1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
70 enum processor_type rs6000_cpu
;
71 struct rs6000_cpu_select rs6000_select
[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string
;
81 int rs6000_long_double_type_size
;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi
;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave
;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string
;
92 /* Nonzero if we want SPE ABI extensions. */
95 /* Whether isel instructions should be generated. */
98 /* Whether SPE simd instructions should be generated. */
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs
= 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string
;
107 /* String from -misel=. */
108 const char *rs6000_isel_string
;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string
;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined
;
116 /* Save information from a "cmpxx" operation until the branch or scc is
118 rtx rs6000_compare_op0
, rs6000_compare_op1
;
119 int rs6000_compare_fp_p
;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno
;
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name
= (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno
= 0;
139 /* Bit size of immediate TLS offsets and string from which it is decoded. */
140 int rs6000_tls_size
= 32;
141 const char *rs6000_tls_size_string
;
143 /* ABI enumeration available for subtarget to use. */
144 enum rs6000_abi rs6000_current_abi
;
146 /* ABI string from -mabi= option. */
147 const char *rs6000_abi_string
;
150 const char *rs6000_debug_name
;
151 int rs6000_debug_stack
; /* debug stack applications */
152 int rs6000_debug_arg
; /* debug argument handling */
155 static GTY(()) tree opaque_V2SI_type_node
;
156 static GTY(()) tree opaque_V2SF_type_node
;
157 static GTY(()) tree opaque_p_V2SI_type_node
;
159 const char *rs6000_traceback_name
;
161 traceback_default
= 0,
167 /* Flag to say the TOC is initialized */
169 char toc_label_name
[10];
171 /* Alias set for saves and restores from the rs6000 stack. */
172 static int rs6000_sr_alias_set
;
174 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
175 The only place that looks at this is rs6000_set_default_type_attributes;
176 everywhere else should rely on the presence or absence of a longcall
177 attribute on the function declaration. */
178 int rs6000_default_long_calls
;
179 const char *rs6000_longcall_switch
;
181 struct builtin_description
183 /* mask is not const because we're going to alter it below. This
184 nonsense will go away when we rewrite the -march infrastructure
185 to give us more target flag bits. */
187 const enum insn_code icode
;
188 const char *const name
;
189 const enum rs6000_builtins code
;
192 static bool rs6000_function_ok_for_sibcall
PARAMS ((tree
, tree
));
193 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
194 static void validate_condition_mode
195 PARAMS ((enum rtx_code
, enum machine_mode
));
196 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
197 static void rs6000_maybe_dead
PARAMS ((rtx
));
198 static void rs6000_emit_stack_tie
PARAMS ((void));
199 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
200 static rtx spe_synthesize_frame_save
PARAMS ((rtx
));
201 static bool spe_func_has_64bit_regs_p
PARAMS ((void));
202 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
203 unsigned int, int, int));
204 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
205 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
206 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
207 static unsigned toc_hash_function
PARAMS ((const void *));
208 static int toc_hash_eq
PARAMS ((const void *, const void *));
209 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
210 static bool constant_pool_expr_p
PARAMS ((rtx
));
211 static bool toc_relative_expr_p
PARAMS ((rtx
));
212 static bool legitimate_small_data_p
PARAMS ((enum machine_mode
, rtx
));
213 static bool legitimate_offset_address_p
PARAMS ((enum machine_mode
, rtx
, int));
214 static bool legitimate_indexed_address_p
PARAMS ((rtx
, int));
215 static bool legitimate_indirect_address_p
PARAMS ((rtx
, int));
216 static bool legitimate_lo_sum_address_p
PARAMS ((enum machine_mode
, rtx
, int));
217 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
218 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
219 #ifdef HAVE_GAS_HIDDEN
220 static void rs6000_assemble_visibility
PARAMS ((tree
, int));
222 static int rs6000_ra_ever_killed
PARAMS ((void));
223 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
224 extern const struct attribute_spec rs6000_attribute_table
[];
225 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
226 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
227 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
228 static void rs6000_output_mi_thunk
PARAMS ((FILE *, tree
, HOST_WIDE_INT
,
229 HOST_WIDE_INT
, tree
));
230 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
231 HOST_WIDE_INT
, HOST_WIDE_INT
));
233 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
235 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
236 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
237 static void rs6000_elf_select_section
PARAMS ((tree
, int,
238 unsigned HOST_WIDE_INT
));
239 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
240 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
241 unsigned HOST_WIDE_INT
));
242 static void rs6000_elf_encode_section_info
PARAMS ((tree
, rtx
, int))
244 static bool rs6000_elf_in_small_data_p
PARAMS ((tree
));
247 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
248 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
249 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
250 unsigned HOST_WIDE_INT
));
251 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
252 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
253 unsigned HOST_WIDE_INT
));
254 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
255 static unsigned int rs6000_xcoff_section_type_flags
PARAMS ((tree
, const char *, int));
258 static bool rs6000_binds_local_p
PARAMS ((tree
));
260 static int rs6000_use_dfa_pipeline_interface
PARAMS ((void));
261 static int rs6000_variable_issue
PARAMS ((FILE *, int, rtx
, int));
262 static bool rs6000_rtx_costs
PARAMS ((rtx
, int, int, int *));
263 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
264 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
265 static int rs6000_issue_rate
PARAMS ((void));
266 static int rs6000_use_sched_lookahead
PARAMS ((void));
268 static void rs6000_init_builtins
PARAMS ((void));
269 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
270 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
271 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
272 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
273 static void altivec_init_builtins
PARAMS ((void));
274 static void rs6000_common_init_builtins
PARAMS ((void));
276 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
277 int, enum rs6000_builtins
,
278 enum rs6000_builtins
));
279 static void spe_init_builtins
PARAMS ((void));
280 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
281 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
282 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
283 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
285 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
286 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
287 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
288 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
289 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
290 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
291 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
292 static void rs6000_parse_abi_options
PARAMS ((void));
293 static void rs6000_parse_tls_size_option
PARAMS ((void));
294 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
295 static int first_altivec_reg_to_save
PARAMS ((void));
296 static unsigned int compute_vrsave_mask
PARAMS ((void));
297 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
298 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
299 int easy_vector_constant
PARAMS ((rtx
, enum machine_mode
));
300 static int easy_vector_same
PARAMS ((rtx
, enum machine_mode
));
301 static bool is_ev64_opaque_type
PARAMS ((tree
));
302 static rtx rs6000_dwarf_register_span
PARAMS ((rtx
));
303 static rtx rs6000_legitimize_tls_address
PARAMS ((rtx
, enum tls_model
));
304 static rtx rs6000_tls_get_addr
PARAMS ((void));
305 static rtx rs6000_got_sym
PARAMS ((void));
306 static inline int rs6000_tls_symbol_ref_1
PARAMS ((rtx
*, void *));
307 static const char *rs6000_get_some_local_dynamic_name
PARAMS ((void));
308 static int rs6000_get_some_local_dynamic_name_1
PARAMS ((rtx
*, void *));
310 /* Hash table stuff for keeping track of TOC entries. */
312 struct toc_hash_struct
GTY(())
314 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
315 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
317 enum machine_mode key_mode
;
321 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
323 /* Default register names. */
324 char rs6000_reg_names
[][8] =
326 "0", "1", "2", "3", "4", "5", "6", "7",
327 "8", "9", "10", "11", "12", "13", "14", "15",
328 "16", "17", "18", "19", "20", "21", "22", "23",
329 "24", "25", "26", "27", "28", "29", "30", "31",
330 "0", "1", "2", "3", "4", "5", "6", "7",
331 "8", "9", "10", "11", "12", "13", "14", "15",
332 "16", "17", "18", "19", "20", "21", "22", "23",
333 "24", "25", "26", "27", "28", "29", "30", "31",
334 "mq", "lr", "ctr","ap",
335 "0", "1", "2", "3", "4", "5", "6", "7",
337 /* AltiVec registers. */
338 "0", "1", "2", "3", "4", "5", "6", "7",
339 "8", "9", "10", "11", "12", "13", "14", "15",
340 "16", "17", "18", "19", "20", "21", "22", "23",
341 "24", "25", "26", "27", "28", "29", "30", "31",
347 #ifdef TARGET_REGNAMES
348 static const char alt_reg_names
[][8] =
350 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
351 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
352 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
353 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
354 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
355 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
356 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
357 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
358 "mq", "lr", "ctr", "ap",
359 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
361 /* AltiVec registers. */
362 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
363 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
364 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
365 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
372 #ifndef MASK_STRICT_ALIGN
373 #define MASK_STRICT_ALIGN 0
375 #ifndef TARGET_PROFILE_KERNEL
376 #define TARGET_PROFILE_KERNEL 0
379 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
380 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
382 /* Return 1 for a symbol ref for a thread-local storage symbol. */
383 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
384 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
386 /* Initialize the GCC target structure. */
387 #undef TARGET_ATTRIBUTE_TABLE
388 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
389 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
390 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
392 #undef TARGET_ASM_ALIGNED_DI_OP
393 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
395 /* Default unaligned ops are only provided for ELF. Find the ops needed
396 for non-ELF systems. */
397 #ifndef OBJECT_FORMAT_ELF
399 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
401 #undef TARGET_ASM_UNALIGNED_HI_OP
402 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
403 #undef TARGET_ASM_UNALIGNED_SI_OP
404 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
405 #undef TARGET_ASM_UNALIGNED_DI_OP
406 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
409 #undef TARGET_ASM_UNALIGNED_HI_OP
410 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
411 #undef TARGET_ASM_UNALIGNED_SI_OP
412 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
416 /* This hook deals with fixups for relocatable code and DI-mode objects
418 #undef TARGET_ASM_INTEGER
419 #define TARGET_ASM_INTEGER rs6000_assemble_integer
421 #ifdef HAVE_GAS_HIDDEN
422 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
423 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
426 #undef TARGET_HAVE_TLS
427 #define TARGET_HAVE_TLS HAVE_AS_TLS
429 #undef TARGET_CANNOT_FORCE_CONST_MEM
430 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
432 #undef TARGET_ASM_FUNCTION_PROLOGUE
433 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
434 #undef TARGET_ASM_FUNCTION_EPILOGUE
435 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
437 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
438 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
439 #undef TARGET_SCHED_VARIABLE_ISSUE
440 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
442 #undef TARGET_SCHED_ISSUE_RATE
443 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
444 #undef TARGET_SCHED_ADJUST_COST
445 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
446 #undef TARGET_SCHED_ADJUST_PRIORITY
447 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
449 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
450 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
452 #undef TARGET_INIT_BUILTINS
453 #define TARGET_INIT_BUILTINS rs6000_init_builtins
455 #undef TARGET_EXPAND_BUILTIN
456 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
459 #undef TARGET_BINDS_LOCAL_P
460 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
463 #undef TARGET_ASM_OUTPUT_MI_THUNK
464 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
466 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
467 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
469 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
470 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
472 #undef TARGET_RTX_COSTS
473 #define TARGET_RTX_COSTS rs6000_rtx_costs
474 #undef TARGET_ADDRESS_COST
475 #define TARGET_ADDRESS_COST hook_int_rtx_0
477 #undef TARGET_VECTOR_OPAQUE_P
478 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
480 #undef TARGET_DWARF_REGISTER_SPAN
481 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
483 struct gcc_target targetm
= TARGET_INITIALIZER
;
485 /* Override command line options. Mostly we process the processor
486 type and sometimes adjust other TARGET_ options. */
489 rs6000_override_options (default_cpu
)
490 const char *default_cpu
;
493 struct rs6000_cpu_select
*ptr
;
495 /* Simplify the entries below by making a mask for any POWER
496 variant and any PowerPC variant. */
498 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
499 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
500 | MASK_PPC_GFXOPT | MASK_POWERPC64)
501 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
505 const char *const name
; /* Canonical processor name. */
506 const enum processor_type processor
; /* Processor type enum value. */
507 const int target_enable
; /* Target flags to enable. */
508 const int target_disable
; /* Target flags to disable. */
509 } const processor_target_table
[]
510 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
511 POWER_MASKS
| POWERPC_MASKS
},
512 {"power", PROCESSOR_POWER
,
513 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
514 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
515 {"power2", PROCESSOR_POWER
,
516 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
517 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
518 {"power3", PROCESSOR_PPC630
,
519 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
521 {"power4", PROCESSOR_POWER4
,
522 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
524 {"powerpc", PROCESSOR_POWERPC
,
525 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
526 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
527 {"powerpc64", PROCESSOR_POWERPC64
,
528 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
529 POWER_MASKS
| POWERPC_OPT_MASKS
},
530 {"rios", PROCESSOR_RIOS1
,
531 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
532 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
533 {"rios1", PROCESSOR_RIOS1
,
534 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
535 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
536 {"rsc", PROCESSOR_PPC601
,
537 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
538 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
539 {"rsc1", PROCESSOR_PPC601
,
540 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
541 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
542 {"rios2", PROCESSOR_RIOS2
,
543 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
544 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
545 {"rs64a", PROCESSOR_RS64A
,
546 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
547 POWER_MASKS
| POWERPC_OPT_MASKS
},
548 {"401", PROCESSOR_PPC403
,
549 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
550 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
551 {"403", PROCESSOR_PPC403
,
552 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
553 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
554 {"405", PROCESSOR_PPC405
,
555 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
556 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
557 {"405fp", PROCESSOR_PPC405
,
558 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
559 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
560 {"440", PROCESSOR_PPC440
,
561 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
562 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
563 {"440fp", PROCESSOR_PPC440
,
564 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
565 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
566 {"505", PROCESSOR_MPCCORE
,
567 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
568 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
569 {"601", PROCESSOR_PPC601
,
570 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
571 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
572 {"602", PROCESSOR_PPC603
,
573 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
574 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
575 {"603", PROCESSOR_PPC603
,
576 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
577 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
578 {"603e", PROCESSOR_PPC603
,
579 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
580 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
581 {"ec603e", PROCESSOR_PPC603
,
582 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
583 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
584 {"604", PROCESSOR_PPC604
,
585 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
586 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
587 {"604e", PROCESSOR_PPC604e
,
588 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
589 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
590 {"620", PROCESSOR_PPC620
,
591 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
593 {"630", PROCESSOR_PPC630
,
594 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
596 {"740", PROCESSOR_PPC750
,
597 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
598 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
599 {"750", PROCESSOR_PPC750
,
600 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
601 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
602 {"7400", PROCESSOR_PPC7400
,
603 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
604 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
605 {"7450", PROCESSOR_PPC7450
,
606 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
607 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
608 {"8540", PROCESSOR_PPC8540
,
609 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
610 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
611 {"801", PROCESSOR_MPCCORE
,
612 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
613 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
614 {"821", PROCESSOR_MPCCORE
,
615 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
616 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
617 {"823", PROCESSOR_MPCCORE
,
618 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
619 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
620 {"860", PROCESSOR_MPCCORE
,
621 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
622 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
624 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
626 /* Save current -mmultiple/-mno-multiple status. */
627 int multiple
= TARGET_MULTIPLE
;
628 /* Save current -mstring/-mno-string status. */
629 int string
= TARGET_STRING
;
631 /* Identify the processor type. */
632 rs6000_select
[0].string
= default_cpu
;
633 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
635 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
637 ptr
= &rs6000_select
[i
];
638 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
640 for (j
= 0; j
< ptt_size
; j
++)
641 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
644 rs6000_cpu
= processor_target_table
[j
].processor
;
648 target_flags
|= processor_target_table
[j
].target_enable
;
649 target_flags
&= ~processor_target_table
[j
].target_disable
;
655 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
662 /* If we are optimizing big endian systems for space, use the load/store
663 multiple and string instructions. */
664 if (BYTES_BIG_ENDIAN
&& optimize_size
)
665 target_flags
|= MASK_MULTIPLE
| MASK_STRING
;
667 /* If -mmultiple or -mno-multiple was explicitly used, don't
668 override with the processor default */
669 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
670 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
672 /* If -mstring or -mno-string was explicitly used, don't override
673 with the processor default. */
674 if ((target_flags_explicit
& MASK_STRING
) != 0)
675 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
677 /* Don't allow -mmultiple or -mstring on little endian systems
678 unless the cpu is a 750, because the hardware doesn't support the
679 instructions used in little endian mode, and causes an alignment
680 trap. The 750 does not cause an alignment trap (except when the
681 target is unaligned). */
683 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
687 target_flags
&= ~MASK_MULTIPLE
;
688 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
689 warning ("-mmultiple is not supported on little endian systems");
694 target_flags
&= ~MASK_STRING
;
695 if ((target_flags_explicit
& MASK_STRING
) != 0)
696 warning ("-mstring is not supported on little endian systems");
700 /* Set debug flags */
701 if (rs6000_debug_name
)
703 if (! strcmp (rs6000_debug_name
, "all"))
704 rs6000_debug_stack
= rs6000_debug_arg
= 1;
705 else if (! strcmp (rs6000_debug_name
, "stack"))
706 rs6000_debug_stack
= 1;
707 else if (! strcmp (rs6000_debug_name
, "arg"))
708 rs6000_debug_arg
= 1;
710 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
713 if (rs6000_traceback_name
)
715 if (! strncmp (rs6000_traceback_name
, "full", 4))
716 rs6000_traceback
= traceback_full
;
717 else if (! strncmp (rs6000_traceback_name
, "part", 4))
718 rs6000_traceback
= traceback_part
;
719 else if (! strncmp (rs6000_traceback_name
, "no", 2))
720 rs6000_traceback
= traceback_none
;
722 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
723 rs6000_traceback_name
);
726 /* Set size of long double */
727 rs6000_long_double_type_size
= 64;
728 if (rs6000_long_double_size_string
)
731 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
732 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
733 error ("Unknown switch -mlong-double-%s",
734 rs6000_long_double_size_string
);
736 rs6000_long_double_type_size
= size
;
739 /* Handle -mabi= options. */
740 rs6000_parse_abi_options ();
742 /* Handle generic -mFOO=YES/NO options. */
743 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
744 &rs6000_altivec_vrsave
);
745 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
747 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
748 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
751 /* Handle -mtls-size option. */
752 rs6000_parse_tls_size_option ();
754 #ifdef SUBTARGET_OVERRIDE_OPTIONS
755 SUBTARGET_OVERRIDE_OPTIONS
;
757 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
758 SUBSUBTARGET_OVERRIDE_OPTIONS
;
763 /* The e500 does not have string instructions, and we set
764 MASK_STRING above when optimizing for size. */
765 if ((target_flags
& MASK_STRING
) != 0)
766 target_flags
= target_flags
& ~MASK_STRING
;
768 /* No SPE means 64-bit long doubles, even if an E500. */
769 if (rs6000_spe_string
!= 0
770 && !strcmp (rs6000_spe_string
, "no"))
771 rs6000_long_double_type_size
= 64;
773 else if (rs6000_select
[1].string
!= NULL
)
775 /* For the powerpc-eabispe configuration, we set all these by
776 default, so let's unset them if we manually set another
777 CPU that is not the E500. */
778 if (rs6000_abi_string
== 0)
780 if (rs6000_spe_string
== 0)
782 if (rs6000_float_gprs_string
== 0)
783 rs6000_float_gprs
= 0;
784 if (rs6000_isel_string
== 0)
786 if (rs6000_long_double_size_string
== 0)
787 rs6000_long_double_type_size
= 64;
790 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
791 using TARGET_OPTIONS to handle a toggle switch, but we're out of
792 bits in target_flags so TARGET_SWITCHES cannot be used.
793 Assumption here is that rs6000_longcall_switch points into the
794 text of the complete option, rather than being a copy, so we can
795 scan back for the presence or absence of the no- modifier. */
796 if (rs6000_longcall_switch
)
798 const char *base
= rs6000_longcall_switch
;
799 while (base
[-1] != 'm') base
--;
801 if (*rs6000_longcall_switch
!= '\0')
802 error ("invalid option `%s'", base
);
803 rs6000_default_long_calls
= (base
[0] != 'n');
806 #ifdef TARGET_REGNAMES
807 /* If the user desires alternate register names, copy in the
808 alternate names now. */
810 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
813 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
814 If -maix-struct-return or -msvr4-struct-return was explicitly
815 used, don't override with the ABI default. */
816 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
818 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
819 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
821 target_flags
|= MASK_AIX_STRUCT_RET
;
824 if (TARGET_LONG_DOUBLE_128
825 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
826 real_format_for_mode
[TFmode
- QFmode
] = &ibm_extended_format
;
828 /* Allocate an alias set for register saves & restores from stack. */
829 rs6000_sr_alias_set
= new_alias_set ();
832 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
834 /* We can only guarantee the availability of DI pseudo-ops when
835 assembling for 64-bit targets. */
838 targetm
.asm_out
.aligned_op
.di
= NULL
;
839 targetm
.asm_out
.unaligned_op
.di
= NULL
;
842 /* Set maximum branch target alignment at two instructions, eight bytes. */
843 align_jumps_max_skip
= 8;
844 align_loops_max_skip
= 8;
846 /* Arrange to save and restore machine status around nested functions. */
847 init_machine_status
= rs6000_init_machine_status
;
850 /* Handle generic options of the form -mfoo=yes/no.
851 NAME is the option name.
852 VALUE is the option value.
853 FLAG is the pointer to the flag where to store a 1 or 0, depending on
854 whether the option value is 'yes' or 'no' respectively. */
856 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
860 else if (!strcmp (value
, "yes"))
862 else if (!strcmp (value
, "no"))
865 error ("unknown -m%s= option specified: '%s'", name
, value
);
868 /* Handle -mabi= options. */
870 rs6000_parse_abi_options ()
872 if (rs6000_abi_string
== 0)
874 else if (! strcmp (rs6000_abi_string
, "altivec"))
875 rs6000_altivec_abi
= 1;
876 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
877 rs6000_altivec_abi
= 0;
878 else if (! strcmp (rs6000_abi_string
, "spe"))
882 error ("not configured for ABI: '%s'", rs6000_abi_string
);
885 else if (! strcmp (rs6000_abi_string
, "no-spe"))
888 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
891 /* Validate and record the size specified with the -mtls-size option. */
894 rs6000_parse_tls_size_option ()
896 if (rs6000_tls_size_string
== 0)
898 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
899 rs6000_tls_size
= 16;
900 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
901 rs6000_tls_size
= 32;
902 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
903 rs6000_tls_size
= 64;
905 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string
);
909 optimization_options (level
, size
)
910 int level ATTRIBUTE_UNUSED
;
911 int size ATTRIBUTE_UNUSED
;
915 /* Do anything needed at the start of the asm file. */
918 rs6000_file_start (file
, default_cpu
)
920 const char *default_cpu
;
924 const char *start
= buffer
;
925 struct rs6000_cpu_select
*ptr
;
927 if (flag_verbose_asm
)
929 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
930 rs6000_select
[0].string
= default_cpu
;
932 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
934 ptr
= &rs6000_select
[i
];
935 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
937 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
943 switch (rs6000_sdata
)
945 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
946 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
947 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
948 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
951 if (rs6000_sdata
&& g_switch_value
)
953 fprintf (file
, "%s -G %d", start
, g_switch_value
);
963 /* Return nonzero if this function is known to have a null epilogue. */
968 if (reload_completed
)
970 rs6000_stack_t
*info
= rs6000_stack_info ();
972 if (info
->first_gp_reg_save
== 32
973 && info
->first_fp_reg_save
== 64
974 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
977 && info
->vrsave_mask
== 0
985 /* Returns 1 always. */
988 any_operand (op
, mode
)
989 rtx op ATTRIBUTE_UNUSED
;
990 enum machine_mode mode ATTRIBUTE_UNUSED
;
995 /* Returns 1 if op is the count register. */
997 count_register_operand (op
, mode
)
999 enum machine_mode mode ATTRIBUTE_UNUSED
;
1001 if (GET_CODE (op
) != REG
)
1004 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
1007 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
1013 /* Returns 1 if op is an altivec register. */
1015 altivec_register_operand (op
, mode
)
1017 enum machine_mode mode ATTRIBUTE_UNUSED
;
1020 return (register_operand (op
, mode
)
1021 && (GET_CODE (op
) != REG
1022 || REGNO (op
) > FIRST_PSEUDO_REGISTER
1023 || ALTIVEC_REGNO_P (REGNO (op
))));
1027 xer_operand (op
, mode
)
1029 enum machine_mode mode ATTRIBUTE_UNUSED
;
1031 if (GET_CODE (op
) != REG
)
1034 if (XER_REGNO_P (REGNO (op
)))
1040 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1041 by such constants completes more quickly. */
1044 s8bit_cint_operand (op
, mode
)
1046 enum machine_mode mode ATTRIBUTE_UNUSED
;
1048 return ( GET_CODE (op
) == CONST_INT
1049 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
1052 /* Return 1 if OP is a constant that can fit in a D field. */
1055 short_cint_operand (op
, mode
)
1057 enum machine_mode mode ATTRIBUTE_UNUSED
;
1059 return (GET_CODE (op
) == CONST_INT
1060 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1063 /* Similar for an unsigned D field. */
1066 u_short_cint_operand (op
, mode
)
1068 enum machine_mode mode ATTRIBUTE_UNUSED
;
1070 return (GET_CODE (op
) == CONST_INT
1071 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1074 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1077 non_short_cint_operand (op
, mode
)
1079 enum machine_mode mode ATTRIBUTE_UNUSED
;
1081 return (GET_CODE (op
) == CONST_INT
1082 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1085 /* Returns 1 if OP is a CONST_INT that is a positive value
1086 and an exact power of 2. */
1089 exact_log2_cint_operand (op
, mode
)
1091 enum machine_mode mode ATTRIBUTE_UNUSED
;
1093 return (GET_CODE (op
) == CONST_INT
1095 && exact_log2 (INTVAL (op
)) >= 0);
1098 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1102 gpc_reg_operand (op
, mode
)
1104 enum machine_mode mode
;
1106 return (register_operand (op
, mode
)
1107 && (GET_CODE (op
) != REG
1108 || (REGNO (op
) >= ARG_POINTER_REGNUM
1109 && !XER_REGNO_P (REGNO (op
)))
1110 || REGNO (op
) < MQ_REGNO
));
1113 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1117 cc_reg_operand (op
, mode
)
1119 enum machine_mode mode
;
1121 return (register_operand (op
, mode
)
1122 && (GET_CODE (op
) != REG
1123 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1124 || CR_REGNO_P (REGNO (op
))));
1127 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1128 CR field that isn't CR0. */
1131 cc_reg_not_cr0_operand (op
, mode
)
1133 enum machine_mode mode
;
1135 return (register_operand (op
, mode
)
1136 && (GET_CODE (op
) != REG
1137 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1138 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1141 /* Returns 1 if OP is either a constant integer valid for a D-field or
1142 a non-special register. If a register, it must be in the proper
1143 mode unless MODE is VOIDmode. */
1146 reg_or_short_operand (op
, mode
)
1148 enum machine_mode mode
;
1150 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1153 /* Similar, except check if the negation of the constant would be
1154 valid for a D-field. */
1157 reg_or_neg_short_operand (op
, mode
)
1159 enum machine_mode mode
;
1161 if (GET_CODE (op
) == CONST_INT
)
1162 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1164 return gpc_reg_operand (op
, mode
);
1167 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1168 a non-special register. If a register, it must be in the proper
1169 mode unless MODE is VOIDmode. */
1172 reg_or_aligned_short_operand (op
, mode
)
1174 enum machine_mode mode
;
1176 if (gpc_reg_operand (op
, mode
))
1178 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1185 /* Return 1 if the operand is either a register or an integer whose
1186 high-order 16 bits are zero. */
1189 reg_or_u_short_operand (op
, mode
)
1191 enum machine_mode mode
;
1193 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1196 /* Return 1 is the operand is either a non-special register or ANY
1197 constant integer. */
1200 reg_or_cint_operand (op
, mode
)
1202 enum machine_mode mode
;
1204 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1207 /* Return 1 is the operand is either a non-special register or ANY
1208 32-bit signed constant integer. */
1211 reg_or_arith_cint_operand (op
, mode
)
1213 enum machine_mode mode
;
1215 return (gpc_reg_operand (op
, mode
)
1216 || (GET_CODE (op
) == CONST_INT
1217 #if HOST_BITS_PER_WIDE_INT != 32
1218 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1219 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1224 /* Return 1 is the operand is either a non-special register or a 32-bit
1225 signed constant integer valid for 64-bit addition. */
1228 reg_or_add_cint64_operand (op
, mode
)
1230 enum machine_mode mode
;
1232 return (gpc_reg_operand (op
, mode
)
1233 || (GET_CODE (op
) == CONST_INT
1234 #if HOST_BITS_PER_WIDE_INT == 32
1235 && INTVAL (op
) < 0x7fff8000
1237 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1243 /* Return 1 is the operand is either a non-special register or a 32-bit
1244 signed constant integer valid for 64-bit subtraction. */
1247 reg_or_sub_cint64_operand (op
, mode
)
1249 enum machine_mode mode
;
1251 return (gpc_reg_operand (op
, mode
)
1252 || (GET_CODE (op
) == CONST_INT
1253 #if HOST_BITS_PER_WIDE_INT == 32
1254 && (- INTVAL (op
)) < 0x7fff8000
1256 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1262 /* Return 1 is the operand is either a non-special register or ANY
1263 32-bit unsigned constant integer. */
1266 reg_or_logical_cint_operand (op
, mode
)
1268 enum machine_mode mode
;
1270 if (GET_CODE (op
) == CONST_INT
)
1272 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1274 if (GET_MODE_BITSIZE (mode
) <= 32)
1277 if (INTVAL (op
) < 0)
1281 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1282 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1284 else if (GET_CODE (op
) == CONST_DOUBLE
)
1286 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1290 return CONST_DOUBLE_HIGH (op
) == 0;
1293 return gpc_reg_operand (op
, mode
);
1296 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1299 got_operand (op
, mode
)
1301 enum machine_mode mode ATTRIBUTE_UNUSED
;
1303 return (GET_CODE (op
) == SYMBOL_REF
1304 || GET_CODE (op
) == CONST
1305 || GET_CODE (op
) == LABEL_REF
);
1308 /* Return 1 if the operand is a simple references that can be loaded via
1309 the GOT (labels involving addition aren't allowed). */
1312 got_no_const_operand (op
, mode
)
1314 enum machine_mode mode ATTRIBUTE_UNUSED
;
1316 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1319 /* Return the number of instructions it takes to form a constant in an
1320 integer register. */
1323 num_insns_constant_wide (value
)
1324 HOST_WIDE_INT value
;
1326 /* signed constant loadable with {cal|addi} */
1327 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1330 /* constant loadable with {cau|addis} */
1331 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1334 #if HOST_BITS_PER_WIDE_INT == 64
1335 else if (TARGET_POWERPC64
)
1337 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1338 HOST_WIDE_INT high
= value
>> 31;
1340 if (high
== 0 || high
== -1)
1346 return num_insns_constant_wide (high
) + 1;
1348 return (num_insns_constant_wide (high
)
1349 + num_insns_constant_wide (low
) + 1);
1358 num_insns_constant (op
, mode
)
1360 enum machine_mode mode
;
1362 if (GET_CODE (op
) == CONST_INT
)
1364 #if HOST_BITS_PER_WIDE_INT == 64
1365 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1366 && mask64_operand (op
, mode
))
1370 return num_insns_constant_wide (INTVAL (op
));
1373 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1378 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1379 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1380 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1383 else if (GET_CODE (op
) == CONST_DOUBLE
)
1389 int endian
= (WORDS_BIG_ENDIAN
== 0);
1391 if (mode
== VOIDmode
|| mode
== DImode
)
1393 high
= CONST_DOUBLE_HIGH (op
);
1394 low
= CONST_DOUBLE_LOW (op
);
1398 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1399 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1401 low
= l
[1 - endian
];
1405 return (num_insns_constant_wide (low
)
1406 + num_insns_constant_wide (high
));
1410 if (high
== 0 && low
>= 0)
1411 return num_insns_constant_wide (low
);
1413 else if (high
== -1 && low
< 0)
1414 return num_insns_constant_wide (low
);
1416 else if (mask64_operand (op
, mode
))
1420 return num_insns_constant_wide (high
) + 1;
1423 return (num_insns_constant_wide (high
)
1424 + num_insns_constant_wide (low
) + 1);
1432 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1433 register with one instruction per word. We only do this if we can
1434 safely read CONST_DOUBLE_{LOW,HIGH}. */
1437 easy_fp_constant (op
, mode
)
1439 enum machine_mode mode
;
1441 if (GET_CODE (op
) != CONST_DOUBLE
1442 || GET_MODE (op
) != mode
1443 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1446 /* Consider all constants with -msoft-float to be easy. */
1447 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1451 /* If we are using V.4 style PIC, consider all constants to be hard. */
1452 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1455 #ifdef TARGET_RELOCATABLE
1456 /* Similarly if we are using -mrelocatable, consider all constants
1458 if (TARGET_RELOCATABLE
)
1467 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1468 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
1470 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1471 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
1472 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
1473 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
1476 else if (mode
== DFmode
)
1481 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1482 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1484 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1485 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1488 else if (mode
== SFmode
)
1493 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1494 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1496 return num_insns_constant_wide (l
) == 1;
1499 else if (mode
== DImode
)
1500 return ((TARGET_POWERPC64
1501 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1502 || (num_insns_constant (op
, DImode
) <= 2));
1504 else if (mode
== SImode
)
1510 /* Return non zero if all elements of a vector have the same value. */
1513 easy_vector_same (op
, mode
)
1515 enum machine_mode mode ATTRIBUTE_UNUSED
;
1519 units
= CONST_VECTOR_NUNITS (op
);
1521 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1522 for (i
= 1; i
< units
; ++i
)
1523 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
1530 /* Return 1 if the operand is a CONST_INT and can be put into a
1531 register without using memory. */
1534 easy_vector_constant (op
, mode
)
1536 enum machine_mode mode
;
1540 if (GET_CODE (op
) != CONST_VECTOR
1545 if (zero_constant (op
, mode
)
1546 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
1547 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
1550 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
1553 if (TARGET_SPE
&& mode
== V1DImode
)
1556 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1557 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
1559 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1561 evmergelo r0, r0, r0
1564 I don't know how efficient it would be to allow bigger constants,
1565 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1566 instructions is better than a 64-bit memory load, but I don't
1567 have the e500 timing specs. */
1568 if (TARGET_SPE
&& mode
== V2SImode
1569 && cst
>= -0x7fff && cst
<= 0x7fff
1570 && cst2
>= -0x7fff && cst2
<= 0x7fff)
1573 if (TARGET_ALTIVEC
&& EASY_VECTOR_15 (cst
, op
, mode
))
1576 if (TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
))
1582 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1585 easy_vector_constant_add_self (op
, mode
)
1587 enum machine_mode mode
;
1591 if (!easy_vector_constant (op
, mode
))
1594 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
1596 return TARGET_ALTIVEC
&& EASY_VECTOR_15_ADD_SELF (cst
, op
, mode
);
1600 output_vec_const_move (operands
)
1604 enum machine_mode mode
;
1610 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
1611 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
1612 mode
= GET_MODE (dest
);
1616 if (zero_constant (vec
, mode
))
1617 return "vxor %0,%0,%0";
1618 else if (EASY_VECTOR_15 (cst
, vec
, mode
))
1620 operands
[1] = GEN_INT (cst
);
1624 return "vspltisw %0,%1";
1626 return "vspltish %0,%1";
1628 return "vspltisb %0,%1";
1633 else if (EASY_VECTOR_15_ADD_SELF (cst
, vec
, mode
))
1641 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1642 pattern of V1DI, V4HI, and V2SF.
1644 FIXME: We should probabl return # and add post reload
1645 splitters for these, but this way is so easy ;-).
1647 operands
[1] = GEN_INT (cst
);
1648 operands
[2] = GEN_INT (cst2
);
1650 return "li %0,%1\n\tevmergelo %0,%0,%0";
1652 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1658 /* Return 1 if the operand is the constant 0. This works for scalars
1659 as well as vectors. */
1661 zero_constant (op
, mode
)
1663 enum machine_mode mode
;
1665 return op
== CONST0_RTX (mode
);
1668 /* Return 1 if the operand is 0.0. */
1670 zero_fp_constant (op
, mode
)
1672 enum machine_mode mode
;
1674 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1677 /* Return 1 if the operand is in volatile memory. Note that during
1678 the RTL generation phase, memory_operand does not return TRUE for
1679 volatile memory references. So this function allows us to
1680 recognize volatile references where its safe. */
1683 volatile_mem_operand (op
, mode
)
1685 enum machine_mode mode
;
1687 if (GET_CODE (op
) != MEM
)
1690 if (!MEM_VOLATILE_P (op
))
1693 if (mode
!= GET_MODE (op
))
1696 if (reload_completed
)
1697 return memory_operand (op
, mode
);
1699 if (reload_in_progress
)
1700 return strict_memory_address_p (mode
, XEXP (op
, 0));
1702 return memory_address_p (mode
, XEXP (op
, 0));
1705 /* Return 1 if the operand is an offsettable memory operand. */
1708 offsettable_mem_operand (op
, mode
)
1710 enum machine_mode mode
;
1712 return ((GET_CODE (op
) == MEM
)
1713 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1714 mode
, XEXP (op
, 0)));
1717 /* Return 1 if the operand is either an easy FP constant (see above) or
1721 mem_or_easy_const_operand (op
, mode
)
1723 enum machine_mode mode
;
1725 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1728 /* Return 1 if the operand is either a non-special register or an item
1729 that can be used as the operand of a `mode' add insn. */
1732 add_operand (op
, mode
)
1734 enum machine_mode mode
;
1736 if (GET_CODE (op
) == CONST_INT
)
1737 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1738 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1740 return gpc_reg_operand (op
, mode
);
1743 /* Return 1 if OP is a constant but not a valid add_operand. */
1746 non_add_cint_operand (op
, mode
)
1748 enum machine_mode mode ATTRIBUTE_UNUSED
;
1750 return (GET_CODE (op
) == CONST_INT
1751 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1752 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1755 /* Return 1 if the operand is a non-special register or a constant that
1756 can be used as the operand of an OR or XOR insn on the RS/6000. */
1759 logical_operand (op
, mode
)
1761 enum machine_mode mode
;
1763 HOST_WIDE_INT opl
, oph
;
1765 if (gpc_reg_operand (op
, mode
))
1768 if (GET_CODE (op
) == CONST_INT
)
1770 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1772 #if HOST_BITS_PER_WIDE_INT <= 32
1773 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1777 else if (GET_CODE (op
) == CONST_DOUBLE
)
1779 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1782 opl
= CONST_DOUBLE_LOW (op
);
1783 oph
= CONST_DOUBLE_HIGH (op
);
1790 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1791 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1794 /* Return 1 if C is a constant that is not a logical operand (as
1795 above), but could be split into one. */
1798 non_logical_cint_operand (op
, mode
)
1800 enum machine_mode mode
;
1802 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1803 && ! logical_operand (op
, mode
)
1804 && reg_or_logical_cint_operand (op
, mode
));
1807 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1808 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1809 Reject all ones and all zeros, since these should have been optimized
1810 away and confuse the making of MB and ME. */
1813 mask_operand (op
, mode
)
1815 enum machine_mode mode ATTRIBUTE_UNUSED
;
1817 HOST_WIDE_INT c
, lsb
;
1819 if (GET_CODE (op
) != CONST_INT
)
1824 /* Fail in 64-bit mode if the mask wraps around because the upper
1825 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1826 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1829 /* We don't change the number of transitions by inverting,
1830 so make sure we start with the LS bit zero. */
1834 /* Reject all zeros or all ones. */
1838 /* Find the first transition. */
1841 /* Invert to look for a second transition. */
1844 /* Erase first transition. */
1847 /* Find the second transition (if any). */
1850 /* Match if all the bits above are 1's (or c is zero). */
1854 /* Return 1 for the PowerPC64 rlwinm corner case. */
1857 mask_operand_wrap (op
, mode
)
1859 enum machine_mode mode ATTRIBUTE_UNUSED
;
1861 HOST_WIDE_INT c
, lsb
;
1863 if (GET_CODE (op
) != CONST_INT
)
1868 if ((c
& 0x80000001) != 0x80000001)
1882 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1883 It is if there are no more than one 1->0 or 0->1 transitions.
1884 Reject all zeros, since zero should have been optimized away and
1885 confuses the making of MB and ME. */
1888 mask64_operand (op
, mode
)
1890 enum machine_mode mode ATTRIBUTE_UNUSED
;
1892 if (GET_CODE (op
) == CONST_INT
)
1894 HOST_WIDE_INT c
, lsb
;
1898 /* Reject all zeros. */
1902 /* We don't change the number of transitions by inverting,
1903 so make sure we start with the LS bit zero. */
1907 /* Find the transition, and check that all bits above are 1's. */
1910 /* Match if all the bits above are 1's (or c is zero). */
1916 /* Like mask64_operand, but allow up to three transitions. This
1917 predicate is used by insn patterns that generate two rldicl or
1918 rldicr machine insns. */
1921 mask64_2_operand (op
, mode
)
1923 enum machine_mode mode ATTRIBUTE_UNUSED
;
1925 if (GET_CODE (op
) == CONST_INT
)
1927 HOST_WIDE_INT c
, lsb
;
1931 /* Disallow all zeros. */
1935 /* We don't change the number of transitions by inverting,
1936 so make sure we start with the LS bit zero. */
1940 /* Find the first transition. */
1943 /* Invert to look for a second transition. */
1946 /* Erase first transition. */
1949 /* Find the second transition. */
1952 /* Invert to look for a third transition. */
1955 /* Erase second transition. */
1958 /* Find the third transition (if any). */
1961 /* Match if all the bits above are 1's (or c is zero). */
1967 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1968 implement ANDing by the mask IN. */
1970 build_mask64_2_operands (in
, out
)
1974 #if HOST_BITS_PER_WIDE_INT >= 64
1975 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1978 if (GET_CODE (in
) != CONST_INT
)
1984 /* Assume c initially something like 0x00fff000000fffff. The idea
1985 is to rotate the word so that the middle ^^^^^^ group of zeros
1986 is at the MS end and can be cleared with an rldicl mask. We then
1987 rotate back and clear off the MS ^^ group of zeros with a
1989 c
= ~c
; /* c == 0xff000ffffff00000 */
1990 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1991 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1992 c
= ~c
; /* c == 0x00fff000000fffff */
1993 c
&= -lsb
; /* c == 0x00fff00000000000 */
1994 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1995 c
= ~c
; /* c == 0xff000fffffffffff */
1996 c
&= -lsb
; /* c == 0xff00000000000000 */
1998 while ((lsb
>>= 1) != 0)
1999 shift
++; /* shift == 44 on exit from loop */
2000 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2001 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2002 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2006 /* Assume c initially something like 0xff000f0000000000. The idea
2007 is to rotate the word so that the ^^^ middle group of zeros
2008 is at the LS end and can be cleared with an rldicr mask. We then
2009 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2011 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2012 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2013 c
= ~c
; /* c == 0x00fff0ffffffffff */
2014 c
&= -lsb
; /* c == 0x00fff00000000000 */
2015 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2016 c
= ~c
; /* c == 0xff000fffffffffff */
2017 c
&= -lsb
; /* c == 0xff00000000000000 */
2019 while ((lsb
>>= 1) != 0)
2020 shift
++; /* shift == 44 on exit from loop */
2021 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2022 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2023 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2026 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2027 masks will be all 1's. We are guaranteed more than one transition. */
2028 out
[0] = GEN_INT (64 - shift
);
2029 out
[1] = GEN_INT (m1
);
2030 out
[2] = GEN_INT (shift
);
2031 out
[3] = GEN_INT (m2
);
2039 /* Return 1 if the operand is either a non-special register or a constant
2040 that can be used as the operand of a PowerPC64 logical AND insn. */
2043 and64_operand (op
, mode
)
2045 enum machine_mode mode
;
2047 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2048 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
2050 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
2053 /* Like the above, but also match constants that can be implemented
2054 with two rldicl or rldicr insns. */
2057 and64_2_operand (op
, mode
)
2059 enum machine_mode mode
;
2061 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2062 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2064 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2067 /* Return 1 if the operand is either a non-special register or a
2068 constant that can be used as the operand of an RS/6000 logical AND insn. */
2071 and_operand (op
, mode
)
2073 enum machine_mode mode
;
2075 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2076 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2078 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2081 /* Return 1 if the operand is a general register or memory operand. */
2084 reg_or_mem_operand (op
, mode
)
2086 enum machine_mode mode
;
2088 return (gpc_reg_operand (op
, mode
)
2089 || memory_operand (op
, mode
)
2090 || volatile_mem_operand (op
, mode
));
2093 /* Return 1 if the operand is a general register or memory operand without
2094 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2098 lwa_operand (op
, mode
)
2100 enum machine_mode mode
;
2104 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2105 inner
= SUBREG_REG (inner
);
2107 return gpc_reg_operand (inner
, mode
)
2108 || (memory_operand (inner
, mode
)
2109 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2110 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2111 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2112 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2113 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2116 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2119 symbol_ref_operand (op
, mode
)
2121 enum machine_mode mode
;
2123 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2126 return (GET_CODE (op
) == SYMBOL_REF
2127 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
)));
2130 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2131 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2134 call_operand (op
, mode
)
2136 enum machine_mode mode
;
2138 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2141 return (GET_CODE (op
) == SYMBOL_REF
2142 || (GET_CODE (op
) == REG
2143 && (REGNO (op
) == LINK_REGISTER_REGNUM
2144 || REGNO (op
) == COUNT_REGISTER_REGNUM
2145 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2148 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2152 current_file_function_operand (op
, mode
)
2154 enum machine_mode mode ATTRIBUTE_UNUSED
;
2156 return (GET_CODE (op
) == SYMBOL_REF
2157 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
))
2158 && (SYMBOL_REF_LOCAL_P (op
)
2159 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))));
2162 /* Return 1 if this operand is a valid input for a move insn. */
2165 input_operand (op
, mode
)
2167 enum machine_mode mode
;
2169 /* Memory is always valid. */
2170 if (memory_operand (op
, mode
))
2173 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2174 if (GET_CODE (op
) == CONSTANT_P_RTX
)
2177 /* For floating-point, easy constants are valid. */
2178 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2180 && easy_fp_constant (op
, mode
))
2183 /* Allow any integer constant. */
2184 if (GET_MODE_CLASS (mode
) == MODE_INT
2185 && (GET_CODE (op
) == CONST_INT
2186 || GET_CODE (op
) == CONST_DOUBLE
))
2189 /* Allow easy vector constants. */
2190 if (GET_CODE (op
) == CONST_VECTOR
2191 && easy_vector_constant (op
, mode
))
2194 /* For floating-point or multi-word mode, the only remaining valid type
2196 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2197 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2198 return register_operand (op
, mode
);
2200 /* The only cases left are integral modes one word or smaller (we
2201 do not get called for MODE_CC values). These can be in any
2203 if (register_operand (op
, mode
))
2206 /* A SYMBOL_REF referring to the TOC is valid. */
2207 if (legitimate_constant_pool_address_p (op
))
2210 /* A constant pool expression (relative to the TOC) is valid */
2211 if (toc_relative_expr_p (op
))
2214 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2216 if (DEFAULT_ABI
== ABI_V4
2217 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2218 && small_data_operand (op
, Pmode
))
2224 /* Return 1 for an operand in small memory on V.4/eabi. */
2227 small_data_operand (op
, mode
)
2228 rtx op ATTRIBUTE_UNUSED
;
2229 enum machine_mode mode ATTRIBUTE_UNUSED
;
2234 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2237 if (DEFAULT_ABI
!= ABI_V4
)
2240 if (GET_CODE (op
) == SYMBOL_REF
)
2243 else if (GET_CODE (op
) != CONST
2244 || GET_CODE (XEXP (op
, 0)) != PLUS
2245 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2246 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2251 rtx sum
= XEXP (op
, 0);
2252 HOST_WIDE_INT summand
;
2254 /* We have to be careful here, because it is the referenced address
2255 that must be 32k from _SDA_BASE_, not just the symbol. */
2256 summand
= INTVAL (XEXP (sum
, 1));
2257 if (summand
< 0 || summand
> g_switch_value
)
2260 sym_ref
= XEXP (sum
, 0);
2263 return SYMBOL_REF_SMALL_P (sym_ref
);
2269 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2272 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2277 switch (GET_CODE(op
))
2280 if (RS6000_SYMBOL_REF_TLS_P (op
))
2282 else if (CONSTANT_POOL_ADDRESS_P (op
))
2284 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2292 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2301 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2302 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2304 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2313 constant_pool_expr_p (op
)
2318 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2322 toc_relative_expr_p (op
)
2327 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2330 /* SPE offset addressing is limited to 5-bits worth of double words. */
2331 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2334 legitimate_constant_pool_address_p (x
)
2338 && GET_CODE (x
) == PLUS
2339 && GET_CODE (XEXP (x
, 0)) == REG
2340 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
2341 && constant_pool_expr_p (XEXP (x
, 1)));
2345 legitimate_small_data_p (mode
, x
)
2346 enum machine_mode mode
;
2349 return (DEFAULT_ABI
== ABI_V4
2350 && !flag_pic
&& !TARGET_TOC
2351 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
2352 && small_data_operand (x
, mode
));
2356 legitimate_offset_address_p (mode
, x
, strict
)
2357 enum machine_mode mode
;
2361 unsigned HOST_WIDE_INT offset
, extra
;
2363 if (GET_CODE (x
) != PLUS
)
2365 if (GET_CODE (XEXP (x
, 0)) != REG
)
2367 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2369 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2372 offset
= INTVAL (XEXP (x
, 1));
2380 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2381 which leaves the only valid constant offset of zero, which by
2382 canonicalization rules is also invalid. */
2389 /* SPE vector modes. */
2390 return SPE_CONST_OFFSET_OK (offset
);
2396 else if (offset
& 3)
2404 else if (offset
& 3)
2414 return (offset
+ extra
>= offset
) && (offset
+ extra
+ 0x8000 < 0x10000);
2418 legitimate_indexed_address_p (x
, strict
)
2424 if (GET_CODE (x
) != PLUS
)
2429 if (!REG_P (op0
) || !REG_P (op1
))
2432 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
2433 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
2434 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
2435 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
2439 legitimate_indirect_address_p (x
, strict
)
2443 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
2447 legitimate_lo_sum_address_p (mode
, x
, strict
)
2448 enum machine_mode mode
;
2452 if (GET_CODE (x
) != LO_SUM
)
2454 if (GET_CODE (XEXP (x
, 0)) != REG
)
2456 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2462 if (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
)
2466 if (GET_MODE_NUNITS (mode
) != 1)
2468 if (GET_MODE_BITSIZE (mode
) > 32
2469 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
))
2472 return CONSTANT_P (x
);
2479 /* Try machine-dependent ways of modifying an illegitimate address
2480 to be legitimate. If we find one, return the new, valid address.
2481 This is used from only one place: `memory_address' in explow.c.
2483 OLDX is the address as it was before break_out_memory_refs was
2484 called. In some cases it is useful to look at this to decide what
2487 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2489 It is always safe for this function to do nothing. It exists to
2490 recognize opportunities to optimize the output.
2492 On RS/6000, first check for the sum of a register with a constant
2493 integer that is out of range. If so, generate code to add the
2494 constant with the low-order 16 bits masked to the register and force
2495 this result into another register (this can be done with `cau').
2496 Then generate an address of REG+(CONST&0xffff), allowing for the
2497 possibility of bit 16 being a one.
2499 Then check for the sum of a register and something not constant, try to
2500 load the other things into a register and return the sum. */
2503 rs6000_legitimize_address (x
, oldx
, mode
)
2505 rtx oldx ATTRIBUTE_UNUSED
;
2506 enum machine_mode mode
;
2508 if (GET_CODE (x
) == SYMBOL_REF
)
2510 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
2512 return rs6000_legitimize_tls_address (x
, model
);
2515 if (GET_CODE (x
) == PLUS
2516 && GET_CODE (XEXP (x
, 0)) == REG
2517 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2518 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2520 HOST_WIDE_INT high_int
, low_int
;
2522 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2523 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2524 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2525 GEN_INT (high_int
)), 0);
2526 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2528 else if (GET_CODE (x
) == PLUS
2529 && GET_CODE (XEXP (x
, 0)) == REG
2530 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2531 && GET_MODE_NUNITS (mode
) == 1
2532 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2534 || (mode
!= DFmode
&& mode
!= TFmode
))
2535 && (TARGET_POWERPC64
|| mode
!= DImode
)
2538 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2539 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2541 else if (ALTIVEC_VECTOR_MODE (mode
))
2545 /* Make sure both operands are registers. */
2546 if (GET_CODE (x
) == PLUS
)
2547 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2548 force_reg (Pmode
, XEXP (x
, 1)));
2550 reg
= force_reg (Pmode
, x
);
2553 else if (SPE_VECTOR_MODE (mode
))
2555 /* We accept [reg + reg] and [reg + OFFSET]. */
2557 if (GET_CODE (x
) == PLUS
)
2559 rtx op1
= XEXP (x
, 0);
2560 rtx op2
= XEXP (x
, 1);
2562 op1
= force_reg (Pmode
, op1
);
2564 if (GET_CODE (op2
) != REG
2565 && (GET_CODE (op2
) != CONST_INT
2566 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2567 op2
= force_reg (Pmode
, op2
);
2569 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2572 return force_reg (Pmode
, x
);
2578 && GET_CODE (x
) != CONST_INT
2579 && GET_CODE (x
) != CONST_DOUBLE
2581 && GET_MODE_NUNITS (mode
) == 1
2582 && (GET_MODE_BITSIZE (mode
) <= 32
2583 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2585 rtx reg
= gen_reg_rtx (Pmode
);
2586 emit_insn (gen_elf_high (reg
, (x
)));
2587 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2589 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2592 && ! MACHO_DYNAMIC_NO_PIC_P
2594 && GET_CODE (x
) != CONST_INT
2595 && GET_CODE (x
) != CONST_DOUBLE
2597 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2601 rtx reg
= gen_reg_rtx (Pmode
);
2602 emit_insn (gen_macho_high (reg
, (x
)));
2603 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2606 && constant_pool_expr_p (x
)
2607 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2609 return create_TOC_reference (x
);
2615 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2617 static GTY(()) rtx rs6000_tls_symbol
;
2619 rs6000_tls_get_addr ()
2621 if (!rs6000_tls_symbol
)
2622 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
2624 return rs6000_tls_symbol
;
2627 /* Construct the SYMBOL_REF for TLS GOT references. */
2629 static GTY(()) rtx rs6000_got_symbol
;
2633 if (!rs6000_got_symbol
)
2635 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2636 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
2637 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
2640 return rs6000_got_symbol
;
2643 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2644 this (thread-local) address. */
2647 rs6000_legitimize_tls_address (addr
, model
)
2649 enum tls_model model
;
2653 dest
= gen_reg_rtx (Pmode
);
2654 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
2660 tlsreg
= gen_rtx_REG (Pmode
, 13);
2661 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
2665 tlsreg
= gen_rtx_REG (Pmode
, 2);
2666 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
2670 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
2674 tmp
= gen_reg_rtx (Pmode
);
2677 tlsreg
= gen_rtx_REG (Pmode
, 13);
2678 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
2682 tlsreg
= gen_rtx_REG (Pmode
, 2);
2683 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
2687 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
2689 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
2694 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
2697 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
2701 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
2704 rtx gsym
= rs6000_got_sym ();
2705 got
= gen_reg_rtx (Pmode
);
2707 rs6000_emit_move (got
, gsym
, Pmode
);
2711 static int tls_got_labelno
= 0;
2712 rtx tempLR
, lab
, tmp3
, mem
;
2715 ASM_GENERATE_INTERNAL_LABEL (buf
, "LTLS", tls_got_labelno
++);
2716 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
2717 tempLR
= gen_reg_rtx (Pmode
);
2718 tmp1
= gen_reg_rtx (Pmode
);
2719 tmp2
= gen_reg_rtx (Pmode
);
2720 tmp3
= gen_reg_rtx (Pmode
);
2721 mem
= gen_rtx_MEM (Pmode
, tmp1
);
2722 RTX_UNCHANGING_P (mem
) = 1;
2724 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, lab
,
2726 emit_move_insn (tmp1
, tempLR
);
2727 emit_move_insn (tmp2
, mem
);
2728 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
2729 last
= emit_move_insn (got
, tmp3
);
2730 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
2732 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
2734 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
2740 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
2742 r3
= gen_rtx_REG (Pmode
, 3);
2744 insn
= gen_tls_gd_64 (r3
, got
, addr
);
2746 insn
= gen_tls_gd_32 (r3
, got
, addr
);
2749 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2750 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2751 insn
= emit_call_insn (insn
);
2752 CONST_OR_PURE_CALL_P (insn
) = 1;
2753 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2754 insn
= get_insns ();
2756 emit_libcall_block (insn
, dest
, r3
, addr
);
2758 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
2760 r3
= gen_rtx_REG (Pmode
, 3);
2762 insn
= gen_tls_ld_64 (r3
, got
);
2764 insn
= gen_tls_ld_32 (r3
, got
);
2767 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2768 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2769 insn
= emit_call_insn (insn
);
2770 CONST_OR_PURE_CALL_P (insn
) = 1;
2771 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2772 insn
= get_insns ();
2774 tmp1
= gen_reg_rtx (Pmode
);
2775 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
2777 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
2778 if (rs6000_tls_size
== 16)
2781 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
2783 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
2785 else if (rs6000_tls_size
== 32)
2787 tmp2
= gen_reg_rtx (Pmode
);
2789 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
2791 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
2794 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
2796 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
2800 tmp2
= gen_reg_rtx (Pmode
);
2802 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
2804 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
2806 insn
= gen_rtx_SET (Pmode
, dest
,
2807 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
2813 /* IE, or 64 bit offset LE. */
2814 tmp2
= gen_reg_rtx (Pmode
);
2816 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
2818 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
2821 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
2823 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
2831 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2832 instruction definitions. */
2835 rs6000_tls_symbol_ref (x
, mode
)
2837 enum machine_mode mode ATTRIBUTE_UNUSED
;
2839 return RS6000_SYMBOL_REF_TLS_P (x
);
2842 /* Return 1 if X contains a thread-local symbol. */
2845 rs6000_tls_referenced_p (x
)
2848 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
2851 /* Return 1 if *X is a thread-local symbol. This is the same as
2852 rs6000_tls_symbol_ref except for the type of the unused argument. */
2855 rs6000_tls_symbol_ref_1 (x
, data
)
2857 void *data ATTRIBUTE_UNUSED
;
2859 return RS6000_SYMBOL_REF_TLS_P (*x
);
2862 /* The convention appears to be to define this wherever it is used.
2863 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2864 is now used here. */
2865 #ifndef REG_MODE_OK_FOR_BASE_P
2866 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2869 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2870 replace the input X, or the original X if no replacement is called for.
2871 The output parameter *WIN is 1 if the calling macro should goto WIN,
2874 For RS/6000, we wish to handle large displacements off a base
2875 register by splitting the addend across an addiu/addis and the mem insn.
2876 This cuts number of extra insns needed from 3 to 1.
2878 On Darwin, we use this to generate code for floating point constants.
2879 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2880 The Darwin code is inside #if TARGET_MACHO because only then is
2881 machopic_function_base_name() defined. */
2883 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2885 enum machine_mode mode
;
2888 int ind_levels ATTRIBUTE_UNUSED
;
2891 /* We must recognize output that we have already generated ourselves. */
2892 if (GET_CODE (x
) == PLUS
2893 && GET_CODE (XEXP (x
, 0)) == PLUS
2894 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2895 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2896 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2898 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2899 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2900 opnum
, (enum reload_type
)type
);
2906 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2907 && GET_CODE (x
) == LO_SUM
2908 && GET_CODE (XEXP (x
, 0)) == PLUS
2909 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2910 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2911 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2912 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2913 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2914 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2915 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2917 /* Result of previous invocation of this function on Darwin
2918 floating point constant. */
2919 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2920 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2921 opnum
, (enum reload_type
)type
);
2926 if (GET_CODE (x
) == PLUS
2927 && GET_CODE (XEXP (x
, 0)) == REG
2928 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2929 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2930 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2931 && !SPE_VECTOR_MODE (mode
)
2932 && !ALTIVEC_VECTOR_MODE (mode
))
2934 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2935 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2937 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2939 /* Check for 32-bit overflow. */
2940 if (high
+ low
!= val
)
2946 /* Reload the high part into a base reg; leave the low part
2947 in the mem directly. */
2949 x
= gen_rtx_PLUS (GET_MODE (x
),
2950 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2954 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2955 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2956 opnum
, (enum reload_type
)type
);
2961 if (GET_CODE (x
) == SYMBOL_REF
2962 && DEFAULT_ABI
== ABI_DARWIN
2963 && !ALTIVEC_VECTOR_MODE (mode
)
2966 /* Darwin load of floating point constant. */
2967 rtx offset
= gen_rtx (CONST
, Pmode
,
2968 gen_rtx (MINUS
, Pmode
, x
,
2969 gen_rtx (SYMBOL_REF
, Pmode
,
2970 machopic_function_base_name ())));
2971 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2972 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2973 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2974 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2975 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2976 opnum
, (enum reload_type
)type
);
2980 if (GET_CODE (x
) == SYMBOL_REF
2981 && DEFAULT_ABI
== ABI_DARWIN
2982 && !ALTIVEC_VECTOR_MODE (mode
)
2983 && MACHO_DYNAMIC_NO_PIC_P
)
2985 /* Darwin load of floating point constant. */
2986 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2987 gen_rtx (HIGH
, Pmode
, x
), x
);
2988 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2989 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2990 opnum
, (enum reload_type
)type
);
2996 && constant_pool_expr_p (x
)
2997 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2999 (x
) = create_TOC_reference (x
);
3007 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3008 that is a valid memory address for an instruction.
3009 The MODE argument is the machine mode for the MEM expression
3010 that wants to use this address.
3012 On the RS/6000, there are four valid address: a SYMBOL_REF that
3013 refers to a constant pool entry of an address (or the sum of it
3014 plus a constant), a short (16-bit signed) constant plus a register,
3015 the sum of two registers, or a register indirect, possibly with an
3016 auto-increment. For DFmode and DImode with a constant plus register,
3017 we must ensure that both words are addressable or PowerPC64 with offset
3020 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3021 32-bit DImode, TImode), indexed addressing cannot be used because
3022 adjacent memory cells are accessed by adding word-sized offsets
3023 during assembly output. */
3025 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
3026 enum machine_mode mode
;
3030 if (RS6000_SYMBOL_REF_TLS_P (x
))
3032 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3034 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3035 && !ALTIVEC_VECTOR_MODE (mode
)
3036 && !SPE_VECTOR_MODE (mode
)
3038 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3040 if (legitimate_small_data_p (mode
, x
))
3042 if (legitimate_constant_pool_address_p (x
))
3044 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3046 && GET_CODE (x
) == PLUS
3047 && GET_CODE (XEXP (x
, 0)) == REG
3048 && XEXP (x
, 0) == virtual_stack_vars_rtx
3049 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3051 if (legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3054 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3056 || (mode
!= DFmode
&& mode
!= TFmode
))
3057 && (TARGET_POWERPC64
|| mode
!= DImode
)
3058 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3060 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3065 /* Go to LABEL if ADDR (a legitimate address expression)
3066 has an effect that depends on the machine mode it is used for.
3068 On the RS/6000 this is true of all integral offsets (since AltiVec
3069 modes don't allow them) or is a pre-increment or decrement.
3071 ??? Except that due to conceptual problems in offsettable_address_p
3072 we can't really report the problems of integral offsets. So leave
3073 this assuming that the adjustable offset must be valid for the
3074 sub-words of a TFmode operand, which is what we had before. */
3077 rs6000_mode_dependent_address (addr
)
3080 switch (GET_CODE (addr
))
3083 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3085 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3086 return val
+ 12 + 0x8000 >= 0x10000;
3095 return TARGET_UPDATE
;
3104 /* Try to output insns to set TARGET equal to the constant C if it can
3105 be done in less than N insns. Do all computations in MODE.
3106 Returns the place where the output has been placed if it can be
3107 done and the insns have been emitted. If it would take more than N
3108 insns, zero is returned and no insns and emitted. */
3111 rs6000_emit_set_const (dest
, mode
, source
, n
)
3113 enum machine_mode mode
;
3114 int n ATTRIBUTE_UNUSED
;
3116 rtx result
, insn
, set
;
3117 HOST_WIDE_INT c0
, c1
;
3119 if (mode
== QImode
|| mode
== HImode
)
3122 dest
= gen_reg_rtx (mode
);
3123 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3126 else if (mode
== SImode
)
3128 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3130 emit_insn (gen_rtx_SET (VOIDmode
, result
,
3131 GEN_INT (INTVAL (source
)
3132 & (~ (HOST_WIDE_INT
) 0xffff))));
3133 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3134 gen_rtx_IOR (SImode
, result
,
3135 GEN_INT (INTVAL (source
) & 0xffff))));
3138 else if (mode
== DImode
)
3140 if (GET_CODE (source
) == CONST_INT
)
3142 c0
= INTVAL (source
);
3145 else if (GET_CODE (source
) == CONST_DOUBLE
)
3147 #if HOST_BITS_PER_WIDE_INT >= 64
3148 c0
= CONST_DOUBLE_LOW (source
);
3151 c0
= CONST_DOUBLE_LOW (source
);
3152 c1
= CONST_DOUBLE_HIGH (source
);
3158 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3163 insn
= get_last_insn ();
3164 set
= single_set (insn
);
3165 if (! CONSTANT_P (SET_SRC (set
)))
3166 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3171 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3172 fall back to a straight forward decomposition. We do this to avoid
3173 exponential run times encountered when looking for longer sequences
3174 with rs6000_emit_set_const. */
3176 rs6000_emit_set_long_const (dest
, c1
, c2
)
3178 HOST_WIDE_INT c1
, c2
;
3180 if (!TARGET_POWERPC64
)
3182 rtx operand1
, operand2
;
3184 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
3186 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
3188 emit_move_insn (operand1
, GEN_INT (c1
));
3189 emit_move_insn (operand2
, GEN_INT (c2
));
3193 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
3196 ud2
= (c1
& 0xffff0000) >> 16;
3197 #if HOST_BITS_PER_WIDE_INT >= 64
3201 ud4
= (c2
& 0xffff0000) >> 16;
3203 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
3204 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
3207 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
3209 emit_move_insn (dest
, GEN_INT (ud1
));
3212 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
3213 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
3216 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
3219 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
3221 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3223 else if ((ud4
== 0xffff && (ud3
& 0x8000))
3224 || (ud4
== 0 && ! (ud3
& 0x8000)))
3227 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
3230 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
3233 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
3234 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
3236 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3241 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
3244 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
3247 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
3249 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
3251 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
3252 GEN_INT (ud2
<< 16)));
3254 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3260 /* Emit a move from SOURCE to DEST in mode MODE. */
3262 rs6000_emit_move (dest
, source
, mode
)
3265 enum machine_mode mode
;
3269 operands
[1] = source
;
3271 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3272 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3273 && ! FLOAT_MODE_P (mode
)
3274 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3276 /* FIXME. This should never happen. */
3277 /* Since it seems that it does, do the safe thing and convert
3279 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
3281 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3282 && ! FLOAT_MODE_P (mode
)
3283 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
3284 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
3285 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
3286 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
3289 /* Check if GCC is setting up a block move that will end up using FP
3290 registers as temporaries. We must make sure this is acceptable. */
3291 if (GET_CODE (operands
[0]) == MEM
3292 && GET_CODE (operands
[1]) == MEM
3294 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
3295 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
3296 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
3297 ? 32 : MEM_ALIGN (operands
[0])))
3298 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
3300 : MEM_ALIGN (operands
[1]))))
3301 && ! MEM_VOLATILE_P (operands
[0])
3302 && ! MEM_VOLATILE_P (operands
[1]))
3304 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
3305 adjust_address (operands
[1], SImode
, 0));
3306 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
3307 adjust_address (operands
[1], SImode
, 4));
3311 if (!no_new_pseudos
)
3313 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
3314 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
3315 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
3317 rtx reg
= gen_reg_rtx (word_mode
);
3319 emit_insn (gen_rtx_SET (word_mode
, reg
,
3320 gen_rtx_ZERO_EXTEND (word_mode
,
3322 operands
[1] = gen_lowpart (mode
, reg
);
3324 if (GET_CODE (operands
[0]) != REG
)
3325 operands
[1] = force_reg (mode
, operands
[1]);
3328 if (mode
== SFmode
&& ! TARGET_POWERPC
3329 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3330 && GET_CODE (operands
[0]) == MEM
)
3334 if (reload_in_progress
|| reload_completed
)
3335 regnum
= true_regnum (operands
[1]);
3336 else if (GET_CODE (operands
[1]) == REG
)
3337 regnum
= REGNO (operands
[1]);
3341 /* If operands[1] is a register, on POWER it may have
3342 double-precision data in it, so truncate it to single
3344 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
3347 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
3348 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
3349 operands
[1] = newreg
;
3353 /* Recognize the case where operand[1] is a reference to thread-local
3354 data and load its address to a register. */
3355 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
3357 enum tls_model model
= SYMBOL_REF_TLS_MODEL (operands
[1]);
3359 operands
[1] = rs6000_legitimize_tls_address (operands
[1], model
);
3362 /* Handle the case where reload calls us with an invalid address. */
3363 if (reload_in_progress
&& mode
== Pmode
3364 && (! general_operand (operands
[1], mode
)
3365 || ! nonimmediate_operand (operands
[0], mode
)))
3368 /* Handle the case of CONSTANT_P_RTX. */
3369 if (GET_CODE (operands
[1]) == CONSTANT_P_RTX
)
3372 /* FIXME: In the long term, this switch statement should go away
3373 and be replaced by a sequence of tests based on things like
3379 if (CONSTANT_P (operands
[1])
3380 && GET_CODE (operands
[1]) != CONST_INT
)
3381 operands
[1] = force_const_mem (mode
, operands
[1]);
3387 if (CONSTANT_P (operands
[1])
3388 && ! easy_fp_constant (operands
[1], mode
))
3389 operands
[1] = force_const_mem (mode
, operands
[1]);
3400 if (CONSTANT_P (operands
[1])
3401 && !easy_vector_constant (operands
[1], mode
))
3402 operands
[1] = force_const_mem (mode
, operands
[1]);
3407 /* Use default pattern for address of ELF small data */
3410 && DEFAULT_ABI
== ABI_V4
3411 && (GET_CODE (operands
[1]) == SYMBOL_REF
3412 || GET_CODE (operands
[1]) == CONST
)
3413 && small_data_operand (operands
[1], mode
))
3415 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3419 if (DEFAULT_ABI
== ABI_V4
3420 && mode
== Pmode
&& mode
== SImode
3421 && flag_pic
== 1 && got_operand (operands
[1], mode
))
3423 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
3427 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
3431 && CONSTANT_P (operands
[1])
3432 && GET_CODE (operands
[1]) != HIGH
3433 && GET_CODE (operands
[1]) != CONST_INT
)
3435 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
3437 /* If this is a function address on -mcall-aixdesc,
3438 convert it to the address of the descriptor. */
3439 if (DEFAULT_ABI
== ABI_AIX
3440 && GET_CODE (operands
[1]) == SYMBOL_REF
3441 && XSTR (operands
[1], 0)[0] == '.')
3443 const char *name
= XSTR (operands
[1], 0);
3445 while (*name
== '.')
3447 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
3448 CONSTANT_POOL_ADDRESS_P (new_ref
)
3449 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
3450 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
3451 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
3452 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
3453 operands
[1] = new_ref
;
3456 if (DEFAULT_ABI
== ABI_DARWIN
)
3459 if (MACHO_DYNAMIC_NO_PIC_P
)
3461 /* Take care of any required data indirection. */
3462 operands
[1] = rs6000_machopic_legitimize_pic_address (
3463 operands
[1], mode
, operands
[0]);
3464 if (operands
[0] != operands
[1])
3465 emit_insn (gen_rtx_SET (VOIDmode
,
3466 operands
[0], operands
[1]));
3470 emit_insn (gen_macho_high (target
, operands
[1]));
3471 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
3475 emit_insn (gen_elf_high (target
, operands
[1]));
3476 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
3480 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3481 and we have put it in the TOC, we just need to make a TOC-relative
3484 && GET_CODE (operands
[1]) == SYMBOL_REF
3485 && constant_pool_expr_p (operands
[1])
3486 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
3487 get_pool_mode (operands
[1])))
3489 operands
[1] = create_TOC_reference (operands
[1]);
3491 else if (mode
== Pmode
3492 && CONSTANT_P (operands
[1])
3493 && ((GET_CODE (operands
[1]) != CONST_INT
3494 && ! easy_fp_constant (operands
[1], mode
))
3495 || (GET_CODE (operands
[1]) == CONST_INT
3496 && num_insns_constant (operands
[1], mode
) > 2)
3497 || (GET_CODE (operands
[0]) == REG
3498 && FP_REGNO_P (REGNO (operands
[0]))))
3499 && GET_CODE (operands
[1]) != HIGH
3500 && ! legitimate_constant_pool_address_p (operands
[1])
3501 && ! toc_relative_expr_p (operands
[1]))
3503 /* Emit a USE operation so that the constant isn't deleted if
3504 expensive optimizations are turned on because nobody
3505 references it. This should only be done for operands that
3506 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3507 This should not be done for operands that contain LABEL_REFs.
3508 For now, we just handle the obvious case. */
3509 if (GET_CODE (operands
[1]) != LABEL_REF
)
3510 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3513 /* Darwin uses a special PIC legitimizer. */
3514 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3517 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3519 if (operands
[0] != operands
[1])
3520 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3525 /* If we are to limit the number of things we put in the TOC and
3526 this is a symbol plus a constant we can add in one insn,
3527 just put the symbol in the TOC and add the constant. Don't do
3528 this if reload is in progress. */
3529 if (GET_CODE (operands
[1]) == CONST
3530 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3531 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3532 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3533 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3534 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3535 && ! side_effects_p (operands
[0]))
3538 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3539 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3541 sym
= force_reg (mode
, sym
);
3543 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3545 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3549 operands
[1] = force_const_mem (mode
, operands
[1]);
3552 && constant_pool_expr_p (XEXP (operands
[1], 0))
3553 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3554 get_pool_constant (XEXP (operands
[1], 0)),
3555 get_pool_mode (XEXP (operands
[1], 0))))
3558 = gen_rtx_MEM (mode
,
3559 create_TOC_reference (XEXP (operands
[1], 0)));
3560 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3561 RTX_UNCHANGING_P (operands
[1]) = 1;
3567 if (GET_CODE (operands
[0]) == MEM
3568 && GET_CODE (XEXP (operands
[0], 0)) != REG
3569 && ! reload_in_progress
)
3571 = replace_equiv_address (operands
[0],
3572 copy_addr_to_reg (XEXP (operands
[0], 0)));
3574 if (GET_CODE (operands
[1]) == MEM
3575 && GET_CODE (XEXP (operands
[1], 0)) != REG
3576 && ! reload_in_progress
)
3578 = replace_equiv_address (operands
[1],
3579 copy_addr_to_reg (XEXP (operands
[1], 0)));
3582 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3584 gen_rtx_SET (VOIDmode
,
3585 operands
[0], operands
[1]),
3586 gen_rtx_CLOBBER (VOIDmode
,
3587 gen_rtx_SCRATCH (SImode
)))));
3596 /* Above, we may have called force_const_mem which may have returned
3597 an invalid address. If we can, fix this up; otherwise, reload will
3598 have to deal with it. */
3599 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
3600 operands
[1] = validize_mem (operands
[1]);
3603 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3606 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3607 for a call to a function whose data type is FNTYPE.
3608 For a library call, FNTYPE is 0.
3610 For incoming args we set the number of arguments in the prototype large
3611 so we never return a PARALLEL. */
3614 init_cumulative_args (cum
, fntype
, libname
, incoming
)
3615 CUMULATIVE_ARGS
*cum
;
3617 rtx libname ATTRIBUTE_UNUSED
;
3620 static CUMULATIVE_ARGS zero_cumulative
;
3622 *cum
= zero_cumulative
;
3624 cum
->fregno
= FP_ARG_MIN_REG
;
3625 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
3626 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
3627 cum
->call_cookie
= CALL_NORMAL
;
3628 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
3631 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
3633 else if (cum
->prototype
)
3634 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
3635 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
3636 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
3639 cum
->nargs_prototype
= 0;
3641 cum
->orig_nargs
= cum
->nargs_prototype
;
3643 /* Check for a longcall attribute. */
3645 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
3646 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
3647 cum
->call_cookie
= CALL_LONG
;
3649 if (TARGET_DEBUG_ARG
)
3651 fprintf (stderr
, "\ninit_cumulative_args:");
3654 tree ret_type
= TREE_TYPE (fntype
);
3655 fprintf (stderr
, " ret code = %s,",
3656 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
3659 if (cum
->call_cookie
& CALL_LONG
)
3660 fprintf (stderr
, " longcall,");
3662 fprintf (stderr
, " proto = %d, nargs = %d\n",
3663 cum
->prototype
, cum
->nargs_prototype
);
3667 /* If defined, a C expression which determines whether, and in which
3668 direction, to pad out an argument with extra space. The value
3669 should be of type `enum direction': either `upward' to pad above
3670 the argument, `downward' to pad below, or `none' to inhibit
3673 For the AIX ABI structs are always stored left shifted in their
3677 function_arg_padding (mode
, type
)
3678 enum machine_mode mode
;
3681 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
3684 /* This is the default definition. */
3685 return (! BYTES_BIG_ENDIAN
3688 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
3689 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3690 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
3691 ? downward
: upward
));
3694 /* If defined, a C expression that gives the alignment boundary, in bits,
3695 of an argument with the specified mode and type. If it is not defined,
3696 PARM_BOUNDARY is used for all arguments.
3698 V.4 wants long longs to be double word aligned. */
3701 function_arg_boundary (mode
, type
)
3702 enum machine_mode mode
;
3703 tree type ATTRIBUTE_UNUSED
;
3705 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
3707 else if (SPE_VECTOR_MODE (mode
))
3709 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3712 return PARM_BOUNDARY
;
3715 /* Update the data in CUM to advance over an argument
3716 of mode MODE and data type TYPE.
3717 (TYPE is null for libcalls where that information may not be available.) */
3720 function_arg_advance (cum
, mode
, type
, named
)
3721 CUMULATIVE_ARGS
*cum
;
3722 enum machine_mode mode
;
3726 cum
->nargs_prototype
--;
3728 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3730 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
3733 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3735 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
3736 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3738 else if (DEFAULT_ABI
== ABI_V4
)
3740 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3741 && (mode
== SFmode
|| mode
== DFmode
))
3743 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3748 cum
->words
+= cum
->words
& 1;
3749 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
3755 int gregno
= cum
->sysv_gregno
;
3757 /* Aggregates and IEEE quad get passed by reference. */
3758 if ((type
&& AGGREGATE_TYPE_P (type
))
3762 n_words
= RS6000_ARG_SIZE (mode
, type
);
3764 /* Long long and SPE vectors are put in odd registers. */
3765 if (n_words
== 2 && (gregno
& 1) == 0)
3768 /* Long long and SPE vectors are not split between registers
3770 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
3772 /* Long long is aligned on the stack. */
3774 cum
->words
+= cum
->words
& 1;
3775 cum
->words
+= n_words
;
3778 /* Note: continuing to accumulate gregno past when we've started
3779 spilling to the stack indicates the fact that we've started
3780 spilling to the stack to expand_builtin_saveregs. */
3781 cum
->sysv_gregno
= gregno
+ n_words
;
3784 if (TARGET_DEBUG_ARG
)
3786 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3787 cum
->words
, cum
->fregno
);
3788 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3789 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3790 fprintf (stderr
, "mode = %4s, named = %d\n",
3791 GET_MODE_NAME (mode
), named
);
3796 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3797 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3799 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3801 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3802 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3803 cum
->fregno
+= (mode
== TFmode
? 2 : 1);
3805 if (TARGET_DEBUG_ARG
)
3807 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3808 cum
->words
, cum
->fregno
);
3809 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3810 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3811 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3816 /* Determine where to put an argument to a function.
3817 Value is zero to push the argument on the stack,
3818 or a hard register in which to store the argument.
3820 MODE is the argument's machine mode.
3821 TYPE is the data type of the argument (as a tree).
3822 This is null for libcalls where that information may
3824 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3825 the preceding args and about the function being called.
3826 NAMED is nonzero if this argument is a named parameter
3827 (otherwise it is an extra parameter matching an ellipsis).
3829 On RS/6000 the first eight words of non-FP are normally in registers
3830 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3831 Under V.4, the first 8 FP args are in registers.
3833 If this is floating-point and no prototype is specified, we use
3834 both an FP and integer register (or possibly FP reg and stack). Library
3835 functions (when TYPE is zero) always have the proper types for args,
3836 so we can pass the FP value just in one register. emit_library_function
3837 doesn't support PARALLEL anyway. */
3840 function_arg (cum
, mode
, type
, named
)
3841 CUMULATIVE_ARGS
*cum
;
3842 enum machine_mode mode
;
3846 enum rs6000_abi abi
= DEFAULT_ABI
;
3848 /* Return a marker to indicate whether CR1 needs to set or clear the
3849 bit that V.4 uses to say fp args were passed in registers.
3850 Assume that we don't need the marker for software floating point,
3851 or compiler generated library calls. */
3852 if (mode
== VOIDmode
)
3855 && cum
->nargs_prototype
< 0
3856 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3858 /* For the SPE, we need to crxor CR6 always. */
3860 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3861 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3862 return GEN_INT (cum
->call_cookie
3863 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3864 ? CALL_V4_SET_FP_ARGS
3865 : CALL_V4_CLEAR_FP_ARGS
));
3868 return GEN_INT (cum
->call_cookie
);
3871 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3873 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3874 return gen_rtx_REG (mode
, cum
->vregno
);
3878 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3880 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3881 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3885 else if (abi
== ABI_V4
)
3887 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3888 && (mode
== SFmode
|| mode
== DFmode
))
3890 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3891 return gen_rtx_REG (mode
, cum
->fregno
);
3898 int gregno
= cum
->sysv_gregno
;
3900 /* Aggregates and IEEE quad get passed by reference. */
3901 if ((type
&& AGGREGATE_TYPE_P (type
))
3905 n_words
= RS6000_ARG_SIZE (mode
, type
);
3907 /* Long long and SPE vectors are put in odd registers. */
3908 if (n_words
== 2 && (gregno
& 1) == 0)
3911 /* Long long and SPE vectors are not split between registers
3913 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3915 /* SPE vectors in ... get split into 2 registers. */
3916 if (TARGET_SPE
&& TARGET_SPE_ABI
3917 && SPE_VECTOR_MODE (mode
) && !named
)
3920 enum machine_mode m
= SImode
;
3922 r1
= gen_rtx_REG (m
, gregno
);
3923 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3924 r2
= gen_rtx_REG (m
, gregno
+ 1);
3925 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3926 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3928 return gen_rtx_REG (mode
, gregno
);
3936 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3937 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3938 int align_words
= cum
->words
+ align
;
3940 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3943 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3946 || ((cum
->nargs_prototype
> 0)
3947 /* IBM AIX extended its linkage convention definition always
3948 to require FP args after register save area hole on the
3950 && (DEFAULT_ABI
!= ABI_AIX
3952 || (align_words
< GP_ARG_NUM_REG
))))
3953 return gen_rtx_REG (mode
, cum
->fregno
);
3955 return gen_rtx_PARALLEL (mode
,
3957 gen_rtx_EXPR_LIST (VOIDmode
,
3958 ((align_words
>= GP_ARG_NUM_REG
)
3961 + RS6000_ARG_SIZE (mode
, type
)
3963 /* If this is partially on the stack, then
3964 we only include the portion actually
3965 in registers here. */
3966 ? gen_rtx_REG (SImode
,
3967 GP_ARG_MIN_REG
+ align_words
)
3968 : gen_rtx_REG (mode
,
3969 GP_ARG_MIN_REG
+ align_words
))),
3971 gen_rtx_EXPR_LIST (VOIDmode
,
3972 gen_rtx_REG (mode
, cum
->fregno
),
3975 else if (align_words
< GP_ARG_NUM_REG
)
3976 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3982 /* For an arg passed partly in registers and partly in memory,
3983 this is the number of registers used.
3984 For args passed entirely in registers or entirely in memory, zero. */
3987 function_arg_partial_nregs (cum
, mode
, type
, named
)
3988 CUMULATIVE_ARGS
*cum
;
3989 enum machine_mode mode
;
3991 int named ATTRIBUTE_UNUSED
;
3993 if (DEFAULT_ABI
== ABI_V4
)
3996 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3997 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3999 if (cum
->nargs_prototype
>= 0)
4003 if (cum
->words
< GP_ARG_NUM_REG
4004 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
4006 int ret
= GP_ARG_NUM_REG
- cum
->words
;
4007 if (ret
&& TARGET_DEBUG_ARG
)
4008 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
4016 /* A C expression that indicates when an argument must be passed by
4017 reference. If nonzero for an argument, a copy of that argument is
4018 made in memory and a pointer to the argument is passed instead of
4019 the argument itself. The pointer is passed in whatever way is
4020 appropriate for passing a pointer to that type.
4022 Under V.4, structures and unions are passed by reference.
4024 As an extension to all ABIs, variable sized types are passed by
4028 function_arg_pass_by_reference (cum
, mode
, type
, named
)
4029 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
4030 enum machine_mode mode ATTRIBUTE_UNUSED
;
4032 int named ATTRIBUTE_UNUSED
;
4034 if (DEFAULT_ABI
== ABI_V4
4035 && ((type
&& AGGREGATE_TYPE_P (type
))
4038 if (TARGET_DEBUG_ARG
)
4039 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
4043 return type
&& int_size_in_bytes (type
) <= 0;
4046 /* Perform any needed actions needed for a function that is receiving a
4047 variable number of arguments.
4051 MODE and TYPE are the mode and type of the current parameter.
4053 PRETEND_SIZE is a variable that should be set to the amount of stack
4054 that must be pushed by the prolog to pretend that our caller pushed
4057 Normally, this macro will push all remaining incoming registers on the
4058 stack and set PRETEND_SIZE to the length of the registers pushed. */
4061 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
4062 CUMULATIVE_ARGS
*cum
;
4063 enum machine_mode mode
;
4065 int *pretend_size ATTRIBUTE_UNUSED
;
4069 CUMULATIVE_ARGS next_cum
;
4070 int reg_size
= TARGET_32BIT
? 4 : 8;
4071 rtx save_area
= NULL_RTX
, mem
;
4072 int first_reg_offset
, set
;
4076 fntype
= TREE_TYPE (current_function_decl
);
4077 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
4078 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4079 != void_type_node
));
4081 /* For varargs, we do not want to skip the dummy va_dcl argument.
4082 For stdargs, we do want to skip the last named argument. */
4085 function_arg_advance (&next_cum
, mode
, type
, 1);
4087 if (DEFAULT_ABI
== ABI_V4
)
4089 /* Indicate to allocate space on the stack for varargs save area. */
4090 cfun
->machine
->sysv_varargs_p
= 1;
4092 save_area
= plus_constant (virtual_stack_vars_rtx
,
4093 - RS6000_VARARGS_SIZE
);
4095 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
4099 first_reg_offset
= next_cum
.words
;
4100 save_area
= virtual_incoming_args_rtx
;
4101 cfun
->machine
->sysv_varargs_p
= 0;
4103 if (MUST_PASS_IN_STACK (mode
, type
))
4104 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
4107 set
= get_varargs_alias_set ();
4108 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
4110 mem
= gen_rtx_MEM (BLKmode
,
4111 plus_constant (save_area
,
4112 first_reg_offset
* reg_size
)),
4113 set_mem_alias_set (mem
, set
);
4114 set_mem_align (mem
, BITS_PER_WORD
);
4116 move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
4117 GP_ARG_NUM_REG
- first_reg_offset
);
4120 /* Save FP registers if needed. */
4121 if (DEFAULT_ABI
== ABI_V4
4122 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4124 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
4126 int fregno
= next_cum
.fregno
;
4127 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
4128 rtx lab
= gen_label_rtx ();
4129 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
4131 emit_jump_insn (gen_rtx_SET (VOIDmode
,
4133 gen_rtx_IF_THEN_ELSE (VOIDmode
,
4134 gen_rtx_NE (VOIDmode
, cr1
,
4136 gen_rtx_LABEL_REF (VOIDmode
, lab
),
4139 while (fregno
<= FP_ARG_V4_MAX_REG
)
4141 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
4142 set_mem_alias_set (mem
, set
);
4143 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
4152 /* Create the va_list data type. */
4155 rs6000_build_va_list ()
4157 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
4159 /* For AIX, prefer 'char *' because that's what the system
4160 header files like. */
4161 if (DEFAULT_ABI
!= ABI_V4
)
4162 return build_pointer_type (char_type_node
);
4164 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
4165 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
4167 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
4168 unsigned_char_type_node
);
4169 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
4170 unsigned_char_type_node
);
4171 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
4173 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
4176 DECL_FIELD_CONTEXT (f_gpr
) = record
;
4177 DECL_FIELD_CONTEXT (f_fpr
) = record
;
4178 DECL_FIELD_CONTEXT (f_ovf
) = record
;
4179 DECL_FIELD_CONTEXT (f_sav
) = record
;
4181 TREE_CHAIN (record
) = type_decl
;
4182 TYPE_NAME (record
) = type_decl
;
4183 TYPE_FIELDS (record
) = f_gpr
;
4184 TREE_CHAIN (f_gpr
) = f_fpr
;
4185 TREE_CHAIN (f_fpr
) = f_ovf
;
4186 TREE_CHAIN (f_ovf
) = f_sav
;
4188 layout_type (record
);
4190 /* The correct type is an array type of one element. */
4191 return build_array_type (record
, build_index_type (size_zero_node
));
4194 /* Implement va_start. */
4197 rs6000_va_start (valist
, nextarg
)
4201 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
4202 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
4203 tree gpr
, fpr
, ovf
, sav
, t
;
4205 /* Only SVR4 needs something special. */
4206 if (DEFAULT_ABI
!= ABI_V4
)
4208 std_expand_builtin_va_start (valist
, nextarg
);
4212 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4213 f_fpr
= TREE_CHAIN (f_gpr
);
4214 f_ovf
= TREE_CHAIN (f_fpr
);
4215 f_sav
= TREE_CHAIN (f_ovf
);
4217 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4218 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4219 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4220 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4221 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4223 /* Count number of gp and fp argument registers used. */
4224 words
= current_function_args_info
.words
;
4225 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
4226 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
4228 if (TARGET_DEBUG_ARG
)
4230 fputs ("va_start: words = ", stderr
);
4231 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
4232 fputs (", n_gpr = ", stderr
);
4233 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
4234 fputs (", n_fpr = ", stderr
);
4235 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
4236 putc ('\n', stderr
);
4239 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
4240 TREE_SIDE_EFFECTS (t
) = 1;
4241 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4243 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
4244 TREE_SIDE_EFFECTS (t
) = 1;
4245 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4247 /* Find the overflow area. */
4248 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
4250 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
4251 build_int_2 (words
* UNITS_PER_WORD
, 0));
4252 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4253 TREE_SIDE_EFFECTS (t
) = 1;
4254 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4256 /* Find the register save area. */
4257 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
4258 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
4259 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
4260 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
4261 TREE_SIDE_EFFECTS (t
) = 1;
4262 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4265 /* Implement va_arg. */
4268 rs6000_va_arg (valist
, type
)
4271 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
4272 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
4273 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
4274 rtx lab_false
, lab_over
, addr_rtx
, r
;
4276 if (DEFAULT_ABI
!= ABI_V4
)
4278 /* Variable sized types are passed by reference. */
4279 if (int_size_in_bytes (type
) <= 0)
4281 u
= build_pointer_type (type
);
4283 /* Args grow upward. */
4284 t
= build (POSTINCREMENT_EXPR
, TREE_TYPE (valist
), valist
,
4285 build_int_2 (POINTER_SIZE
/ BITS_PER_UNIT
, 0));
4286 TREE_SIDE_EFFECTS (t
) = 1;
4288 t
= build1 (NOP_EXPR
, build_pointer_type (u
), t
);
4289 TREE_SIDE_EFFECTS (t
) = 1;
4291 t
= build1 (INDIRECT_REF
, u
, t
);
4292 TREE_SIDE_EFFECTS (t
) = 1;
4294 return expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4297 return std_expand_builtin_va_arg (valist
, type
);
4300 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
4301 f_fpr
= TREE_CHAIN (f_gpr
);
4302 f_ovf
= TREE_CHAIN (f_fpr
);
4303 f_sav
= TREE_CHAIN (f_ovf
);
4305 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
4306 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
4307 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
4308 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
4309 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
4311 size
= int_size_in_bytes (type
);
4312 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
4314 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
4316 /* Aggregates and long doubles are passed by reference. */
4322 size
= UNITS_PER_WORD
;
4325 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4327 /* FP args go in FP registers, if present. */
4336 /* Otherwise into GP registers. */
4344 /* Pull the value out of the saved registers ... */
4346 lab_false
= gen_label_rtx ();
4347 lab_over
= gen_label_rtx ();
4348 addr_rtx
= gen_reg_rtx (Pmode
);
4350 /* AltiVec vectors never go in registers. */
4351 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
4353 TREE_THIS_VOLATILE (reg
) = 1;
4354 emit_cmp_and_jump_insns
4355 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
4356 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
4359 /* Long long is aligned in the registers. */
4362 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
4363 build_int_2 (n_reg
- 1, 0));
4364 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
4365 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
4366 TREE_SIDE_EFFECTS (u
) = 1;
4367 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4371 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
4375 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
4376 build_int_2 (n_reg
, 0));
4377 TREE_SIDE_EFFECTS (u
) = 1;
4379 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
4380 TREE_SIDE_EFFECTS (u
) = 1;
4382 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
4383 TREE_SIDE_EFFECTS (u
) = 1;
4385 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
4386 TREE_SIDE_EFFECTS (t
) = 1;
4388 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4390 emit_move_insn (addr_rtx
, r
);
4392 emit_jump_insn (gen_jump (lab_over
));
4396 emit_label (lab_false
);
4398 /* ... otherwise out of the overflow area. */
4400 /* Make sure we don't find reg 7 for the next int arg.
4402 All AltiVec vectors go in the overflow area. So in the AltiVec
4403 case we need to get the vectors from the overflow area, but
4404 remember where the GPRs and FPRs are. */
4405 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
4406 || !TARGET_ALTIVEC
))
4408 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
4409 TREE_SIDE_EFFECTS (t
) = 1;
4410 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4413 /* Care for on-stack alignment if needed. */
4420 /* AltiVec vectors are 16 byte aligned. */
4421 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
4426 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
4427 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
4431 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
4433 emit_move_insn (addr_rtx
, r
);
4435 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
4436 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
4437 TREE_SIDE_EFFECTS (t
) = 1;
4438 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4440 emit_label (lab_over
);
4444 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
4445 set_mem_alias_set (r
, get_varargs_alias_set ());
4446 emit_move_insn (addr_rtx
, r
);
4454 #define def_builtin(MASK, NAME, TYPE, CODE) \
4456 if ((MASK) & target_flags) \
4457 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4461 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4463 static const struct builtin_description bdesc_3arg
[] =
4465 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
4466 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
4467 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
4468 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
4469 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
4470 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
4471 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
4472 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
4473 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
4474 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
4475 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
4476 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
4477 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
4478 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
4479 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
4480 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
4481 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
4482 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
4483 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
4484 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
4485 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
4486 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
4487 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
4490 /* DST operations: void foo (void *, const int, const char). */
4492 static const struct builtin_description bdesc_dst
[] =
4494 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
4495 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
4496 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
4497 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
4500 /* Simple binary operations: VECc = foo (VECa, VECb). */
4502 static struct builtin_description bdesc_2arg
[] =
4504 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
4505 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
4506 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
4507 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
4508 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
4509 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
4510 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
4511 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
4512 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
4513 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
4514 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
4515 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
4516 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
4517 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
4518 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
4519 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
4520 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
4521 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
4522 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
4523 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
4524 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
4525 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
4526 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
4527 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
4528 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
4529 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
4530 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
4531 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
4532 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
4533 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
4534 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
4535 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
4536 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
4537 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
4538 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
4539 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
4540 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
4541 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
4542 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
4543 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
4544 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
4545 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
4546 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
4547 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
4548 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
4549 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
4550 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
4551 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
4552 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
4553 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
4554 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
4555 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
4556 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
4557 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
4558 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
4559 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
4560 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
4561 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
4562 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
4563 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
4564 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
4565 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
4566 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
4567 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
4568 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
4569 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
4570 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
4571 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
4572 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
4573 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
4574 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
4575 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
4576 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
4577 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
4578 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
4579 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
4580 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
4581 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
4582 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
4583 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
4584 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
4585 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
4586 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
4587 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
4588 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
4589 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
4590 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
4591 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
4592 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
4593 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
4594 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
4595 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
4596 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
4597 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
4598 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
4599 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
4600 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
4601 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
4602 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
4603 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
4604 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
4605 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
4606 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
4607 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
4608 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
4609 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
4610 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
4611 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
4612 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
4613 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
4614 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
4615 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
4616 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
4618 /* Place holder, leave as first spe builtin. */
4619 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
4620 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
4621 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
4622 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
4623 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
4624 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
4625 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
4626 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
4627 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
4628 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
4629 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
4630 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
4631 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
4632 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
4633 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
4634 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
4635 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
4636 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
4637 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
4638 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
4639 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
4640 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
4641 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
4642 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
4643 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
4644 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
4645 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
4646 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
4647 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
4648 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
4649 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
4650 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
4651 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
4652 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
4653 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
4654 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
4655 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
4656 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
4657 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
4658 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
4659 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
4660 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
4661 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
4662 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
4663 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
4664 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
4665 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
4666 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
4667 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
4668 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
4669 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
4670 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
4671 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
4672 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
4673 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
4674 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
4675 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
4676 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
4677 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
4678 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
4679 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
4680 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
4681 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
4682 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
4683 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
4684 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
4685 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
4686 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
4687 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
4688 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
4689 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
4690 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
4691 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
4692 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
4693 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
4694 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
4695 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
4696 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
4697 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
4698 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
4699 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
4700 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
4701 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
4702 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
4703 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
4704 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
4705 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
4706 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
4707 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
4708 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
4709 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
4710 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
4711 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
4712 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
4713 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
4714 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
4715 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
4716 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
4717 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
4718 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
4719 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
4720 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
4721 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
4722 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
4723 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
4724 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
4725 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
4726 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
4727 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
4729 /* SPE binary operations expecting a 5-bit unsigned literal. */
4730 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
4732 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
4733 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
4734 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
4735 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
4736 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
4737 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
4738 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
4739 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
4740 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
4741 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
4742 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
4743 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
4744 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
4745 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
4746 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
4747 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
4748 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
4749 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
4750 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
4751 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
4752 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
4753 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
4754 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
4755 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
4756 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
4757 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
4759 /* Place-holder. Leave as last binary SPE builtin. */
4760 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
4763 /* AltiVec predicates. */
4765 struct builtin_description_predicates
4767 const unsigned int mask
;
4768 const enum insn_code icode
;
4770 const char *const name
;
4771 const enum rs6000_builtins code
;
4774 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
4776 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
4777 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
4778 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
4779 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
4780 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
4781 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
4782 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
4783 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
4784 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
4785 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
4786 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
4787 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
4788 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
4791 /* SPE predicates. */
4792 static struct builtin_description bdesc_spe_predicates
[] =
4794 /* Place-holder. Leave as first. */
4795 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
4796 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
4797 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
4798 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4799 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4800 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4801 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4802 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4803 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4804 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4805 /* Place-holder. Leave as last. */
4806 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4809 /* SPE evsel predicates. */
4810 static struct builtin_description bdesc_spe_evsel
[] =
4812 /* Place-holder. Leave as first. */
4813 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4814 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4815 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4816 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4817 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4818 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4819 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4820 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4821 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4822 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4823 /* Place-holder. Leave as last. */
4824 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4827 /* ABS* operations. */
4829 static const struct builtin_description bdesc_abs
[] =
4831 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4832 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4833 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4834 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4835 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4836 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4837 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4840 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4843 static struct builtin_description bdesc_1arg
[] =
4845 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4846 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4847 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4848 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4849 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4850 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4851 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4852 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4853 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4854 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4855 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4856 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4857 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4858 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4859 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4860 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4861 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4863 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4864 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4865 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4866 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4867 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4868 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4869 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4870 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4871 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4872 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4873 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4874 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4875 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4876 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4877 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4878 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4879 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4880 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4881 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4882 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4883 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4884 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4885 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4886 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4887 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4888 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4889 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4890 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4891 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4892 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4893 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4894 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4896 /* Place-holder. Leave as last unary SPE builtin. */
4897 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4901 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4902 enum insn_code icode
;
4907 tree arg0
= TREE_VALUE (arglist
);
4908 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4909 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4910 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4912 if (icode
== CODE_FOR_nothing
)
4913 /* Builtin not supported on this processor. */
4916 /* If we got invalid arguments bail out before generating bad rtl. */
4917 if (arg0
== error_mark_node
)
4920 if (icode
== CODE_FOR_altivec_vspltisb
4921 || icode
== CODE_FOR_altivec_vspltish
4922 || icode
== CODE_FOR_altivec_vspltisw
4923 || icode
== CODE_FOR_spe_evsplatfi
4924 || icode
== CODE_FOR_spe_evsplati
)
4926 /* Only allow 5-bit *signed* literals. */
4927 if (GET_CODE (op0
) != CONST_INT
4928 || INTVAL (op0
) > 0x1f
4929 || INTVAL (op0
) < -0x1f)
4931 error ("argument 1 must be a 5-bit signed literal");
4937 || GET_MODE (target
) != tmode
4938 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4939 target
= gen_reg_rtx (tmode
);
4941 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4942 op0
= copy_to_mode_reg (mode0
, op0
);
4944 pat
= GEN_FCN (icode
) (target
, op0
);
4953 altivec_expand_abs_builtin (icode
, arglist
, target
)
4954 enum insn_code icode
;
4958 rtx pat
, scratch1
, scratch2
;
4959 tree arg0
= TREE_VALUE (arglist
);
4960 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4961 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4962 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4964 /* If we have invalid arguments, bail out before generating bad rtl. */
4965 if (arg0
== error_mark_node
)
4969 || GET_MODE (target
) != tmode
4970 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4971 target
= gen_reg_rtx (tmode
);
4973 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4974 op0
= copy_to_mode_reg (mode0
, op0
);
4976 scratch1
= gen_reg_rtx (mode0
);
4977 scratch2
= gen_reg_rtx (mode0
);
4979 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4988 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4989 enum insn_code icode
;
4994 tree arg0
= TREE_VALUE (arglist
);
4995 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4996 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4997 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4998 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4999 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5000 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5002 if (icode
== CODE_FOR_nothing
)
5003 /* Builtin not supported on this processor. */
5006 /* If we got invalid arguments bail out before generating bad rtl. */
5007 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5010 if (icode
== CODE_FOR_altivec_vcfux
5011 || icode
== CODE_FOR_altivec_vcfsx
5012 || icode
== CODE_FOR_altivec_vctsxs
5013 || icode
== CODE_FOR_altivec_vctuxs
5014 || icode
== CODE_FOR_altivec_vspltb
5015 || icode
== CODE_FOR_altivec_vsplth
5016 || icode
== CODE_FOR_altivec_vspltw
5017 || icode
== CODE_FOR_spe_evaddiw
5018 || icode
== CODE_FOR_spe_evldd
5019 || icode
== CODE_FOR_spe_evldh
5020 || icode
== CODE_FOR_spe_evldw
5021 || icode
== CODE_FOR_spe_evlhhesplat
5022 || icode
== CODE_FOR_spe_evlhhossplat
5023 || icode
== CODE_FOR_spe_evlhhousplat
5024 || icode
== CODE_FOR_spe_evlwhe
5025 || icode
== CODE_FOR_spe_evlwhos
5026 || icode
== CODE_FOR_spe_evlwhou
5027 || icode
== CODE_FOR_spe_evlwhsplat
5028 || icode
== CODE_FOR_spe_evlwwsplat
5029 || icode
== CODE_FOR_spe_evrlwi
5030 || icode
== CODE_FOR_spe_evslwi
5031 || icode
== CODE_FOR_spe_evsrwis
5032 || icode
== CODE_FOR_spe_evsubifw
5033 || icode
== CODE_FOR_spe_evsrwiu
)
5035 /* Only allow 5-bit unsigned literals. */
5036 if (TREE_CODE (arg1
) != INTEGER_CST
5037 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5039 error ("argument 2 must be a 5-bit unsigned literal");
5045 || GET_MODE (target
) != tmode
5046 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5047 target
= gen_reg_rtx (tmode
);
5049 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5050 op0
= copy_to_mode_reg (mode0
, op0
);
5051 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5052 op1
= copy_to_mode_reg (mode1
, op1
);
5054 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
5063 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
5064 enum insn_code icode
;
5070 tree cr6_form
= TREE_VALUE (arglist
);
5071 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5072 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5073 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5074 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5075 enum machine_mode tmode
= SImode
;
5076 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5077 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5080 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
5082 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5086 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
5091 /* If we have invalid arguments, bail out before generating bad rtl. */
5092 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5096 || GET_MODE (target
) != tmode
5097 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5098 target
= gen_reg_rtx (tmode
);
5100 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5101 op0
= copy_to_mode_reg (mode0
, op0
);
5102 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5103 op1
= copy_to_mode_reg (mode1
, op1
);
5105 scratch
= gen_reg_rtx (mode0
);
5107 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
5108 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
5113 /* The vec_any* and vec_all* predicates use the same opcodes for two
5114 different operations, but the bits in CR6 will be different
5115 depending on what information we want. So we have to play tricks
5116 with CR6 to get the right bits out.
5118 If you think this is disgusting, look at the specs for the
5119 AltiVec predicates. */
5121 switch (cr6_form_int
)
5124 emit_insn (gen_cr6_test_for_zero (target
));
5127 emit_insn (gen_cr6_test_for_zero_reverse (target
));
5130 emit_insn (gen_cr6_test_for_lt (target
));
5133 emit_insn (gen_cr6_test_for_lt_reverse (target
));
5136 error ("argument 1 of __builtin_altivec_predicate is out of range");
5144 altivec_expand_stv_builtin (icode
, arglist
)
5145 enum insn_code icode
;
5148 tree arg0
= TREE_VALUE (arglist
);
5149 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5150 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5151 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5152 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5153 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5155 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
5156 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
5157 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
5159 /* Invalid arguments. Bail before doing anything stoopid! */
5160 if (arg0
== error_mark_node
5161 || arg1
== error_mark_node
5162 || arg2
== error_mark_node
)
5165 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
5166 op0
= copy_to_mode_reg (mode2
, op0
);
5167 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
5168 op1
= copy_to_mode_reg (mode0
, op1
);
5169 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5170 op2
= copy_to_mode_reg (mode1
, op2
);
5172 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
5179 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
5180 enum insn_code icode
;
5185 tree arg0
= TREE_VALUE (arglist
);
5186 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5187 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5188 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5189 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5190 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5191 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
5192 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5193 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5194 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
5196 if (icode
== CODE_FOR_nothing
)
5197 /* Builtin not supported on this processor. */
5200 /* If we got invalid arguments bail out before generating bad rtl. */
5201 if (arg0
== error_mark_node
5202 || arg1
== error_mark_node
5203 || arg2
== error_mark_node
)
5206 if (icode
== CODE_FOR_altivec_vsldoi_4sf
5207 || icode
== CODE_FOR_altivec_vsldoi_4si
5208 || icode
== CODE_FOR_altivec_vsldoi_8hi
5209 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
5211 /* Only allow 4-bit unsigned literals. */
5212 if (TREE_CODE (arg2
) != INTEGER_CST
5213 || TREE_INT_CST_LOW (arg2
) & ~0xf)
5215 error ("argument 3 must be a 4-bit unsigned literal");
5221 || GET_MODE (target
) != tmode
5222 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5223 target
= gen_reg_rtx (tmode
);
5225 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5226 op0
= copy_to_mode_reg (mode0
, op0
);
5227 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5228 op1
= copy_to_mode_reg (mode1
, op1
);
5229 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
5230 op2
= copy_to_mode_reg (mode2
, op2
);
5232 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
5240 /* Expand the lvx builtins. */
5242 altivec_expand_ld_builtin (exp
, target
, expandedp
)
5247 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5248 tree arglist
= TREE_OPERAND (exp
, 1);
5249 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5251 enum machine_mode tmode
, mode0
;
5253 enum insn_code icode
;
5257 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
5258 icode
= CODE_FOR_altivec_lvx_16qi
;
5260 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
5261 icode
= CODE_FOR_altivec_lvx_8hi
;
5263 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
5264 icode
= CODE_FOR_altivec_lvx_4si
;
5266 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
5267 icode
= CODE_FOR_altivec_lvx_4sf
;
5276 arg0
= TREE_VALUE (arglist
);
5277 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5278 tmode
= insn_data
[icode
].operand
[0].mode
;
5279 mode0
= insn_data
[icode
].operand
[1].mode
;
5282 || GET_MODE (target
) != tmode
5283 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5284 target
= gen_reg_rtx (tmode
);
5286 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5287 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5289 pat
= GEN_FCN (icode
) (target
, op0
);
5296 /* Expand the stvx builtins. */
5298 altivec_expand_st_builtin (exp
, target
, expandedp
)
5300 rtx target ATTRIBUTE_UNUSED
;
5303 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5304 tree arglist
= TREE_OPERAND (exp
, 1);
5305 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5307 enum machine_mode mode0
, mode1
;
5309 enum insn_code icode
;
5313 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
5314 icode
= CODE_FOR_altivec_stvx_16qi
;
5316 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
5317 icode
= CODE_FOR_altivec_stvx_8hi
;
5319 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
5320 icode
= CODE_FOR_altivec_stvx_4si
;
5322 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
5323 icode
= CODE_FOR_altivec_stvx_4sf
;
5330 arg0
= TREE_VALUE (arglist
);
5331 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5332 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5333 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5334 mode0
= insn_data
[icode
].operand
[0].mode
;
5335 mode1
= insn_data
[icode
].operand
[1].mode
;
5337 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5338 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
5339 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5340 op1
= copy_to_mode_reg (mode1
, op1
);
5342 pat
= GEN_FCN (icode
) (op0
, op1
);
5350 /* Expand the dst builtins. */
5352 altivec_expand_dst_builtin (exp
, target
, expandedp
)
5354 rtx target ATTRIBUTE_UNUSED
;
5357 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5358 tree arglist
= TREE_OPERAND (exp
, 1);
5359 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5360 tree arg0
, arg1
, arg2
;
5361 enum machine_mode mode0
, mode1
, mode2
;
5362 rtx pat
, op0
, op1
, op2
;
5363 struct builtin_description
*d
;
5368 /* Handle DST variants. */
5369 d
= (struct builtin_description
*) bdesc_dst
;
5370 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5371 if (d
->code
== fcode
)
5373 arg0
= TREE_VALUE (arglist
);
5374 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5375 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5376 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5377 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5378 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5379 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5380 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5381 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5383 /* Invalid arguments, bail out before generating bad rtl. */
5384 if (arg0
== error_mark_node
5385 || arg1
== error_mark_node
5386 || arg2
== error_mark_node
)
5389 if (TREE_CODE (arg2
) != INTEGER_CST
5390 || TREE_INT_CST_LOW (arg2
) & ~0x3)
5392 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
5396 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
5397 op0
= copy_to_mode_reg (mode0
, op0
);
5398 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
5399 op1
= copy_to_mode_reg (mode1
, op1
);
5401 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
5412 /* Expand the builtin in EXP and store the result in TARGET. Store
5413 true in *EXPANDEDP if we found a builtin to expand. */
5415 altivec_expand_builtin (exp
, target
, expandedp
)
5420 struct builtin_description
*d
;
5421 struct builtin_description_predicates
*dp
;
5423 enum insn_code icode
;
5424 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5425 tree arglist
= TREE_OPERAND (exp
, 1);
5428 enum machine_mode tmode
, mode0
;
5429 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5431 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
5435 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
5439 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
5447 case ALTIVEC_BUILTIN_STVX
:
5448 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
5449 case ALTIVEC_BUILTIN_STVEBX
:
5450 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
5451 case ALTIVEC_BUILTIN_STVEHX
:
5452 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
5453 case ALTIVEC_BUILTIN_STVEWX
:
5454 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
5455 case ALTIVEC_BUILTIN_STVXL
:
5456 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
5458 case ALTIVEC_BUILTIN_MFVSCR
:
5459 icode
= CODE_FOR_altivec_mfvscr
;
5460 tmode
= insn_data
[icode
].operand
[0].mode
;
5463 || GET_MODE (target
) != tmode
5464 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5465 target
= gen_reg_rtx (tmode
);
5467 pat
= GEN_FCN (icode
) (target
);
5473 case ALTIVEC_BUILTIN_MTVSCR
:
5474 icode
= CODE_FOR_altivec_mtvscr
;
5475 arg0
= TREE_VALUE (arglist
);
5476 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5477 mode0
= insn_data
[icode
].operand
[0].mode
;
5479 /* If we got invalid arguments bail out before generating bad rtl. */
5480 if (arg0
== error_mark_node
)
5483 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5484 op0
= copy_to_mode_reg (mode0
, op0
);
5486 pat
= GEN_FCN (icode
) (op0
);
5491 case ALTIVEC_BUILTIN_DSSALL
:
5492 emit_insn (gen_altivec_dssall ());
5495 case ALTIVEC_BUILTIN_DSS
:
5496 icode
= CODE_FOR_altivec_dss
;
5497 arg0
= TREE_VALUE (arglist
);
5498 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5499 mode0
= insn_data
[icode
].operand
[0].mode
;
5501 /* If we got invalid arguments bail out before generating bad rtl. */
5502 if (arg0
== error_mark_node
)
5505 if (TREE_CODE (arg0
) != INTEGER_CST
5506 || TREE_INT_CST_LOW (arg0
) & ~0x3)
5508 error ("argument to dss must be a 2-bit unsigned literal");
5512 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5513 op0
= copy_to_mode_reg (mode0
, op0
);
5515 emit_insn (gen_altivec_dss (op0
));
5519 /* Expand abs* operations. */
5520 d
= (struct builtin_description
*) bdesc_abs
;
5521 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5522 if (d
->code
== fcode
)
5523 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
5525 /* Expand the AltiVec predicates. */
5526 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5527 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5528 if (dp
->code
== fcode
)
5529 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
5531 /* LV* are funky. We initialized them differently. */
5534 case ALTIVEC_BUILTIN_LVSL
:
5535 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
5537 case ALTIVEC_BUILTIN_LVSR
:
5538 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
5540 case ALTIVEC_BUILTIN_LVEBX
:
5541 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
5543 case ALTIVEC_BUILTIN_LVEHX
:
5544 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
5546 case ALTIVEC_BUILTIN_LVEWX
:
5547 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
5549 case ALTIVEC_BUILTIN_LVXL
:
5550 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
5552 case ALTIVEC_BUILTIN_LVX
:
5553 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
5564 /* Binops that need to be initialized manually, but can be expanded
5565 automagically by rs6000_expand_binop_builtin. */
5566 static struct builtin_description bdesc_2arg_spe
[] =
5568 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
5569 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
5570 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
5571 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
5572 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
5573 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
5574 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
5575 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
5576 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
5577 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
5578 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
5579 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
5580 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
5581 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
5582 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
5583 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
5584 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
5585 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
5586 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
5587 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
5588 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
5589 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
5592 /* Expand the builtin in EXP and store the result in TARGET. Store
5593 true in *EXPANDEDP if we found a builtin to expand.
5595 This expands the SPE builtins that are not simple unary and binary
5598 spe_expand_builtin (exp
, target
, expandedp
)
5603 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5604 tree arglist
= TREE_OPERAND (exp
, 1);
5606 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5607 enum insn_code icode
;
5608 enum machine_mode tmode
, mode0
;
5610 struct builtin_description
*d
;
5615 /* Syntax check for a 5-bit unsigned immediate. */
5618 case SPE_BUILTIN_EVSTDD
:
5619 case SPE_BUILTIN_EVSTDH
:
5620 case SPE_BUILTIN_EVSTDW
:
5621 case SPE_BUILTIN_EVSTWHE
:
5622 case SPE_BUILTIN_EVSTWHO
:
5623 case SPE_BUILTIN_EVSTWWE
:
5624 case SPE_BUILTIN_EVSTWWO
:
5625 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5626 if (TREE_CODE (arg1
) != INTEGER_CST
5627 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
5629 error ("argument 2 must be a 5-bit unsigned literal");
5637 d
= (struct builtin_description
*) bdesc_2arg_spe
;
5638 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
5639 if (d
->code
== fcode
)
5640 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5642 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5643 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
5644 if (d
->code
== fcode
)
5645 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
5647 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5648 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
5649 if (d
->code
== fcode
)
5650 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
5654 case SPE_BUILTIN_EVSTDDX
:
5655 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
5656 case SPE_BUILTIN_EVSTDHX
:
5657 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
5658 case SPE_BUILTIN_EVSTDWX
:
5659 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
5660 case SPE_BUILTIN_EVSTWHEX
:
5661 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
5662 case SPE_BUILTIN_EVSTWHOX
:
5663 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
5664 case SPE_BUILTIN_EVSTWWEX
:
5665 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
5666 case SPE_BUILTIN_EVSTWWOX
:
5667 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
5668 case SPE_BUILTIN_EVSTDD
:
5669 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
5670 case SPE_BUILTIN_EVSTDH
:
5671 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
5672 case SPE_BUILTIN_EVSTDW
:
5673 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
5674 case SPE_BUILTIN_EVSTWHE
:
5675 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
5676 case SPE_BUILTIN_EVSTWHO
:
5677 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
5678 case SPE_BUILTIN_EVSTWWE
:
5679 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
5680 case SPE_BUILTIN_EVSTWWO
:
5681 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
5682 case SPE_BUILTIN_MFSPEFSCR
:
5683 icode
= CODE_FOR_spe_mfspefscr
;
5684 tmode
= insn_data
[icode
].operand
[0].mode
;
5687 || GET_MODE (target
) != tmode
5688 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
5689 target
= gen_reg_rtx (tmode
);
5691 pat
= GEN_FCN (icode
) (target
);
5696 case SPE_BUILTIN_MTSPEFSCR
:
5697 icode
= CODE_FOR_spe_mtspefscr
;
5698 arg0
= TREE_VALUE (arglist
);
5699 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5700 mode0
= insn_data
[icode
].operand
[0].mode
;
5702 if (arg0
== error_mark_node
)
5705 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
5706 op0
= copy_to_mode_reg (mode0
, op0
);
5708 pat
= GEN_FCN (icode
) (op0
);
5721 spe_expand_predicate_builtin (icode
, arglist
, target
)
5722 enum insn_code icode
;
5726 rtx pat
, scratch
, tmp
;
5727 tree form
= TREE_VALUE (arglist
);
5728 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
5729 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5730 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5731 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5732 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5733 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5737 if (TREE_CODE (form
) != INTEGER_CST
)
5739 error ("argument 1 of __builtin_spe_predicate must be a constant");
5743 form_int
= TREE_INT_CST_LOW (form
);
5748 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
5752 || GET_MODE (target
) != SImode
5753 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
5754 target
= gen_reg_rtx (SImode
);
5756 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5757 op0
= copy_to_mode_reg (mode0
, op0
);
5758 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
5759 op1
= copy_to_mode_reg (mode1
, op1
);
5761 scratch
= gen_reg_rtx (CCmode
);
5763 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5768 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5769 _lower_. We use one compare, but look in different bits of the
5770 CR for each variant.
5772 There are 2 elements in each SPE simd type (upper/lower). The CR
5773 bits are set as follows:
5775 BIT0 | BIT 1 | BIT 2 | BIT 3
5776 U | L | (U | L) | (U & L)
5778 So, for an "all" relationship, BIT 3 would be set.
5779 For an "any" relationship, BIT 2 would be set. Etc.
5781 Following traditional nomenclature, these bits map to:
5783 BIT0 | BIT 1 | BIT 2 | BIT 3
5786 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5791 /* All variant. OV bit. */
5793 /* We need to get to the OV bit, which is the ORDERED bit. We
5794 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5795 that's ugly and will trigger a validate_condition_mode abort.
5796 So let's just use another pattern. */
5797 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
5799 /* Any variant. EQ bit. */
5803 /* Upper variant. LT bit. */
5807 /* Lower variant. GT bit. */
5812 error ("argument 1 of __builtin_spe_predicate is out of range");
5816 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5817 emit_move_insn (target
, tmp
);
5822 /* The evsel builtins look like this:
5824 e = __builtin_spe_evsel_OP (a, b, c, d);
5828 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5829 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5833 spe_expand_evsel_builtin (icode
, arglist
, target
)
5834 enum insn_code icode
;
5839 tree arg0
= TREE_VALUE (arglist
);
5840 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5841 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5842 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5843 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5844 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5845 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5846 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5847 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5848 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5853 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5854 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5858 || GET_MODE (target
) != mode0
5859 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5860 target
= gen_reg_rtx (mode0
);
5862 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5863 op0
= copy_to_mode_reg (mode0
, op0
);
5864 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5865 op1
= copy_to_mode_reg (mode0
, op1
);
5866 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5867 op2
= copy_to_mode_reg (mode0
, op2
);
5868 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5869 op3
= copy_to_mode_reg (mode0
, op3
);
5871 /* Generate the compare. */
5872 scratch
= gen_reg_rtx (CCmode
);
5873 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5878 if (mode0
== V2SImode
)
5879 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5881 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5886 /* Expand an expression EXP that calls a built-in function,
5887 with result going to TARGET if that's convenient
5888 (and in mode MODE if that's convenient).
5889 SUBTARGET may be used as the target for computing one of EXP's operands.
5890 IGNORE is nonzero if the value is to be ignored. */
5893 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5896 rtx subtarget ATTRIBUTE_UNUSED
;
5897 enum machine_mode mode ATTRIBUTE_UNUSED
;
5898 int ignore ATTRIBUTE_UNUSED
;
5900 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5901 tree arglist
= TREE_OPERAND (exp
, 1);
5902 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5903 struct builtin_description
*d
;
5910 ret
= altivec_expand_builtin (exp
, target
, &success
);
5917 ret
= spe_expand_builtin (exp
, target
, &success
);
5923 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5925 /* Handle simple unary operations. */
5926 d
= (struct builtin_description
*) bdesc_1arg
;
5927 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5928 if (d
->code
== fcode
)
5929 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5931 /* Handle simple binary operations. */
5932 d
= (struct builtin_description
*) bdesc_2arg
;
5933 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5934 if (d
->code
== fcode
)
5935 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5937 /* Handle simple ternary operations. */
5938 d
= (struct builtin_description
*) bdesc_3arg
;
5939 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5940 if (d
->code
== fcode
)
5941 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5949 rs6000_init_builtins ()
5951 opaque_V2SI_type_node
= copy_node (V2SI_type_node
);
5952 opaque_V2SF_type_node
= copy_node (V2SF_type_node
);
5953 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
5956 spe_init_builtins ();
5958 altivec_init_builtins ();
5959 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5960 rs6000_common_init_builtins ();
5963 /* Search through a set of builtins and enable the mask bits.
5964 DESC is an array of builtins.
5965 SIZE is the total number of builtins.
5966 START is the builtin enum at which to start.
5967 END is the builtin enum at which to end. */
5969 enable_mask_for_builtins (desc
, size
, start
, end
)
5970 struct builtin_description
*desc
;
5972 enum rs6000_builtins start
, end
;
5976 for (i
= 0; i
< size
; ++i
)
5977 if (desc
[i
].code
== start
)
5983 for (; i
< size
; ++i
)
5985 /* Flip all the bits on. */
5986 desc
[i
].mask
= target_flags
;
5987 if (desc
[i
].code
== end
)
5993 spe_init_builtins ()
5995 tree endlink
= void_list_node
;
5996 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5997 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5998 struct builtin_description
*d
;
6001 tree v2si_ftype_4_v2si
6002 = build_function_type
6003 (opaque_V2SI_type_node
,
6004 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6005 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6006 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6007 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6010 tree v2sf_ftype_4_v2sf
6011 = build_function_type
6012 (opaque_V2SF_type_node
,
6013 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6014 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6015 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6016 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6019 tree int_ftype_int_v2si_v2si
6020 = build_function_type
6022 tree_cons (NULL_TREE
, integer_type_node
,
6023 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6024 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6027 tree int_ftype_int_v2sf_v2sf
6028 = build_function_type
6030 tree_cons (NULL_TREE
, integer_type_node
,
6031 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6032 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
6035 tree void_ftype_v2si_puint_int
6036 = build_function_type (void_type_node
,
6037 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6038 tree_cons (NULL_TREE
, puint_type_node
,
6039 tree_cons (NULL_TREE
,
6043 tree void_ftype_v2si_puint_char
6044 = build_function_type (void_type_node
,
6045 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6046 tree_cons (NULL_TREE
, puint_type_node
,
6047 tree_cons (NULL_TREE
,
6051 tree void_ftype_v2si_pv2si_int
6052 = build_function_type (void_type_node
,
6053 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6054 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6055 tree_cons (NULL_TREE
,
6059 tree void_ftype_v2si_pv2si_char
6060 = build_function_type (void_type_node
,
6061 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
6062 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6063 tree_cons (NULL_TREE
,
6068 = build_function_type (void_type_node
,
6069 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
6072 = build_function_type (integer_type_node
, endlink
);
6074 tree v2si_ftype_pv2si_int
6075 = build_function_type (opaque_V2SI_type_node
,
6076 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
6077 tree_cons (NULL_TREE
, integer_type_node
,
6080 tree v2si_ftype_puint_int
6081 = build_function_type (opaque_V2SI_type_node
,
6082 tree_cons (NULL_TREE
, puint_type_node
,
6083 tree_cons (NULL_TREE
, integer_type_node
,
6086 tree v2si_ftype_pushort_int
6087 = build_function_type (opaque_V2SI_type_node
,
6088 tree_cons (NULL_TREE
, pushort_type_node
,
6089 tree_cons (NULL_TREE
, integer_type_node
,
6092 /* The initialization of the simple binary and unary builtins is
6093 done in rs6000_common_init_builtins, but we have to enable the
6094 mask bits here manually because we have run out of `target_flags'
6095 bits. We really need to redesign this mask business. */
6097 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
6098 ARRAY_SIZE (bdesc_2arg
),
6101 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
6102 ARRAY_SIZE (bdesc_1arg
),
6104 SPE_BUILTIN_EVSUBFUSIAAW
);
6105 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
6106 ARRAY_SIZE (bdesc_spe_predicates
),
6107 SPE_BUILTIN_EVCMPEQ
,
6108 SPE_BUILTIN_EVFSTSTLT
);
6109 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
6110 ARRAY_SIZE (bdesc_spe_evsel
),
6111 SPE_BUILTIN_EVSEL_CMPGTS
,
6112 SPE_BUILTIN_EVSEL_FSTSTEQ
);
6114 /* Initialize irregular SPE builtins. */
6116 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
6117 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
6118 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
6119 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
6120 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
6121 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
6122 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
6123 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
6124 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
6125 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
6126 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
6127 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
6128 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
6129 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
6130 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
6131 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
6134 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
6135 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
6136 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
6137 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
6138 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
6139 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
6140 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
6141 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
6142 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
6143 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
6144 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
6145 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
6146 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
6147 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
6148 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
6149 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
6150 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
6151 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
6152 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
6153 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
6154 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
6155 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
6158 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6159 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
6163 switch (insn_data
[d
->icode
].operand
[1].mode
)
6166 type
= int_ftype_int_v2si_v2si
;
6169 type
= int_ftype_int_v2sf_v2sf
;
6175 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6178 /* Evsel predicates. */
6179 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6180 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
6184 switch (insn_data
[d
->icode
].operand
[1].mode
)
6187 type
= v2si_ftype_4_v2si
;
6190 type
= v2sf_ftype_4_v2sf
;
6196 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6201 altivec_init_builtins ()
6203 struct builtin_description
*d
;
6204 struct builtin_description_predicates
*dp
;
6206 tree pfloat_type_node
= build_pointer_type (float_type_node
);
6207 tree pint_type_node
= build_pointer_type (integer_type_node
);
6208 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
6209 tree pchar_type_node
= build_pointer_type (char_type_node
);
6211 tree pvoid_type_node
= build_pointer_type (void_type_node
);
6213 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
6214 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
6215 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
6216 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
6218 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
6220 tree int_ftype_int_v4si_v4si
6221 = build_function_type_list (integer_type_node
,
6222 integer_type_node
, V4SI_type_node
,
6223 V4SI_type_node
, NULL_TREE
);
6224 tree v4sf_ftype_pcfloat
6225 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
6226 tree void_ftype_pfloat_v4sf
6227 = build_function_type_list (void_type_node
,
6228 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
6229 tree v4si_ftype_pcint
6230 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
6231 tree void_ftype_pint_v4si
6232 = build_function_type_list (void_type_node
,
6233 pint_type_node
, V4SI_type_node
, NULL_TREE
);
6234 tree v8hi_ftype_pcshort
6235 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
6236 tree void_ftype_pshort_v8hi
6237 = build_function_type_list (void_type_node
,
6238 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
6239 tree v16qi_ftype_pcchar
6240 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
6241 tree void_ftype_pchar_v16qi
6242 = build_function_type_list (void_type_node
,
6243 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
6244 tree void_ftype_v4si
6245 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
6246 tree v8hi_ftype_void
6247 = build_function_type (V8HI_type_node
, void_list_node
);
6248 tree void_ftype_void
6249 = build_function_type (void_type_node
, void_list_node
);
6251 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
6253 tree v16qi_ftype_int_pcvoid
6254 = build_function_type_list (V16QI_type_node
,
6255 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6256 tree v8hi_ftype_int_pcvoid
6257 = build_function_type_list (V8HI_type_node
,
6258 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6259 tree v4si_ftype_int_pcvoid
6260 = build_function_type_list (V4SI_type_node
,
6261 integer_type_node
, pcvoid_type_node
, NULL_TREE
);
6263 tree void_ftype_v4si_int_pvoid
6264 = build_function_type_list (void_type_node
,
6265 V4SI_type_node
, integer_type_node
,
6266 pvoid_type_node
, NULL_TREE
);
6267 tree void_ftype_v16qi_int_pvoid
6268 = build_function_type_list (void_type_node
,
6269 V16QI_type_node
, integer_type_node
,
6270 pvoid_type_node
, NULL_TREE
);
6271 tree void_ftype_v8hi_int_pvoid
6272 = build_function_type_list (void_type_node
,
6273 V8HI_type_node
, integer_type_node
,
6274 pvoid_type_node
, NULL_TREE
);
6275 tree int_ftype_int_v8hi_v8hi
6276 = build_function_type_list (integer_type_node
,
6277 integer_type_node
, V8HI_type_node
,
6278 V8HI_type_node
, NULL_TREE
);
6279 tree int_ftype_int_v16qi_v16qi
6280 = build_function_type_list (integer_type_node
,
6281 integer_type_node
, V16QI_type_node
,
6282 V16QI_type_node
, NULL_TREE
);
6283 tree int_ftype_int_v4sf_v4sf
6284 = build_function_type_list (integer_type_node
,
6285 integer_type_node
, V4SF_type_node
,
6286 V4SF_type_node
, NULL_TREE
);
6287 tree v4si_ftype_v4si
6288 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6289 tree v8hi_ftype_v8hi
6290 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6291 tree v16qi_ftype_v16qi
6292 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6293 tree v4sf_ftype_v4sf
6294 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6295 tree void_ftype_pcvoid_int_char
6296 = build_function_type_list (void_type_node
,
6297 pcvoid_type_node
, integer_type_node
,
6298 char_type_node
, NULL_TREE
);
6300 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
6301 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
6302 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
6303 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
6304 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
6305 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
6306 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
6307 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
6308 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
6309 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
6310 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
6311 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
6312 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
6313 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
6314 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
6315 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
6316 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
6317 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
6318 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
6319 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
6320 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
6321 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
6322 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
6323 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
6324 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
6325 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
6326 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid
, ALTIVEC_BUILTIN_LVX
);
6327 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
6328 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
6329 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
6330 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
6331 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
6333 /* Add the DST variants. */
6334 d
= (struct builtin_description
*) bdesc_dst
;
6335 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6336 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_char
, d
->code
);
6338 /* Initialize the predicates. */
6339 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
6340 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
6342 enum machine_mode mode1
;
6345 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
6350 type
= int_ftype_int_v4si_v4si
;
6353 type
= int_ftype_int_v8hi_v8hi
;
6356 type
= int_ftype_int_v16qi_v16qi
;
6359 type
= int_ftype_int_v4sf_v4sf
;
6365 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
6368 /* Initialize the abs* operators. */
6369 d
= (struct builtin_description
*) bdesc_abs
;
6370 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
6372 enum machine_mode mode0
;
6375 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6380 type
= v4si_ftype_v4si
;
6383 type
= v8hi_ftype_v8hi
;
6386 type
= v16qi_ftype_v16qi
;
6389 type
= v4sf_ftype_v4sf
;
6395 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6400 rs6000_common_init_builtins ()
6402 struct builtin_description
*d
;
6405 tree v4sf_ftype_v4sf_v4sf_v16qi
6406 = build_function_type_list (V4SF_type_node
,
6407 V4SF_type_node
, V4SF_type_node
,
6408 V16QI_type_node
, NULL_TREE
);
6409 tree v4si_ftype_v4si_v4si_v16qi
6410 = build_function_type_list (V4SI_type_node
,
6411 V4SI_type_node
, V4SI_type_node
,
6412 V16QI_type_node
, NULL_TREE
);
6413 tree v8hi_ftype_v8hi_v8hi_v16qi
6414 = build_function_type_list (V8HI_type_node
,
6415 V8HI_type_node
, V8HI_type_node
,
6416 V16QI_type_node
, NULL_TREE
);
6417 tree v16qi_ftype_v16qi_v16qi_v16qi
6418 = build_function_type_list (V16QI_type_node
,
6419 V16QI_type_node
, V16QI_type_node
,
6420 V16QI_type_node
, NULL_TREE
);
6421 tree v4si_ftype_char
6422 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
6423 tree v8hi_ftype_char
6424 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
6425 tree v16qi_ftype_char
6426 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
6427 tree v8hi_ftype_v16qi
6428 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
6429 tree v4sf_ftype_v4sf
6430 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6432 tree v2si_ftype_v2si_v2si
6433 = build_function_type_list (opaque_V2SI_type_node
,
6434 opaque_V2SI_type_node
,
6435 opaque_V2SI_type_node
, NULL_TREE
);
6437 tree v2sf_ftype_v2sf_v2sf
6438 = build_function_type_list (opaque_V2SF_type_node
,
6439 opaque_V2SF_type_node
,
6440 opaque_V2SF_type_node
, NULL_TREE
);
6442 tree v2si_ftype_int_int
6443 = build_function_type_list (opaque_V2SI_type_node
,
6444 integer_type_node
, integer_type_node
,
6447 tree v2si_ftype_v2si
6448 = build_function_type_list (opaque_V2SI_type_node
,
6449 opaque_V2SI_type_node
, NULL_TREE
);
6451 tree v2sf_ftype_v2sf
6452 = build_function_type_list (opaque_V2SF_type_node
,
6453 opaque_V2SF_type_node
, NULL_TREE
);
6455 tree v2sf_ftype_v2si
6456 = build_function_type_list (opaque_V2SF_type_node
,
6457 opaque_V2SI_type_node
, NULL_TREE
);
6459 tree v2si_ftype_v2sf
6460 = build_function_type_list (opaque_V2SI_type_node
,
6461 opaque_V2SF_type_node
, NULL_TREE
);
6463 tree v2si_ftype_v2si_char
6464 = build_function_type_list (opaque_V2SI_type_node
,
6465 opaque_V2SI_type_node
,
6466 char_type_node
, NULL_TREE
);
6468 tree v2si_ftype_int_char
6469 = build_function_type_list (opaque_V2SI_type_node
,
6470 integer_type_node
, char_type_node
, NULL_TREE
);
6472 tree v2si_ftype_char
6473 = build_function_type_list (opaque_V2SI_type_node
,
6474 char_type_node
, NULL_TREE
);
6476 tree int_ftype_int_int
6477 = build_function_type_list (integer_type_node
,
6478 integer_type_node
, integer_type_node
,
6481 tree v4si_ftype_v4si_v4si
6482 = build_function_type_list (V4SI_type_node
,
6483 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6484 tree v4sf_ftype_v4si_char
6485 = build_function_type_list (V4SF_type_node
,
6486 V4SI_type_node
, char_type_node
, NULL_TREE
);
6487 tree v4si_ftype_v4sf_char
6488 = build_function_type_list (V4SI_type_node
,
6489 V4SF_type_node
, char_type_node
, NULL_TREE
);
6490 tree v4si_ftype_v4si_char
6491 = build_function_type_list (V4SI_type_node
,
6492 V4SI_type_node
, char_type_node
, NULL_TREE
);
6493 tree v8hi_ftype_v8hi_char
6494 = build_function_type_list (V8HI_type_node
,
6495 V8HI_type_node
, char_type_node
, NULL_TREE
);
6496 tree v16qi_ftype_v16qi_char
6497 = build_function_type_list (V16QI_type_node
,
6498 V16QI_type_node
, char_type_node
, NULL_TREE
);
6499 tree v16qi_ftype_v16qi_v16qi_char
6500 = build_function_type_list (V16QI_type_node
,
6501 V16QI_type_node
, V16QI_type_node
,
6502 char_type_node
, NULL_TREE
);
6503 tree v8hi_ftype_v8hi_v8hi_char
6504 = build_function_type_list (V8HI_type_node
,
6505 V8HI_type_node
, V8HI_type_node
,
6506 char_type_node
, NULL_TREE
);
6507 tree v4si_ftype_v4si_v4si_char
6508 = build_function_type_list (V4SI_type_node
,
6509 V4SI_type_node
, V4SI_type_node
,
6510 char_type_node
, NULL_TREE
);
6511 tree v4sf_ftype_v4sf_v4sf_char
6512 = build_function_type_list (V4SF_type_node
,
6513 V4SF_type_node
, V4SF_type_node
,
6514 char_type_node
, NULL_TREE
);
6515 tree v4sf_ftype_v4sf_v4sf
6516 = build_function_type_list (V4SF_type_node
,
6517 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6518 tree v4sf_ftype_v4sf_v4sf_v4si
6519 = build_function_type_list (V4SF_type_node
,
6520 V4SF_type_node
, V4SF_type_node
,
6521 V4SI_type_node
, NULL_TREE
);
6522 tree v4sf_ftype_v4sf_v4sf_v4sf
6523 = build_function_type_list (V4SF_type_node
,
6524 V4SF_type_node
, V4SF_type_node
,
6525 V4SF_type_node
, NULL_TREE
);
6526 tree v4si_ftype_v4si_v4si_v4si
6527 = build_function_type_list (V4SI_type_node
,
6528 V4SI_type_node
, V4SI_type_node
,
6529 V4SI_type_node
, NULL_TREE
);
6530 tree v8hi_ftype_v8hi_v8hi
6531 = build_function_type_list (V8HI_type_node
,
6532 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6533 tree v8hi_ftype_v8hi_v8hi_v8hi
6534 = build_function_type_list (V8HI_type_node
,
6535 V8HI_type_node
, V8HI_type_node
,
6536 V8HI_type_node
, NULL_TREE
);
6537 tree v4si_ftype_v8hi_v8hi_v4si
6538 = build_function_type_list (V4SI_type_node
,
6539 V8HI_type_node
, V8HI_type_node
,
6540 V4SI_type_node
, NULL_TREE
);
6541 tree v4si_ftype_v16qi_v16qi_v4si
6542 = build_function_type_list (V4SI_type_node
,
6543 V16QI_type_node
, V16QI_type_node
,
6544 V4SI_type_node
, NULL_TREE
);
6545 tree v16qi_ftype_v16qi_v16qi
6546 = build_function_type_list (V16QI_type_node
,
6547 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6548 tree v4si_ftype_v4sf_v4sf
6549 = build_function_type_list (V4SI_type_node
,
6550 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6551 tree v8hi_ftype_v16qi_v16qi
6552 = build_function_type_list (V8HI_type_node
,
6553 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6554 tree v4si_ftype_v8hi_v8hi
6555 = build_function_type_list (V4SI_type_node
,
6556 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6557 tree v8hi_ftype_v4si_v4si
6558 = build_function_type_list (V8HI_type_node
,
6559 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6560 tree v16qi_ftype_v8hi_v8hi
6561 = build_function_type_list (V16QI_type_node
,
6562 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6563 tree v4si_ftype_v16qi_v4si
6564 = build_function_type_list (V4SI_type_node
,
6565 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
6566 tree v4si_ftype_v16qi_v16qi
6567 = build_function_type_list (V4SI_type_node
,
6568 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6569 tree v4si_ftype_v8hi_v4si
6570 = build_function_type_list (V4SI_type_node
,
6571 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
6572 tree v4si_ftype_v8hi
6573 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
6574 tree int_ftype_v4si_v4si
6575 = build_function_type_list (integer_type_node
,
6576 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
6577 tree int_ftype_v4sf_v4sf
6578 = build_function_type_list (integer_type_node
,
6579 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
6580 tree int_ftype_v16qi_v16qi
6581 = build_function_type_list (integer_type_node
,
6582 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
6583 tree int_ftype_v8hi_v8hi
6584 = build_function_type_list (integer_type_node
,
6585 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
6587 /* Add the simple ternary operators. */
6588 d
= (struct builtin_description
*) bdesc_3arg
;
6589 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
6592 enum machine_mode mode0
, mode1
, mode2
, mode3
;
6595 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6598 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6599 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6600 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6601 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
6603 /* When all four are of the same mode. */
6604 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
6609 type
= v4si_ftype_v4si_v4si_v4si
;
6612 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
6615 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
6618 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6624 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
6629 type
= v4si_ftype_v4si_v4si_v16qi
;
6632 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
6635 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
6638 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
6644 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
6645 && mode3
== V4SImode
)
6646 type
= v4si_ftype_v16qi_v16qi_v4si
;
6647 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
6648 && mode3
== V4SImode
)
6649 type
= v4si_ftype_v8hi_v8hi_v4si
;
6650 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
6651 && mode3
== V4SImode
)
6652 type
= v4sf_ftype_v4sf_v4sf_v4si
;
6654 /* vchar, vchar, vchar, 4 bit literal. */
6655 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
6657 type
= v16qi_ftype_v16qi_v16qi_char
;
6659 /* vshort, vshort, vshort, 4 bit literal. */
6660 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
6662 type
= v8hi_ftype_v8hi_v8hi_char
;
6664 /* vint, vint, vint, 4 bit literal. */
6665 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
6667 type
= v4si_ftype_v4si_v4si_char
;
6669 /* vfloat, vfloat, vfloat, 4 bit literal. */
6670 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
6672 type
= v4sf_ftype_v4sf_v4sf_char
;
6677 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6680 /* Add the simple binary operators. */
6681 d
= (struct builtin_description
*) bdesc_2arg
;
6682 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6684 enum machine_mode mode0
, mode1
, mode2
;
6687 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6690 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6691 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6692 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6694 /* When all three operands are of the same mode. */
6695 if (mode0
== mode1
&& mode1
== mode2
)
6700 type
= v4sf_ftype_v4sf_v4sf
;
6703 type
= v4si_ftype_v4si_v4si
;
6706 type
= v16qi_ftype_v16qi_v16qi
;
6709 type
= v8hi_ftype_v8hi_v8hi
;
6712 type
= v2si_ftype_v2si_v2si
;
6715 type
= v2sf_ftype_v2sf_v2sf
;
6718 type
= int_ftype_int_int
;
6725 /* A few other combos we really don't want to do manually. */
6727 /* vint, vfloat, vfloat. */
6728 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
6729 type
= v4si_ftype_v4sf_v4sf
;
6731 /* vshort, vchar, vchar. */
6732 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6733 type
= v8hi_ftype_v16qi_v16qi
;
6735 /* vint, vshort, vshort. */
6736 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6737 type
= v4si_ftype_v8hi_v8hi
;
6739 /* vshort, vint, vint. */
6740 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
6741 type
= v8hi_ftype_v4si_v4si
;
6743 /* vchar, vshort, vshort. */
6744 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
6745 type
= v16qi_ftype_v8hi_v8hi
;
6747 /* vint, vchar, vint. */
6748 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
6749 type
= v4si_ftype_v16qi_v4si
;
6751 /* vint, vchar, vchar. */
6752 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
6753 type
= v4si_ftype_v16qi_v16qi
;
6755 /* vint, vshort, vint. */
6756 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
6757 type
= v4si_ftype_v8hi_v4si
;
6759 /* vint, vint, 5 bit literal. */
6760 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
6761 type
= v4si_ftype_v4si_char
;
6763 /* vshort, vshort, 5 bit literal. */
6764 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
6765 type
= v8hi_ftype_v8hi_char
;
6767 /* vchar, vchar, 5 bit literal. */
6768 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
6769 type
= v16qi_ftype_v16qi_char
;
6771 /* vfloat, vint, 5 bit literal. */
6772 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
6773 type
= v4sf_ftype_v4si_char
;
6775 /* vint, vfloat, 5 bit literal. */
6776 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
6777 type
= v4si_ftype_v4sf_char
;
6779 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
6780 type
= v2si_ftype_int_int
;
6782 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
6783 type
= v2si_ftype_v2si_char
;
6785 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
6786 type
= v2si_ftype_int_char
;
6789 else if (mode0
== SImode
)
6794 type
= int_ftype_v4si_v4si
;
6797 type
= int_ftype_v4sf_v4sf
;
6800 type
= int_ftype_v16qi_v16qi
;
6803 type
= int_ftype_v8hi_v8hi
;
6813 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6816 /* Add the simple unary operators. */
6817 d
= (struct builtin_description
*) bdesc_1arg
;
6818 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6820 enum machine_mode mode0
, mode1
;
6823 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6826 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6827 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6829 if (mode0
== V4SImode
&& mode1
== QImode
)
6830 type
= v4si_ftype_char
;
6831 else if (mode0
== V8HImode
&& mode1
== QImode
)
6832 type
= v8hi_ftype_char
;
6833 else if (mode0
== V16QImode
&& mode1
== QImode
)
6834 type
= v16qi_ftype_char
;
6835 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6836 type
= v4sf_ftype_v4sf
;
6837 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6838 type
= v8hi_ftype_v16qi
;
6839 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6840 type
= v4si_ftype_v8hi
;
6841 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6842 type
= v2si_ftype_v2si
;
6843 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6844 type
= v2sf_ftype_v2sf
;
6845 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6846 type
= v2sf_ftype_v2si
;
6847 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6848 type
= v2si_ftype_v2sf
;
6849 else if (mode0
== V2SImode
&& mode1
== QImode
)
6850 type
= v2si_ftype_char
;
6854 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6859 /* Expand a block move operation, and return 1 if successful. Return 0
6860 if we should let the compiler generate normal code.
6862 operands[0] is the destination
6863 operands[1] is the source
6864 operands[2] is the length
6865 operands[3] is the alignment */
6867 #define MAX_MOVE_REG 4
6870 expand_block_move (operands
)
6873 rtx orig_dest
= operands
[0];
6874 rtx orig_src
= operands
[1];
6875 rtx bytes_rtx
= operands
[2];
6876 rtx align_rtx
= operands
[3];
6877 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6883 /* If this is not a fixed size move, just call memcpy */
6887 /* If this is not a fixed size alignment, abort */
6888 if (GET_CODE (align_rtx
) != CONST_INT
)
6890 align
= INTVAL (align_rtx
);
6892 /* Anything to move? */
6893 bytes
= INTVAL (bytes_rtx
);
6897 /* store_one_arg depends on expand_block_move to handle at least the size of
6898 reg_parm_stack_space. */
6899 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6902 if (TARGET_STRING
) /* string instructions are available */
6904 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
6907 rtx (*movstrsi
) PARAMS ((rtx
, rtx
, rtx
, rtx
));
6908 rtx (*mov
) PARAMS ((rtx
, rtx
));
6910 enum machine_mode mode
= BLKmode
;
6913 if (bytes
> 24 /* move up to 32 bytes at a time */
6921 && ! fixed_regs
[12])
6923 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6924 gen_func
.movstrsi
= gen_movstrsi_8reg
;
6926 else if (bytes
> 16 /* move up to 24 bytes at a time */
6932 && ! fixed_regs
[10])
6934 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6935 gen_func
.movstrsi
= gen_movstrsi_6reg
;
6937 else if (bytes
> 8 /* move up to 16 bytes at a time */
6943 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6944 gen_func
.movstrsi
= gen_movstrsi_4reg
;
6946 else if (bytes
>= 8 && TARGET_POWERPC64
6947 /* 64-bit loads and stores require word-aligned
6949 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6953 gen_func
.mov
= gen_movdi
;
6955 else if (bytes
> 4 && !TARGET_POWERPC64
)
6956 { /* move up to 8 bytes at a time */
6957 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6958 gen_func
.movstrsi
= gen_movstrsi_2reg
;
6960 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6961 { /* move 4 bytes */
6964 gen_func
.mov
= gen_movsi
;
6966 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6967 { /* move 2 bytes */
6970 gen_func
.mov
= gen_movhi
;
6972 else if (bytes
== 1) /* move 1 byte */
6976 gen_func
.mov
= gen_movqi
;
6979 { /* move up to 4 bytes at a time */
6980 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6981 gen_func
.movstrsi
= gen_movstrsi_1reg
;
6984 src
= adjust_address (orig_src
, mode
, offset
);
6985 dest
= adjust_address (orig_dest
, mode
, offset
);
6987 if (mode
== BLKmode
)
6989 /* Move the address into scratch registers. The movstrsi
6990 patterns require zero offset. */
6991 if (!REG_P (XEXP (src
, 0)))
6993 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
6994 src
= replace_equiv_address (src
, src_reg
);
6996 set_mem_size (src
, GEN_INT (move_bytes
));
6998 if (!REG_P (XEXP (dest
, 0)))
7000 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
7001 dest
= replace_equiv_address (dest
, dest_reg
);
7003 set_mem_size (dest
, GEN_INT (move_bytes
));
7005 emit_insn ((*gen_func
.movstrsi
) (dest
, src
,
7006 GEN_INT (move_bytes
& 31),
7011 rtx tmp_reg
= gen_reg_rtx (mode
);
7013 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
7014 emit_insn ((*gen_func
.mov
) (dest
, tmp_reg
));
7019 else /* string instructions not available */
7021 rtx stores
[MAX_MOVE_REG
];
7025 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
7027 rtx (*gen_mov_func
) PARAMS ((rtx
, rtx
));
7028 enum machine_mode mode
;
7029 rtx src
, dest
, tmp_reg
;
7031 /* Generate the appropriate load and store, saving the stores
7033 if (bytes
>= 8 && TARGET_POWERPC64
7034 /* 64-bit loads and stores require word-aligned
7036 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
7040 gen_mov_func
= gen_movdi
;
7042 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
7046 gen_mov_func
= gen_movsi
;
7048 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
7052 gen_mov_func
= gen_movhi
;
7058 gen_mov_func
= gen_movqi
;
7061 src
= adjust_address (orig_src
, mode
, offset
);
7062 dest
= adjust_address (orig_dest
, mode
, offset
);
7063 tmp_reg
= gen_reg_rtx (mode
);
7065 emit_insn ((*gen_mov_func
) (tmp_reg
, src
));
7066 stores
[num_reg
++] = (*gen_mov_func
) (dest
, tmp_reg
);
7068 if (num_reg
>= MAX_MOVE_REG
)
7070 for (i
= 0; i
< num_reg
; i
++)
7071 emit_insn (stores
[i
]);
7076 for (i
= 0; i
< num_reg
; i
++)
7077 emit_insn (stores
[i
]);
7084 /* Return 1 if OP is a load multiple operation. It is known to be a
7085 PARALLEL and the first section will be tested. */
7088 load_multiple_operation (op
, mode
)
7090 enum machine_mode mode ATTRIBUTE_UNUSED
;
7092 int count
= XVECLEN (op
, 0);
7093 unsigned int dest_regno
;
7097 /* Perform a quick check so we don't blow up below. */
7099 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7100 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7101 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7104 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7105 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7107 for (i
= 1; i
< count
; i
++)
7109 rtx elt
= XVECEXP (op
, 0, i
);
7111 if (GET_CODE (elt
) != SET
7112 || GET_CODE (SET_DEST (elt
)) != REG
7113 || GET_MODE (SET_DEST (elt
)) != SImode
7114 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7115 || GET_CODE (SET_SRC (elt
)) != MEM
7116 || GET_MODE (SET_SRC (elt
)) != SImode
7117 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
7118 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
7119 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
7120 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
7127 /* Similar, but tests for store multiple. Here, the second vector element
7128 is a CLOBBER. It will be tested later. */
7131 store_multiple_operation (op
, mode
)
7133 enum machine_mode mode ATTRIBUTE_UNUSED
;
7135 int count
= XVECLEN (op
, 0) - 1;
7136 unsigned int src_regno
;
7140 /* Perform a quick check so we don't blow up below. */
7142 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7143 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7144 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7147 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7148 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7150 for (i
= 1; i
< count
; i
++)
7152 rtx elt
= XVECEXP (op
, 0, i
+ 1);
7154 if (GET_CODE (elt
) != SET
7155 || GET_CODE (SET_SRC (elt
)) != REG
7156 || GET_MODE (SET_SRC (elt
)) != SImode
7157 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7158 || GET_CODE (SET_DEST (elt
)) != MEM
7159 || GET_MODE (SET_DEST (elt
)) != SImode
7160 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
7161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
7162 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
7163 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
7170 /* Return a string to perform a load_multiple operation.
7171 operands[0] is the vector.
7172 operands[1] is the source address.
7173 operands[2] is the first destination register. */
7176 rs6000_output_load_multiple (operands
)
7179 /* We have to handle the case where the pseudo used to contain the address
7180 is assigned to one of the output registers. */
7182 int words
= XVECLEN (operands
[0], 0);
7185 if (XVECLEN (operands
[0], 0) == 1)
7186 return "{l|lwz} %2,0(%1)";
7188 for (i
= 0; i
< words
; i
++)
7189 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
7190 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
7194 xop
[0] = GEN_INT (4 * (words
-1));
7195 xop
[1] = operands
[1];
7196 xop
[2] = operands
[2];
7197 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
7202 xop
[0] = GEN_INT (4 * (words
-1));
7203 xop
[1] = operands
[1];
7204 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
7205 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
7210 for (j
= 0; j
< words
; j
++)
7213 xop
[0] = GEN_INT (j
* 4);
7214 xop
[1] = operands
[1];
7215 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
7216 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
7218 xop
[0] = GEN_INT (i
* 4);
7219 xop
[1] = operands
[1];
7220 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
7225 return "{lsi|lswi} %2,%1,%N0";
7228 /* Return 1 for a parallel vrsave operation. */
7231 vrsave_operation (op
, mode
)
7233 enum machine_mode mode ATTRIBUTE_UNUSED
;
7235 int count
= XVECLEN (op
, 0);
7236 unsigned int dest_regno
, src_regno
;
7240 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7241 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7242 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
7245 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7246 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7248 if (dest_regno
!= VRSAVE_REGNO
7249 && src_regno
!= VRSAVE_REGNO
)
7252 for (i
= 1; i
< count
; i
++)
7254 rtx elt
= XVECEXP (op
, 0, i
);
7256 if (GET_CODE (elt
) != CLOBBER
7257 && GET_CODE (elt
) != SET
)
7264 /* Return 1 for an PARALLEL suitable for mtcrf. */
7267 mtcrf_operation (op
, mode
)
7269 enum machine_mode mode ATTRIBUTE_UNUSED
;
7271 int count
= XVECLEN (op
, 0);
7275 /* Perform a quick check so we don't blow up below. */
7277 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7278 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
7279 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
7281 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
7283 if (GET_CODE (src_reg
) != REG
7284 || GET_MODE (src_reg
) != SImode
7285 || ! INT_REGNO_P (REGNO (src_reg
)))
7288 for (i
= 0; i
< count
; i
++)
7290 rtx exp
= XVECEXP (op
, 0, i
);
7294 if (GET_CODE (exp
) != SET
7295 || GET_CODE (SET_DEST (exp
)) != REG
7296 || GET_MODE (SET_DEST (exp
)) != CCmode
7297 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
7299 unspec
= SET_SRC (exp
);
7300 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
7302 if (GET_CODE (unspec
) != UNSPEC
7303 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
7304 || XVECLEN (unspec
, 0) != 2
7305 || XVECEXP (unspec
, 0, 0) != src_reg
7306 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
7307 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
7313 /* Return 1 for an PARALLEL suitable for lmw. */
7316 lmw_operation (op
, mode
)
7318 enum machine_mode mode ATTRIBUTE_UNUSED
;
7320 int count
= XVECLEN (op
, 0);
7321 unsigned int dest_regno
;
7323 unsigned int base_regno
;
7324 HOST_WIDE_INT offset
;
7327 /* Perform a quick check so we don't blow up below. */
7329 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7330 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
7331 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
7334 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
7335 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
7338 || count
!= 32 - (int) dest_regno
)
7341 if (legitimate_indirect_address_p (src_addr
, 0))
7344 base_regno
= REGNO (src_addr
);
7345 if (base_regno
== 0)
7348 else if (legitimate_offset_address_p (SImode
, src_addr
, 0))
7350 offset
= INTVAL (XEXP (src_addr
, 1));
7351 base_regno
= REGNO (XEXP (src_addr
, 0));
7356 for (i
= 0; i
< count
; i
++)
7358 rtx elt
= XVECEXP (op
, 0, i
);
7361 HOST_WIDE_INT newoffset
;
7363 if (GET_CODE (elt
) != SET
7364 || GET_CODE (SET_DEST (elt
)) != REG
7365 || GET_MODE (SET_DEST (elt
)) != SImode
7366 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
7367 || GET_CODE (SET_SRC (elt
)) != MEM
7368 || GET_MODE (SET_SRC (elt
)) != SImode
)
7370 newaddr
= XEXP (SET_SRC (elt
), 0);
7371 if (legitimate_indirect_address_p (newaddr
, 0))
7376 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7378 addr_reg
= XEXP (newaddr
, 0);
7379 newoffset
= INTVAL (XEXP (newaddr
, 1));
7383 if (REGNO (addr_reg
) != base_regno
7384 || newoffset
!= offset
+ 4 * i
)
7391 /* Return 1 for an PARALLEL suitable for stmw. */
7394 stmw_operation (op
, mode
)
7396 enum machine_mode mode ATTRIBUTE_UNUSED
;
7398 int count
= XVECLEN (op
, 0);
7399 unsigned int src_regno
;
7401 unsigned int base_regno
;
7402 HOST_WIDE_INT offset
;
7405 /* Perform a quick check so we don't blow up below. */
7407 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
7408 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
7409 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
7412 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
7413 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
7416 || count
!= 32 - (int) src_regno
)
7419 if (legitimate_indirect_address_p (dest_addr
, 0))
7422 base_regno
= REGNO (dest_addr
);
7423 if (base_regno
== 0)
7426 else if (legitimate_offset_address_p (SImode
, dest_addr
, 0))
7428 offset
= INTVAL (XEXP (dest_addr
, 1));
7429 base_regno
= REGNO (XEXP (dest_addr
, 0));
7434 for (i
= 0; i
< count
; i
++)
7436 rtx elt
= XVECEXP (op
, 0, i
);
7439 HOST_WIDE_INT newoffset
;
7441 if (GET_CODE (elt
) != SET
7442 || GET_CODE (SET_SRC (elt
)) != REG
7443 || GET_MODE (SET_SRC (elt
)) != SImode
7444 || REGNO (SET_SRC (elt
)) != src_regno
+ i
7445 || GET_CODE (SET_DEST (elt
)) != MEM
7446 || GET_MODE (SET_DEST (elt
)) != SImode
)
7448 newaddr
= XEXP (SET_DEST (elt
), 0);
7449 if (legitimate_indirect_address_p (newaddr
, 0))
7454 else if (legitimate_offset_address_p (SImode
, newaddr
, 0))
7456 addr_reg
= XEXP (newaddr
, 0);
7457 newoffset
= INTVAL (XEXP (newaddr
, 1));
7461 if (REGNO (addr_reg
) != base_regno
7462 || newoffset
!= offset
+ 4 * i
)
7469 /* A validation routine: say whether CODE, a condition code, and MODE
7470 match. The other alternatives either don't make sense or should
7471 never be generated. */
7474 validate_condition_mode (code
, mode
)
7476 enum machine_mode mode
;
7478 if (GET_RTX_CLASS (code
) != '<'
7479 || GET_MODE_CLASS (mode
) != MODE_CC
)
7482 /* These don't make sense. */
7483 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
7484 && mode
== CCUNSmode
)
7487 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
7488 && mode
!= CCUNSmode
)
7491 if (mode
!= CCFPmode
7492 && (code
== ORDERED
|| code
== UNORDERED
7493 || code
== UNEQ
|| code
== LTGT
7494 || code
== UNGT
|| code
== UNLT
7495 || code
== UNGE
|| code
== UNLE
))
7498 /* These should never be generated except for
7499 flag_finite_math_only. */
7500 if (mode
== CCFPmode
7501 && ! flag_finite_math_only
7502 && (code
== LE
|| code
== GE
7503 || code
== UNEQ
|| code
== LTGT
7504 || code
== UNGT
|| code
== UNLT
))
7507 /* These are invalid; the information is not there. */
7508 if (mode
== CCEQmode
7509 && code
!= EQ
&& code
!= NE
)
7513 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7514 We only check the opcode against the mode of the CC value here. */
7517 branch_comparison_operator (op
, mode
)
7519 enum machine_mode mode ATTRIBUTE_UNUSED
;
7521 enum rtx_code code
= GET_CODE (op
);
7522 enum machine_mode cc_mode
;
7524 if (GET_RTX_CLASS (code
) != '<')
7527 cc_mode
= GET_MODE (XEXP (op
, 0));
7528 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
7531 validate_condition_mode (code
, cc_mode
);
7536 /* Return 1 if OP is a comparison operation that is valid for a branch
7537 insn and which is true if the corresponding bit in the CC register
7541 branch_positive_comparison_operator (op
, mode
)
7543 enum machine_mode mode
;
7547 if (! branch_comparison_operator (op
, mode
))
7550 code
= GET_CODE (op
);
7551 return (code
== EQ
|| code
== LT
|| code
== GT
7552 || (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
7553 || code
== LTU
|| code
== GTU
7554 || code
== UNORDERED
);
7557 /* Return 1 if OP is a comparison operation that is valid for an scc
7558 insn: it must be a positive comparison. */
7561 scc_comparison_operator (op
, mode
)
7563 enum machine_mode mode
;
7565 return branch_positive_comparison_operator (op
, mode
);
7569 trap_comparison_operator (op
, mode
)
7571 enum machine_mode mode
;
7573 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
7575 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
7579 boolean_operator (op
, mode
)
7581 enum machine_mode mode ATTRIBUTE_UNUSED
;
7583 enum rtx_code code
= GET_CODE (op
);
7584 return (code
== AND
|| code
== IOR
|| code
== XOR
);
7588 boolean_or_operator (op
, mode
)
7590 enum machine_mode mode ATTRIBUTE_UNUSED
;
7592 enum rtx_code code
= GET_CODE (op
);
7593 return (code
== IOR
|| code
== XOR
);
7597 min_max_operator (op
, mode
)
7599 enum machine_mode mode ATTRIBUTE_UNUSED
;
7601 enum rtx_code code
= GET_CODE (op
);
7602 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
7605 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7606 mask required to convert the result of a rotate insn into a shift
7607 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7610 includes_lshift_p (shiftop
, andop
)
7614 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7616 shift_mask
<<= INTVAL (shiftop
);
7618 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7621 /* Similar, but for right shift. */
7624 includes_rshift_p (shiftop
, andop
)
7628 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
7630 shift_mask
>>= INTVAL (shiftop
);
7632 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
7635 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7636 to perform a left shift. It must have exactly SHIFTOP least
7637 significant 0's, then one or more 1's, then zero or more 0's. */
7640 includes_rldic_lshift_p (shiftop
, andop
)
7644 if (GET_CODE (andop
) == CONST_INT
)
7646 HOST_WIDE_INT c
, lsb
, shift_mask
;
7649 if (c
== 0 || c
== ~0)
7653 shift_mask
<<= INTVAL (shiftop
);
7655 /* Find the least significant one bit. */
7658 /* It must coincide with the LSB of the shift mask. */
7659 if (-lsb
!= shift_mask
)
7662 /* Invert to look for the next transition (if any). */
7665 /* Remove the low group of ones (originally low group of zeros). */
7668 /* Again find the lsb, and check we have all 1's above. */
7672 else if (GET_CODE (andop
) == CONST_DOUBLE
7673 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7675 HOST_WIDE_INT low
, high
, lsb
;
7676 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
7678 low
= CONST_DOUBLE_LOW (andop
);
7679 if (HOST_BITS_PER_WIDE_INT
< 64)
7680 high
= CONST_DOUBLE_HIGH (andop
);
7682 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
7683 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
7686 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7688 shift_mask_high
= ~0;
7689 if (INTVAL (shiftop
) > 32)
7690 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7694 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
7701 return high
== -lsb
;
7704 shift_mask_low
= ~0;
7705 shift_mask_low
<<= INTVAL (shiftop
);
7709 if (-lsb
!= shift_mask_low
)
7712 if (HOST_BITS_PER_WIDE_INT
< 64)
7717 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
7720 return high
== -lsb
;
7724 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
7730 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7731 to perform a left shift. It must have SHIFTOP or more least
7732 signifigant 0's, with the remainder of the word 1's. */
7735 includes_rldicr_lshift_p (shiftop
, andop
)
7739 if (GET_CODE (andop
) == CONST_INT
)
7741 HOST_WIDE_INT c
, lsb
, shift_mask
;
7744 shift_mask
<<= INTVAL (shiftop
);
7747 /* Find the least signifigant one bit. */
7750 /* It must be covered by the shift mask.
7751 This test also rejects c == 0. */
7752 if ((lsb
& shift_mask
) == 0)
7755 /* Check we have all 1's above the transition, and reject all 1's. */
7756 return c
== -lsb
&& lsb
!= 1;
7758 else if (GET_CODE (andop
) == CONST_DOUBLE
7759 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
7761 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
7763 low
= CONST_DOUBLE_LOW (andop
);
7765 if (HOST_BITS_PER_WIDE_INT
< 64)
7767 HOST_WIDE_INT high
, shift_mask_high
;
7769 high
= CONST_DOUBLE_HIGH (andop
);
7773 shift_mask_high
= ~0;
7774 if (INTVAL (shiftop
) > 32)
7775 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7779 if ((lsb
& shift_mask_high
) == 0)
7782 return high
== -lsb
;
7788 shift_mask_low
= ~0;
7789 shift_mask_low
<<= INTVAL (shiftop
);
7793 if ((lsb
& shift_mask_low
) == 0)
7796 return low
== -lsb
&& lsb
!= 1;
7802 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7803 for lfq and stfq insns.
7805 Note reg1 and reg2 *must* be hard registers. To be sure we will
7806 abort if we are passed pseudo registers. */
7809 registers_ok_for_quad_peep (reg1
, reg2
)
7812 /* We might have been passed a SUBREG. */
7813 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7816 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7819 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7820 addr1 and addr2 must be in consecutive memory locations
7821 (addr2 == addr1 + 8). */
7824 addrs_ok_for_quad_peep (addr1
, addr2
)
7831 /* Extract an offset (if used) from the first addr. */
7832 if (GET_CODE (addr1
) == PLUS
)
7834 /* If not a REG, return zero. */
7835 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7839 reg1
= REGNO (XEXP (addr1
, 0));
7840 /* The offset must be constant! */
7841 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7843 offset1
= INTVAL (XEXP (addr1
, 1));
7846 else if (GET_CODE (addr1
) != REG
)
7850 reg1
= REGNO (addr1
);
7851 /* This was a simple (mem (reg)) expression. Offset is 0. */
7855 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7856 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7857 register as addr1. */
7858 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
7860 if (GET_CODE (addr2
) != PLUS
)
7863 if (GET_CODE (XEXP (addr2
, 0)) != REG
7864 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7867 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7870 /* The offset for the second addr must be 8 more than the first addr. */
7871 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7874 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7879 /* Return the register class of a scratch register needed to copy IN into
7880 or out of a register in CLASS in MODE. If it can be done directly,
7881 NO_REGS is returned. */
7884 secondary_reload_class (class, mode
, in
)
7885 enum reg_class
class;
7886 enum machine_mode mode ATTRIBUTE_UNUSED
;
7891 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
7893 && MACHOPIC_INDIRECT
7897 /* We cannot copy a symbolic operand directly into anything
7898 other than BASE_REGS for TARGET_ELF. So indicate that a
7899 register from BASE_REGS is needed as an intermediate
7902 On Darwin, pic addresses require a load from memory, which
7903 needs a base register. */
7904 if (class != BASE_REGS
7905 && (GET_CODE (in
) == SYMBOL_REF
7906 || GET_CODE (in
) == HIGH
7907 || GET_CODE (in
) == LABEL_REF
7908 || GET_CODE (in
) == CONST
))
7912 if (GET_CODE (in
) == REG
)
7915 if (regno
>= FIRST_PSEUDO_REGISTER
)
7917 regno
= true_regnum (in
);
7918 if (regno
>= FIRST_PSEUDO_REGISTER
)
7922 else if (GET_CODE (in
) == SUBREG
)
7924 regno
= true_regnum (in
);
7925 if (regno
>= FIRST_PSEUDO_REGISTER
)
7931 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7933 if (class == GENERAL_REGS
|| class == BASE_REGS
7934 || (regno
>= 0 && INT_REGNO_P (regno
)))
7937 /* Constants, memory, and FP registers can go into FP registers. */
7938 if ((regno
== -1 || FP_REGNO_P (regno
))
7939 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7942 /* Memory, and AltiVec registers can go into AltiVec registers. */
7943 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7944 && class == ALTIVEC_REGS
)
7947 /* We can copy among the CR registers. */
7948 if ((class == CR_REGS
|| class == CR0_REGS
)
7949 && regno
>= 0 && CR_REGNO_P (regno
))
7952 /* Otherwise, we need GENERAL_REGS. */
7953 return GENERAL_REGS
;
7956 /* Given a comparison operation, return the bit number in CCR to test. We
7957 know this is a valid comparison.
7959 SCC_P is 1 if this is for an scc. That means that %D will have been
7960 used instead of %C, so the bits will be in different places.
7962 Return -1 if OP isn't a valid comparison for some reason. */
7969 enum rtx_code code
= GET_CODE (op
);
7970 enum machine_mode cc_mode
;
7975 if (GET_RTX_CLASS (code
) != '<')
7980 if (GET_CODE (reg
) != REG
7981 || ! CR_REGNO_P (REGNO (reg
)))
7984 cc_mode
= GET_MODE (reg
);
7985 cc_regnum
= REGNO (reg
);
7986 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7988 validate_condition_mode (code
, cc_mode
);
7990 /* When generating a sCOND operation, only positive conditions are
7992 if (scc_p
&& code
!= EQ
&& code
!= GT
&& code
!= LT
&& code
!= UNORDERED
7993 && code
!= GTU
&& code
!= LTU
)
7999 if (TARGET_E500
&& !TARGET_FPRS
8000 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8001 return base_bit
+ 1;
8002 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
8004 if (TARGET_E500
&& !TARGET_FPRS
8005 && TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
8006 return base_bit
+ 1;
8007 return base_bit
+ 2;
8008 case GT
: case GTU
: case UNLE
:
8009 return base_bit
+ 1;
8010 case LT
: case LTU
: case UNGE
:
8012 case ORDERED
: case UNORDERED
:
8013 return base_bit
+ 3;
8016 /* If scc, we will have done a cror to put the bit in the
8017 unordered position. So test that bit. For integer, this is ! LT
8018 unless this is an scc insn. */
8019 return scc_p
? base_bit
+ 3 : base_bit
;
8022 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
8029 /* Return the GOT register. */
8032 rs6000_got_register (value
)
8033 rtx value ATTRIBUTE_UNUSED
;
8035 /* The second flow pass currently (June 1999) can't update
8036 regs_ever_live without disturbing other parts of the compiler, so
8037 update it here to make the prolog/epilogue code happy. */
8038 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
8039 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
8041 current_function_uses_pic_offset_table
= 1;
8043 return pic_offset_table_rtx
;
8046 /* Function to init struct machine_function.
8047 This will be called, via a pointer variable,
8048 from push_function_context. */
8050 static struct machine_function
*
8051 rs6000_init_machine_status ()
8053 return ggc_alloc_cleared (sizeof (machine_function
));
8056 /* These macros test for integers and extract the low-order bits. */
8058 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8059 && GET_MODE (X) == VOIDmode)
8061 #define INT_LOWPART(X) \
8062 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8069 unsigned long val
= INT_LOWPART (op
);
8071 /* If the high bit is zero, the value is the first 1 bit we find
8073 if ((val
& 0x80000000) == 0)
8075 if ((val
& 0xffffffff) == 0)
8079 while (((val
<<= 1) & 0x80000000) == 0)
8084 /* If the high bit is set and the low bit is not, or the mask is all
8085 1's, the value is zero. */
8086 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
8089 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8092 while (((val
>>= 1) & 1) != 0)
8103 unsigned long val
= INT_LOWPART (op
);
8105 /* If the low bit is zero, the value is the first 1 bit we find from
8109 if ((val
& 0xffffffff) == 0)
8113 while (((val
>>= 1) & 1) == 0)
8119 /* If the low bit is set and the high bit is not, or the mask is all
8120 1's, the value is 31. */
8121 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
8124 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8127 while (((val
<<= 1) & 0x80000000) != 0)
8133 /* Locate some local-dynamic symbol still in use by this function
8134 so that we can print its name in some tls_ld pattern. */
8137 rs6000_get_some_local_dynamic_name ()
8141 if (cfun
->machine
->some_ld_name
)
8142 return cfun
->machine
->some_ld_name
;
8144 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8146 && for_each_rtx (&PATTERN (insn
),
8147 rs6000_get_some_local_dynamic_name_1
, 0))
8148 return cfun
->machine
->some_ld_name
;
8153 /* Helper function for rs6000_get_some_local_dynamic_name. */
8156 rs6000_get_some_local_dynamic_name_1 (px
, data
)
8158 void *data ATTRIBUTE_UNUSED
;
8162 if (GET_CODE (x
) == SYMBOL_REF
)
8164 const char *str
= XSTR (x
, 0);
8165 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
8167 cfun
->machine
->some_ld_name
= str
;
8175 /* Print an operand. Recognize special options, documented below. */
8178 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8179 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8181 #define SMALL_DATA_RELOC "sda21"
8182 #define SMALL_DATA_REG 0
8186 print_operand (file
, x
, code
)
8193 unsigned HOST_WIDE_INT uval
;
8198 /* Write out an instruction after the call which may be replaced
8199 with glue code by the loader. This depends on the AIX version. */
8200 asm_fprintf (file
, RS6000_CALL_GLUE
);
8203 /* %a is output_address. */
8206 /* If X is a constant integer whose low-order 5 bits are zero,
8207 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8208 in the AIX assembler where "sri" with a zero shift count
8209 writes a trash instruction. */
8210 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
8217 /* If constant, low-order 16 bits of constant, unsigned.
8218 Otherwise, write normally. */
8220 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
8222 print_operand (file
, x
, 0);
8226 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8227 for 64-bit mask direction. */
8228 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
8231 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8235 /* X is a CR register. Print the number of the EQ bit of the CR */
8236 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8237 output_operand_lossage ("invalid %%E value");
8239 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
8243 /* X is a CR register. Print the shift count needed to move it
8244 to the high-order four bits. */
8245 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8246 output_operand_lossage ("invalid %%f value");
8248 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
8252 /* Similar, but print the count for the rotate in the opposite
8254 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8255 output_operand_lossage ("invalid %%F value");
8257 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
8261 /* X is a constant integer. If it is negative, print "m",
8262 otherwise print "z". This is to make an aze or ame insn. */
8263 if (GET_CODE (x
) != CONST_INT
)
8264 output_operand_lossage ("invalid %%G value");
8265 else if (INTVAL (x
) >= 0)
8272 /* If constant, output low-order five bits. Otherwise, write
8275 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
8277 print_operand (file
, x
, 0);
8281 /* If constant, output low-order six bits. Otherwise, write
8284 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
8286 print_operand (file
, x
, 0);
8290 /* Print `i' if this is a constant, else nothing. */
8296 /* Write the bit number in CCR for jump. */
8299 output_operand_lossage ("invalid %%j code");
8301 fprintf (file
, "%d", i
);
8305 /* Similar, but add one for shift count in rlinm for scc and pass
8306 scc flag to `ccr_bit'. */
8309 output_operand_lossage ("invalid %%J code");
8311 /* If we want bit 31, write a shift count of zero, not 32. */
8312 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8316 /* X must be a constant. Write the 1's complement of the
8319 output_operand_lossage ("invalid %%k value");
8321 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
8325 /* X must be a symbolic constant on ELF. Write an
8326 expression suitable for an 'addi' that adds in the low 16
8328 if (GET_CODE (x
) != CONST
)
8330 print_operand_address (file
, x
);
8335 if (GET_CODE (XEXP (x
, 0)) != PLUS
8336 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
8337 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
8338 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
8339 output_operand_lossage ("invalid %%K value");
8340 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
8342 /* For GNU as, there must be a non-alphanumeric character
8343 between 'l' and the number. The '-' is added by
8344 print_operand() already. */
8345 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
8347 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
8351 /* %l is output_asm_label. */
8354 /* Write second word of DImode or DFmode reference. Works on register
8355 or non-indexed memory only. */
8356 if (GET_CODE (x
) == REG
)
8357 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
8358 else if (GET_CODE (x
) == MEM
)
8360 /* Handle possible auto-increment. Since it is pre-increment and
8361 we have already done it, we can just use an offset of word. */
8362 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8363 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8364 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
8367 output_address (XEXP (adjust_address_nv (x
, SImode
,
8371 if (small_data_operand (x
, GET_MODE (x
)))
8372 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8373 reg_names
[SMALL_DATA_REG
]);
8378 /* MB value for a mask operand. */
8379 if (! mask_operand (x
, SImode
))
8380 output_operand_lossage ("invalid %%m value");
8382 fprintf (file
, "%d", extract_MB (x
));
8386 /* ME value for a mask operand. */
8387 if (! mask_operand (x
, SImode
))
8388 output_operand_lossage ("invalid %%M value");
8390 fprintf (file
, "%d", extract_ME (x
));
8393 /* %n outputs the negative of its operand. */
8396 /* Write the number of elements in the vector times 4. */
8397 if (GET_CODE (x
) != PARALLEL
)
8398 output_operand_lossage ("invalid %%N value");
8400 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
8404 /* Similar, but subtract 1 first. */
8405 if (GET_CODE (x
) != PARALLEL
)
8406 output_operand_lossage ("invalid %%O value");
8408 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
8412 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8414 || INT_LOWPART (x
) < 0
8415 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
8416 output_operand_lossage ("invalid %%p value");
8418 fprintf (file
, "%d", i
);
8422 /* The operand must be an indirect memory reference. The result
8423 is the register number. */
8424 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
8425 || REGNO (XEXP (x
, 0)) >= 32)
8426 output_operand_lossage ("invalid %%P value");
8428 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
8432 /* This outputs the logical code corresponding to a boolean
8433 expression. The expression may have one or both operands
8434 negated (if one, only the first one). For condition register
8435 logical operations, it will also treat the negated
8436 CR codes as NOTs, but not handle NOTs of them. */
8438 const char *const *t
= 0;
8440 enum rtx_code code
= GET_CODE (x
);
8441 static const char * const tbl
[3][3] = {
8442 { "and", "andc", "nor" },
8443 { "or", "orc", "nand" },
8444 { "xor", "eqv", "xor" } };
8448 else if (code
== IOR
)
8450 else if (code
== XOR
)
8453 output_operand_lossage ("invalid %%q value");
8455 if (GET_CODE (XEXP (x
, 0)) != NOT
)
8459 if (GET_CODE (XEXP (x
, 1)) == NOT
)
8470 /* X is a CR register. Print the mask for `mtcrf'. */
8471 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
8472 output_operand_lossage ("invalid %%R value");
8474 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
8478 /* Low 5 bits of 32 - value */
8480 output_operand_lossage ("invalid %%s value");
8482 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
8486 /* PowerPC64 mask position. All 0's is excluded.
8487 CONST_INT 32-bit mask is considered sign-extended so any
8488 transition must occur within the CONST_INT, not on the boundary. */
8489 if (! mask64_operand (x
, DImode
))
8490 output_operand_lossage ("invalid %%S value");
8492 uval
= INT_LOWPART (x
);
8494 if (uval
& 1) /* Clear Left */
8496 #if HOST_BITS_PER_WIDE_INT > 64
8497 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8501 else /* Clear Right */
8504 #if HOST_BITS_PER_WIDE_INT > 64
8505 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
8513 fprintf (file
, "%d", i
);
8517 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8518 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
8521 /* Bit 3 is OV bit. */
8522 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
8524 /* If we want bit 31, write a shift count of zero, not 32. */
8525 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
8529 /* Print the symbolic name of a branch target register. */
8530 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
8531 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
8532 output_operand_lossage ("invalid %%T value");
8533 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
8534 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
8536 fputs ("ctr", file
);
8540 /* High-order 16 bits of constant for use in unsigned operand. */
8542 output_operand_lossage ("invalid %%u value");
8544 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8545 (INT_LOWPART (x
) >> 16) & 0xffff);
8549 /* High-order 16 bits of constant for use in signed operand. */
8551 output_operand_lossage ("invalid %%v value");
8553 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
8554 (INT_LOWPART (x
) >> 16) & 0xffff);
8558 /* Print `u' if this has an auto-increment or auto-decrement. */
8559 if (GET_CODE (x
) == MEM
8560 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
8561 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
8566 /* Print the trap code for this operand. */
8567 switch (GET_CODE (x
))
8570 fputs ("eq", file
); /* 4 */
8573 fputs ("ne", file
); /* 24 */
8576 fputs ("lt", file
); /* 16 */
8579 fputs ("le", file
); /* 20 */
8582 fputs ("gt", file
); /* 8 */
8585 fputs ("ge", file
); /* 12 */
8588 fputs ("llt", file
); /* 2 */
8591 fputs ("lle", file
); /* 6 */
8594 fputs ("lgt", file
); /* 1 */
8597 fputs ("lge", file
); /* 5 */
8605 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8608 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
8609 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
8611 print_operand (file
, x
, 0);
8615 /* MB value for a PowerPC64 rldic operand. */
8616 val
= (GET_CODE (x
) == CONST_INT
8617 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
8622 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
8623 if ((val
<<= 1) < 0)
8626 #if HOST_BITS_PER_WIDE_INT == 32
8627 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
8628 i
+= 32; /* zero-extend high-part was all 0's */
8629 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
8631 val
= CONST_DOUBLE_LOW (x
);
8638 for ( ; i
< 64; i
++)
8639 if ((val
<<= 1) < 0)
8644 fprintf (file
, "%d", i
+ 1);
8648 if (GET_CODE (x
) == MEM
8649 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
8654 /* Like 'L', for third word of TImode */
8655 if (GET_CODE (x
) == REG
)
8656 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
8657 else if (GET_CODE (x
) == MEM
)
8659 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8660 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8661 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
8663 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
8664 if (small_data_operand (x
, GET_MODE (x
)))
8665 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8666 reg_names
[SMALL_DATA_REG
]);
8671 /* X is a SYMBOL_REF. Write out the name preceded by a
8672 period and without any trailing data in brackets. Used for function
8673 names. If we are configured for System V (or the embedded ABI) on
8674 the PowerPC, do not emit the period, since those systems do not use
8675 TOCs and the like. */
8676 if (GET_CODE (x
) != SYMBOL_REF
)
8679 if (XSTR (x
, 0)[0] != '.')
8681 switch (DEFAULT_ABI
)
8696 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
8698 assemble_name (file
, XSTR (x
, 0));
8703 /* Like 'L', for last word of TImode. */
8704 if (GET_CODE (x
) == REG
)
8705 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
8706 else if (GET_CODE (x
) == MEM
)
8708 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
8709 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8710 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
8712 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
8713 if (small_data_operand (x
, GET_MODE (x
)))
8714 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8715 reg_names
[SMALL_DATA_REG
]);
8719 /* Print AltiVec or SPE memory operand. */
8724 if (GET_CODE (x
) != MEM
)
8732 if (GET_CODE (tmp
) == REG
)
8734 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
8737 /* Handle [reg+UIMM]. */
8738 else if (GET_CODE (tmp
) == PLUS
&&
8739 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
8743 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
8746 x
= INTVAL (XEXP (tmp
, 1));
8747 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
8751 /* Fall through. Must be [reg+reg]. */
8753 if (GET_CODE (tmp
) == REG
)
8754 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
8755 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
8757 if (REGNO (XEXP (tmp
, 0)) == 0)
8758 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
8759 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
8761 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
8762 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
8770 if (GET_CODE (x
) == REG
)
8771 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
8772 else if (GET_CODE (x
) == MEM
)
8774 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8775 know the width from the mode. */
8776 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
8777 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
8778 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8779 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
8780 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
8781 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
8783 output_address (XEXP (x
, 0));
8786 output_addr_const (file
, x
);
8790 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
8794 output_operand_lossage ("invalid %%xn code");
8798 /* Print the address of an operand. */
8801 print_operand_address (file
, x
)
8805 if (GET_CODE (x
) == REG
)
8806 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8807 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8808 || GET_CODE (x
) == LABEL_REF
)
8810 output_addr_const (file
, x
);
8811 if (small_data_operand (x
, GET_MODE (x
)))
8812 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8813 reg_names
[SMALL_DATA_REG
]);
8814 else if (TARGET_TOC
)
8817 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8819 if (REGNO (XEXP (x
, 0)) == 0)
8820 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8821 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8823 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8824 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8826 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8828 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8829 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8832 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8833 && CONSTANT_P (XEXP (x
, 1)))
8835 output_addr_const (file
, XEXP (x
, 1));
8836 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8840 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8841 && CONSTANT_P (XEXP (x
, 1)))
8843 fprintf (file
, "lo16(");
8844 output_addr_const (file
, XEXP (x
, 1));
8845 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8848 else if (legitimate_constant_pool_address_p (x
))
8850 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8852 rtx contains_minus
= XEXP (x
, 1);
8856 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8857 turn it into (sym) for output_addr_const. */
8858 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8859 contains_minus
= XEXP (contains_minus
, 0);
8861 minus
= XEXP (contains_minus
, 0);
8862 symref
= XEXP (minus
, 0);
8863 XEXP (contains_minus
, 0) = symref
;
8868 name
= XSTR (symref
, 0);
8869 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8870 strcpy (newname
, name
);
8871 strcat (newname
, "@toc");
8872 XSTR (symref
, 0) = newname
;
8874 output_addr_const (file
, XEXP (x
, 1));
8876 XSTR (symref
, 0) = name
;
8877 XEXP (contains_minus
, 0) = minus
;
8880 output_addr_const (file
, XEXP (x
, 1));
8882 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8888 /* Target hook for assembling integer objects. The PowerPC version has
8889 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8890 is defined. It also needs to handle DI-mode objects on 64-bit
8894 rs6000_assemble_integer (x
, size
, aligned_p
)
8899 #ifdef RELOCATABLE_NEEDS_FIXUP
8900 /* Special handling for SI values. */
8901 if (size
== 4 && aligned_p
)
8903 extern int in_toc_section
PARAMS ((void));
8904 static int recurse
= 0;
8906 /* For -mrelocatable, we mark all addresses that need to be fixed up
8907 in the .fixup section. */
8908 if (TARGET_RELOCATABLE
8909 && !in_toc_section ()
8910 && !in_text_section ()
8912 && GET_CODE (x
) != CONST_INT
8913 && GET_CODE (x
) != CONST_DOUBLE
8919 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8921 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8922 fprintf (asm_out_file
, "\t.long\t(");
8923 output_addr_const (asm_out_file
, x
);
8924 fprintf (asm_out_file
, ")@fixup\n");
8925 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8926 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8927 fprintf (asm_out_file
, "\t.long\t");
8928 assemble_name (asm_out_file
, buf
);
8929 fprintf (asm_out_file
, "\n\t.previous\n");
8933 /* Remove initial .'s to turn a -mcall-aixdesc function
8934 address into the address of the descriptor, not the function
8936 else if (GET_CODE (x
) == SYMBOL_REF
8937 && XSTR (x
, 0)[0] == '.'
8938 && DEFAULT_ABI
== ABI_AIX
)
8940 const char *name
= XSTR (x
, 0);
8941 while (*name
== '.')
8944 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8948 #endif /* RELOCATABLE_NEEDS_FIXUP */
8949 return default_assemble_integer (x
, size
, aligned_p
);
8952 #ifdef HAVE_GAS_HIDDEN
8953 /* Emit an assembler directive to set symbol visibility for DECL to
8957 rs6000_assemble_visibility (decl
, vis
)
8961 /* Functions need to have their entry point symbol visibility set as
8962 well as their descriptor symbol visibility. */
8963 if (DEFAULT_ABI
== ABI_AIX
&& TREE_CODE (decl
) == FUNCTION_DECL
)
8965 static const char * const visibility_types
[] = {
8966 NULL
, "internal", "hidden", "protected"
8969 const char *name
, *type
;
8971 name
= ((* targetm
.strip_name_encoding
)
8972 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
8973 type
= visibility_types
[vis
];
8975 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
8976 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
8979 default_assemble_visibility (decl
, vis
);
8984 rs6000_reverse_condition (mode
, code
)
8985 enum machine_mode mode
;
8988 /* Reversal of FP compares takes care -- an ordered compare
8989 becomes an unordered compare and vice versa. */
8990 if (mode
== CCFPmode
8991 && (!flag_finite_math_only
8992 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
8993 || code
== UNEQ
|| code
== LTGT
))
8994 return reverse_condition_maybe_unordered (code
);
8996 return reverse_condition (code
);
8999 /* Generate a compare for CODE. Return a brand-new rtx that
9000 represents the result of the compare. */
9003 rs6000_generate_compare (code
)
9006 enum machine_mode comp_mode
;
9009 if (rs6000_compare_fp_p
)
9010 comp_mode
= CCFPmode
;
9011 else if (code
== GTU
|| code
== LTU
9012 || code
== GEU
|| code
== LEU
)
9013 comp_mode
= CCUNSmode
;
9017 /* First, the compare. */
9018 compare_result
= gen_reg_rtx (comp_mode
);
9020 /* SPE FP compare instructions on the GPRs. Yuck! */
9021 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
9022 && rs6000_compare_fp_p
)
9024 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
9032 cmp
= flag_finite_math_only
9033 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
9035 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
9036 rs6000_compare_op1
);
9044 cmp
= flag_finite_math_only
9045 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
9047 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
9048 rs6000_compare_op1
);
9056 cmp
= flag_finite_math_only
9057 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
9059 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
9060 rs6000_compare_op1
);
9066 /* Synthesize LE and GE from LT/GT || EQ. */
9067 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
9069 /* Synthesize GE/LE frome GT/LT || EQ. */
9075 case LE
: code
= LT
; break;
9076 case GE
: code
= GT
; break;
9077 case LEU
: code
= LT
; break;
9078 case GEU
: code
= GT
; break;
9082 or1
= gen_reg_rtx (SImode
);
9083 or2
= gen_reg_rtx (SImode
);
9084 or_result
= gen_reg_rtx (CCEQmode
);
9085 compare_result2
= gen_reg_rtx (CCFPmode
);
9088 cmp
= flag_finite_math_only
9089 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9091 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
9092 rs6000_compare_op1
);
9095 /* The MC8540 FP compare instructions set the CR bits
9096 differently than other PPC compare instructions. For
9097 that matter, there is no generic test instruction, but a
9098 testgt, testlt, and testeq. For a true condition, bit 2
9099 is set (x1xx) in the CR. Following the traditional CR
9105 ... bit 2 would be a GT CR alias, so later on we
9106 look in the GT bits for the branch instructions.
9107 However, we must be careful to emit correct RTL in
9108 the meantime, so optimizations don't get confused. */
9110 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
9111 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
9113 /* OR them together. */
9114 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
9115 gen_rtx_COMPARE (CCEQmode
,
9116 gen_rtx_IOR (SImode
, or1
, or2
),
9118 compare_result
= or_result
;
9123 /* We only care about 1 bit (x1xx), so map everything to NE to
9124 maintain rtl sanity. We'll get to the right bit (x1xx) at
9125 code output time. */
9126 if (code
== NE
|| code
== LTGT
)
9127 /* Do the inverse here because we have no cmpne
9128 instruction. We use the cmpeq instruction and expect
9129 to get a 0 instead. */
9138 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
9139 gen_rtx_COMPARE (comp_mode
,
9141 rs6000_compare_op1
)));
9143 /* Some kinds of FP comparisons need an OR operation;
9144 under flag_finite_math_only we don't bother. */
9145 if (rs6000_compare_fp_p
9146 && ! flag_finite_math_only
9147 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
9148 && (code
== LE
|| code
== GE
9149 || code
== UNEQ
|| code
== LTGT
9150 || code
== UNGT
|| code
== UNLT
))
9152 enum rtx_code or1
, or2
;
9153 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
9154 rtx or_result
= gen_reg_rtx (CCEQmode
);
9158 case LE
: or1
= LT
; or2
= EQ
; break;
9159 case GE
: or1
= GT
; or2
= EQ
; break;
9160 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
9161 case LTGT
: or1
= LT
; or2
= GT
; break;
9162 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
9163 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
9166 validate_condition_mode (or1
, comp_mode
);
9167 validate_condition_mode (or2
, comp_mode
);
9168 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
9169 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
9170 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
9171 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
9173 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
9175 compare_result
= or_result
;
9179 validate_condition_mode (code
, GET_MODE (compare_result
));
9181 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
9185 /* Emit the RTL for an sCOND pattern. */
9188 rs6000_emit_sCOND (code
, result
)
9193 enum machine_mode op_mode
;
9194 enum rtx_code cond_code
;
9196 condition_rtx
= rs6000_generate_compare (code
);
9197 cond_code
= GET_CODE (condition_rtx
);
9200 || cond_code
== GE
|| cond_code
== LE
9201 || cond_code
== GEU
|| cond_code
== LEU
9202 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
9204 rtx not_result
= gen_reg_rtx (CCEQmode
);
9205 rtx not_op
, rev_cond_rtx
;
9206 enum machine_mode cc_mode
;
9208 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
9210 rev_cond_rtx
= gen_rtx (rs6000_reverse_condition (cc_mode
, cond_code
),
9211 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
9212 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
9213 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
9214 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
9217 op_mode
= GET_MODE (rs6000_compare_op0
);
9218 if (op_mode
== VOIDmode
)
9219 op_mode
= GET_MODE (rs6000_compare_op1
);
9221 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
9223 PUT_MODE (condition_rtx
, DImode
);
9224 convert_move (result
, condition_rtx
, 0);
9228 PUT_MODE (condition_rtx
, SImode
);
9229 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
9233 /* Emit a branch of kind CODE to location LOC. */
9236 rs6000_emit_cbranch (code
, loc
)
9240 rtx condition_rtx
, loc_ref
;
9242 condition_rtx
= rs6000_generate_compare (code
);
9243 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
9244 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
9245 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
9249 /* Return the string to output a conditional branch to LABEL, which is
9250 the operand number of the label, or -1 if the branch is really a
9253 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9254 condition code register and its mode specifies what kind of
9257 REVERSED is nonzero if we should reverse the sense of the comparison.
9259 INSN is the insn. */
9262 output_cbranch (op
, label
, reversed
, insn
)
9268 static char string
[64];
9269 enum rtx_code code
= GET_CODE (op
);
9270 rtx cc_reg
= XEXP (op
, 0);
9271 enum machine_mode mode
= GET_MODE (cc_reg
);
9272 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
9273 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
9274 int really_reversed
= reversed
^ need_longbranch
;
9280 validate_condition_mode (code
, mode
);
9282 /* Work out which way this really branches. We could use
9283 reverse_condition_maybe_unordered here always but this
9284 makes the resulting assembler clearer. */
9285 if (really_reversed
)
9287 /* Reversal of FP compares takes care -- an ordered compare
9288 becomes an unordered compare and vice versa. */
9289 if (mode
== CCFPmode
)
9290 code
= reverse_condition_maybe_unordered (code
);
9292 code
= reverse_condition (code
);
9295 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
9297 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9300 /* Opposite of GT. */
9302 else if (code
== NE
)
9310 /* Not all of these are actually distinct opcodes, but
9311 we distinguish them for clarity of the resulting assembler. */
9313 ccode
= "ne"; break;
9315 ccode
= "eq"; break;
9317 ccode
= "ge"; break;
9318 case GT
: case GTU
: case UNGT
:
9319 ccode
= "gt"; break;
9321 ccode
= "le"; break;
9322 case LT
: case LTU
: case UNLT
:
9323 ccode
= "lt"; break;
9324 case UNORDERED
: ccode
= "un"; break;
9325 case ORDERED
: ccode
= "nu"; break;
9326 case UNGE
: ccode
= "nl"; break;
9327 case UNLE
: ccode
= "ng"; break;
9332 /* Maybe we have a guess as to how likely the branch is.
9333 The old mnemonics don't have a way to specify this information. */
9335 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
9336 if (note
!= NULL_RTX
)
9338 /* PROB is the difference from 50%. */
9339 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
9340 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
9342 /* Only hint for highly probable/improbable branches on newer
9343 cpus as static prediction overrides processor dynamic
9344 prediction. For older cpus we may as well always hint, but
9345 assume not taken for branches that are very close to 50% as a
9346 mispredicted taken branch is more expensive than a
9347 mispredicted not-taken branch. */
9349 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
9351 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
9352 && ((prob
> 0) ^ need_longbranch
))
9360 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
9362 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
9364 /* We need to escape any '%' characters in the reg_names string.
9365 Assume they'd only be the first character... */
9366 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
9368 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
9372 /* If the branch distance was too far, we may have to use an
9373 unconditional branch to go the distance. */
9374 if (need_longbranch
)
9375 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
9377 s
+= sprintf (s
, ",%s", label
);
9383 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9384 operands of the last comparison is nonzero/true, FALSE_COND if it
9385 is zero/false. Return 0 if the hardware has no such operation. */
9388 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
9394 enum rtx_code code
= GET_CODE (op
);
9395 rtx op0
= rs6000_compare_op0
;
9396 rtx op1
= rs6000_compare_op1
;
9398 enum machine_mode compare_mode
= GET_MODE (op0
);
9399 enum machine_mode result_mode
= GET_MODE (dest
);
9402 /* These modes should always match. */
9403 if (GET_MODE (op1
) != compare_mode
9404 /* In the isel case however, we can use a compare immediate, so
9405 op1 may be a small constant. */
9406 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
9408 if (GET_MODE (true_cond
) != result_mode
)
9410 if (GET_MODE (false_cond
) != result_mode
)
9413 /* First, work out if the hardware can do this at all, or
9414 if it's too slow... */
9415 if (! rs6000_compare_fp_p
)
9418 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
9422 /* Eliminate half of the comparisons by switching operands, this
9423 makes the remaining code simpler. */
9424 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
9425 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
9427 code
= reverse_condition_maybe_unordered (code
);
9429 true_cond
= false_cond
;
9433 /* UNEQ and LTGT take four instructions for a comparison with zero,
9434 it'll probably be faster to use a branch here too. */
9435 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
9438 if (GET_CODE (op1
) == CONST_DOUBLE
)
9439 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
9441 /* We're going to try to implement comparisons by performing
9442 a subtract, then comparing against zero. Unfortunately,
9443 Inf - Inf is NaN which is not zero, and so if we don't
9444 know that the operand is finite and the comparison
9445 would treat EQ different to UNORDERED, we can't do it. */
9446 if (HONOR_INFINITIES (compare_mode
)
9447 && code
!= GT
&& code
!= UNGE
9448 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
9449 /* Constructs of the form (a OP b ? a : b) are safe. */
9450 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
9451 || (! rtx_equal_p (op0
, true_cond
)
9452 && ! rtx_equal_p (op1
, true_cond
))))
9454 /* At this point we know we can use fsel. */
9456 /* Reduce the comparison to a comparison against zero. */
9457 temp
= gen_reg_rtx (compare_mode
);
9458 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9459 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
9461 op1
= CONST0_RTX (compare_mode
);
9463 /* If we don't care about NaNs we can reduce some of the comparisons
9464 down to faster ones. */
9465 if (! HONOR_NANS (compare_mode
))
9471 true_cond
= false_cond
;
9484 /* Now, reduce everything down to a GE. */
9491 temp
= gen_reg_rtx (compare_mode
);
9492 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9497 temp
= gen_reg_rtx (compare_mode
);
9498 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
9503 temp
= gen_reg_rtx (compare_mode
);
9504 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9505 gen_rtx_NEG (compare_mode
,
9506 gen_rtx_ABS (compare_mode
, op0
))));
9511 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9512 temp
= gen_reg_rtx (result_mode
);
9513 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9514 gen_rtx_IF_THEN_ELSE (result_mode
,
9515 gen_rtx_GE (VOIDmode
,
9517 true_cond
, false_cond
)));
9518 false_cond
= true_cond
;
9521 temp
= gen_reg_rtx (compare_mode
);
9522 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9527 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9528 temp
= gen_reg_rtx (result_mode
);
9529 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
9530 gen_rtx_IF_THEN_ELSE (result_mode
,
9531 gen_rtx_GE (VOIDmode
,
9533 true_cond
, false_cond
)));
9534 true_cond
= false_cond
;
9537 temp
= gen_reg_rtx (compare_mode
);
9538 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
9546 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
9547 gen_rtx_IF_THEN_ELSE (result_mode
,
9548 gen_rtx_GE (VOIDmode
,
9550 true_cond
, false_cond
)));
9554 /* Same as above, but for ints (isel). */
9557 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
9563 rtx condition_rtx
, cr
;
9565 /* All isel implementations thus far are 32-bits. */
9566 if (GET_MODE (rs6000_compare_op0
) != SImode
)
9569 /* We still have to do the compare, because isel doesn't do a
9570 compare, it just looks at the CRx bits set by a previous compare
9572 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
9573 cr
= XEXP (condition_rtx
, 0);
9575 if (GET_MODE (cr
) == CCmode
)
9576 emit_insn (gen_isel_signed (dest
, condition_rtx
,
9577 true_cond
, false_cond
, cr
));
9579 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
9580 true_cond
, false_cond
, cr
));
9586 output_isel (operands
)
9591 code
= GET_CODE (operands
[1]);
9592 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
9594 PUT_CODE (operands
[1], reverse_condition (code
));
9595 return "isel %0,%3,%2,%j1";
9598 return "isel %0,%2,%3,%j1";
9602 rs6000_emit_minmax (dest
, code
, op0
, op1
)
9608 enum machine_mode mode
= GET_MODE (op0
);
9612 if (code
== SMAX
|| code
== SMIN
)
9617 if (code
== SMAX
|| code
== UMAX
)
9618 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9621 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
9623 if (target
== NULL_RTX
)
9626 emit_move_insn (dest
, target
);
9629 /* This page contains routines that are used to determine what the
9630 function prologue and epilogue code will do and write them out. */
9632 /* Return the first fixed-point register that is required to be
9633 saved. 32 if none. */
9636 first_reg_to_save ()
9640 /* Find lowest numbered live register. */
9641 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
9642 if (regs_ever_live
[first_reg
]
9643 && (! call_used_regs
[first_reg
]
9644 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
9645 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
9646 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
9651 && current_function_uses_pic_offset_table
9652 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9653 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
9659 /* Similar, for FP regs. */
9662 first_fp_reg_to_save ()
9666 /* Find lowest numbered live register. */
9667 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
9668 if (regs_ever_live
[first_reg
])
9674 /* Similar, for AltiVec regs. */
9677 first_altivec_reg_to_save ()
9681 /* Stack frame remains as is unless we are in AltiVec ABI. */
9682 if (! TARGET_ALTIVEC_ABI
)
9683 return LAST_ALTIVEC_REGNO
+ 1;
9685 /* Find lowest numbered live register. */
9686 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9687 if (regs_ever_live
[i
])
9693 /* Return a 32-bit mask of the AltiVec registers we need to set in
9694 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9695 the 32-bit word is 0. */
9698 compute_vrsave_mask ()
9700 unsigned int i
, mask
= 0;
9702 /* First, find out if we use _any_ altivec registers. */
9703 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9704 if (regs_ever_live
[i
])
9705 mask
|= ALTIVEC_REG_BIT (i
);
9710 /* Next, remove the argument registers from the set. These must
9711 be in the VRSAVE mask set by the caller, so we don't need to add
9712 them in again. More importantly, the mask we compute here is
9713 used to generate CLOBBERs in the set_vrsave insn, and we do not
9714 wish the argument registers to die. */
9715 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
9716 mask
&= ~ALTIVEC_REG_BIT (i
);
9718 /* Similarly, remove the return value from the set. */
9721 diddle_return_value (is_altivec_return_reg
, &yes
);
9723 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
9730 is_altivec_return_reg (reg
, xyes
)
9734 bool *yes
= (bool *) xyes
;
9735 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
9740 /* Calculate the stack information for the current function. This is
9741 complicated by having two separate calling sequences, the AIX calling
9742 sequence and the V.4 calling sequence.
9744 AIX (and Darwin/Mac OS X) stack frames look like:
9746 SP----> +---------------------------------------+
9747 | back chain to caller | 0 0
9748 +---------------------------------------+
9749 | saved CR | 4 8 (8-11)
9750 +---------------------------------------+
9752 +---------------------------------------+
9753 | reserved for compilers | 12 24
9754 +---------------------------------------+
9755 | reserved for binders | 16 32
9756 +---------------------------------------+
9757 | saved TOC pointer | 20 40
9758 +---------------------------------------+
9759 | Parameter save area (P) | 24 48
9760 +---------------------------------------+
9761 | Alloca space (A) | 24+P etc.
9762 +---------------------------------------+
9763 | Local variable space (L) | 24+P+A
9764 +---------------------------------------+
9765 | Float/int conversion temporary (X) | 24+P+A+L
9766 +---------------------------------------+
9767 | Save area for AltiVec registers (W) | 24+P+A+L+X
9768 +---------------------------------------+
9769 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9770 +---------------------------------------+
9771 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9772 +---------------------------------------+
9773 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9774 +---------------------------------------+
9775 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9776 +---------------------------------------+
9777 old SP->| back chain to caller's caller |
9778 +---------------------------------------+
9780 The required alignment for AIX configurations is two words (i.e., 8
9784 V.4 stack frames look like:
9786 SP----> +---------------------------------------+
9787 | back chain to caller | 0
9788 +---------------------------------------+
9789 | caller's saved LR | 4
9790 +---------------------------------------+
9791 | Parameter save area (P) | 8
9792 +---------------------------------------+
9793 | Alloca space (A) | 8+P
9794 +---------------------------------------+
9795 | Varargs save area (V) | 8+P+A
9796 +---------------------------------------+
9797 | Local variable space (L) | 8+P+A+V
9798 +---------------------------------------+
9799 | Float/int conversion temporary (X) | 8+P+A+V+L
9800 +---------------------------------------+
9801 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9802 +---------------------------------------+
9803 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9804 +---------------------------------------+
9805 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9806 +---------------------------------------+
9807 | SPE: area for 64-bit GP registers |
9808 +---------------------------------------+
9809 | SPE alignment padding |
9810 +---------------------------------------+
9811 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9812 +---------------------------------------+
9813 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9814 +---------------------------------------+
9815 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9816 +---------------------------------------+
9817 old SP->| back chain to caller's caller |
9818 +---------------------------------------+
9820 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9821 given. (But note below and in sysv4.h that we require only 8 and
9822 may round up the size of our stack frame anyways. The historical
9823 reason is early versions of powerpc-linux which didn't properly
9824 align the stack at program startup. A happy side-effect is that
9825 -mno-eabi libraries can be used with -meabi programs.)
9827 The EABI configuration defaults to the V.4 layout. However,
9828 the stack alignment requirements may differ. If -mno-eabi is not
9829 given, the required stack alignment is 8 bytes; if -mno-eabi is
9830 given, the required alignment is 16 bytes. (But see V.4 comment
9833 #ifndef ABI_STACK_BOUNDARY
9834 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9838 rs6000_stack_info ()
9840 static rs6000_stack_t info
, zero_info
;
9841 rs6000_stack_t
*info_ptr
= &info
;
9842 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9846 /* Zero all fields portably. */
9851 /* Cache value so we don't rescan instruction chain over and over. */
9852 if (cfun
->machine
->insn_chain_scanned_p
== 0)
9854 cfun
->machine
->insn_chain_scanned_p
= 1;
9855 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
9859 /* Select which calling sequence. */
9860 info_ptr
->abi
= DEFAULT_ABI
;
9862 /* Calculate which registers need to be saved & save area size. */
9863 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
9864 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9865 even if it currently looks like we won't. */
9866 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9867 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
9868 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
9869 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9870 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9872 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9874 /* For the SPE, we have an additional upper 32-bits on each GPR.
9875 Ideally we should save the entire 64-bits only when the upper
9876 half is used in SIMD instructions. Since we only record
9877 registers live (not the size they are used in), this proves
9878 difficult because we'd have to traverse the instruction chain at
9879 the right time, taking reload into account. This is a real pain,
9880 so we opt to save the GPRs in 64-bits always if but one register
9881 gets used in 64-bits. Otherwise, all the registers in the frame
9882 get saved in 32-bits.
9884 So... since when we save all GPRs (except the SP) in 64-bits, the
9885 traditional GP save area will be empty. */
9886 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9887 info_ptr
->gp_size
= 0;
9889 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9890 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9892 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9893 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9894 - info_ptr
->first_altivec_reg_save
);
9896 /* Does this function call anything? */
9897 info_ptr
->calls_p
= (! current_function_is_leaf
9898 || cfun
->machine
->ra_needs_full_frame
);
9900 /* Determine if we need to save the link register. */
9901 if (rs6000_ra_ever_killed ()
9902 || (DEFAULT_ABI
== ABI_AIX
9903 && current_function_profile
9904 && !TARGET_PROFILE_KERNEL
)
9905 #ifdef TARGET_RELOCATABLE
9906 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9908 || (info_ptr
->first_fp_reg_save
!= 64
9909 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9910 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9911 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
9912 || (DEFAULT_ABI
== ABI_DARWIN
9914 && current_function_uses_pic_offset_table
)
9915 || info_ptr
->calls_p
)
9917 info_ptr
->lr_save_p
= 1;
9918 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9921 /* Determine if we need to save the condition code registers. */
9922 if (regs_ever_live
[CR2_REGNO
]
9923 || regs_ever_live
[CR3_REGNO
]
9924 || regs_ever_live
[CR4_REGNO
])
9926 info_ptr
->cr_save_p
= 1;
9927 if (DEFAULT_ABI
== ABI_V4
)
9928 info_ptr
->cr_size
= reg_size
;
9931 /* If the current function calls __builtin_eh_return, then we need
9932 to allocate stack space for registers that will hold data for
9933 the exception handler. */
9934 if (current_function_calls_eh_return
)
9937 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9940 /* SPE saves EH registers in 64-bits. */
9941 ehrd_size
= i
* (TARGET_SPE_ABI
9942 && info_ptr
->spe_64bit_regs_used
!= 0
9943 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9948 /* Determine various sizes. */
9949 info_ptr
->reg_size
= reg_size
;
9950 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9951 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9952 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9953 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9956 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
9957 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9959 info_ptr
->spe_gp_size
= 0;
9961 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9963 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9964 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9968 info_ptr
->vrsave_mask
= 0;
9969 info_ptr
->vrsave_size
= 0;
9972 /* Calculate the offsets. */
9973 switch (DEFAULT_ABI
)
9981 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9982 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9984 if (TARGET_ALTIVEC_ABI
)
9986 info_ptr
->vrsave_save_offset
9987 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9989 /* Align stack so vector save area is on a quadword boundary. */
9990 if (info_ptr
->altivec_size
!= 0)
9991 info_ptr
->altivec_padding_size
9992 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9994 info_ptr
->altivec_padding_size
= 0;
9996 info_ptr
->altivec_save_offset
9997 = info_ptr
->vrsave_save_offset
9998 - info_ptr
->altivec_padding_size
9999 - info_ptr
->altivec_size
;
10001 /* Adjust for AltiVec case. */
10002 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
10005 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
10006 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
10007 info_ptr
->lr_save_offset
= 2*reg_size
;
10011 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
10012 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
10013 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
10015 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
10017 /* Align stack so SPE GPR save area is aligned on a
10018 double-word boundary. */
10019 if (info_ptr
->spe_gp_size
!= 0)
10020 info_ptr
->spe_padding_size
10021 = 8 - (-info_ptr
->cr_save_offset
% 8);
10023 info_ptr
->spe_padding_size
= 0;
10025 info_ptr
->spe_gp_save_offset
10026 = info_ptr
->cr_save_offset
10027 - info_ptr
->spe_padding_size
10028 - info_ptr
->spe_gp_size
;
10030 /* Adjust for SPE case. */
10031 info_ptr
->toc_save_offset
10032 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
10034 else if (TARGET_ALTIVEC_ABI
)
10036 info_ptr
->vrsave_save_offset
10037 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
10039 /* Align stack so vector save area is on a quadword boundary. */
10040 if (info_ptr
->altivec_size
!= 0)
10041 info_ptr
->altivec_padding_size
10042 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
10044 info_ptr
->altivec_padding_size
= 0;
10046 info_ptr
->altivec_save_offset
10047 = info_ptr
->vrsave_save_offset
10048 - info_ptr
->altivec_padding_size
10049 - info_ptr
->altivec_size
;
10051 /* Adjust for AltiVec case. */
10052 info_ptr
->toc_save_offset
10053 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
10056 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
10057 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
10058 info_ptr
->lr_save_offset
= reg_size
;
10062 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
10063 + info_ptr
->gp_size
10064 + info_ptr
->altivec_size
10065 + info_ptr
->altivec_padding_size
10066 + info_ptr
->vrsave_size
10067 + info_ptr
->spe_gp_size
10068 + info_ptr
->spe_padding_size
10070 + info_ptr
->cr_size
10071 + info_ptr
->lr_size
10072 + info_ptr
->vrsave_size
10073 + info_ptr
->toc_size
,
10074 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
10077 total_raw_size
= (info_ptr
->vars_size
10078 + info_ptr
->parm_size
10079 + info_ptr
->save_size
10080 + info_ptr
->varargs_size
10081 + info_ptr
->fixed_size
);
10083 info_ptr
->total_size
=
10084 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
10086 /* Determine if we need to allocate any stack frame:
10088 For AIX we need to push the stack if a frame pointer is needed
10089 (because the stack might be dynamically adjusted), if we are
10090 debugging, if we make calls, or if the sum of fp_save, gp_save,
10091 and local variables are more than the space needed to save all
10092 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10093 + 18*8 = 288 (GPR13 reserved).
10095 For V.4 we don't have the stack cushion that AIX uses, but assume
10096 that the debugger can handle stackless frames. */
10098 if (info_ptr
->calls_p
)
10099 info_ptr
->push_p
= 1;
10101 else if (DEFAULT_ABI
== ABI_V4
)
10102 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
10104 else if (frame_pointer_needed
)
10105 info_ptr
->push_p
= 1;
10107 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
10108 info_ptr
->push_p
= 1;
10112 = total_raw_size
- info_ptr
->fixed_size
> (TARGET_32BIT
? 220 : 288);
10114 /* Zero offsets if we're not saving those registers. */
10115 if (info_ptr
->fp_size
== 0)
10116 info_ptr
->fp_save_offset
= 0;
10118 if (info_ptr
->gp_size
== 0)
10119 info_ptr
->gp_save_offset
= 0;
10121 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
10122 info_ptr
->altivec_save_offset
= 0;
10124 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
10125 info_ptr
->vrsave_save_offset
= 0;
10127 if (! TARGET_SPE_ABI
10128 || info_ptr
->spe_64bit_regs_used
== 0
10129 || info_ptr
->spe_gp_size
== 0)
10130 info_ptr
->spe_gp_save_offset
= 0;
10132 if (! info_ptr
->lr_save_p
)
10133 info_ptr
->lr_save_offset
= 0;
10135 if (! info_ptr
->cr_save_p
)
10136 info_ptr
->cr_save_offset
= 0;
10138 if (! info_ptr
->toc_save_p
)
10139 info_ptr
->toc_save_offset
= 0;
10144 /* Return true if the current function uses any GPRs in 64-bit SIMD
10148 spe_func_has_64bit_regs_p ()
10152 /* Functions that save and restore all the call-saved registers will
10153 need to save/restore the registers in 64-bits. */
10154 if (current_function_calls_eh_return
10155 || current_function_calls_setjmp
10156 || current_function_has_nonlocal_goto
)
10159 insns
= get_insns ();
10161 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10167 i
= PATTERN (insn
);
10168 if (GET_CODE (i
) == SET
10169 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
10178 debug_stack_info (info
)
10179 rs6000_stack_t
*info
;
10181 const char *abi_string
;
10184 info
= rs6000_stack_info ();
10186 fprintf (stderr
, "\nStack information for function %s:\n",
10187 ((current_function_decl
&& DECL_NAME (current_function_decl
))
10188 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
10193 default: abi_string
= "Unknown"; break;
10194 case ABI_NONE
: abi_string
= "NONE"; break;
10195 case ABI_AIX
: abi_string
= "AIX"; break;
10196 case ABI_DARWIN
: abi_string
= "Darwin"; break;
10197 case ABI_V4
: abi_string
= "V.4"; break;
10200 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
10202 if (TARGET_ALTIVEC_ABI
)
10203 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
10205 if (TARGET_SPE_ABI
)
10206 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
10208 if (info
->first_gp_reg_save
!= 32)
10209 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
10211 if (info
->first_fp_reg_save
!= 64)
10212 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
10214 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
10215 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
10216 info
->first_altivec_reg_save
);
10218 if (info
->lr_save_p
)
10219 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
10221 if (info
->cr_save_p
)
10222 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
10224 if (info
->toc_save_p
)
10225 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
10227 if (info
->vrsave_mask
)
10228 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
10231 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
10234 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
10236 if (info
->gp_save_offset
)
10237 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
10239 if (info
->fp_save_offset
)
10240 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
10242 if (info
->altivec_save_offset
)
10243 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
10244 info
->altivec_save_offset
);
10246 if (info
->spe_gp_save_offset
)
10247 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
10248 info
->spe_gp_save_offset
);
10250 if (info
->vrsave_save_offset
)
10251 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
10252 info
->vrsave_save_offset
);
10254 if (info
->lr_save_offset
)
10255 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
10257 if (info
->cr_save_offset
)
10258 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
10260 if (info
->toc_save_offset
)
10261 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
10263 if (info
->varargs_save_offset
)
10264 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
10266 if (info
->total_size
)
10267 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
10269 if (info
->varargs_size
)
10270 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
10272 if (info
->vars_size
)
10273 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
10275 if (info
->parm_size
)
10276 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
10278 if (info
->fixed_size
)
10279 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
10282 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
10284 if (info
->spe_gp_size
)
10285 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
10288 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
10290 if (info
->altivec_size
)
10291 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
10293 if (info
->vrsave_size
)
10294 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
10296 if (info
->altivec_padding_size
)
10297 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
10298 info
->altivec_padding_size
);
10300 if (info
->spe_padding_size
)
10301 fprintf (stderr
, "\tspe_padding_size = %5d\n",
10302 info
->spe_padding_size
);
10305 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
10308 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
10310 if (info
->toc_size
)
10311 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
10313 if (info
->save_size
)
10314 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
10316 if (info
->reg_size
!= 4)
10317 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
10319 fprintf (stderr
, "\n");
10323 rs6000_return_addr (count
, frame
)
10327 /* Currently we don't optimize very well between prolog and body
10328 code and for PIC code the code can be actually quite bad, so
10329 don't try to be too clever here. */
10330 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
10332 cfun
->machine
->ra_needs_full_frame
= 1;
10339 plus_constant (copy_to_reg
10340 (gen_rtx_MEM (Pmode
,
10341 memory_address (Pmode
, frame
))),
10342 RETURN_ADDRESS_OFFSET
)));
10345 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
10348 /* Say whether a function is a candidate for sibcall handling or not.
10349 We do not allow indirect calls to be optimized into sibling calls.
10350 Also, we can't do it if there are any vector parameters; there's
10351 nowhere to put the VRsave code so it works; note that functions with
10352 vector parameters are required to have a prototype, so the argument
10353 type info must be available here. (The tail recursion case can work
10354 with vector parameters, but there's no way to distinguish here.) */
10356 rs6000_function_ok_for_sibcall (decl
, exp
)
10358 tree exp ATTRIBUTE_UNUSED
;
10363 if (TARGET_ALTIVEC_VRSAVE
)
10365 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
10366 type
; type
= TREE_CHAIN (type
))
10368 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
10372 if (DEFAULT_ABI
== ABI_DARWIN
10373 || (*targetm
.binds_local_p
) (decl
))
10375 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
10377 if (!lookup_attribute ("longcall", attr_list
)
10378 || lookup_attribute ("shortcall", attr_list
))
10386 rs6000_ra_ever_killed ()
10392 /* Irritatingly, there are two kinds of thunks -- those created with
10393 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10394 through the regular part of the compiler. This is a very hacky
10395 way to tell them apart. */
10396 if (current_function_is_thunk
&& !no_new_pseudos
)
10399 /* regs_ever_live has LR marked as used if any sibcalls are present,
10400 but this should not force saving and restoring in the
10401 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10402 clobbers LR, so that is inappropriate. */
10404 /* Also, the prologue can generate a store into LR that
10405 doesn't really count, like this:
10408 bcl to set PIC register
10412 When we're called from the epilogue, we need to avoid counting
10413 this as a store. */
10415 push_topmost_sequence ();
10416 top
= get_insns ();
10417 pop_topmost_sequence ();
10418 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10420 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
10424 if (FIND_REG_INC_NOTE (insn
, reg
))
10426 else if (GET_CODE (insn
) == CALL_INSN
10427 && !SIBLING_CALL_P (insn
))
10429 else if (set_of (reg
, insn
) != NULL_RTX
10430 && !prologue_epilogue_contains (insn
))
10437 /* Add a REG_MAYBE_DEAD note to the insn. */
10439 rs6000_maybe_dead (insn
)
10442 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
10447 /* Emit instructions needed to load the TOC register.
10448 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10449 a constant pool; or for SVR4 -fpic. */
10452 rs6000_emit_load_toc_table (fromprolog
)
10456 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
10458 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10460 rtx temp
= (fromprolog
10461 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
10462 : gen_reg_rtx (Pmode
));
10463 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
10465 rs6000_maybe_dead (insn
);
10466 insn
= emit_move_insn (dest
, temp
);
10468 rs6000_maybe_dead (insn
);
10470 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
10473 rtx tempLR
= (fromprolog
10474 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
10475 : gen_reg_rtx (Pmode
));
10476 rtx temp0
= (fromprolog
10477 ? gen_rtx_REG (Pmode
, 0)
10478 : gen_reg_rtx (Pmode
));
10481 /* possibly create the toc section */
10482 if (! toc_initialized
)
10485 function_section (current_function_decl
);
10492 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
10493 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10495 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
10496 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10498 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
10500 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
10501 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
10508 static int reload_toc_labelno
= 0;
10510 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
10512 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
10513 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10515 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
10516 emit_move_insn (dest
, tempLR
);
10517 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
10519 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
10521 rs6000_maybe_dead (insn
);
10523 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
10525 /* This is for AIX code running in non-PIC ELF32. */
10528 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
10529 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
10531 insn
= emit_insn (gen_elf_high (dest
, realsym
));
10533 rs6000_maybe_dead (insn
);
10534 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
10536 rs6000_maybe_dead (insn
);
10538 else if (DEFAULT_ABI
== ABI_AIX
)
10541 insn
= emit_insn (gen_load_toc_aix_si (dest
));
10543 insn
= emit_insn (gen_load_toc_aix_di (dest
));
10545 rs6000_maybe_dead (insn
);
10552 get_TOC_alias_set ()
10554 static int set
= -1;
10556 set
= new_alias_set ();
10560 /* This retuns nonzero if the current function uses the TOC. This is
10561 determined by the presence of (unspec ... UNSPEC_TOC) or
10562 use (unspec ... UNSPEC_TOC), which are generated by the various
10563 load_toc_* patterns. */
10570 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10573 rtx pat
= PATTERN (insn
);
10576 if (GET_CODE (pat
) == PARALLEL
)
10577 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
10579 rtx sub
= XVECEXP (pat
, 0, i
);
10580 if (GET_CODE (sub
) == USE
)
10582 sub
= XEXP (sub
, 0);
10583 if (GET_CODE (sub
) == UNSPEC
10584 && XINT (sub
, 1) == UNSPEC_TOC
)
10593 create_TOC_reference (symbol
)
10596 return gen_rtx_PLUS (Pmode
,
10597 gen_rtx_REG (Pmode
, TOC_REGISTER
),
10598 gen_rtx_CONST (Pmode
,
10599 gen_rtx_MINUS (Pmode
, symbol
,
10600 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
10604 /* __throw will restore its own return address to be the same as the
10605 return address of the function that the throw is being made to.
10606 This is unfortunate, because we want to check the original
10607 return address to see if we need to restore the TOC.
10608 So we have to squirrel it away here.
10609 This is used only in compiling __throw and __rethrow.
10611 Most of this code should be removed by CSE. */
10612 static rtx insn_after_throw
;
10614 /* This does the saving... */
10616 rs6000_aix_emit_builtin_unwind_init ()
10619 rtx stack_top
= gen_reg_rtx (Pmode
);
10620 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10622 insn_after_throw
= gen_reg_rtx (SImode
);
10624 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10625 emit_move_insn (stack_top
, mem
);
10627 mem
= gen_rtx_MEM (Pmode
,
10628 gen_rtx_PLUS (Pmode
, stack_top
,
10629 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10630 emit_move_insn (opcode_addr
, mem
);
10631 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
10634 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10635 in _eh.o). Only used on AIX.
10637 The idea is that on AIX, function calls look like this:
10638 bl somefunction-trampoline
10642 somefunction-trampoline:
10644 ... load function address in the count register ...
10646 or like this, if the linker determines that this is not a cross-module call
10647 and so the TOC need not be restored:
10650 or like this, if the compiler could determine that this is not a
10653 now, the tricky bit here is that register 2 is saved and restored
10654 by the _linker_, so we can't readily generate debugging information
10655 for it. So we need to go back up the call chain looking at the
10656 insns at return addresses to see which calls saved the TOC register
10657 and so see where it gets restored from.
10659 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10660 just before the actual epilogue.
10662 On the bright side, this incurs no space or time overhead unless an
10663 exception is thrown, except for the extra code in libgcc.a.
10665 The parameter STACKSIZE is a register containing (at runtime)
10666 the amount to be popped off the stack in addition to the stack frame
10667 of this routine (which will be __throw or __rethrow, and so is
10668 guaranteed to have a stack frame). */
10671 rs6000_emit_eh_toc_restore (stacksize
)
10675 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
10676 rtx tocompare
= gen_reg_rtx (SImode
);
10677 rtx opcode
= gen_reg_rtx (SImode
);
10678 rtx opcode_addr
= gen_reg_rtx (Pmode
);
10680 rtx loop_start
= gen_label_rtx ();
10681 rtx no_toc_restore_needed
= gen_label_rtx ();
10682 rtx loop_exit
= gen_label_rtx ();
10684 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
10685 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10686 emit_move_insn (bottom_of_stack
, mem
);
10688 top_of_stack
= expand_binop (Pmode
, add_optab
,
10689 bottom_of_stack
, stacksize
,
10690 NULL_RTX
, 1, OPTAB_WIDEN
);
10692 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
10693 : 0xE8410028, SImode
));
10695 if (insn_after_throw
== NULL_RTX
)
10697 emit_move_insn (opcode
, insn_after_throw
);
10699 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
10700 emit_label (loop_start
);
10702 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
10703 SImode
, NULL_RTX
, NULL_RTX
,
10704 no_toc_restore_needed
);
10706 mem
= gen_rtx_MEM (Pmode
,
10707 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10708 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
10709 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
10711 emit_label (no_toc_restore_needed
);
10712 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
10713 Pmode
, NULL_RTX
, NULL_RTX
,
10716 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
10717 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10718 emit_move_insn (bottom_of_stack
, mem
);
10720 mem
= gen_rtx_MEM (Pmode
,
10721 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
10722 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
10723 emit_move_insn (opcode_addr
, mem
);
10724 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
10726 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
10727 emit_jump (loop_start
);
10728 emit_note (NULL
, NOTE_INSN_LOOP_END
);
10729 emit_label (loop_exit
);
10731 #endif /* TARGET_AIX */
10733 /* This ties together stack memory (MEM with an alias set of
10734 rs6000_sr_alias_set) and the change to the stack pointer. */
10737 rs6000_emit_stack_tie ()
10739 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
10741 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10742 emit_insn (gen_stack_tie (mem
));
10745 /* Emit the correct code for allocating stack space, as insns.
10746 If COPY_R12, make sure a copy of the old frame is left in r12.
10747 The generated code may use hard register 0 as a temporary. */
10750 rs6000_emit_allocate_stack (size
, copy_r12
)
10751 HOST_WIDE_INT size
;
10755 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10756 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
10757 rtx todec
= GEN_INT (-size
);
10759 if (current_function_limit_stack
)
10761 if (REG_P (stack_limit_rtx
)
10762 && REGNO (stack_limit_rtx
) > 1
10763 && REGNO (stack_limit_rtx
) <= 31)
10765 emit_insn (TARGET_32BIT
10766 ? gen_addsi3 (tmp_reg
,
10769 : gen_adddi3 (tmp_reg
,
10773 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10776 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
10778 && DEFAULT_ABI
== ABI_V4
)
10780 rtx toload
= gen_rtx_CONST (VOIDmode
,
10781 gen_rtx_PLUS (Pmode
,
10785 emit_insn (gen_elf_high (tmp_reg
, toload
));
10786 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
10787 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
10791 warning ("stack limit expression is not supported");
10794 if (copy_r12
|| ! TARGET_UPDATE
)
10795 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
10801 /* Need a note here so that try_split doesn't get confused. */
10802 if (get_last_insn() == NULL_RTX
)
10803 emit_note (0, NOTE_INSN_DELETED
);
10804 insn
= emit_move_insn (tmp_reg
, todec
);
10805 try_split (PATTERN (insn
), insn
, 0);
10809 insn
= emit_insn (TARGET_32BIT
10810 ? gen_movsi_update (stack_reg
, stack_reg
,
10812 : gen_movdi_update (stack_reg
, stack_reg
,
10813 todec
, stack_reg
));
10817 insn
= emit_insn (TARGET_32BIT
10818 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
10819 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
10820 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
10821 gen_rtx_REG (Pmode
, 12));
10824 RTX_FRAME_RELATED_P (insn
) = 1;
10826 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10827 gen_rtx_SET (VOIDmode
, stack_reg
,
10828 gen_rtx_PLUS (Pmode
, stack_reg
,
10833 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10834 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10835 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10836 deduce these equivalences by itself so it wasn't necessary to hold
10837 its hand so much. */
10840 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
10849 /* copy_rtx will not make unique copies of registers, so we need to
10850 ensure we don't have unwanted sharing here. */
10852 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10855 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
10857 real
= copy_rtx (PATTERN (insn
));
10859 if (reg2
!= NULL_RTX
)
10860 real
= replace_rtx (real
, reg2
, rreg
);
10862 real
= replace_rtx (real
, reg
,
10863 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
10864 STACK_POINTER_REGNUM
),
10867 /* We expect that 'real' is either a SET or a PARALLEL containing
10868 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10869 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10871 if (GET_CODE (real
) == SET
)
10875 temp
= simplify_rtx (SET_SRC (set
));
10877 SET_SRC (set
) = temp
;
10878 temp
= simplify_rtx (SET_DEST (set
));
10880 SET_DEST (set
) = temp
;
10881 if (GET_CODE (SET_DEST (set
)) == MEM
)
10883 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10885 XEXP (SET_DEST (set
), 0) = temp
;
10888 else if (GET_CODE (real
) == PARALLEL
)
10891 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
10892 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
10894 rtx set
= XVECEXP (real
, 0, i
);
10896 temp
= simplify_rtx (SET_SRC (set
));
10898 SET_SRC (set
) = temp
;
10899 temp
= simplify_rtx (SET_DEST (set
));
10901 SET_DEST (set
) = temp
;
10902 if (GET_CODE (SET_DEST (set
)) == MEM
)
10904 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10906 XEXP (SET_DEST (set
), 0) = temp
;
10908 RTX_FRAME_RELATED_P (set
) = 1;
10915 real
= spe_synthesize_frame_save (real
);
10917 RTX_FRAME_RELATED_P (insn
) = 1;
10918 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10923 /* Given an SPE frame note, return a PARALLEL of SETs with the
10924 original note, plus a synthetic register save. */
10927 spe_synthesize_frame_save (real
)
10930 rtx synth
, offset
, reg
, real2
;
10932 if (GET_CODE (real
) != SET
10933 || GET_MODE (SET_SRC (real
)) != V2SImode
)
10936 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10937 frame related note. The parallel contains a set of the register
10938 being saved, and another set to a synthetic register (n+1200).
10939 This is so we can differentiate between 64-bit and 32-bit saves.
10940 Words cannot describe this nastiness. */
10942 if (GET_CODE (SET_DEST (real
)) != MEM
10943 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
10944 || GET_CODE (SET_SRC (real
)) != REG
)
10948 (set (mem (plus (reg x) (const y)))
10951 (set (mem (plus (reg x) (const y+4)))
10955 real2
= copy_rtx (real
);
10956 PUT_MODE (SET_DEST (real2
), SImode
);
10957 reg
= SET_SRC (real2
);
10958 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
10959 synth
= copy_rtx (real2
);
10961 if (BYTES_BIG_ENDIAN
)
10963 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
10964 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
10967 reg
= SET_SRC (synth
);
10969 synth
= replace_rtx (synth
, reg
,
10970 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
10972 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
10973 synth
= replace_rtx (synth
, offset
,
10974 GEN_INT (INTVAL (offset
)
10975 + (BYTES_BIG_ENDIAN
? 0 : 4)));
10977 RTX_FRAME_RELATED_P (synth
) = 1;
10978 RTX_FRAME_RELATED_P (real2
) = 1;
10979 if (BYTES_BIG_ENDIAN
)
10980 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
10982 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
10987 /* Returns an insn that has a vrsave set operation with the
10988 appropriate CLOBBERs. */
10991 generate_set_vrsave (reg
, info
, epiloguep
)
10993 rs6000_stack_t
*info
;
10997 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10998 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
11001 = gen_rtx_SET (VOIDmode
,
11003 gen_rtx_UNSPEC_VOLATILE (SImode
,
11004 gen_rtvec (2, reg
, vrsave
),
11009 /* We need to clobber the registers in the mask so the scheduler
11010 does not move sets to VRSAVE before sets of AltiVec registers.
11012 However, if the function receives nonlocal gotos, reload will set
11013 all call saved registers live. We will end up with:
11015 (set (reg 999) (mem))
11016 (parallel [ (set (reg vrsave) (unspec blah))
11017 (clobber (reg 999))])
11019 The clobber will cause the store into reg 999 to be dead, and
11020 flow will attempt to delete an epilogue insn. In this case, we
11021 need an unspec use/set of the register. */
11023 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11024 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
11026 if (!epiloguep
|| call_used_regs
[i
])
11027 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
11028 gen_rtx_REG (V4SImode
, i
));
11031 rtx reg
= gen_rtx_REG (V4SImode
, i
);
11034 = gen_rtx_SET (VOIDmode
,
11036 gen_rtx_UNSPEC (V4SImode
,
11037 gen_rtvec (1, reg
), 27));
11041 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
11043 for (i
= 0; i
< nclobs
; ++i
)
11044 XVECEXP (insn
, 0, i
) = clobs
[i
];
11049 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11050 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11053 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
11056 enum machine_mode mode
;
11057 unsigned int regno
;
11061 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
11062 rtx replacea
, replaceb
;
11064 int_rtx
= GEN_INT (offset
);
11066 /* Some cases that need register indexed addressing. */
11067 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
11069 && SPE_VECTOR_MODE (mode
)
11070 && !SPE_CONST_OFFSET_OK (offset
)))
11072 /* Whomever calls us must make sure r11 is available in the
11073 flow path of instructions in the prologue. */
11074 offset_rtx
= gen_rtx_REG (Pmode
, 11);
11075 emit_move_insn (offset_rtx
, int_rtx
);
11077 replacea
= offset_rtx
;
11078 replaceb
= int_rtx
;
11082 offset_rtx
= int_rtx
;
11083 replacea
= NULL_RTX
;
11084 replaceb
= NULL_RTX
;
11087 reg
= gen_rtx_REG (mode
, regno
);
11088 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
11089 mem
= gen_rtx_MEM (mode
, addr
);
11090 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11092 insn
= emit_move_insn (mem
, reg
);
11094 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
11097 /* Emit an offset memory reference suitable for a frame store, while
11098 converting to a valid addressing mode. */
11101 gen_frame_mem_offset (mode
, reg
, offset
)
11102 enum machine_mode mode
;
11106 rtx int_rtx
, offset_rtx
;
11108 int_rtx
= GEN_INT (offset
);
11110 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
11112 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11113 emit_move_insn (offset_rtx
, int_rtx
);
11116 offset_rtx
= int_rtx
;
11118 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
11121 /* Emit function prologue as insns. */
11124 rs6000_emit_prologue ()
11126 rs6000_stack_t
*info
= rs6000_stack_info ();
11127 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11128 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11129 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
11130 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
11131 rtx frame_reg_rtx
= sp_reg_rtx
;
11132 rtx cr_save_rtx
= NULL
;
11134 int saving_FPRs_inline
;
11135 int using_store_multiple
;
11136 HOST_WIDE_INT sp_offset
= 0;
11138 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11140 reg_mode
= V2SImode
;
11144 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11145 && (!TARGET_SPE_ABI
11146 || info
->spe_64bit_regs_used
== 0)
11147 && info
->first_gp_reg_save
< 31);
11148 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
11149 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11151 /* For V.4, update stack before we do any saving and set back pointer. */
11152 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
11154 if (info
->total_size
< 32767)
11155 sp_offset
= info
->total_size
;
11157 frame_reg_rtx
= frame_ptr_rtx
;
11158 rs6000_emit_allocate_stack (info
->total_size
,
11159 (frame_reg_rtx
!= sp_reg_rtx
11160 && (info
->cr_save_p
11162 || info
->first_fp_reg_save
< 64
11163 || info
->first_gp_reg_save
< 32
11165 if (frame_reg_rtx
!= sp_reg_rtx
)
11166 rs6000_emit_stack_tie ();
11169 /* Save AltiVec registers if needed. */
11170 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11174 /* There should be a non inline version of this, for when we
11175 are saving lots of vector registers. */
11176 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11177 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11179 rtx areg
, savereg
, mem
;
11182 offset
= info
->altivec_save_offset
+ sp_offset
11183 + 16 * (i
- info
->first_altivec_reg_save
);
11185 savereg
= gen_rtx_REG (V4SImode
, i
);
11187 areg
= gen_rtx_REG (Pmode
, 0);
11188 emit_move_insn (areg
, GEN_INT (offset
));
11190 /* AltiVec addressing mode is [reg+reg]. */
11191 mem
= gen_rtx_MEM (V4SImode
,
11192 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
11194 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11196 insn
= emit_move_insn (mem
, savereg
);
11198 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11199 areg
, GEN_INT (offset
));
11203 /* VRSAVE is a bit vector representing which AltiVec registers
11204 are used. The OS uses this to determine which vector
11205 registers to save on a context switch. We need to save
11206 VRSAVE on the stack frame, add whatever AltiVec registers we
11207 used in this function, and do the corresponding magic in the
11210 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
11212 rtx reg
, mem
, vrsave
;
11215 /* Get VRSAVE onto a GPR. */
11216 reg
= gen_rtx_REG (SImode
, 12);
11217 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
11219 emit_insn (gen_get_vrsave_internal (reg
));
11221 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
11224 offset
= info
->vrsave_save_offset
+ sp_offset
;
11226 = gen_rtx_MEM (SImode
,
11227 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
11228 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11229 insn
= emit_move_insn (mem
, reg
);
11231 /* Include the registers in the mask. */
11232 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
11234 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
11237 /* If we use the link register, get it into r0. */
11238 if (info
->lr_save_p
)
11239 emit_move_insn (gen_rtx_REG (Pmode
, 0),
11240 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11242 /* If we need to save CR, put it into r12. */
11243 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
11245 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
11246 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11249 /* Do any required saving of fpr's. If only one or two to save, do
11250 it ourselves. Otherwise, call function. */
11251 if (saving_FPRs_inline
)
11254 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11255 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11256 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11257 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
11258 info
->first_fp_reg_save
+ i
,
11259 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
11262 else if (info
->first_fp_reg_save
!= 64)
11266 const char *alloc_rname
;
11268 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
11270 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
11271 gen_rtx_REG (Pmode
,
11272 LINK_REGISTER_REGNUM
));
11273 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
11274 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
11275 alloc_rname
= ggc_strdup (rname
);
11276 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11277 gen_rtx_SYMBOL_REF (Pmode
,
11279 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11281 rtx addr
, reg
, mem
;
11282 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
11283 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11284 GEN_INT (info
->fp_save_offset
11285 + sp_offset
+ 8*i
));
11286 mem
= gen_rtx_MEM (DFmode
, addr
);
11287 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11289 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11291 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11292 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11293 NULL_RTX
, NULL_RTX
);
11296 /* Save GPRs. This is done as a PARALLEL if we are using
11297 the store-multiple instructions. */
11298 if (using_store_multiple
)
11302 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11303 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11305 rtx addr
, reg
, mem
;
11306 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11307 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11308 GEN_INT (info
->gp_save_offset
11311 mem
= gen_rtx_MEM (reg_mode
, addr
);
11312 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11314 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
11316 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11317 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11318 NULL_RTX
, NULL_RTX
);
11323 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11324 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11325 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11326 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11327 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11328 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11330 rtx addr
, reg
, mem
;
11331 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
11333 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11335 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11338 if (!SPE_CONST_OFFSET_OK (offset
))
11340 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11341 emit_move_insn (b
, GEN_INT (offset
));
11344 b
= GEN_INT (offset
);
11346 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11347 mem
= gen_rtx_MEM (V2SImode
, addr
);
11348 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11349 insn
= emit_move_insn (mem
, reg
);
11351 if (GET_CODE (b
) == CONST_INT
)
11352 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11353 NULL_RTX
, NULL_RTX
);
11355 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11356 b
, GEN_INT (offset
));
11360 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11361 GEN_INT (info
->gp_save_offset
11364 mem
= gen_rtx_MEM (reg_mode
, addr
);
11365 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11367 insn
= emit_move_insn (mem
, reg
);
11368 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11369 NULL_RTX
, NULL_RTX
);
11374 /* ??? There's no need to emit actual instructions here, but it's the
11375 easiest way to get the frame unwind information emitted. */
11376 if (current_function_calls_eh_return
)
11378 unsigned int i
, regno
;
11382 regno
= EH_RETURN_DATA_REGNO (i
);
11383 if (regno
== INVALID_REGNUM
)
11386 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
11387 info
->ehrd_offset
+ sp_offset
11388 + reg_size
* (int) i
,
11393 /* Save lr if we used it. */
11394 if (info
->lr_save_p
)
11396 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11397 GEN_INT (info
->lr_save_offset
+ sp_offset
));
11398 rtx reg
= gen_rtx_REG (Pmode
, 0);
11399 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
11400 /* This should not be of rs6000_sr_alias_set, because of
11401 __builtin_return_address. */
11403 insn
= emit_move_insn (mem
, reg
);
11404 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11405 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
11408 /* Save CR if we use any that must be preserved. */
11409 if (info
->cr_save_p
)
11411 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11412 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11413 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11415 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11417 /* If r12 was used to hold the original sp, copy cr into r0 now
11419 if (REGNO (frame_reg_rtx
) == 12)
11421 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
11422 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
11424 insn
= emit_move_insn (mem
, cr_save_rtx
);
11426 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11427 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11428 But that's OK. All we have to do is specify that _one_ condition
11429 code register is saved in this stack slot. The thrower's epilogue
11430 will then restore all the call-saved registers.
11431 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11432 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
11433 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
11436 /* Update stack and set back pointer unless this is V.4,
11437 for which it was done previously. */
11438 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
11439 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
11441 /* Set frame pointer, if needed. */
11442 if (frame_pointer_needed
)
11444 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
11446 RTX_FRAME_RELATED_P (insn
) = 1;
11449 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11450 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
11451 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
11452 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
11454 /* If emit_load_toc_table will use the link register, we need to save
11455 it. We use R12 for this purpose because emit_load_toc_table
11456 can use register 0. This allows us to use a plain 'blr' to return
11457 from the procedure more often. */
11458 int save_LR_around_toc_setup
= (TARGET_ELF
11459 && DEFAULT_ABI
!= ABI_AIX
11461 && ! info
->lr_save_p
11462 && EXIT_BLOCK_PTR
->pred
!= NULL
);
11463 if (save_LR_around_toc_setup
)
11465 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11466 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx
, lr
));
11467 rs6000_emit_load_toc_table (TRUE
);
11468 rs6000_maybe_dead (emit_move_insn (lr
, frame_ptr_rtx
));
11471 rs6000_emit_load_toc_table (TRUE
);
11475 if (DEFAULT_ABI
== ABI_DARWIN
11476 && flag_pic
&& current_function_uses_pic_offset_table
)
11478 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
11479 const char *picbase
= machopic_function_base_name ();
11480 rtx src
= gen_rtx_SYMBOL_REF (Pmode
, picbase
);
11482 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
, src
)));
11484 rs6000_maybe_dead (
11485 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
11486 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
11491 /* Write function prologue. */
11494 rs6000_output_function_prologue (file
, size
)
11496 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11498 rs6000_stack_t
*info
= rs6000_stack_info ();
11500 if (TARGET_DEBUG_STACK
)
11501 debug_stack_info (info
);
11503 /* Write .extern for any function we will call to save and restore
11505 if (info
->first_fp_reg_save
< 64
11506 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
11507 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11508 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
11509 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
11510 RESTORE_FP_SUFFIX
);
11512 /* Write .extern for AIX common mode routines, if needed. */
11513 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
11515 fputs ("\t.extern __mulh\n", file
);
11516 fputs ("\t.extern __mull\n", file
);
11517 fputs ("\t.extern __divss\n", file
);
11518 fputs ("\t.extern __divus\n", file
);
11519 fputs ("\t.extern __quoss\n", file
);
11520 fputs ("\t.extern __quous\n", file
);
11521 common_mode_defined
= 1;
11524 if (! HAVE_prologue
)
11528 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11529 the "toplevel" insn chain. */
11530 emit_note (0, NOTE_INSN_DELETED
);
11531 rs6000_emit_prologue ();
11532 emit_note (0, NOTE_INSN_DELETED
);
11534 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11538 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11540 INSN_ADDRESSES_NEW (insn
, addr
);
11545 if (TARGET_DEBUG_STACK
)
11546 debug_rtx_list (get_insns (), 100);
11547 final (get_insns (), file
, FALSE
, FALSE
);
11551 rs6000_pic_labelno
++;
11554 /* Emit function epilogue as insns.
11556 At present, dwarf2out_frame_debug_expr doesn't understand
11557 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11558 anywhere in the epilogue. Most of the insns below would in any case
11559 need special notes to explain where r11 is in relation to the stack. */
11562 rs6000_emit_epilogue (sibcall
)
11565 rs6000_stack_t
*info
;
11566 int restoring_FPRs_inline
;
11567 int using_load_multiple
;
11568 int using_mfcr_multiple
;
11569 int use_backchain_to_restore_sp
;
11571 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
11572 rtx frame_reg_rtx
= sp_reg_rtx
;
11573 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
11574 int reg_size
= TARGET_POWERPC64
? 8 : 4;
11577 info
= rs6000_stack_info ();
11579 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11581 reg_mode
= V2SImode
;
11585 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
11586 && (!TARGET_SPE_ABI
11587 || info
->spe_64bit_regs_used
== 0)
11588 && info
->first_gp_reg_save
< 31);
11589 restoring_FPRs_inline
= (sibcall
11590 || current_function_calls_eh_return
11591 || info
->first_fp_reg_save
== 64
11592 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
11593 use_backchain_to_restore_sp
= (frame_pointer_needed
11594 || current_function_calls_alloca
11595 || info
->total_size
> 32767);
11596 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
11597 || rs6000_cpu
== PROCESSOR_PPC603
11598 || rs6000_cpu
== PROCESSOR_PPC750
11601 /* If we have a frame pointer, a call to alloca, or a large stack
11602 frame, restore the old stack pointer using the backchain. Otherwise,
11603 we know what size to update it with. */
11604 if (use_backchain_to_restore_sp
)
11606 /* Under V.4, don't reset the stack pointer until after we're done
11607 loading the saved registers. */
11608 if (DEFAULT_ABI
== ABI_V4
)
11609 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
11611 emit_move_insn (frame_reg_rtx
,
11612 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
11615 else if (info
->push_p
)
11617 if (DEFAULT_ABI
== ABI_V4
)
11618 sp_offset
= info
->total_size
;
11621 emit_insn (TARGET_32BIT
11622 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11623 GEN_INT (info
->total_size
))
11624 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11625 GEN_INT (info
->total_size
)));
11629 /* Restore AltiVec registers if needed. */
11630 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
11634 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11635 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
11637 rtx addr
, areg
, mem
;
11639 areg
= gen_rtx_REG (Pmode
, 0);
11641 (areg
, GEN_INT (info
->altivec_save_offset
11643 + 16 * (i
- info
->first_altivec_reg_save
)));
11645 /* AltiVec addressing mode is [reg+reg]. */
11646 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
11647 mem
= gen_rtx_MEM (V4SImode
, addr
);
11648 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11650 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
11654 /* Restore VRSAVE if needed. */
11655 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
11657 rtx addr
, mem
, reg
;
11659 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11660 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
11661 mem
= gen_rtx_MEM (SImode
, addr
);
11662 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11663 reg
= gen_rtx_REG (SImode
, 12);
11664 emit_move_insn (reg
, mem
);
11666 emit_insn (generate_set_vrsave (reg
, info
, 1));
11669 /* Get the old lr if we saved it. */
11670 if (info
->lr_save_p
)
11672 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
11673 info
->lr_save_offset
+ sp_offset
);
11675 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11677 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
11680 /* Get the old cr if we saved it. */
11681 if (info
->cr_save_p
)
11683 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11684 GEN_INT (info
->cr_save_offset
+ sp_offset
));
11685 rtx mem
= gen_rtx_MEM (SImode
, addr
);
11687 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11689 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
11692 /* Set LR here to try to overlap restores below. */
11693 if (info
->lr_save_p
)
11694 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
11695 gen_rtx_REG (Pmode
, 0));
11697 /* Load exception handler data registers, if needed. */
11698 if (current_function_calls_eh_return
)
11700 unsigned int i
, regno
;
11706 regno
= EH_RETURN_DATA_REGNO (i
);
11707 if (regno
== INVALID_REGNUM
)
11710 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
11711 info
->ehrd_offset
+ sp_offset
11712 + reg_size
* (int) i
);
11713 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11715 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
11719 /* Restore GPRs. This is done as a PARALLEL if we are using
11720 the load-multiple instructions. */
11721 if (using_load_multiple
)
11724 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
11725 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11727 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11728 GEN_INT (info
->gp_save_offset
11731 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11733 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11736 gen_rtx_SET (VOIDmode
,
11737 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
11740 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11743 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
11744 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
11745 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
11746 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
11747 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11748 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
11750 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11751 GEN_INT (info
->gp_save_offset
11754 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
11756 /* Restore 64-bit quantities for SPE. */
11757 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
11759 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
11762 if (!SPE_CONST_OFFSET_OK (offset
))
11764 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
11765 emit_move_insn (b
, GEN_INT (offset
));
11768 b
= GEN_INT (offset
);
11770 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
11771 mem
= gen_rtx_MEM (V2SImode
, addr
);
11774 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11776 emit_move_insn (gen_rtx_REG (reg_mode
,
11777 info
->first_gp_reg_save
+ i
), mem
);
11780 /* Restore fpr's if we need to do it without calling a function. */
11781 if (restoring_FPRs_inline
)
11782 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11783 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
11784 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
11787 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
11788 GEN_INT (info
->fp_save_offset
11791 mem
= gen_rtx_MEM (DFmode
, addr
);
11792 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11794 emit_move_insn (gen_rtx_REG (DFmode
,
11795 info
->first_fp_reg_save
+ i
),
11799 /* If we saved cr, restore it here. Just those that were used. */
11800 if (info
->cr_save_p
)
11802 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
11805 if (using_mfcr_multiple
)
11807 for (i
= 0; i
< 8; i
++)
11808 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11814 if (using_mfcr_multiple
&& count
> 1)
11819 p
= rtvec_alloc (count
);
11822 for (i
= 0; i
< 8; i
++)
11823 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11825 rtvec r
= rtvec_alloc (2);
11826 RTVEC_ELT (r
, 0) = r12_rtx
;
11827 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
11828 RTVEC_ELT (p
, ndx
) =
11829 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
11830 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
11833 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11838 for (i
= 0; i
< 8; i
++)
11839 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
11841 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
11847 /* If this is V.4, unwind the stack pointer after all of the loads
11848 have been done. We need to emit a block here so that sched
11849 doesn't decide to move the sp change before the register restores
11850 (which may not have any obvious dependency on the stack). This
11851 doesn't hurt performance, because there is no scheduling that can
11852 be done after this point. */
11853 if (DEFAULT_ABI
== ABI_V4
)
11855 if (frame_reg_rtx
!= sp_reg_rtx
)
11856 rs6000_emit_stack_tie ();
11858 if (use_backchain_to_restore_sp
)
11860 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
11862 else if (sp_offset
!= 0)
11864 emit_insn (TARGET_32BIT
11865 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
11866 GEN_INT (sp_offset
))
11867 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
11868 GEN_INT (sp_offset
)));
11872 if (current_function_calls_eh_return
)
11874 rtx sa
= EH_RETURN_STACKADJ_RTX
;
11875 emit_insn (TARGET_32BIT
11876 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
11877 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
11883 if (! restoring_FPRs_inline
)
11884 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
11886 p
= rtvec_alloc (2);
11888 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
11889 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
11890 gen_rtx_REG (Pmode
,
11891 LINK_REGISTER_REGNUM
));
11893 /* If we have to restore more than two FP registers, branch to the
11894 restore function. It will return to our caller. */
11895 if (! restoring_FPRs_inline
)
11899 const char *alloc_rname
;
11901 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
11902 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
11903 alloc_rname
= ggc_strdup (rname
);
11904 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
11905 gen_rtx_SYMBOL_REF (Pmode
,
11908 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
11911 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
11912 GEN_INT (info
->fp_save_offset
+ 8*i
));
11913 mem
= gen_rtx_MEM (DFmode
, addr
);
11914 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
11916 RTVEC_ELT (p
, i
+3) =
11917 gen_rtx_SET (VOIDmode
,
11918 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
11923 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
11927 /* Write function epilogue. */
11930 rs6000_output_function_epilogue (file
, size
)
11932 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
11934 rs6000_stack_t
*info
= rs6000_stack_info ();
11936 if (! HAVE_epilogue
)
11938 rtx insn
= get_last_insn ();
11939 /* If the last insn was a BARRIER, we don't have to write anything except
11940 the trace table. */
11941 if (GET_CODE (insn
) == NOTE
)
11942 insn
= prev_nonnote_insn (insn
);
11943 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
11945 /* This is slightly ugly, but at least we don't have two
11946 copies of the epilogue-emitting code. */
11949 /* A NOTE_INSN_DELETED is supposed to be at the start
11950 and end of the "toplevel" insn chain. */
11951 emit_note (0, NOTE_INSN_DELETED
);
11952 rs6000_emit_epilogue (FALSE
);
11953 emit_note (0, NOTE_INSN_DELETED
);
11955 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11959 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
11961 INSN_ADDRESSES_NEW (insn
, addr
);
11966 if (TARGET_DEBUG_STACK
)
11967 debug_rtx_list (get_insns (), 100);
11968 final (get_insns (), file
, FALSE
, FALSE
);
11973 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11976 We don't output a traceback table if -finhibit-size-directive was
11977 used. The documentation for -finhibit-size-directive reads
11978 ``don't output a @code{.size} assembler directive, or anything
11979 else that would cause trouble if the function is split in the
11980 middle, and the two halves are placed at locations far apart in
11981 memory.'' The traceback table has this property, since it
11982 includes the offset from the start of the function to the
11983 traceback table itself.
11985 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11986 different traceback table. */
11987 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11988 && rs6000_traceback
!= traceback_none
)
11990 const char *fname
= NULL
;
11991 const char *language_string
= lang_hooks
.name
;
11992 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11994 int optional_tbtab
;
11996 if (rs6000_traceback
== traceback_full
)
11997 optional_tbtab
= 1;
11998 else if (rs6000_traceback
== traceback_part
)
11999 optional_tbtab
= 0;
12001 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
12003 if (optional_tbtab
)
12005 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
12006 while (*fname
== '.') /* V.4 encodes . in the name */
12009 /* Need label immediately before tbtab, so we can compute
12010 its offset from the function start. */
12011 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12012 ASM_OUTPUT_LABEL (file
, fname
);
12015 /* The .tbtab pseudo-op can only be used for the first eight
12016 expressions, since it can't handle the possibly variable
12017 length fields that follow. However, if you omit the optional
12018 fields, the assembler outputs zeros for all optional fields
12019 anyways, giving each variable length field is minimum length
12020 (as defined in sys/debug.h). Thus we can not use the .tbtab
12021 pseudo-op at all. */
12023 /* An all-zero word flags the start of the tbtab, for debuggers
12024 that have to find it by searching forward from the entry
12025 point or from the current pc. */
12026 fputs ("\t.long 0\n", file
);
12028 /* Tbtab format type. Use format type 0. */
12029 fputs ("\t.byte 0,", file
);
12031 /* Language type. Unfortunately, there doesn't seem to be any
12032 official way to get this info, so we use language_string. C
12033 is 0. C++ is 9. No number defined for Obj-C, so use the
12034 value for C for now. There is no official value for Java,
12035 although IBM appears to be using 13. There is no official value
12036 for Chill, so we've chosen 44 pseudo-randomly. */
12037 if (! strcmp (language_string
, "GNU C")
12038 || ! strcmp (language_string
, "GNU Objective-C"))
12040 else if (! strcmp (language_string
, "GNU F77"))
12042 else if (! strcmp (language_string
, "GNU Ada"))
12044 else if (! strcmp (language_string
, "GNU Pascal"))
12046 else if (! strcmp (language_string
, "GNU C++"))
12048 else if (! strcmp (language_string
, "GNU Java"))
12050 else if (! strcmp (language_string
, "GNU CHILL"))
12054 fprintf (file
, "%d,", i
);
12056 /* 8 single bit fields: global linkage (not set for C extern linkage,
12057 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12058 from start of procedure stored in tbtab, internal function, function
12059 has controlled storage, function has no toc, function uses fp,
12060 function logs/aborts fp operations. */
12061 /* Assume that fp operations are used if any fp reg must be saved. */
12062 fprintf (file
, "%d,",
12063 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
12065 /* 6 bitfields: function is interrupt handler, name present in
12066 proc table, function calls alloca, on condition directives
12067 (controls stack walks, 3 bits), saves condition reg, saves
12069 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12070 set up as a frame pointer, even when there is no alloca call. */
12071 fprintf (file
, "%d,",
12072 ((optional_tbtab
<< 6)
12073 | ((optional_tbtab
& frame_pointer_needed
) << 5)
12074 | (info
->cr_save_p
<< 1)
12075 | (info
->lr_save_p
)));
12077 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12079 fprintf (file
, "%d,",
12080 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
12082 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12083 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
12085 if (optional_tbtab
)
12087 /* Compute the parameter info from the function decl argument
12090 int next_parm_info_bit
= 31;
12092 for (decl
= DECL_ARGUMENTS (current_function_decl
);
12093 decl
; decl
= TREE_CHAIN (decl
))
12095 rtx parameter
= DECL_INCOMING_RTL (decl
);
12096 enum machine_mode mode
= GET_MODE (parameter
);
12098 if (GET_CODE (parameter
) == REG
)
12100 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
12106 if (mode
== SFmode
)
12108 else if (mode
== DFmode
|| mode
== TFmode
)
12113 /* If only one bit will fit, don't or in this entry. */
12114 if (next_parm_info_bit
> 0)
12115 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
12116 next_parm_info_bit
-= 2;
12120 fixed_parms
+= ((GET_MODE_SIZE (mode
)
12121 + (UNITS_PER_WORD
- 1))
12123 next_parm_info_bit
-= 1;
12129 /* Number of fixed point parameters. */
12130 /* This is actually the number of words of fixed point parameters; thus
12131 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12132 fprintf (file
, "%d,", fixed_parms
);
12134 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12136 /* This is actually the number of fp registers that hold parameters;
12137 and thus the maximum value is 13. */
12138 /* Set parameters on stack bit if parameters are not in their original
12139 registers, regardless of whether they are on the stack? Xlc
12140 seems to set the bit when not optimizing. */
12141 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
12143 if (! optional_tbtab
)
12146 /* Optional fields follow. Some are variable length. */
12148 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12149 11 double float. */
12150 /* There is an entry for each parameter in a register, in the order that
12151 they occur in the parameter list. Any intervening arguments on the
12152 stack are ignored. If the list overflows a long (max possible length
12153 34 bits) then completely leave off all elements that don't fit. */
12154 /* Only emit this long if there was at least one parameter. */
12155 if (fixed_parms
|| float_parms
)
12156 fprintf (file
, "\t.long %d\n", parm_info
);
12158 /* Offset from start of code to tb table. */
12159 fputs ("\t.long ", file
);
12160 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
12162 RS6000_OUTPUT_BASENAME (file
, fname
);
12164 assemble_name (file
, fname
);
12166 fputs ("-.", file
);
12168 RS6000_OUTPUT_BASENAME (file
, fname
);
12170 assemble_name (file
, fname
);
12174 /* Interrupt handler mask. */
12175 /* Omit this long, since we never set the interrupt handler bit
12178 /* Number of CTL (controlled storage) anchors. */
12179 /* Omit this long, since the has_ctl bit is never set above. */
12181 /* Displacement into stack of each CTL anchor. */
12182 /* Omit this list of longs, because there are no CTL anchors. */
12184 /* Length of function name. */
12187 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
12189 /* Function name. */
12190 assemble_string (fname
, strlen (fname
));
12192 /* Register for alloca automatic storage; this is always reg 31.
12193 Only emit this if the alloca bit was set above. */
12194 if (frame_pointer_needed
)
12195 fputs ("\t.byte 31\n", file
);
12197 fputs ("\t.align 2\n", file
);
12201 /* A C compound statement that outputs the assembler code for a thunk
12202 function, used to implement C++ virtual function calls with
12203 multiple inheritance. The thunk acts as a wrapper around a virtual
12204 function, adjusting the implicit object parameter before handing
12205 control off to the real function.
12207 First, emit code to add the integer DELTA to the location that
12208 contains the incoming first argument. Assume that this argument
12209 contains a pointer, and is the one used to pass the `this' pointer
12210 in C++. This is the incoming argument *before* the function
12211 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12212 values of all other incoming arguments.
12214 After the addition, emit code to jump to FUNCTION, which is a
12215 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12216 not touch the return address. Hence returning from FUNCTION will
12217 return to whoever called the current `thunk'.
12219 The effect must be as if FUNCTION had been called directly with the
12220 adjusted first argument. This macro is responsible for emitting
12221 all of the code for a thunk function; output_function_prologue()
12222 and output_function_epilogue() are not invoked.
12224 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12225 been extracted from it.) It might possibly be useful on some
12226 targets, but probably not.
12228 If you do not define this macro, the target-independent code in the
12229 C++ frontend will generate a less efficient heavyweight thunk that
12230 calls FUNCTION instead of jumping to it. The generic approach does
12231 not support varargs. */
12234 rs6000_output_mi_thunk (file
, thunk_fndecl
, delta
, vcall_offset
, function
)
12236 tree thunk_fndecl ATTRIBUTE_UNUSED
;
12237 HOST_WIDE_INT delta
;
12238 HOST_WIDE_INT vcall_offset
;
12241 rtx
this, insn
, funexp
;
12243 reload_completed
= 1;
12244 no_new_pseudos
= 1;
12246 /* Mark the end of the (empty) prologue. */
12247 emit_note (NULL
, NOTE_INSN_PROLOGUE_END
);
12249 /* Find the "this" pointer. If the function returns a structure,
12250 the structure return pointer is in r3. */
12251 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))))
12252 this = gen_rtx_REG (Pmode
, 4);
12254 this = gen_rtx_REG (Pmode
, 3);
12256 /* Apply the constant offset, if required. */
12259 rtx delta_rtx
= GEN_INT (delta
);
12260 emit_insn (TARGET_32BIT
12261 ? gen_addsi3 (this, this, delta_rtx
)
12262 : gen_adddi3 (this, this, delta_rtx
));
12265 /* Apply the offset from the vtable, if required. */
12268 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
12269 rtx tmp
= gen_rtx_REG (Pmode
, 12);
12271 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
12272 emit_insn (TARGET_32BIT
12273 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
12274 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
12275 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
12276 emit_insn (TARGET_32BIT
12277 ? gen_addsi3 (this, this, tmp
)
12278 : gen_adddi3 (this, this, tmp
));
12281 /* Generate a tail call to the target function. */
12282 if (!TREE_USED (function
))
12284 assemble_external (function
);
12285 TREE_USED (function
) = 1;
12287 funexp
= XEXP (DECL_RTL (function
), 0);
12288 SYMBOL_REF_FLAGS (funexp
) &= ~SYMBOL_FLAG_LOCAL
;
12289 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
12292 if (MACHOPIC_INDIRECT
)
12293 funexp
= machopic_indirect_call_target (funexp
);
12296 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12297 generate sibcall RTL explicitly to avoid constraint abort. */
12298 insn
= emit_call_insn (
12299 gen_rtx_PARALLEL (VOIDmode
,
12301 gen_rtx_CALL (VOIDmode
,
12302 funexp
, const0_rtx
),
12303 gen_rtx_USE (VOIDmode
, const0_rtx
),
12304 gen_rtx_USE (VOIDmode
,
12305 gen_rtx_REG (SImode
,
12306 LINK_REGISTER_REGNUM
)),
12307 gen_rtx_RETURN (VOIDmode
))));
12308 SIBLING_CALL_P (insn
) = 1;
12311 /* Run just enough of rest_of_compilation to get the insns emitted.
12312 There's not really enough bulk here to make other passes such as
12313 instruction scheduling worth while. Note that use_thunk calls
12314 assemble_start_function and assemble_end_function. */
12315 insn
= get_insns ();
12316 shorten_branches (insn
);
12317 final_start_function (insn
, file
, 1);
12318 final (insn
, file
, 1, 0);
12319 final_end_function ();
12321 reload_completed
= 0;
12322 no_new_pseudos
= 0;
12325 /* A quick summary of the various types of 'constant-pool tables'
12328 Target Flags Name One table per
12329 AIX (none) AIX TOC object file
12330 AIX -mfull-toc AIX TOC object file
12331 AIX -mminimal-toc AIX minimal TOC translation unit
12332 SVR4/EABI (none) SVR4 SDATA object file
12333 SVR4/EABI -fpic SVR4 pic object file
12334 SVR4/EABI -fPIC SVR4 PIC translation unit
12335 SVR4/EABI -mrelocatable EABI TOC function
12336 SVR4/EABI -maix AIX TOC object file
12337 SVR4/EABI -maix -mminimal-toc
12338 AIX minimal TOC translation unit
12340 Name Reg. Set by entries contains:
12341 made by addrs? fp? sum?
12343 AIX TOC 2 crt0 as Y option option
12344 AIX minimal TOC 30 prolog gcc Y Y option
12345 SVR4 SDATA 13 crt0 gcc N Y N
12346 SVR4 pic 30 prolog ld Y not yet N
12347 SVR4 PIC 30 prolog gcc Y option option
12348 EABI TOC 30 prolog gcc Y option option
12352 /* Hash functions for the hash table. */
12355 rs6000_hash_constant (k
)
12358 enum rtx_code code
= GET_CODE (k
);
12359 enum machine_mode mode
= GET_MODE (k
);
12360 unsigned result
= (code
<< 3) ^ mode
;
12361 const char *format
;
12364 format
= GET_RTX_FORMAT (code
);
12365 flen
= strlen (format
);
12371 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
12374 if (mode
!= VOIDmode
)
12375 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
12387 for (; fidx
< flen
; fidx
++)
12388 switch (format
[fidx
])
12393 const char *str
= XSTR (k
, fidx
);
12394 len
= strlen (str
);
12395 result
= result
* 613 + len
;
12396 for (i
= 0; i
< len
; i
++)
12397 result
= result
* 613 + (unsigned) str
[i
];
12402 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
12406 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
12409 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
12410 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
12414 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
12415 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
12429 toc_hash_function (hash_entry
)
12430 const void * hash_entry
;
12432 const struct toc_hash_struct
*thc
=
12433 (const struct toc_hash_struct
*) hash_entry
;
12434 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
12437 /* Compare H1 and H2 for equivalence. */
12440 toc_hash_eq (h1
, h2
)
12444 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
12445 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
12447 if (((const struct toc_hash_struct
*) h1
)->key_mode
12448 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
12451 return rtx_equal_p (r1
, r2
);
12454 /* These are the names given by the C++ front-end to vtables, and
12455 vtable-like objects. Ideally, this logic should not be here;
12456 instead, there should be some programmatic way of inquiring as
12457 to whether or not an object is a vtable. */
12459 #define VTABLE_NAME_P(NAME) \
12460 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12461 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12462 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12463 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12466 rs6000_output_symbol_ref (file
, x
)
12470 /* Currently C++ toc references to vtables can be emitted before it
12471 is decided whether the vtable is public or private. If this is
12472 the case, then the linker will eventually complain that there is
12473 a reference to an unknown section. Thus, for vtables only,
12474 we emit the TOC reference to reference the symbol and not the
12476 const char *name
= XSTR (x
, 0);
12478 if (VTABLE_NAME_P (name
))
12480 RS6000_OUTPUT_BASENAME (file
, name
);
12483 assemble_name (file
, name
);
12486 /* Output a TOC entry. We derive the entry name from what is being
12490 output_toc (file
, x
, labelno
, mode
)
12494 enum machine_mode mode
;
12497 const char *name
= buf
;
12498 const char *real_name
;
12505 /* When the linker won't eliminate them, don't output duplicate
12506 TOC entries (this happens on AIX if there is any kind of TOC,
12507 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12509 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
12511 struct toc_hash_struct
*h
;
12514 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12515 time because GGC is not initialised at that point. */
12516 if (toc_hash_table
== NULL
)
12517 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
12518 toc_hash_eq
, NULL
);
12520 h
= ggc_alloc (sizeof (*h
));
12522 h
->key_mode
= mode
;
12523 h
->labelno
= labelno
;
12525 found
= htab_find_slot (toc_hash_table
, h
, 1);
12526 if (*found
== NULL
)
12528 else /* This is indeed a duplicate.
12529 Set this label equal to that label. */
12531 fputs ("\t.set ", file
);
12532 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
12533 fprintf (file
, "%d,", labelno
);
12534 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
12535 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
12541 /* If we're going to put a double constant in the TOC, make sure it's
12542 aligned properly when strict alignment is on. */
12543 if (GET_CODE (x
) == CONST_DOUBLE
12544 && STRICT_ALIGNMENT
12545 && GET_MODE_BITSIZE (mode
) >= 64
12546 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
12547 ASM_OUTPUT_ALIGN (file
, 3);
12550 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
12552 /* Handle FP constants specially. Note that if we have a minimal
12553 TOC, things we put here aren't actually in the TOC, so we can allow
12555 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
12557 REAL_VALUE_TYPE rv
;
12560 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12561 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
12565 if (TARGET_MINIMAL_TOC
)
12566 fputs (DOUBLE_INT_ASM_OP
, file
);
12568 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12569 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12570 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12571 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
12572 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12573 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12578 if (TARGET_MINIMAL_TOC
)
12579 fputs ("\t.long ", file
);
12581 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12582 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12583 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12584 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12585 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
12586 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
12590 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
12592 REAL_VALUE_TYPE rv
;
12595 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12596 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
12600 if (TARGET_MINIMAL_TOC
)
12601 fputs (DOUBLE_INT_ASM_OP
, file
);
12603 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12604 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12605 fprintf (file
, "0x%lx%08lx\n",
12606 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12611 if (TARGET_MINIMAL_TOC
)
12612 fputs ("\t.long ", file
);
12614 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
12615 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12616 fprintf (file
, "0x%lx,0x%lx\n",
12617 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
12621 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
12623 REAL_VALUE_TYPE rv
;
12626 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
12627 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
12631 if (TARGET_MINIMAL_TOC
)
12632 fputs (DOUBLE_INT_ASM_OP
, file
);
12634 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12635 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
12640 if (TARGET_MINIMAL_TOC
)
12641 fputs ("\t.long ", file
);
12643 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
12644 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
12648 else if (GET_MODE (x
) == VOIDmode
12649 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
12651 unsigned HOST_WIDE_INT low
;
12652 HOST_WIDE_INT high
;
12654 if (GET_CODE (x
) == CONST_DOUBLE
)
12656 low
= CONST_DOUBLE_LOW (x
);
12657 high
= CONST_DOUBLE_HIGH (x
);
12660 #if HOST_BITS_PER_WIDE_INT == 32
12663 high
= (low
& 0x80000000) ? ~0 : 0;
12667 low
= INTVAL (x
) & 0xffffffff;
12668 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
12672 /* TOC entries are always Pmode-sized, but since this
12673 is a bigendian machine then if we're putting smaller
12674 integer constants in the TOC we have to pad them.
12675 (This is still a win over putting the constants in
12676 a separate constant pool, because then we'd have
12677 to have both a TOC entry _and_ the actual constant.)
12679 For a 32-bit target, CONST_INT values are loaded and shifted
12680 entirely within `low' and can be stored in one TOC entry. */
12682 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12683 abort ();/* It would be easy to make this work, but it doesn't now. */
12685 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
12687 #if HOST_BITS_PER_WIDE_INT == 32
12688 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
12689 POINTER_SIZE
, &low
, &high
, 0);
12692 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
12693 high
= (HOST_WIDE_INT
) low
>> 32;
12700 if (TARGET_MINIMAL_TOC
)
12701 fputs (DOUBLE_INT_ASM_OP
, file
);
12703 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12704 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12705 fprintf (file
, "0x%lx%08lx\n",
12706 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12711 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
12713 if (TARGET_MINIMAL_TOC
)
12714 fputs ("\t.long ", file
);
12716 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
12717 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12718 fprintf (file
, "0x%lx,0x%lx\n",
12719 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
12723 if (TARGET_MINIMAL_TOC
)
12724 fputs ("\t.long ", file
);
12726 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
12727 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
12733 if (GET_CODE (x
) == CONST
)
12735 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
12738 base
= XEXP (XEXP (x
, 0), 0);
12739 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
12742 if (GET_CODE (base
) == SYMBOL_REF
)
12743 name
= XSTR (base
, 0);
12744 else if (GET_CODE (base
) == LABEL_REF
)
12745 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
12746 else if (GET_CODE (base
) == CODE_LABEL
)
12747 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
12751 real_name
= (*targetm
.strip_name_encoding
) (name
);
12752 if (TARGET_MINIMAL_TOC
)
12753 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
12756 fprintf (file
, "\t.tc %s", real_name
);
12759 fprintf (file
, ".N%d", - offset
);
12761 fprintf (file
, ".P%d", offset
);
12763 fputs ("[TC],", file
);
12766 /* Currently C++ toc references to vtables can be emitted before it
12767 is decided whether the vtable is public or private. If this is
12768 the case, then the linker will eventually complain that there is
12769 a TOC reference to an unknown section. Thus, for vtables only,
12770 we emit the TOC reference to reference the symbol and not the
12772 if (VTABLE_NAME_P (name
))
12774 RS6000_OUTPUT_BASENAME (file
, name
);
12776 fprintf (file
, "%d", offset
);
12777 else if (offset
> 0)
12778 fprintf (file
, "+%d", offset
);
12781 output_addr_const (file
, x
);
12785 /* Output an assembler pseudo-op to write an ASCII string of N characters
12786 starting at P to FILE.
12788 On the RS/6000, we have to do this using the .byte operation and
12789 write out special characters outside the quoted string.
12790 Also, the assembler is broken; very long strings are truncated,
12791 so we must artificially break them up early. */
12794 output_ascii (file
, p
, n
)
12800 int i
, count_string
;
12801 const char *for_string
= "\t.byte \"";
12802 const char *for_decimal
= "\t.byte ";
12803 const char *to_close
= NULL
;
12806 for (i
= 0; i
< n
; i
++)
12809 if (c
>= ' ' && c
< 0177)
12812 fputs (for_string
, file
);
12815 /* Write two quotes to get one. */
12823 for_decimal
= "\"\n\t.byte ";
12827 if (count_string
>= 512)
12829 fputs (to_close
, file
);
12831 for_string
= "\t.byte \"";
12832 for_decimal
= "\t.byte ";
12840 fputs (for_decimal
, file
);
12841 fprintf (file
, "%d", c
);
12843 for_string
= "\n\t.byte \"";
12844 for_decimal
= ", ";
12850 /* Now close the string if we have written one. Then end the line. */
12852 fputs (to_close
, file
);
12855 /* Generate a unique section name for FILENAME for a section type
12856 represented by SECTION_DESC. Output goes into BUF.
12858 SECTION_DESC can be any string, as long as it is different for each
12859 possible section type.
12861 We name the section in the same manner as xlc. The name begins with an
12862 underscore followed by the filename (after stripping any leading directory
12863 names) with the last period replaced by the string SECTION_DESC. If
12864 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12868 rs6000_gen_section_name (buf
, filename
, section_desc
)
12870 const char *filename
;
12871 const char *section_desc
;
12873 const char *q
, *after_last_slash
, *last_period
= 0;
12877 after_last_slash
= filename
;
12878 for (q
= filename
; *q
; q
++)
12881 after_last_slash
= q
+ 1;
12882 else if (*q
== '.')
12886 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
12887 *buf
= (char *) xmalloc (len
);
12892 for (q
= after_last_slash
; *q
; q
++)
12894 if (q
== last_period
)
12896 strcpy (p
, section_desc
);
12897 p
+= strlen (section_desc
);
12901 else if (ISALNUM (*q
))
12905 if (last_period
== 0)
12906 strcpy (p
, section_desc
);
12911 /* Emit profile function. */
12914 output_profile_hook (labelno
)
12915 int labelno ATTRIBUTE_UNUSED
;
12917 if (TARGET_PROFILE_KERNEL
)
12920 if (DEFAULT_ABI
== ABI_AIX
)
12922 #ifdef NO_PROFILE_COUNTERS
12923 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
12926 const char *label_name
;
12929 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12930 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
12931 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
12933 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12937 else if (DEFAULT_ABI
== ABI_DARWIN
)
12939 const char *mcount_name
= RS6000_MCOUNT
;
12940 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12942 /* Be conservative and always set this, at least for now. */
12943 current_function_uses_pic_offset_table
= 1;
12946 /* For PIC code, set up a stub and collect the caller's address
12947 from r0, which is where the prologue puts it. */
12948 if (MACHOPIC_INDIRECT
)
12950 mcount_name
= machopic_stub_name (mcount_name
);
12951 if (current_function_uses_pic_offset_table
)
12952 caller_addr_regno
= 0;
12955 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12957 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12961 /* Write function profiler code. */
12964 output_function_profiler (file
, labelno
)
12971 switch (DEFAULT_ABI
)
12980 warning ("no profiling of 64-bit code for this ABI");
12983 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12984 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12987 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12988 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12989 reg_names
[0], save_lr
, reg_names
[1]);
12990 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12991 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12992 assemble_name (file
, buf
);
12993 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12995 else if (flag_pic
> 1)
12997 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12998 reg_names
[0], save_lr
, reg_names
[1]);
12999 /* Now, we need to get the address of the label. */
13000 fputs ("\tbl 1f\n\t.long ", file
);
13001 assemble_name (file
, buf
);
13002 fputs ("-.\n1:", file
);
13003 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
13004 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
13005 reg_names
[0], reg_names
[11]);
13006 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
13007 reg_names
[0], reg_names
[0], reg_names
[11]);
13011 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
13012 assemble_name (file
, buf
);
13013 fputs ("@ha\n", file
);
13014 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
13015 reg_names
[0], save_lr
, reg_names
[1]);
13016 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
13017 assemble_name (file
, buf
);
13018 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
13021 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13022 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13027 if (!TARGET_PROFILE_KERNEL
)
13029 /* Don't do anything, done in output_profile_hook (). */
13036 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
13037 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
13039 if (current_function_needs_context
)
13041 asm_fprintf (file
, "\tstd %s,24(%s)\n",
13042 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13043 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13044 asm_fprintf (file
, "\tld %s,24(%s)\n",
13045 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
13048 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
13056 rs6000_use_dfa_pipeline_interface ()
13061 /* Power4 load update and store update instructions are cracked into a
13062 load or store and an integer insn which are executed in the same cycle.
13063 Branches have their own dispatch slot which does not count against the
13064 GCC issue rate, but it changes the program flow so there are no other
13065 instructions to issue in this cycle. */
13068 rs6000_variable_issue (stream
, verbose
, insn
, more
)
13069 FILE *stream ATTRIBUTE_UNUSED
;
13070 int verbose ATTRIBUTE_UNUSED
;
13074 if (GET_CODE (PATTERN (insn
)) == USE
13075 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
13078 if (rs6000_cpu
== PROCESSOR_POWER4
)
13080 enum attr_type type
= get_attr_type (insn
);
13081 if (type
== TYPE_LOAD_EXT_U
|| type
== TYPE_LOAD_EXT_UX
13082 || type
== TYPE_LOAD_UX
|| type
== TYPE_STORE_UX
)
13084 else if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
13085 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
13086 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
13087 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
13088 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
13089 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
13090 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
)
13091 return more
> 2 ? more
- 2 : 0;
13097 /* Adjust the cost of a scheduling dependency. Return the new cost of
13098 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13101 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
13104 rtx dep_insn ATTRIBUTE_UNUSED
;
13107 if (! recog_memoized (insn
))
13110 if (REG_NOTE_KIND (link
) != 0)
13113 if (REG_NOTE_KIND (link
) == 0)
13115 /* Data dependency; DEP_INSN writes a register that INSN reads
13116 some cycles later. */
13117 switch (get_attr_type (insn
))
13120 /* Tell the first scheduling pass about the latency between
13121 a mtctr and bctr (and mtlr and br/blr). The first
13122 scheduling pass will not know about this latency since
13123 the mtctr instruction, which has the latency associated
13124 to it, will be generated by reload. */
13125 return TARGET_POWER
? 5 : 4;
13127 /* Leave some extra cycles between a compare and its
13128 dependent branch, to inhibit expensive mispredicts. */
13129 if ((rs6000_cpu_attr
== CPU_PPC603
13130 || rs6000_cpu_attr
== CPU_PPC604
13131 || rs6000_cpu_attr
== CPU_PPC604E
13132 || rs6000_cpu_attr
== CPU_PPC620
13133 || rs6000_cpu_attr
== CPU_PPC630
13134 || rs6000_cpu_attr
== CPU_PPC750
13135 || rs6000_cpu_attr
== CPU_PPC7400
13136 || rs6000_cpu_attr
== CPU_PPC7450
13137 || rs6000_cpu_attr
== CPU_POWER4
)
13138 && recog_memoized (dep_insn
)
13139 && (INSN_CODE (dep_insn
) >= 0)
13140 && (get_attr_type (dep_insn
) == TYPE_CMP
13141 || get_attr_type (dep_insn
) == TYPE_COMPARE
13142 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
13143 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
13144 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
13145 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
13146 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
13147 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
13152 /* Fall out to return default cost. */
13158 /* A C statement (sans semicolon) to update the integer scheduling
13159 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13160 INSN earlier, increase the priority to execute INSN later. Do not
13161 define this macro if you do not need to adjust the scheduling
13162 priorities of insns. */
13165 rs6000_adjust_priority (insn
, priority
)
13166 rtx insn ATTRIBUTE_UNUSED
;
13169 /* On machines (like the 750) which have asymmetric integer units,
13170 where one integer unit can do multiply and divides and the other
13171 can't, reduce the priority of multiply/divide so it is scheduled
13172 before other integer operations. */
13175 if (! INSN_P (insn
))
13178 if (GET_CODE (PATTERN (insn
)) == USE
)
13181 switch (rs6000_cpu_attr
) {
13183 switch (get_attr_type (insn
))
13190 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
13191 priority
, priority
);
13192 if (priority
>= 0 && priority
< 0x01000000)
13202 /* Return how many instructions the machine can issue per cycle. */
13205 rs6000_issue_rate ()
13207 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13208 if (!reload_completed
)
13211 switch (rs6000_cpu_attr
) {
13212 case CPU_RIOS1
: /* ? */
13214 case CPU_PPC601
: /* ? */
13235 /* Return how many instructions to look ahead for better insn
13239 rs6000_use_sched_lookahead ()
13241 if (rs6000_cpu_attr
== CPU_PPC8540
)
13247 /* Length in units of the trampoline for entering a nested function. */
13250 rs6000_trampoline_size ()
13254 switch (DEFAULT_ABI
)
13260 ret
= (TARGET_32BIT
) ? 12 : 24;
13265 ret
= (TARGET_32BIT
) ? 40 : 48;
13272 /* Emit RTL insns to initialize the variable parts of a trampoline.
13273 FNADDR is an RTX for the address of the function's pure code.
13274 CXT is an RTX for the static chain value for the function. */
13277 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
13282 enum machine_mode pmode
= Pmode
;
13283 int regsize
= (TARGET_32BIT
) ? 4 : 8;
13284 rtx ctx_reg
= force_reg (pmode
, cxt
);
13286 switch (DEFAULT_ABI
)
13291 /* Macros to shorten the code expansions below. */
13292 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13293 #define MEM_PLUS(addr,offset) \
13294 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13296 /* Under AIX, just build the 3 word function descriptor */
13299 rtx fn_reg
= gen_reg_rtx (pmode
);
13300 rtx toc_reg
= gen_reg_rtx (pmode
);
13301 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
13302 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
13303 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
13304 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
13305 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
13309 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13312 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
13313 FALSE
, VOIDmode
, 4,
13315 GEN_INT (rs6000_trampoline_size ()), SImode
,
13325 /* Table of valid machine attributes. */
13327 const struct attribute_spec rs6000_attribute_table
[] =
13329 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13330 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
13331 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
13332 { NULL
, 0, 0, false, false, false, NULL
}
13335 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13336 struct attribute_spec.handler. */
13339 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
13342 tree args ATTRIBUTE_UNUSED
;
13343 int flags ATTRIBUTE_UNUSED
;
13344 bool *no_add_attrs
;
13346 if (TREE_CODE (*node
) != FUNCTION_TYPE
13347 && TREE_CODE (*node
) != FIELD_DECL
13348 && TREE_CODE (*node
) != TYPE_DECL
)
13350 warning ("`%s' attribute only applies to functions",
13351 IDENTIFIER_POINTER (name
));
13352 *no_add_attrs
= true;
13358 /* Set longcall attributes on all functions declared when
13359 rs6000_default_long_calls is true. */
13361 rs6000_set_default_type_attributes (type
)
13364 if (rs6000_default_long_calls
13365 && (TREE_CODE (type
) == FUNCTION_TYPE
13366 || TREE_CODE (type
) == METHOD_TYPE
))
13367 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
13369 TYPE_ATTRIBUTES (type
));
13372 /* Return a reference suitable for calling a function with the
13373 longcall attribute. */
13376 rs6000_longcall_ref (call_ref
)
13379 const char *call_name
;
13382 if (GET_CODE (call_ref
) != SYMBOL_REF
)
13385 /* System V adds '.' to the internal name, so skip them. */
13386 call_name
= XSTR (call_ref
, 0);
13387 if (*call_name
== '.')
13389 while (*call_name
== '.')
13392 node
= get_identifier (call_name
);
13393 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
13396 return force_reg (Pmode
, call_ref
);
13399 #ifdef USING_ELFOS_H
13401 /* A C statement or statements to switch to the appropriate section
13402 for output of RTX in mode MODE. You can assume that RTX is some
13403 kind of constant in RTL. The argument MODE is redundant except in
13404 the case of a `const_int' rtx. Select the section by calling
13405 `text_section' or one of the alternatives for other sections.
13407 Do not define this macro if you put all constants in the read-only
13411 rs6000_elf_select_rtx_section (mode
, x
, align
)
13412 enum machine_mode mode
;
13414 unsigned HOST_WIDE_INT align
;
13416 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13419 default_elf_select_rtx_section (mode
, x
, align
);
13422 /* A C statement or statements to switch to the appropriate
13423 section for output of DECL. DECL is either a `VAR_DECL' node
13424 or a constant of some sort. RELOC indicates whether forming
13425 the initial value of DECL requires link-time relocations. */
13428 rs6000_elf_select_section (decl
, reloc
, align
)
13431 unsigned HOST_WIDE_INT align
;
13433 /* Pretend that we're always building for a shared library when
13434 ABI_AIX, because otherwise we end up with dynamic relocations
13435 in read-only sections. This happens for function pointers,
13436 references to vtables in typeinfo, and probably other cases. */
13437 default_elf_select_section_1 (decl
, reloc
, align
,
13438 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13441 /* A C statement to build up a unique section name, expressed as a
13442 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13443 RELOC indicates whether the initial value of EXP requires
13444 link-time relocations. If you do not define this macro, GCC will use
13445 the symbol name prefixed by `.' as the section name. Note - this
13446 macro can now be called for uninitialized data items as well as
13447 initialized data and functions. */
13450 rs6000_elf_unique_section (decl
, reloc
)
13454 /* As above, pretend that we're always building for a shared library
13455 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13456 default_unique_section_1 (decl
, reloc
,
13457 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13460 /* For a SYMBOL_REF, set generic flags and then perform some
13461 target-specific processing.
13463 When the AIX ABI is requested on a non-AIX system, replace the
13464 function name with the real name (with a leading .) rather than the
13465 function descriptor name. This saves a lot of overriding code to
13466 read the prefixes. */
13469 rs6000_elf_encode_section_info (decl
, rtl
, first
)
13474 default_encode_section_info (decl
, rtl
, first
);
13477 && TREE_CODE (decl
) == FUNCTION_DECL
13479 && DEFAULT_ABI
== ABI_AIX
)
13481 rtx sym_ref
= XEXP (rtl
, 0);
13482 size_t len
= strlen (XSTR (sym_ref
, 0));
13483 char *str
= alloca (len
+ 2);
13485 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
13486 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
13491 rs6000_elf_in_small_data_p (decl
)
13494 if (rs6000_sdata
== SDATA_NONE
)
13497 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
13499 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
13500 if (strcmp (section
, ".sdata") == 0
13501 || strcmp (section
, ".sdata2") == 0
13502 || strcmp (section
, ".sbss") == 0
13503 || strcmp (section
, ".sbss2") == 0
13504 || strcmp (section
, ".PPC.EMB.sdata0") == 0
13505 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
13510 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
13513 && size
<= g_switch_value
13514 /* If it's not public, and we're not going to reference it there,
13515 there's no need to put it in the small data section. */
13516 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
13523 #endif /* USING_ELFOS_H */
13526 /* Return a REG that occurs in ADDR with coefficient 1.
13527 ADDR can be effectively incremented by incrementing REG.
13529 r0 is special and we must not select it as an address
13530 register by this routine since our caller will try to
13531 increment the returned register via an "la" instruction. */
13534 find_addr_reg (addr
)
13537 while (GET_CODE (addr
) == PLUS
)
13539 if (GET_CODE (XEXP (addr
, 0)) == REG
13540 && REGNO (XEXP (addr
, 0)) != 0)
13541 addr
= XEXP (addr
, 0);
13542 else if (GET_CODE (XEXP (addr
, 1)) == REG
13543 && REGNO (XEXP (addr
, 1)) != 0)
13544 addr
= XEXP (addr
, 1);
13545 else if (CONSTANT_P (XEXP (addr
, 0)))
13546 addr
= XEXP (addr
, 1);
13547 else if (CONSTANT_P (XEXP (addr
, 1)))
13548 addr
= XEXP (addr
, 0);
13552 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
13558 rs6000_fatal_bad_address (op
)
13561 fatal_insn ("bad address", op
);
13567 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13568 reference and a constant. */
13571 symbolic_operand (op
)
13574 switch (GET_CODE (op
))
13581 return (GET_CODE (op
) == SYMBOL_REF
||
13582 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
13583 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
13584 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
13591 #ifdef RS6000_LONG_BRANCH
13593 static tree stub_list
= 0;
13595 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13596 procedure calls to the linked list. */
13599 add_compiler_stub (label_name
, function_name
, line_number
)
13601 tree function_name
;
13604 tree stub
= build_tree_list (function_name
, label_name
);
13605 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
13606 TREE_CHAIN (stub
) = stub_list
;
13610 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13611 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13612 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13614 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13615 handling procedure calls from the linked list and initializes the
13619 output_compiler_stub ()
13622 char label_buf
[256];
13626 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13628 fprintf (asm_out_file
,
13629 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
13631 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13632 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13633 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
13634 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13636 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
13638 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
13641 label_buf
[0] = '_';
13642 strcpy (label_buf
+1,
13643 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
13646 strcpy (tmp_buf
, "lis r12,hi16(");
13647 strcat (tmp_buf
, label_buf
);
13648 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
13649 strcat (tmp_buf
, label_buf
);
13650 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
13651 output_asm_insn (tmp_buf
, 0);
13653 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13654 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
13655 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
13656 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13662 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13663 already there or not. */
13666 no_previous_def (function_name
)
13667 tree function_name
;
13670 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13671 if (function_name
== STUB_FUNCTION_NAME (stub
))
13676 /* GET_PREV_LABEL gets the label name from the previous definition of
13680 get_prev_label (function_name
)
13681 tree function_name
;
13684 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
13685 if (function_name
== STUB_FUNCTION_NAME (stub
))
13686 return STUB_LABEL_NAME (stub
);
13690 /* INSN is either a function call or a millicode call. It may have an
13691 unconditional jump in its delay slot.
13693 CALL_DEST is the routine we are calling. */
13696 output_call (insn
, call_dest
, operand_number
)
13699 int operand_number
;
13701 static char buf
[256];
13702 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
13705 tree funname
= get_identifier (XSTR (call_dest
, 0));
13707 if (no_previous_def (funname
))
13709 int line_number
= 0;
13710 rtx label_rtx
= gen_label_rtx ();
13711 char *label_buf
, temp_buf
[256];
13712 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
13713 CODE_LABEL_NUMBER (label_rtx
));
13714 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
13715 labelname
= get_identifier (label_buf
);
13716 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
13718 line_number
= NOTE_LINE_NUMBER (insn
);
13719 add_compiler_stub (labelname
, funname
, line_number
);
13722 labelname
= get_prev_label (funname
);
13724 sprintf (buf
, "jbsr %%z%d,%.246s",
13725 operand_number
, IDENTIFIER_POINTER (labelname
));
13730 sprintf (buf
, "bl %%z%d", operand_number
);
13735 #endif /* RS6000_LONG_BRANCH */
13737 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13739 const char *const symbol_ = (SYMBOL); \
13740 char *buffer_ = (BUF); \
13741 if (symbol_[0] == '"') \
13743 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13745 else if (name_needs_quotes(symbol_)) \
13747 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13751 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13756 /* Generate PIC and indirect symbol stubs. */
13759 machopic_output_stub (file
, symb
, stub
)
13761 const char *symb
, *stub
;
13763 unsigned int length
;
13764 char *symbol_name
, *lazy_ptr_name
;
13765 char *local_label_0
;
13766 static int label
= 0;
13768 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13769 symb
= (*targetm
.strip_name_encoding
) (symb
);
13773 length
= strlen (symb
);
13774 symbol_name
= alloca (length
+ 32);
13775 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
13777 lazy_ptr_name
= alloca (length
+ 32);
13778 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
13780 local_label_0
= alloca (length
+ 32);
13781 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
13784 machopic_picsymbol_stub1_section ();
13786 machopic_symbol_stub1_section ();
13787 fprintf (file
, "\t.align 2\n");
13789 fprintf (file
, "%s:\n", stub
);
13790 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13794 fprintf (file
, "\tmflr r0\n");
13795 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
13796 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
13797 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
13798 lazy_ptr_name
, local_label_0
);
13799 fprintf (file
, "\tmtlr r0\n");
13800 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13801 lazy_ptr_name
, local_label_0
);
13802 fprintf (file
, "\tmtctr r12\n");
13803 fprintf (file
, "\tbctr\n");
13807 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
13808 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
13809 fprintf (file
, "\tmtctr r12\n");
13810 fprintf (file
, "\tbctr\n");
13813 machopic_lazy_symbol_ptr_section ();
13814 fprintf (file
, "%s:\n", lazy_ptr_name
);
13815 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
13816 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
13819 /* Legitimize PIC addresses. If the address is already
13820 position-independent, we return ORIG. Newly generated
13821 position-independent addresses go into a reg. This is REG if non
13822 zero, otherwise we allocate register(s) as necessary. */
13824 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13827 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
13829 enum machine_mode mode
;
13834 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
13835 reg
= gen_reg_rtx (Pmode
);
13837 if (GET_CODE (orig
) == CONST
)
13839 if (GET_CODE (XEXP (orig
, 0)) == PLUS
13840 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
13843 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
13846 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
13849 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
13855 if (GET_CODE (offset
) == CONST_INT
)
13857 if (SMALL_INT (offset
))
13858 return plus_constant (base
, INTVAL (offset
));
13859 else if (! reload_in_progress
&& ! reload_completed
)
13860 offset
= force_reg (Pmode
, offset
);
13863 rtx mem
= force_const_mem (Pmode
, orig
);
13864 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
13867 return gen_rtx (PLUS
, Pmode
, base
, offset
);
13870 /* Fall back on generic machopic code. */
13871 return machopic_legitimize_pic_address (orig
, mode
, reg
);
13874 /* This is just a placeholder to make linking work without having to
13875 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13876 ever needed for Darwin (not too likely!) this would have to get a
13877 real definition. */
13884 #endif /* TARGET_MACHO */
13887 static unsigned int
13888 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13894 = default_section_type_flags_1 (decl
, name
, reloc
,
13895 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
13897 if (TARGET_RELOCATABLE
)
13898 flags
|= SECTION_WRITE
;
13903 /* Record an element in the table of global constructors. SYMBOL is
13904 a SYMBOL_REF of the function to be called; PRIORITY is a number
13905 between 0 and MAX_INIT_PRIORITY.
13907 This differs from default_named_section_asm_out_constructor in
13908 that we have special handling for -mrelocatable. */
13911 rs6000_elf_asm_out_constructor (symbol
, priority
)
13915 const char *section
= ".ctors";
13918 if (priority
!= DEFAULT_INIT_PRIORITY
)
13920 sprintf (buf
, ".ctors.%.5u",
13921 /* Invert the numbering so the linker puts us in the proper
13922 order; constructors are run from right to left, and the
13923 linker sorts in increasing order. */
13924 MAX_INIT_PRIORITY
- priority
);
13928 named_section_flags (section
, SECTION_WRITE
);
13929 assemble_align (POINTER_SIZE
);
13931 if (TARGET_RELOCATABLE
)
13933 fputs ("\t.long (", asm_out_file
);
13934 output_addr_const (asm_out_file
, symbol
);
13935 fputs (")@fixup\n", asm_out_file
);
13938 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13942 rs6000_elf_asm_out_destructor (symbol
, priority
)
13946 const char *section
= ".dtors";
13949 if (priority
!= DEFAULT_INIT_PRIORITY
)
13951 sprintf (buf
, ".dtors.%.5u",
13952 /* Invert the numbering so the linker puts us in the proper
13953 order; constructors are run from right to left, and the
13954 linker sorts in increasing order. */
13955 MAX_INIT_PRIORITY
- priority
);
13959 named_section_flags (section
, SECTION_WRITE
);
13960 assemble_align (POINTER_SIZE
);
13962 if (TARGET_RELOCATABLE
)
13964 fputs ("\t.long (", asm_out_file
);
13965 output_addr_const (asm_out_file
, symbol
);
13966 fputs (")@fixup\n", asm_out_file
);
13969 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13975 rs6000_xcoff_asm_globalize_label (stream
, name
)
13979 fputs (GLOBAL_ASM_OP
, stream
);
13980 RS6000_OUTPUT_BASENAME (stream
, name
);
13981 putc ('\n', stream
);
13985 rs6000_xcoff_asm_named_section (name
, flags
)
13987 unsigned int flags
;
13990 static const char * const suffix
[3] = { "PR", "RO", "RW" };
13992 if (flags
& SECTION_CODE
)
13994 else if (flags
& SECTION_WRITE
)
13999 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
14000 (flags
& SECTION_CODE
) ? "." : "",
14001 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
14005 rs6000_xcoff_select_section (decl
, reloc
, align
)
14008 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
14010 if (decl_readonly_section_1 (decl
, reloc
, 1))
14012 if (TREE_PUBLIC (decl
))
14013 read_only_data_section ();
14015 read_only_private_data_section ();
14019 if (TREE_PUBLIC (decl
))
14022 private_data_section ();
14027 rs6000_xcoff_unique_section (decl
, reloc
)
14029 int reloc ATTRIBUTE_UNUSED
;
14033 /* Use select_section for private and uninitialized data. */
14034 if (!TREE_PUBLIC (decl
)
14035 || DECL_COMMON (decl
)
14036 || DECL_INITIAL (decl
) == NULL_TREE
14037 || DECL_INITIAL (decl
) == error_mark_node
14038 || (flag_zero_initialized_in_bss
14039 && initializer_zerop (DECL_INITIAL (decl
))))
14042 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
14043 name
= (*targetm
.strip_name_encoding
) (name
);
14044 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
14047 /* Select section for constant in constant pool.
14049 On RS/6000, all constants are in the private read-only data area.
14050 However, if this is being placed in the TOC it must be output as a
14054 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
14055 enum machine_mode mode
;
14057 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
14059 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
14062 read_only_private_data_section ();
14065 /* Remove any trailing [DS] or the like from the symbol name. */
14067 static const char *
14068 rs6000_xcoff_strip_name_encoding (name
)
14074 len
= strlen (name
);
14075 if (name
[len
- 1] == ']')
14076 return ggc_alloc_string (name
, len
- 4);
14081 /* Section attributes. AIX is always PIC. */
14083 static unsigned int
14084 rs6000_xcoff_section_type_flags (decl
, name
, reloc
)
14089 unsigned int align
;
14090 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
14092 /* Align to at least UNIT size. */
14093 if (flags
& SECTION_CODE
)
14094 align
= MIN_UNITS_PER_WORD
;
14096 /* Increase alignment of large objects if not already stricter. */
14097 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
14098 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
14099 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
14101 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
14103 #endif /* TARGET_XCOFF */
14106 /* Cross-module name binding. Darwin does not support overriding
14107 functions at dynamic-link time. */
14110 rs6000_binds_local_p (decl
)
14113 return default_binds_local_p_1 (decl
, 0);
14117 /* Compute a (partial) cost for rtx X. Return true if the complete
14118 cost has been computed, and false if subexpressions should be
14119 scanned. In either case, *TOTAL contains the cost result. */
14122 rs6000_rtx_costs (x
, code
, outer_code
, total
)
14124 int code
, outer_code ATTRIBUTE_UNUSED
;
14129 /* On the RS/6000, if it is valid in the insn, it is free.
14130 So this always returns 0. */
14141 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
14142 && ((unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1))
14143 + 0x8000) >= 0x10000)
14144 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
14145 ? COSTS_N_INSNS (2)
14146 : COSTS_N_INSNS (1));
14152 *total
= ((GET_CODE (XEXP (x
, 1)) == CONST_INT
14153 && (INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff)) != 0
14154 && ((INTVAL (XEXP (x
, 1)) & 0xffff) != 0))
14155 ? COSTS_N_INSNS (2)
14156 : COSTS_N_INSNS (1));
14162 *total
= COSTS_N_INSNS (2);
14165 switch (rs6000_cpu
)
14167 case PROCESSOR_RIOS1
:
14168 case PROCESSOR_PPC405
:
14169 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14170 ? COSTS_N_INSNS (5)
14171 : (INTVAL (XEXP (x
, 1)) >= -256
14172 && INTVAL (XEXP (x
, 1)) <= 255)
14173 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14176 case PROCESSOR_PPC440
:
14177 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14178 ? COSTS_N_INSNS (3)
14179 : COSTS_N_INSNS (2));
14182 case PROCESSOR_RS64A
:
14183 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14184 ? GET_MODE (XEXP (x
, 1)) != DImode
14185 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14186 : (INTVAL (XEXP (x
, 1)) >= -256
14187 && INTVAL (XEXP (x
, 1)) <= 255)
14188 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14191 case PROCESSOR_RIOS2
:
14192 case PROCESSOR_MPCCORE
:
14193 case PROCESSOR_PPC604e
:
14194 *total
= COSTS_N_INSNS (2);
14197 case PROCESSOR_PPC601
:
14198 *total
= COSTS_N_INSNS (5);
14201 case PROCESSOR_PPC603
:
14202 case PROCESSOR_PPC7400
:
14203 case PROCESSOR_PPC750
:
14204 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14205 ? COSTS_N_INSNS (5)
14206 : (INTVAL (XEXP (x
, 1)) >= -256
14207 && INTVAL (XEXP (x
, 1)) <= 255)
14208 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14211 case PROCESSOR_PPC7450
:
14212 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14213 ? COSTS_N_INSNS (4)
14214 : COSTS_N_INSNS (3));
14217 case PROCESSOR_PPC403
:
14218 case PROCESSOR_PPC604
:
14219 case PROCESSOR_PPC8540
:
14220 *total
= COSTS_N_INSNS (4);
14223 case PROCESSOR_PPC620
:
14224 case PROCESSOR_PPC630
:
14225 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14226 ? GET_MODE (XEXP (x
, 1)) != DImode
14227 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14228 : (INTVAL (XEXP (x
, 1)) >= -256
14229 && INTVAL (XEXP (x
, 1)) <= 255)
14230 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14233 case PROCESSOR_POWER4
:
14234 *total
= (GET_CODE (XEXP (x
, 1)) != CONST_INT
14235 ? GET_MODE (XEXP (x
, 1)) != DImode
14236 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14237 : COSTS_N_INSNS (2));
14246 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
14247 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
14249 *total
= COSTS_N_INSNS (2);
14256 switch (rs6000_cpu
)
14258 case PROCESSOR_RIOS1
:
14259 *total
= COSTS_N_INSNS (19);
14262 case PROCESSOR_RIOS2
:
14263 *total
= COSTS_N_INSNS (13);
14266 case PROCESSOR_RS64A
:
14267 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14268 ? COSTS_N_INSNS (65)
14269 : COSTS_N_INSNS (67));
14272 case PROCESSOR_MPCCORE
:
14273 *total
= COSTS_N_INSNS (6);
14276 case PROCESSOR_PPC403
:
14277 *total
= COSTS_N_INSNS (33);
14280 case PROCESSOR_PPC405
:
14281 *total
= COSTS_N_INSNS (35);
14284 case PROCESSOR_PPC440
:
14285 *total
= COSTS_N_INSNS (34);
14288 case PROCESSOR_PPC601
:
14289 *total
= COSTS_N_INSNS (36);
14292 case PROCESSOR_PPC603
:
14293 *total
= COSTS_N_INSNS (37);
14296 case PROCESSOR_PPC604
:
14297 case PROCESSOR_PPC604e
:
14298 *total
= COSTS_N_INSNS (20);
14301 case PROCESSOR_PPC620
:
14302 case PROCESSOR_PPC630
:
14303 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14304 ? COSTS_N_INSNS (21)
14305 : COSTS_N_INSNS (37));
14308 case PROCESSOR_PPC750
:
14309 case PROCESSOR_PPC8540
:
14310 case PROCESSOR_PPC7400
:
14311 *total
= COSTS_N_INSNS (19);
14314 case PROCESSOR_PPC7450
:
14315 *total
= COSTS_N_INSNS (23);
14318 case PROCESSOR_POWER4
:
14319 *total
= (GET_MODE (XEXP (x
, 1)) != DImode
14320 ? COSTS_N_INSNS (18)
14321 : COSTS_N_INSNS (34));
14329 *total
= COSTS_N_INSNS (4);
14333 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14342 /* A C expression returning the cost of moving data from a register of class
14343 CLASS1 to one of CLASS2. */
14346 rs6000_register_move_cost (mode
, from
, to
)
14347 enum machine_mode mode
;
14348 enum reg_class from
, to
;
14350 /* Moves from/to GENERAL_REGS. */
14351 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
14352 || reg_classes_intersect_p (from
, GENERAL_REGS
))
14354 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
14357 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
14358 return (rs6000_memory_move_cost (mode
, from
, 0)
14359 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
14361 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14362 else if (from
== CR_REGS
)
14366 /* A move will cost one instruction per GPR moved. */
14367 return 2 * HARD_REGNO_NREGS (0, mode
);
14370 /* Moving between two similar registers is just one instruction. */
14371 else if (reg_classes_intersect_p (to
, from
))
14372 return mode
== TFmode
? 4 : 2;
14374 /* Everything else has to go through GENERAL_REGS. */
14376 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
14377 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
14380 /* A C expressions returning the cost of moving data of MODE from a register to
14384 rs6000_memory_move_cost (mode
, class, in
)
14385 enum machine_mode mode
;
14386 enum reg_class
class;
14387 int in ATTRIBUTE_UNUSED
;
14389 if (reg_classes_intersect_p (class, GENERAL_REGS
))
14390 return 4 * HARD_REGNO_NREGS (0, mode
);
14391 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
14392 return 4 * HARD_REGNO_NREGS (32, mode
);
14393 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
14394 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
14396 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
14399 /* Define how to find the value returned by a function.
14400 VALTYPE is the data type of the value (as a tree).
14401 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14402 otherwise, FUNC is 0.
14404 On the SPE, both FPs and vectors are returned in r3.
14406 On RS/6000 an integer value is in r3 and a floating-point value is in
14407 fp1, unless -msoft-float. */
14410 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
14412 enum machine_mode mode
;
14413 unsigned int regno
= GP_ARG_RETURN
;
14415 if ((INTEGRAL_TYPE_P (valtype
)
14416 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
14417 || POINTER_TYPE_P (valtype
))
14420 mode
= TYPE_MODE (valtype
);
14422 if (TREE_CODE (valtype
) == REAL_TYPE
)
14424 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
14425 regno
= FP_ARG_RETURN
;
14426 else if (TARGET_SPE_ABI
&& !TARGET_FPRS
)
14427 regno
= GP_ARG_RETURN
;
14429 else if (TARGET_ALTIVEC
&& TREE_CODE (valtype
) == VECTOR_TYPE
)
14430 regno
= ALTIVEC_ARG_RETURN
;
14432 regno
= GP_ARG_RETURN
;
14434 return gen_rtx_REG (mode
, regno
);
14437 /* Return true if TYPE is of type __ev64_opaque__. */
14440 is_ev64_opaque_type (type
)
14444 && (type
== opaque_V2SI_type_node
14445 || type
== opaque_V2SF_type_node
14446 || type
== opaque_p_V2SI_type_node
14447 || (TREE_CODE (type
) == VECTOR_TYPE
14448 && TYPE_NAME (type
)
14449 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
14450 && DECL_NAME (TYPE_NAME (type
))
14451 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
14452 "__ev64_opaque__") == 0)));
14456 rs6000_dwarf_register_span (reg
)
14461 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
14464 regno
= REGNO (reg
);
14466 /* The duality of the SPE register size wreaks all kinds of havoc.
14467 This is a way of distinguishing r0 in 32-bits from r0 in
14470 gen_rtx_PARALLEL (VOIDmode
,
14473 gen_rtx_REG (SImode
, regno
+ 1200),
14474 gen_rtx_REG (SImode
, regno
))
14476 gen_rtx_REG (SImode
, regno
),
14477 gen_rtx_REG (SImode
, regno
+ 1200)));
14480 #include "gt-rs6000.h"