* targhooks.c: New file.
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob378170ca79d2a9e8a9b69d3444cbd098923bcda7
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54 #if TARGET_XCOFF
55 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
56 #endif
58 #ifndef TARGET_NO_PROTOTYPE
59 #define TARGET_NO_PROTOTYPE 0
60 #endif
62 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
63 && easy_vector_same (x, y))
65 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
66 && !((n) & 1) \
67 && easy_vector_same (x, y))
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
72 /* Target cpu type */
74 enum processor_type rs6000_cpu;
75 struct rs6000_cpu_select rs6000_select[3] =
77 /* switch name, tune arch */
78 { (const char *)0, "--with-cpu=", 1, 1 },
79 { (const char *)0, "-mcpu=", 1, 1 },
80 { (const char *)0, "-mtune=", 1, 0 },
83 /* Size of long double */
84 const char *rs6000_long_double_size_string;
85 int rs6000_long_double_type_size;
87 /* Whether -mabi=altivec has appeared */
88 int rs6000_altivec_abi;
90 /* Whether VRSAVE instructions should be generated. */
91 int rs6000_altivec_vrsave;
93 /* String from -mvrsave= option. */
94 const char *rs6000_altivec_vrsave_string;
96 /* Nonzero if we want SPE ABI extensions. */
97 int rs6000_spe_abi;
99 /* Whether isel instructions should be generated. */
100 int rs6000_isel;
102 /* Whether SPE simd instructions should be generated. */
103 int rs6000_spe;
105 /* Nonzero if floating point operations are done in the GPRs. */
106 int rs6000_float_gprs = 0;
108 /* String from -mfloat-gprs=. */
109 const char *rs6000_float_gprs_string;
111 /* String from -misel=. */
112 const char *rs6000_isel_string;
114 /* String from -mspe=. */
115 const char *rs6000_spe_string;
117 /* Set to nonzero once AIX common-mode calls have been defined. */
118 static GTY(()) int common_mode_defined;
120 /* Save information from a "cmpxx" operation until the branch or scc is
121 emitted. */
122 rtx rs6000_compare_op0, rs6000_compare_op1;
123 int rs6000_compare_fp_p;
125 /* Label number of label created for -mrelocatable, to call to so we can
126 get the address of the GOT section */
127 int rs6000_pic_labelno;
129 #ifdef USING_ELFOS_H
130 /* Which abi to adhere to */
131 const char *rs6000_abi_name;
133 /* Semantics of the small data area */
134 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
136 /* Which small data model to use */
137 const char *rs6000_sdata_name = (char *)0;
139 /* Counter for labels which are to be placed in .fixup. */
140 int fixuplabelno = 0;
141 #endif
143 /* Bit size of immediate TLS offsets and string from which it is decoded. */
144 int rs6000_tls_size = 32;
145 const char *rs6000_tls_size_string;
147 /* ABI enumeration available for subtarget to use. */
148 enum rs6000_abi rs6000_current_abi;
150 /* ABI string from -mabi= option. */
151 const char *rs6000_abi_string;
153 /* Debug flags */
154 const char *rs6000_debug_name;
155 int rs6000_debug_stack; /* debug stack applications */
156 int rs6000_debug_arg; /* debug argument handling */
158 /* Opaque types. */
159 static GTY(()) tree opaque_V2SI_type_node;
160 static GTY(()) tree opaque_V2SF_type_node;
161 static GTY(()) tree opaque_p_V2SI_type_node;
163 const char *rs6000_traceback_name;
164 static enum {
165 traceback_default = 0,
166 traceback_none,
167 traceback_part,
168 traceback_full
169 } rs6000_traceback;
171 /* Flag to say the TOC is initialized */
172 int toc_initialized;
173 char toc_label_name[10];
175 /* Alias set for saves and restores from the rs6000 stack. */
176 static int rs6000_sr_alias_set;
178 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
179 The only place that looks at this is rs6000_set_default_type_attributes;
180 everywhere else should rely on the presence or absence of a longcall
181 attribute on the function declaration. */
182 int rs6000_default_long_calls;
183 const char *rs6000_longcall_switch;
185 /* Control alignment for fields within structures. */
186 /* String from -malign-XXXXX. */
187 const char *rs6000_alignment_string;
188 int rs6000_alignment_flags;
190 struct builtin_description
192 /* mask is not const because we're going to alter it below. This
193 nonsense will go away when we rewrite the -march infrastructure
194 to give us more target flag bits. */
195 unsigned int mask;
196 const enum insn_code icode;
197 const char *const name;
198 const enum rs6000_builtins code;
201 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
202 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
203 static void validate_condition_mode
204 PARAMS ((enum rtx_code, enum machine_mode));
205 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
206 static void rs6000_maybe_dead PARAMS ((rtx));
207 static void rs6000_emit_stack_tie PARAMS ((void));
208 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
209 static rtx spe_synthesize_frame_save PARAMS ((rtx));
210 static bool spe_func_has_64bit_regs_p PARAMS ((void));
211 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
212 unsigned int, int, int));
213 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
214 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
215 static unsigned rs6000_hash_constant PARAMS ((rtx));
216 static unsigned toc_hash_function PARAMS ((const void *));
217 static int toc_hash_eq PARAMS ((const void *, const void *));
218 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
219 static bool constant_pool_expr_p PARAMS ((rtx));
220 static bool toc_relative_expr_p PARAMS ((rtx));
221 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
222 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
223 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
224 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
225 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
226 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
227 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
228 #ifdef HAVE_GAS_HIDDEN
229 static void rs6000_assemble_visibility PARAMS ((tree, int));
230 #endif
231 static int rs6000_ra_ever_killed PARAMS ((void));
232 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
233 extern const struct attribute_spec rs6000_attribute_table[];
234 static void rs6000_set_default_type_attributes PARAMS ((tree));
235 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
236 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
237 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
238 HOST_WIDE_INT, tree));
239 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
240 HOST_WIDE_INT, HOST_WIDE_INT));
241 static void rs6000_file_start PARAMS ((void));
242 #if TARGET_ELF
243 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
244 int));
245 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
246 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
247 static void rs6000_elf_select_section PARAMS ((tree, int,
248 unsigned HOST_WIDE_INT));
249 static void rs6000_elf_unique_section PARAMS ((tree, int));
250 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
251 unsigned HOST_WIDE_INT));
252 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
253 ATTRIBUTE_UNUSED;
254 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
255 #endif
256 #if TARGET_XCOFF
257 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
258 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
259 static void rs6000_xcoff_select_section PARAMS ((tree, int,
260 unsigned HOST_WIDE_INT));
261 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
262 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
263 unsigned HOST_WIDE_INT));
264 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
265 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
266 static void rs6000_xcoff_file_start PARAMS ((void));
267 static void rs6000_xcoff_file_end PARAMS ((void));
268 #endif
269 #if TARGET_MACHO
270 static bool rs6000_binds_local_p PARAMS ((tree));
271 #endif
272 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
273 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
274 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
275 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
276 static int rs6000_adjust_priority PARAMS ((rtx, int));
277 static int rs6000_issue_rate PARAMS ((void));
278 static int rs6000_use_sched_lookahead PARAMS ((void));
280 static void rs6000_init_builtins PARAMS ((void));
281 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
282 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
283 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
284 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
285 static void altivec_init_builtins PARAMS ((void));
286 static void rs6000_common_init_builtins PARAMS ((void));
288 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
289 int, enum rs6000_builtins,
290 enum rs6000_builtins));
291 static void spe_init_builtins PARAMS ((void));
292 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
293 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
294 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
295 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
297 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
298 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
299 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
300 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
301 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
302 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
303 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
304 static void rs6000_parse_abi_options PARAMS ((void));
305 static void rs6000_parse_alignment_option PARAMS ((void));
306 static void rs6000_parse_tls_size_option PARAMS ((void));
307 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
308 static int first_altivec_reg_to_save PARAMS ((void));
309 static unsigned int compute_vrsave_mask PARAMS ((void));
310 static void is_altivec_return_reg PARAMS ((rtx, void *));
311 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
312 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
313 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
314 static bool is_ev64_opaque_type PARAMS ((tree));
315 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
316 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
317 static rtx rs6000_tls_get_addr PARAMS ((void));
318 static rtx rs6000_got_sym PARAMS ((void));
319 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
320 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
321 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
322 static rtx rs6000_complex_function_value (enum machine_mode);
323 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, enum machine_mode, tree);
325 /* Hash table stuff for keeping track of TOC entries. */
327 struct toc_hash_struct GTY(())
329 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
330 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
331 rtx key;
332 enum machine_mode key_mode;
333 int labelno;
336 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
338 /* Default register names. */
339 char rs6000_reg_names[][8] =
341 "0", "1", "2", "3", "4", "5", "6", "7",
342 "8", "9", "10", "11", "12", "13", "14", "15",
343 "16", "17", "18", "19", "20", "21", "22", "23",
344 "24", "25", "26", "27", "28", "29", "30", "31",
345 "0", "1", "2", "3", "4", "5", "6", "7",
346 "8", "9", "10", "11", "12", "13", "14", "15",
347 "16", "17", "18", "19", "20", "21", "22", "23",
348 "24", "25", "26", "27", "28", "29", "30", "31",
349 "mq", "lr", "ctr","ap",
350 "0", "1", "2", "3", "4", "5", "6", "7",
351 "xer",
352 /* AltiVec registers. */
353 "0", "1", "2", "3", "4", "5", "6", "7",
354 "8", "9", "10", "11", "12", "13", "14", "15",
355 "16", "17", "18", "19", "20", "21", "22", "23",
356 "24", "25", "26", "27", "28", "29", "30", "31",
357 "vrsave", "vscr",
358 /* SPE registers. */
359 "spe_acc", "spefscr"
362 #ifdef TARGET_REGNAMES
363 static const char alt_reg_names[][8] =
365 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
366 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
367 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
368 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
369 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
370 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
371 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
372 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
373 "mq", "lr", "ctr", "ap",
374 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
375 "xer",
376 /* AltiVec registers. */
377 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
378 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
379 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
380 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
381 "vrsave", "vscr",
382 /* SPE registers. */
383 "spe_acc", "spefscr"
385 #endif
387 #ifndef MASK_STRICT_ALIGN
388 #define MASK_STRICT_ALIGN 0
389 #endif
390 #ifndef TARGET_PROFILE_KERNEL
391 #define TARGET_PROFILE_KERNEL 0
392 #endif
394 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
395 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
397 /* Return 1 for a symbol ref for a thread-local storage symbol. */
398 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
399 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
401 /* Initialize the GCC target structure. */
402 #undef TARGET_ATTRIBUTE_TABLE
403 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
404 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
405 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
407 #undef TARGET_ASM_ALIGNED_DI_OP
408 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
410 /* Default unaligned ops are only provided for ELF. Find the ops needed
411 for non-ELF systems. */
412 #ifndef OBJECT_FORMAT_ELF
413 #if TARGET_XCOFF
414 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
415 64-bit targets. */
416 #undef TARGET_ASM_UNALIGNED_HI_OP
417 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
418 #undef TARGET_ASM_UNALIGNED_SI_OP
419 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
420 #undef TARGET_ASM_UNALIGNED_DI_OP
421 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
422 #else
423 /* For Darwin. */
424 #undef TARGET_ASM_UNALIGNED_HI_OP
425 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
426 #undef TARGET_ASM_UNALIGNED_SI_OP
427 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
428 #endif
429 #endif
431 /* This hook deals with fixups for relocatable code and DI-mode objects
432 in 64-bit code. */
433 #undef TARGET_ASM_INTEGER
434 #define TARGET_ASM_INTEGER rs6000_assemble_integer
436 #ifdef HAVE_GAS_HIDDEN
437 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
438 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
439 #endif
441 #undef TARGET_HAVE_TLS
442 #define TARGET_HAVE_TLS HAVE_AS_TLS
444 #undef TARGET_CANNOT_FORCE_CONST_MEM
445 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
447 #undef TARGET_ASM_FUNCTION_PROLOGUE
448 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
449 #undef TARGET_ASM_FUNCTION_EPILOGUE
450 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
452 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
453 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
454 #undef TARGET_SCHED_VARIABLE_ISSUE
455 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
457 #undef TARGET_SCHED_ISSUE_RATE
458 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
459 #undef TARGET_SCHED_ADJUST_COST
460 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
461 #undef TARGET_SCHED_ADJUST_PRIORITY
462 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
464 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
465 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
467 #undef TARGET_INIT_BUILTINS
468 #define TARGET_INIT_BUILTINS rs6000_init_builtins
470 #undef TARGET_EXPAND_BUILTIN
471 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
473 #if TARGET_MACHO
474 #undef TARGET_BINDS_LOCAL_P
475 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
476 #endif
478 #undef TARGET_ASM_OUTPUT_MI_THUNK
479 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
481 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
482 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
484 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
485 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
487 #undef TARGET_RTX_COSTS
488 #define TARGET_RTX_COSTS rs6000_rtx_costs
489 #undef TARGET_ADDRESS_COST
490 #define TARGET_ADDRESS_COST hook_int_rtx_0
492 #undef TARGET_VECTOR_OPAQUE_P
493 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
495 #undef TARGET_DWARF_REGISTER_SPAN
496 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
498 struct gcc_target targetm = TARGET_INITIALIZER;
500 /* Override command line options. Mostly we process the processor
501 type and sometimes adjust other TARGET_ options. */
503 void
504 rs6000_override_options (default_cpu)
505 const char *default_cpu;
507 size_t i, j;
508 struct rs6000_cpu_select *ptr;
510 /* Simplify the entries below by making a mask for any POWER
511 variant and any PowerPC variant. */
513 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
514 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
515 | MASK_PPC_GFXOPT | MASK_POWERPC64)
516 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
518 static struct ptt
520 const char *const name; /* Canonical processor name. */
521 const enum processor_type processor; /* Processor type enum value. */
522 const int target_enable; /* Target flags to enable. */
523 const int target_disable; /* Target flags to disable. */
524 } const processor_target_table[]
525 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_MASKS},
527 {"power", PROCESSOR_POWER,
528 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
529 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
530 {"power2", PROCESSOR_POWER,
531 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
532 POWERPC_MASKS | MASK_NEW_MNEMONICS},
533 {"power3", PROCESSOR_PPC630,
534 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
535 POWER_MASKS},
536 {"power4", PROCESSOR_POWER4,
537 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
538 POWER_MASKS},
539 {"powerpc", PROCESSOR_POWERPC,
540 MASK_POWERPC | MASK_NEW_MNEMONICS,
541 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
542 {"powerpc64", PROCESSOR_POWERPC64,
543 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
544 POWER_MASKS | POWERPC_OPT_MASKS},
545 {"rios", PROCESSOR_RIOS1,
546 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
547 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
548 {"rios1", PROCESSOR_RIOS1,
549 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
550 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
551 {"rsc", PROCESSOR_PPC601,
552 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
553 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
554 {"rsc1", PROCESSOR_PPC601,
555 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
556 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
557 {"rios2", PROCESSOR_RIOS2,
558 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
559 POWERPC_MASKS | MASK_NEW_MNEMONICS},
560 {"rs64a", PROCESSOR_RS64A,
561 MASK_POWERPC | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS},
563 {"401", PROCESSOR_PPC403,
564 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
565 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
566 {"403", PROCESSOR_PPC403,
567 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
568 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
569 {"405", PROCESSOR_PPC405,
570 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
571 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
572 {"405fp", PROCESSOR_PPC405,
573 MASK_POWERPC | MASK_NEW_MNEMONICS,
574 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
575 {"440", PROCESSOR_PPC440,
576 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
578 {"440fp", PROCESSOR_PPC440,
579 MASK_POWERPC | MASK_NEW_MNEMONICS,
580 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
581 {"505", PROCESSOR_MPCCORE,
582 MASK_POWERPC | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"601", PROCESSOR_PPC601,
585 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
586 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
587 {"602", PROCESSOR_PPC603,
588 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
590 {"603", PROCESSOR_PPC603,
591 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
593 {"603e", PROCESSOR_PPC603,
594 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
595 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
596 {"ec603e", PROCESSOR_PPC603,
597 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
598 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
599 {"604", PROCESSOR_PPC604,
600 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
602 {"604e", PROCESSOR_PPC604e,
603 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
605 {"620", PROCESSOR_PPC620,
606 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
607 POWER_MASKS},
608 {"630", PROCESSOR_PPC630,
609 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
610 POWER_MASKS},
611 {"740", PROCESSOR_PPC750,
612 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
613 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
614 {"750", PROCESSOR_PPC750,
615 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
616 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
617 {"7400", PROCESSOR_PPC7400,
618 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
619 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
620 {"7450", PROCESSOR_PPC7450,
621 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
622 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
623 {"8540", PROCESSOR_PPC8540,
624 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
625 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
626 {"801", PROCESSOR_MPCCORE,
627 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
628 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
629 {"821", PROCESSOR_MPCCORE,
630 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
631 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
632 {"823", PROCESSOR_MPCCORE,
633 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
634 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
635 {"860", PROCESSOR_MPCCORE,
636 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
637 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
638 {"970", PROCESSOR_POWER4,
639 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
640 POWER_MASKS}};
642 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
644 /* Save current -mmultiple/-mno-multiple status. */
645 int multiple = TARGET_MULTIPLE;
646 /* Save current -mstring/-mno-string status. */
647 int string = TARGET_STRING;
649 /* Identify the processor type. */
650 rs6000_select[0].string = default_cpu;
651 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
653 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
655 ptr = &rs6000_select[i];
656 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
658 for (j = 0; j < ptt_size; j++)
659 if (! strcmp (ptr->string, processor_target_table[j].name))
661 if (ptr->set_tune_p)
662 rs6000_cpu = processor_target_table[j].processor;
664 if (ptr->set_arch_p)
666 target_flags |= processor_target_table[j].target_enable;
667 target_flags &= ~processor_target_table[j].target_disable;
669 break;
672 if (j == ptt_size)
673 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
677 if (TARGET_E500)
678 rs6000_isel = 1;
680 /* If we are optimizing big endian systems for space, use the load/store
681 multiple and string instructions. */
682 if (BYTES_BIG_ENDIAN && optimize_size)
683 target_flags |= MASK_MULTIPLE | MASK_STRING;
685 /* If -mmultiple or -mno-multiple was explicitly used, don't
686 override with the processor default */
687 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
688 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
690 /* If -mstring or -mno-string was explicitly used, don't override
691 with the processor default. */
692 if ((target_flags_explicit & MASK_STRING) != 0)
693 target_flags = (target_flags & ~MASK_STRING) | string;
695 /* Don't allow -mmultiple or -mstring on little endian systems
696 unless the cpu is a 750, because the hardware doesn't support the
697 instructions used in little endian mode, and causes an alignment
698 trap. The 750 does not cause an alignment trap (except when the
699 target is unaligned). */
701 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
703 if (TARGET_MULTIPLE)
705 target_flags &= ~MASK_MULTIPLE;
706 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
707 warning ("-mmultiple is not supported on little endian systems");
710 if (TARGET_STRING)
712 target_flags &= ~MASK_STRING;
713 if ((target_flags_explicit & MASK_STRING) != 0)
714 warning ("-mstring is not supported on little endian systems");
718 /* Set debug flags */
719 if (rs6000_debug_name)
721 if (! strcmp (rs6000_debug_name, "all"))
722 rs6000_debug_stack = rs6000_debug_arg = 1;
723 else if (! strcmp (rs6000_debug_name, "stack"))
724 rs6000_debug_stack = 1;
725 else if (! strcmp (rs6000_debug_name, "arg"))
726 rs6000_debug_arg = 1;
727 else
728 error ("unknown -mdebug-%s switch", rs6000_debug_name);
731 if (rs6000_traceback_name)
733 if (! strncmp (rs6000_traceback_name, "full", 4))
734 rs6000_traceback = traceback_full;
735 else if (! strncmp (rs6000_traceback_name, "part", 4))
736 rs6000_traceback = traceback_part;
737 else if (! strncmp (rs6000_traceback_name, "no", 2))
738 rs6000_traceback = traceback_none;
739 else
740 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
741 rs6000_traceback_name);
744 /* Set size of long double */
745 rs6000_long_double_type_size = 64;
746 if (rs6000_long_double_size_string)
748 char *tail;
749 int size = strtol (rs6000_long_double_size_string, &tail, 10);
750 if (*tail != '\0' || (size != 64 && size != 128))
751 error ("Unknown switch -mlong-double-%s",
752 rs6000_long_double_size_string);
753 else
754 rs6000_long_double_type_size = size;
757 /* Handle -mabi= options. */
758 rs6000_parse_abi_options ();
760 /* Handle -malign-XXXXX option. */
761 rs6000_parse_alignment_option ();
763 /* Handle generic -mFOO=YES/NO options. */
764 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
765 &rs6000_altivec_vrsave);
766 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
767 &rs6000_isel);
768 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
769 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
770 &rs6000_float_gprs);
772 /* Handle -mtls-size option. */
773 rs6000_parse_tls_size_option ();
775 #ifdef SUBTARGET_OVERRIDE_OPTIONS
776 SUBTARGET_OVERRIDE_OPTIONS;
777 #endif
778 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
779 SUBSUBTARGET_OVERRIDE_OPTIONS;
780 #endif
782 if (TARGET_E500)
784 /* The e500 does not have string instructions, and we set
785 MASK_STRING above when optimizing for size. */
786 if ((target_flags & MASK_STRING) != 0)
787 target_flags = target_flags & ~MASK_STRING;
789 /* No SPE means 64-bit long doubles, even if an E500. */
790 if (rs6000_spe_string != 0
791 && !strcmp (rs6000_spe_string, "no"))
792 rs6000_long_double_type_size = 64;
794 else if (rs6000_select[1].string != NULL)
796 /* For the powerpc-eabispe configuration, we set all these by
797 default, so let's unset them if we manually set another
798 CPU that is not the E500. */
799 if (rs6000_abi_string == 0)
800 rs6000_spe_abi = 0;
801 if (rs6000_spe_string == 0)
802 rs6000_spe = 0;
803 if (rs6000_float_gprs_string == 0)
804 rs6000_float_gprs = 0;
805 if (rs6000_isel_string == 0)
806 rs6000_isel = 0;
807 if (rs6000_long_double_size_string == 0)
808 rs6000_long_double_type_size = 64;
811 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
812 using TARGET_OPTIONS to handle a toggle switch, but we're out of
813 bits in target_flags so TARGET_SWITCHES cannot be used.
814 Assumption here is that rs6000_longcall_switch points into the
815 text of the complete option, rather than being a copy, so we can
816 scan back for the presence or absence of the no- modifier. */
817 if (rs6000_longcall_switch)
819 const char *base = rs6000_longcall_switch;
820 while (base[-1] != 'm') base--;
822 if (*rs6000_longcall_switch != '\0')
823 error ("invalid option `%s'", base);
824 rs6000_default_long_calls = (base[0] != 'n');
827 #ifdef TARGET_REGNAMES
828 /* If the user desires alternate register names, copy in the
829 alternate names now. */
830 if (TARGET_REGNAMES)
831 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
832 #endif
834 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
835 If -maix-struct-return or -msvr4-struct-return was explicitly
836 used, don't override with the ABI default. */
837 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
839 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
840 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
841 else
842 target_flags |= MASK_AIX_STRUCT_RET;
845 if (TARGET_LONG_DOUBLE_128
846 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
847 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
849 /* Allocate an alias set for register saves & restores from stack. */
850 rs6000_sr_alias_set = new_alias_set ();
852 if (TARGET_TOC)
853 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
855 /* We can only guarantee the availability of DI pseudo-ops when
856 assembling for 64-bit targets. */
857 if (!TARGET_64BIT)
859 targetm.asm_out.aligned_op.di = NULL;
860 targetm.asm_out.unaligned_op.di = NULL;
863 /* Set maximum branch target alignment at two instructions, eight bytes. */
864 align_jumps_max_skip = 8;
865 align_loops_max_skip = 8;
867 /* Arrange to save and restore machine status around nested functions. */
868 init_machine_status = rs6000_init_machine_status;
871 /* Handle generic options of the form -mfoo=yes/no.
872 NAME is the option name.
873 VALUE is the option value.
874 FLAG is the pointer to the flag where to store a 1 or 0, depending on
875 whether the option value is 'yes' or 'no' respectively. */
876 static void
877 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
879 if (value == 0)
880 return;
881 else if (!strcmp (value, "yes"))
882 *flag = 1;
883 else if (!strcmp (value, "no"))
884 *flag = 0;
885 else
886 error ("unknown -m%s= option specified: '%s'", name, value);
889 /* Handle -mabi= options. */
890 static void
891 rs6000_parse_abi_options ()
893 if (rs6000_abi_string == 0)
894 return;
895 else if (! strcmp (rs6000_abi_string, "altivec"))
896 rs6000_altivec_abi = 1;
897 else if (! strcmp (rs6000_abi_string, "no-altivec"))
898 rs6000_altivec_abi = 0;
899 else if (! strcmp (rs6000_abi_string, "spe"))
901 rs6000_spe_abi = 1;
902 if (!TARGET_SPE_ABI)
903 error ("not configured for ABI: '%s'", rs6000_abi_string);
906 else if (! strcmp (rs6000_abi_string, "no-spe"))
907 rs6000_spe_abi = 0;
908 else
909 error ("unknown ABI specified: '%s'", rs6000_abi_string);
912 /* Handle -malign-XXXXXX options. */
913 static void
914 rs6000_parse_alignment_option ()
916 if (rs6000_alignment_string == 0
917 || ! strcmp (rs6000_alignment_string, "power"))
918 rs6000_alignment_flags = MASK_ALIGN_POWER;
919 else if (! strcmp (rs6000_alignment_string, "natural"))
920 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
921 else
922 error ("unknown -malign-XXXXX option specified: '%s'",
923 rs6000_alignment_string);
926 /* Validate and record the size specified with the -mtls-size option. */
928 static void
929 rs6000_parse_tls_size_option ()
931 if (rs6000_tls_size_string == 0)
932 return;
933 else if (strcmp (rs6000_tls_size_string, "16") == 0)
934 rs6000_tls_size = 16;
935 else if (strcmp (rs6000_tls_size_string, "32") == 0)
936 rs6000_tls_size = 32;
937 else if (strcmp (rs6000_tls_size_string, "64") == 0)
938 rs6000_tls_size = 64;
939 else
940 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
943 void
944 optimization_options (level, size)
945 int level ATTRIBUTE_UNUSED;
946 int size ATTRIBUTE_UNUSED;
950 /* Do anything needed at the start of the asm file. */
952 static void
953 rs6000_file_start ()
955 size_t i;
956 char buffer[80];
957 const char *start = buffer;
958 struct rs6000_cpu_select *ptr;
959 const char *default_cpu = TARGET_CPU_DEFAULT;
960 FILE *file = asm_out_file;
962 default_file_start ();
964 #ifdef TARGET_BI_ARCH
965 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
966 default_cpu = 0;
967 #endif
969 if (flag_verbose_asm)
971 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
972 rs6000_select[0].string = default_cpu;
974 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
976 ptr = &rs6000_select[i];
977 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
979 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
980 start = "";
984 #ifdef USING_ELFOS_H
985 switch (rs6000_sdata)
987 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
988 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
989 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
990 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
993 if (rs6000_sdata && g_switch_value)
995 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
996 g_switch_value);
997 start = "";
999 #endif
1001 if (*start == '\0')
1002 putc ('\n', file);
1006 /* Return nonzero if this function is known to have a null epilogue. */
1009 direct_return ()
1011 if (reload_completed)
1013 rs6000_stack_t *info = rs6000_stack_info ();
1015 if (info->first_gp_reg_save == 32
1016 && info->first_fp_reg_save == 64
1017 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1018 && ! info->lr_save_p
1019 && ! info->cr_save_p
1020 && info->vrsave_mask == 0
1021 && ! info->push_p)
1022 return 1;
1025 return 0;
1028 /* Returns 1 always. */
1031 any_operand (op, mode)
1032 rtx op ATTRIBUTE_UNUSED;
1033 enum machine_mode mode ATTRIBUTE_UNUSED;
1035 return 1;
1038 /* Returns 1 if op is the count register. */
1040 count_register_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode ATTRIBUTE_UNUSED;
1044 if (GET_CODE (op) != REG)
1045 return 0;
1047 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1048 return 1;
1050 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1051 return 1;
1053 return 0;
1056 /* Returns 1 if op is an altivec register. */
1058 altivec_register_operand (op, mode)
1059 rtx op;
1060 enum machine_mode mode ATTRIBUTE_UNUSED;
1063 return (register_operand (op, mode)
1064 && (GET_CODE (op) != REG
1065 || REGNO (op) > FIRST_PSEUDO_REGISTER
1066 || ALTIVEC_REGNO_P (REGNO (op))));
1070 xer_operand (op, mode)
1071 rtx op;
1072 enum machine_mode mode ATTRIBUTE_UNUSED;
1074 if (GET_CODE (op) != REG)
1075 return 0;
1077 if (XER_REGNO_P (REGNO (op)))
1078 return 1;
1080 return 0;
1083 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1084 by such constants completes more quickly. */
1087 s8bit_cint_operand (op, mode)
1088 rtx op;
1089 enum machine_mode mode ATTRIBUTE_UNUSED;
1091 return ( GET_CODE (op) == CONST_INT
1092 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1095 /* Return 1 if OP is a constant that can fit in a D field. */
1098 short_cint_operand (op, mode)
1099 rtx op;
1100 enum machine_mode mode ATTRIBUTE_UNUSED;
1102 return (GET_CODE (op) == CONST_INT
1103 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1106 /* Similar for an unsigned D field. */
1109 u_short_cint_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode ATTRIBUTE_UNUSED;
1113 return (GET_CODE (op) == CONST_INT
1114 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1117 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1120 non_short_cint_operand (op, mode)
1121 rtx op;
1122 enum machine_mode mode ATTRIBUTE_UNUSED;
1124 return (GET_CODE (op) == CONST_INT
1125 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1128 /* Returns 1 if OP is a CONST_INT that is a positive value
1129 and an exact power of 2. */
1132 exact_log2_cint_operand (op, mode)
1133 rtx op;
1134 enum machine_mode mode ATTRIBUTE_UNUSED;
1136 return (GET_CODE (op) == CONST_INT
1137 && INTVAL (op) > 0
1138 && exact_log2 (INTVAL (op)) >= 0);
1141 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1142 ctr, or lr). */
1145 gpc_reg_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode;
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || (REGNO (op) >= ARG_POINTER_REGNUM
1152 && !XER_REGNO_P (REGNO (op)))
1153 || REGNO (op) < MQ_REGNO));
1156 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1157 CR field. */
1160 cc_reg_operand (op, mode)
1161 rtx op;
1162 enum machine_mode mode;
1164 return (register_operand (op, mode)
1165 && (GET_CODE (op) != REG
1166 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1167 || CR_REGNO_P (REGNO (op))));
1170 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1171 CR field that isn't CR0. */
1174 cc_reg_not_cr0_operand (op, mode)
1175 rtx op;
1176 enum machine_mode mode;
1178 return (register_operand (op, mode)
1179 && (GET_CODE (op) != REG
1180 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1181 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1184 /* Returns 1 if OP is either a constant integer valid for a D-field or
1185 a non-special register. If a register, it must be in the proper
1186 mode unless MODE is VOIDmode. */
1189 reg_or_short_operand (op, mode)
1190 rtx op;
1191 enum machine_mode mode;
1193 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1196 /* Similar, except check if the negation of the constant would be
1197 valid for a D-field. */
1200 reg_or_neg_short_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode;
1204 if (GET_CODE (op) == CONST_INT)
1205 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1207 return gpc_reg_operand (op, mode);
1210 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1211 a non-special register. If a register, it must be in the proper
1212 mode unless MODE is VOIDmode. */
1215 reg_or_aligned_short_operand (op, mode)
1216 rtx op;
1217 enum machine_mode mode;
1219 if (gpc_reg_operand (op, mode))
1220 return 1;
1221 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1222 return 1;
1224 return 0;
1228 /* Return 1 if the operand is either a register or an integer whose
1229 high-order 16 bits are zero. */
1232 reg_or_u_short_operand (op, mode)
1233 rtx op;
1234 enum machine_mode mode;
1236 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1239 /* Return 1 is the operand is either a non-special register or ANY
1240 constant integer. */
1243 reg_or_cint_operand (op, mode)
1244 rtx op;
1245 enum machine_mode mode;
1247 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1250 /* Return 1 is the operand is either a non-special register or ANY
1251 32-bit signed constant integer. */
1254 reg_or_arith_cint_operand (op, mode)
1255 rtx op;
1256 enum machine_mode mode;
1258 return (gpc_reg_operand (op, mode)
1259 || (GET_CODE (op) == CONST_INT
1260 #if HOST_BITS_PER_WIDE_INT != 32
1261 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1262 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1263 #endif
1267 /* Return 1 is the operand is either a non-special register or a 32-bit
1268 signed constant integer valid for 64-bit addition. */
1271 reg_or_add_cint64_operand (op, mode)
1272 rtx op;
1273 enum machine_mode mode;
1275 return (gpc_reg_operand (op, mode)
1276 || (GET_CODE (op) == CONST_INT
1277 #if HOST_BITS_PER_WIDE_INT == 32
1278 && INTVAL (op) < 0x7fff8000
1279 #else
1280 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1281 < 0x100000000ll)
1282 #endif
1286 /* Return 1 is the operand is either a non-special register or a 32-bit
1287 signed constant integer valid for 64-bit subtraction. */
1290 reg_or_sub_cint64_operand (op, mode)
1291 rtx op;
1292 enum machine_mode mode;
1294 return (gpc_reg_operand (op, mode)
1295 || (GET_CODE (op) == CONST_INT
1296 #if HOST_BITS_PER_WIDE_INT == 32
1297 && (- INTVAL (op)) < 0x7fff8000
1298 #else
1299 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1300 < 0x100000000ll)
1301 #endif
1305 /* Return 1 is the operand is either a non-special register or ANY
1306 32-bit unsigned constant integer. */
1309 reg_or_logical_cint_operand (op, mode)
1310 rtx op;
1311 enum machine_mode mode;
1313 if (GET_CODE (op) == CONST_INT)
1315 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1317 if (GET_MODE_BITSIZE (mode) <= 32)
1318 abort ();
1320 if (INTVAL (op) < 0)
1321 return 0;
1324 return ((INTVAL (op) & GET_MODE_MASK (mode)
1325 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1327 else if (GET_CODE (op) == CONST_DOUBLE)
1329 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1330 || mode != DImode)
1331 abort ();
1333 return CONST_DOUBLE_HIGH (op) == 0;
1335 else
1336 return gpc_reg_operand (op, mode);
1339 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1342 got_operand (op, mode)
1343 rtx op;
1344 enum machine_mode mode ATTRIBUTE_UNUSED;
1346 return (GET_CODE (op) == SYMBOL_REF
1347 || GET_CODE (op) == CONST
1348 || GET_CODE (op) == LABEL_REF);
1351 /* Return 1 if the operand is a simple references that can be loaded via
1352 the GOT (labels involving addition aren't allowed). */
1355 got_no_const_operand (op, mode)
1356 rtx op;
1357 enum machine_mode mode ATTRIBUTE_UNUSED;
1359 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1362 /* Return the number of instructions it takes to form a constant in an
1363 integer register. */
1365 static int
1366 num_insns_constant_wide (value)
1367 HOST_WIDE_INT value;
1369 /* signed constant loadable with {cal|addi} */
1370 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1371 return 1;
1373 /* constant loadable with {cau|addis} */
1374 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1375 return 1;
1377 #if HOST_BITS_PER_WIDE_INT == 64
1378 else if (TARGET_POWERPC64)
1380 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1381 HOST_WIDE_INT high = value >> 31;
1383 if (high == 0 || high == -1)
1384 return 2;
1386 high >>= 1;
1388 if (low == 0)
1389 return num_insns_constant_wide (high) + 1;
1390 else
1391 return (num_insns_constant_wide (high)
1392 + num_insns_constant_wide (low) + 1);
1394 #endif
1396 else
1397 return 2;
1401 num_insns_constant (op, mode)
1402 rtx op;
1403 enum machine_mode mode;
1405 if (GET_CODE (op) == CONST_INT)
1407 #if HOST_BITS_PER_WIDE_INT == 64
1408 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1409 && mask64_operand (op, mode))
1410 return 2;
1411 else
1412 #endif
1413 return num_insns_constant_wide (INTVAL (op));
1416 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1418 long l;
1419 REAL_VALUE_TYPE rv;
1421 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1422 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1423 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1426 else if (GET_CODE (op) == CONST_DOUBLE)
1428 HOST_WIDE_INT low;
1429 HOST_WIDE_INT high;
1430 long l[2];
1431 REAL_VALUE_TYPE rv;
1432 int endian = (WORDS_BIG_ENDIAN == 0);
1434 if (mode == VOIDmode || mode == DImode)
1436 high = CONST_DOUBLE_HIGH (op);
1437 low = CONST_DOUBLE_LOW (op);
1439 else
1441 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1442 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1443 high = l[endian];
1444 low = l[1 - endian];
1447 if (TARGET_32BIT)
1448 return (num_insns_constant_wide (low)
1449 + num_insns_constant_wide (high));
1451 else
1453 if (high == 0 && low >= 0)
1454 return num_insns_constant_wide (low);
1456 else if (high == -1 && low < 0)
1457 return num_insns_constant_wide (low);
1459 else if (mask64_operand (op, mode))
1460 return 2;
1462 else if (low == 0)
1463 return num_insns_constant_wide (high) + 1;
1465 else
1466 return (num_insns_constant_wide (high)
1467 + num_insns_constant_wide (low) + 1);
1471 else
1472 abort ();
1475 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1476 register with one instruction per word. We only do this if we can
1477 safely read CONST_DOUBLE_{LOW,HIGH}. */
1480 easy_fp_constant (op, mode)
1481 rtx op;
1482 enum machine_mode mode;
1484 if (GET_CODE (op) != CONST_DOUBLE
1485 || GET_MODE (op) != mode
1486 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1487 return 0;
1489 /* Consider all constants with -msoft-float to be easy. */
1490 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1491 && mode != DImode)
1492 return 1;
1494 /* If we are using V.4 style PIC, consider all constants to be hard. */
1495 if (flag_pic && DEFAULT_ABI == ABI_V4)
1496 return 0;
1498 #ifdef TARGET_RELOCATABLE
1499 /* Similarly if we are using -mrelocatable, consider all constants
1500 to be hard. */
1501 if (TARGET_RELOCATABLE)
1502 return 0;
1503 #endif
1505 if (mode == TFmode)
1507 long k[4];
1508 REAL_VALUE_TYPE rv;
1510 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1511 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1513 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1514 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1515 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1516 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1519 else if (mode == DFmode)
1521 long k[2];
1522 REAL_VALUE_TYPE rv;
1524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1525 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1527 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1528 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1531 else if (mode == SFmode)
1533 long l;
1534 REAL_VALUE_TYPE rv;
1536 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1537 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1539 return num_insns_constant_wide (l) == 1;
1542 else if (mode == DImode)
1543 return ((TARGET_POWERPC64
1544 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1545 || (num_insns_constant (op, DImode) <= 2));
1547 else if (mode == SImode)
1548 return 1;
1549 else
1550 abort ();
1553 /* Return nonzero if all elements of a vector have the same value. */
1555 static int
1556 easy_vector_same (op, mode)
1557 rtx op;
1558 enum machine_mode mode ATTRIBUTE_UNUSED;
1560 int units, i, cst;
1562 units = CONST_VECTOR_NUNITS (op);
1564 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1565 for (i = 1; i < units; ++i)
1566 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1567 break;
1568 if (i == units)
1569 return 1;
1570 return 0;
1573 /* Return 1 if the operand is a CONST_INT and can be put into a
1574 register without using memory. */
1577 easy_vector_constant (op, mode)
1578 rtx op;
1579 enum machine_mode mode;
1581 int cst, cst2;
1583 if (GET_CODE (op) != CONST_VECTOR
1584 || (!TARGET_ALTIVEC
1585 && !TARGET_SPE))
1586 return 0;
1588 if (zero_constant (op, mode)
1589 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1590 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1591 return 1;
1593 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1594 return 0;
1596 if (TARGET_SPE && mode == V1DImode)
1597 return 0;
1599 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1600 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1602 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1603 li r0, CONSTANT1
1604 evmergelo r0, r0, r0
1605 li r0, CONSTANT2
1607 I don't know how efficient it would be to allow bigger constants,
1608 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1609 instructions is better than a 64-bit memory load, but I don't
1610 have the e500 timing specs. */
1611 if (TARGET_SPE && mode == V2SImode
1612 && cst >= -0x7fff && cst <= 0x7fff
1613 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1614 return 1;
1616 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1617 return 1;
1619 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1620 return 1;
1622 return 0;
1625 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1628 easy_vector_constant_add_self (op, mode)
1629 rtx op;
1630 enum machine_mode mode;
1632 int cst;
1634 if (!easy_vector_constant (op, mode))
1635 return 0;
1637 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1639 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1642 const char *
1643 output_vec_const_move (operands)
1644 rtx *operands;
1646 int cst, cst2;
1647 enum machine_mode mode;
1648 rtx dest, vec;
1650 dest = operands[0];
1651 vec = operands[1];
1653 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1654 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1655 mode = GET_MODE (dest);
1657 if (TARGET_ALTIVEC)
1659 if (zero_constant (vec, mode))
1660 return "vxor %0,%0,%0";
1661 else if (EASY_VECTOR_15 (cst, vec, mode))
1663 operands[1] = GEN_INT (cst);
1664 switch (mode)
1666 case V4SImode:
1667 return "vspltisw %0,%1";
1668 case V8HImode:
1669 return "vspltish %0,%1";
1670 case V16QImode:
1671 return "vspltisb %0,%1";
1672 default:
1673 abort ();
1676 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1677 return "#";
1678 else
1679 abort ();
1682 if (TARGET_SPE)
1684 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1685 pattern of V1DI, V4HI, and V2SF.
1687 FIXME: We should probably return # and add post reload
1688 splitters for these, but this way is so easy ;-).
1690 operands[1] = GEN_INT (cst);
1691 operands[2] = GEN_INT (cst2);
1692 if (cst == cst2)
1693 return "li %0,%1\n\tevmergelo %0,%0,%0";
1694 else
1695 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1698 abort ();
1701 /* Return 1 if the operand is the constant 0. This works for scalars
1702 as well as vectors. */
1704 zero_constant (op, mode)
1705 rtx op;
1706 enum machine_mode mode;
1708 return op == CONST0_RTX (mode);
1711 /* Return 1 if the operand is 0.0. */
1713 zero_fp_constant (op, mode)
1714 rtx op;
1715 enum machine_mode mode;
1717 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1720 /* Return 1 if the operand is in volatile memory. Note that during
1721 the RTL generation phase, memory_operand does not return TRUE for
1722 volatile memory references. So this function allows us to
1723 recognize volatile references where its safe. */
1726 volatile_mem_operand (op, mode)
1727 rtx op;
1728 enum machine_mode mode;
1730 if (GET_CODE (op) != MEM)
1731 return 0;
1733 if (!MEM_VOLATILE_P (op))
1734 return 0;
1736 if (mode != GET_MODE (op))
1737 return 0;
1739 if (reload_completed)
1740 return memory_operand (op, mode);
1742 if (reload_in_progress)
1743 return strict_memory_address_p (mode, XEXP (op, 0));
1745 return memory_address_p (mode, XEXP (op, 0));
1748 /* Return 1 if the operand is an offsettable memory operand. */
1751 offsettable_mem_operand (op, mode)
1752 rtx op;
1753 enum machine_mode mode;
1755 return ((GET_CODE (op) == MEM)
1756 && offsettable_address_p (reload_completed || reload_in_progress,
1757 mode, XEXP (op, 0)));
1760 /* Return 1 if the operand is either an easy FP constant (see above) or
1761 memory. */
1764 mem_or_easy_const_operand (op, mode)
1765 rtx op;
1766 enum machine_mode mode;
1768 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1771 /* Return 1 if the operand is either a non-special register or an item
1772 that can be used as the operand of a `mode' add insn. */
1775 add_operand (op, mode)
1776 rtx op;
1777 enum machine_mode mode;
1779 if (GET_CODE (op) == CONST_INT)
1780 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1781 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1783 return gpc_reg_operand (op, mode);
1786 /* Return 1 if OP is a constant but not a valid add_operand. */
1789 non_add_cint_operand (op, mode)
1790 rtx op;
1791 enum machine_mode mode ATTRIBUTE_UNUSED;
1793 return (GET_CODE (op) == CONST_INT
1794 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1795 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1798 /* Return 1 if the operand is a non-special register or a constant that
1799 can be used as the operand of an OR or XOR insn on the RS/6000. */
1802 logical_operand (op, mode)
1803 rtx op;
1804 enum machine_mode mode;
1806 HOST_WIDE_INT opl, oph;
1808 if (gpc_reg_operand (op, mode))
1809 return 1;
1811 if (GET_CODE (op) == CONST_INT)
1813 opl = INTVAL (op) & GET_MODE_MASK (mode);
1815 #if HOST_BITS_PER_WIDE_INT <= 32
1816 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1817 return 0;
1818 #endif
1820 else if (GET_CODE (op) == CONST_DOUBLE)
1822 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1823 abort ();
1825 opl = CONST_DOUBLE_LOW (op);
1826 oph = CONST_DOUBLE_HIGH (op);
1827 if (oph != 0)
1828 return 0;
1830 else
1831 return 0;
1833 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1834 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1837 /* Return 1 if C is a constant that is not a logical operand (as
1838 above), but could be split into one. */
1841 non_logical_cint_operand (op, mode)
1842 rtx op;
1843 enum machine_mode mode;
1845 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1846 && ! logical_operand (op, mode)
1847 && reg_or_logical_cint_operand (op, mode));
1850 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1851 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1852 Reject all ones and all zeros, since these should have been optimized
1853 away and confuse the making of MB and ME. */
1856 mask_operand (op, mode)
1857 rtx op;
1858 enum machine_mode mode ATTRIBUTE_UNUSED;
1860 HOST_WIDE_INT c, lsb;
1862 if (GET_CODE (op) != CONST_INT)
1863 return 0;
1865 c = INTVAL (op);
1867 /* Fail in 64-bit mode if the mask wraps around because the upper
1868 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1869 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1870 return 0;
1872 /* We don't change the number of transitions by inverting,
1873 so make sure we start with the LS bit zero. */
1874 if (c & 1)
1875 c = ~c;
1877 /* Reject all zeros or all ones. */
1878 if (c == 0)
1879 return 0;
1881 /* Find the first transition. */
1882 lsb = c & -c;
1884 /* Invert to look for a second transition. */
1885 c = ~c;
1887 /* Erase first transition. */
1888 c &= -lsb;
1890 /* Find the second transition (if any). */
1891 lsb = c & -c;
1893 /* Match if all the bits above are 1's (or c is zero). */
1894 return c == -lsb;
1897 /* Return 1 for the PowerPC64 rlwinm corner case. */
1900 mask_operand_wrap (op, mode)
1901 rtx op;
1902 enum machine_mode mode ATTRIBUTE_UNUSED;
1904 HOST_WIDE_INT c, lsb;
1906 if (GET_CODE (op) != CONST_INT)
1907 return 0;
1909 c = INTVAL (op);
1911 if ((c & 0x80000001) != 0x80000001)
1912 return 0;
1914 c = ~c;
1915 if (c == 0)
1916 return 0;
1918 lsb = c & -c;
1919 c = ~c;
1920 c &= -lsb;
1921 lsb = c & -c;
1922 return c == -lsb;
1925 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1926 It is if there are no more than one 1->0 or 0->1 transitions.
1927 Reject all zeros, since zero should have been optimized away and
1928 confuses the making of MB and ME. */
1931 mask64_operand (op, mode)
1932 rtx op;
1933 enum machine_mode mode ATTRIBUTE_UNUSED;
1935 if (GET_CODE (op) == CONST_INT)
1937 HOST_WIDE_INT c, lsb;
1939 c = INTVAL (op);
1941 /* Reject all zeros. */
1942 if (c == 0)
1943 return 0;
1945 /* We don't change the number of transitions by inverting,
1946 so make sure we start with the LS bit zero. */
1947 if (c & 1)
1948 c = ~c;
1950 /* Find the transition, and check that all bits above are 1's. */
1951 lsb = c & -c;
1953 /* Match if all the bits above are 1's (or c is zero). */
1954 return c == -lsb;
1956 return 0;
1959 /* Like mask64_operand, but allow up to three transitions. This
1960 predicate is used by insn patterns that generate two rldicl or
1961 rldicr machine insns. */
1964 mask64_2_operand (op, mode)
1965 rtx op;
1966 enum machine_mode mode ATTRIBUTE_UNUSED;
1968 if (GET_CODE (op) == CONST_INT)
1970 HOST_WIDE_INT c, lsb;
1972 c = INTVAL (op);
1974 /* Disallow all zeros. */
1975 if (c == 0)
1976 return 0;
1978 /* We don't change the number of transitions by inverting,
1979 so make sure we start with the LS bit zero. */
1980 if (c & 1)
1981 c = ~c;
1983 /* Find the first transition. */
1984 lsb = c & -c;
1986 /* Invert to look for a second transition. */
1987 c = ~c;
1989 /* Erase first transition. */
1990 c &= -lsb;
1992 /* Find the second transition. */
1993 lsb = c & -c;
1995 /* Invert to look for a third transition. */
1996 c = ~c;
1998 /* Erase second transition. */
1999 c &= -lsb;
2001 /* Find the third transition (if any). */
2002 lsb = c & -c;
2004 /* Match if all the bits above are 1's (or c is zero). */
2005 return c == -lsb;
2007 return 0;
2010 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2011 implement ANDing by the mask IN. */
2012 void
2013 build_mask64_2_operands (in, out)
2014 rtx in;
2015 rtx *out;
2017 #if HOST_BITS_PER_WIDE_INT >= 64
2018 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2019 int shift;
2021 if (GET_CODE (in) != CONST_INT)
2022 abort ();
2024 c = INTVAL (in);
2025 if (c & 1)
2027 /* Assume c initially something like 0x00fff000000fffff. The idea
2028 is to rotate the word so that the middle ^^^^^^ group of zeros
2029 is at the MS end and can be cleared with an rldicl mask. We then
2030 rotate back and clear off the MS ^^ group of zeros with a
2031 second rldicl. */
2032 c = ~c; /* c == 0xff000ffffff00000 */
2033 lsb = c & -c; /* lsb == 0x0000000000100000 */
2034 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2035 c = ~c; /* c == 0x00fff000000fffff */
2036 c &= -lsb; /* c == 0x00fff00000000000 */
2037 lsb = c & -c; /* lsb == 0x0000100000000000 */
2038 c = ~c; /* c == 0xff000fffffffffff */
2039 c &= -lsb; /* c == 0xff00000000000000 */
2040 shift = 0;
2041 while ((lsb >>= 1) != 0)
2042 shift++; /* shift == 44 on exit from loop */
2043 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2044 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2045 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2047 else
2049 /* Assume c initially something like 0xff000f0000000000. The idea
2050 is to rotate the word so that the ^^^ middle group of zeros
2051 is at the LS end and can be cleared with an rldicr mask. We then
2052 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2053 a second rldicr. */
2054 lsb = c & -c; /* lsb == 0x0000010000000000 */
2055 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2056 c = ~c; /* c == 0x00fff0ffffffffff */
2057 c &= -lsb; /* c == 0x00fff00000000000 */
2058 lsb = c & -c; /* lsb == 0x0000100000000000 */
2059 c = ~c; /* c == 0xff000fffffffffff */
2060 c &= -lsb; /* c == 0xff00000000000000 */
2061 shift = 0;
2062 while ((lsb >>= 1) != 0)
2063 shift++; /* shift == 44 on exit from loop */
2064 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2065 m1 >>= shift; /* m1 == 0x0000000000000fff */
2066 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2069 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2070 masks will be all 1's. We are guaranteed more than one transition. */
2071 out[0] = GEN_INT (64 - shift);
2072 out[1] = GEN_INT (m1);
2073 out[2] = GEN_INT (shift);
2074 out[3] = GEN_INT (m2);
2075 #else
2076 (void)in;
2077 (void)out;
2078 abort ();
2079 #endif
2082 /* Return 1 if the operand is either a non-special register or a constant
2083 that can be used as the operand of a PowerPC64 logical AND insn. */
2086 and64_operand (op, mode)
2087 rtx op;
2088 enum machine_mode mode;
2090 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2091 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2093 return (logical_operand (op, mode) || mask64_operand (op, mode));
2096 /* Like the above, but also match constants that can be implemented
2097 with two rldicl or rldicr insns. */
2100 and64_2_operand (op, mode)
2101 rtx op;
2102 enum machine_mode mode;
2104 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2105 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2107 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2110 /* Return 1 if the operand is either a non-special register or a
2111 constant that can be used as the operand of an RS/6000 logical AND insn. */
2114 and_operand (op, mode)
2115 rtx op;
2116 enum machine_mode mode;
2118 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2119 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2121 return (logical_operand (op, mode) || mask_operand (op, mode));
2124 /* Return 1 if the operand is a general register or memory operand. */
2127 reg_or_mem_operand (op, mode)
2128 rtx op;
2129 enum machine_mode mode;
2131 return (gpc_reg_operand (op, mode)
2132 || memory_operand (op, mode)
2133 || volatile_mem_operand (op, mode));
2136 /* Return 1 if the operand is a general register or memory operand without
2137 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2138 instruction. */
2141 lwa_operand (op, mode)
2142 rtx op;
2143 enum machine_mode mode;
2145 rtx inner = op;
2147 if (reload_completed && GET_CODE (inner) == SUBREG)
2148 inner = SUBREG_REG (inner);
2150 return gpc_reg_operand (inner, mode)
2151 || (memory_operand (inner, mode)
2152 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2153 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2154 && (GET_CODE (XEXP (inner, 0)) != PLUS
2155 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2156 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2159 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2162 symbol_ref_operand (op, mode)
2163 rtx op;
2164 enum machine_mode mode;
2166 if (mode != VOIDmode && GET_MODE (op) != mode)
2167 return 0;
2169 return (GET_CODE (op) == SYMBOL_REF
2170 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2173 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2174 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2177 call_operand (op, mode)
2178 rtx op;
2179 enum machine_mode mode;
2181 if (mode != VOIDmode && GET_MODE (op) != mode)
2182 return 0;
2184 return (GET_CODE (op) == SYMBOL_REF
2185 || (GET_CODE (op) == REG
2186 && (REGNO (op) == LINK_REGISTER_REGNUM
2187 || REGNO (op) == COUNT_REGISTER_REGNUM
2188 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2191 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2192 this file. */
2195 current_file_function_operand (op, mode)
2196 rtx op;
2197 enum machine_mode mode ATTRIBUTE_UNUSED;
2199 return (GET_CODE (op) == SYMBOL_REF
2200 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2201 && (SYMBOL_REF_LOCAL_P (op)
2202 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2205 /* Return 1 if this operand is a valid input for a move insn. */
2208 input_operand (op, mode)
2209 rtx op;
2210 enum machine_mode mode;
2212 /* Memory is always valid. */
2213 if (memory_operand (op, mode))
2214 return 1;
2216 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2217 if (GET_CODE (op) == CONSTANT_P_RTX)
2218 return 1;
2220 /* For floating-point, easy constants are valid. */
2221 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2222 && CONSTANT_P (op)
2223 && easy_fp_constant (op, mode))
2224 return 1;
2226 /* Allow any integer constant. */
2227 if (GET_MODE_CLASS (mode) == MODE_INT
2228 && (GET_CODE (op) == CONST_INT
2229 || GET_CODE (op) == CONST_DOUBLE))
2230 return 1;
2232 /* Allow easy vector constants. */
2233 if (GET_CODE (op) == CONST_VECTOR
2234 && easy_vector_constant (op, mode))
2235 return 1;
2237 /* For floating-point or multi-word mode, the only remaining valid type
2238 is a register. */
2239 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2240 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2241 return register_operand (op, mode);
2243 /* The only cases left are integral modes one word or smaller (we
2244 do not get called for MODE_CC values). These can be in any
2245 register. */
2246 if (register_operand (op, mode))
2247 return 1;
2249 /* A SYMBOL_REF referring to the TOC is valid. */
2250 if (legitimate_constant_pool_address_p (op))
2251 return 1;
2253 /* A constant pool expression (relative to the TOC) is valid */
2254 if (toc_relative_expr_p (op))
2255 return 1;
2257 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2258 to be valid. */
2259 if (DEFAULT_ABI == ABI_V4
2260 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2261 && small_data_operand (op, Pmode))
2262 return 1;
2264 return 0;
2267 /* Return 1 for an operand in small memory on V.4/eabi. */
2270 small_data_operand (op, mode)
2271 rtx op ATTRIBUTE_UNUSED;
2272 enum machine_mode mode ATTRIBUTE_UNUSED;
2274 #if TARGET_ELF
2275 rtx sym_ref;
2277 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2278 return 0;
2280 if (DEFAULT_ABI != ABI_V4)
2281 return 0;
2283 if (GET_CODE (op) == SYMBOL_REF)
2284 sym_ref = op;
2286 else if (GET_CODE (op) != CONST
2287 || GET_CODE (XEXP (op, 0)) != PLUS
2288 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2289 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2290 return 0;
2292 else
2294 rtx sum = XEXP (op, 0);
2295 HOST_WIDE_INT summand;
2297 /* We have to be careful here, because it is the referenced address
2298 that must be 32k from _SDA_BASE_, not just the symbol. */
2299 summand = INTVAL (XEXP (sum, 1));
2300 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2301 return 0;
2303 sym_ref = XEXP (sum, 0);
2306 return SYMBOL_REF_SMALL_P (sym_ref);
2307 #else
2308 return 0;
2309 #endif
2312 /* Return 1 for all valid move insn operand combination involving altivec
2313 vectors in gprs. */
2316 altivec_in_gprs_p (rtx op0, rtx op1)
2318 if (REG_P (op0) && REGNO_REG_CLASS (REGNO (op0)) == GENERAL_REGS)
2319 return 1;
2321 if (REG_P (op1) && REGNO_REG_CLASS (REGNO (op1)) == GENERAL_REGS)
2322 return 1;
2323 return 0;
2327 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2329 static int
2330 constant_pool_expr_1 (op, have_sym, have_toc)
2331 rtx op;
2332 int *have_sym;
2333 int *have_toc;
2335 switch (GET_CODE(op))
2337 case SYMBOL_REF:
2338 if (RS6000_SYMBOL_REF_TLS_P (op))
2339 return 0;
2340 else if (CONSTANT_POOL_ADDRESS_P (op))
2342 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2344 *have_sym = 1;
2345 return 1;
2347 else
2348 return 0;
2350 else if (! strcmp (XSTR (op, 0), toc_label_name))
2352 *have_toc = 1;
2353 return 1;
2355 else
2356 return 0;
2357 case PLUS:
2358 case MINUS:
2359 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2360 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2361 case CONST:
2362 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2363 case CONST_INT:
2364 return 1;
2365 default:
2366 return 0;
2370 static bool
2371 constant_pool_expr_p (op)
2372 rtx op;
2374 int have_sym = 0;
2375 int have_toc = 0;
2376 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2379 static bool
2380 toc_relative_expr_p (op)
2381 rtx op;
2383 int have_sym = 0;
2384 int have_toc = 0;
2385 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2388 /* SPE offset addressing is limited to 5-bits worth of double words. */
2389 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2391 bool
2392 legitimate_constant_pool_address_p (x)
2393 rtx x;
2395 return (TARGET_TOC
2396 && GET_CODE (x) == PLUS
2397 && GET_CODE (XEXP (x, 0)) == REG
2398 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2399 && constant_pool_expr_p (XEXP (x, 1)));
2402 static bool
2403 legitimate_small_data_p (mode, x)
2404 enum machine_mode mode;
2405 rtx x;
2407 return (DEFAULT_ABI == ABI_V4
2408 && !flag_pic && !TARGET_TOC
2409 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2410 && small_data_operand (x, mode));
2413 static bool
2414 legitimate_offset_address_p (mode, x, strict)
2415 enum machine_mode mode;
2416 rtx x;
2417 int strict;
2419 unsigned HOST_WIDE_INT offset, extra;
2421 if (GET_CODE (x) != PLUS)
2422 return false;
2423 if (GET_CODE (XEXP (x, 0)) != REG)
2424 return false;
2425 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2426 return false;
2427 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2428 return false;
2430 offset = INTVAL (XEXP (x, 1));
2431 extra = 0;
2432 switch (mode)
2434 case V16QImode:
2435 case V8HImode:
2436 case V4SFmode:
2437 case V4SImode:
2438 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2439 which leaves the only valid constant offset of zero, which by
2440 canonicalization rules is also invalid. */
2441 return false;
2443 case V4HImode:
2444 case V2SImode:
2445 case V1DImode:
2446 case V2SFmode:
2447 /* SPE vector modes. */
2448 return SPE_CONST_OFFSET_OK (offset);
2450 case DFmode:
2451 case DImode:
2452 if (TARGET_32BIT)
2453 extra = 4;
2454 else if (offset & 3)
2455 return false;
2456 break;
2458 case TFmode:
2459 case TImode:
2460 if (TARGET_32BIT)
2461 extra = 12;
2462 else if (offset & 3)
2463 return false;
2464 else
2465 extra = 8;
2466 break;
2468 default:
2469 break;
2472 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2475 static bool
2476 legitimate_indexed_address_p (x, strict)
2477 rtx x;
2478 int strict;
2480 rtx op0, op1;
2482 if (GET_CODE (x) != PLUS)
2483 return false;
2484 op0 = XEXP (x, 0);
2485 op1 = XEXP (x, 1);
2487 if (!REG_P (op0) || !REG_P (op1))
2488 return false;
2490 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2491 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2492 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2493 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2496 static inline bool
2497 legitimate_indirect_address_p (x, strict)
2498 rtx x;
2499 int strict;
2501 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2504 static bool
2505 legitimate_lo_sum_address_p (mode, x, strict)
2506 enum machine_mode mode;
2507 rtx x;
2508 int strict;
2510 if (GET_CODE (x) != LO_SUM)
2511 return false;
2512 if (GET_CODE (XEXP (x, 0)) != REG)
2513 return false;
2514 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2515 return false;
2516 x = XEXP (x, 1);
2518 if (TARGET_ELF)
2520 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2521 return false;
2522 if (TARGET_TOC)
2523 return false;
2524 if (GET_MODE_NUNITS (mode) != 1)
2525 return false;
2526 if (GET_MODE_BITSIZE (mode) > 32
2527 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2528 return false;
2530 return CONSTANT_P (x);
2533 return false;
2537 /* Try machine-dependent ways of modifying an illegitimate address
2538 to be legitimate. If we find one, return the new, valid address.
2539 This is used from only one place: `memory_address' in explow.c.
2541 OLDX is the address as it was before break_out_memory_refs was
2542 called. In some cases it is useful to look at this to decide what
2543 needs to be done.
2545 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2547 It is always safe for this function to do nothing. It exists to
2548 recognize opportunities to optimize the output.
2550 On RS/6000, first check for the sum of a register with a constant
2551 integer that is out of range. If so, generate code to add the
2552 constant with the low-order 16 bits masked to the register and force
2553 this result into another register (this can be done with `cau').
2554 Then generate an address of REG+(CONST&0xffff), allowing for the
2555 possibility of bit 16 being a one.
2557 Then check for the sum of a register and something not constant, try to
2558 load the other things into a register and return the sum. */
2561 rs6000_legitimize_address (x, oldx, mode)
2562 rtx x;
2563 rtx oldx ATTRIBUTE_UNUSED;
2564 enum machine_mode mode;
2566 if (GET_CODE (x) == SYMBOL_REF)
2568 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2569 if (model != 0)
2570 return rs6000_legitimize_tls_address (x, model);
2573 if (GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && GET_CODE (XEXP (x, 1)) == CONST_INT
2576 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2578 HOST_WIDE_INT high_int, low_int;
2579 rtx sum;
2580 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2581 high_int = INTVAL (XEXP (x, 1)) - low_int;
2582 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2583 GEN_INT (high_int)), 0);
2584 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2586 else if (GET_CODE (x) == PLUS
2587 && GET_CODE (XEXP (x, 0)) == REG
2588 && GET_CODE (XEXP (x, 1)) != CONST_INT
2589 && GET_MODE_NUNITS (mode) == 1
2590 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2591 || TARGET_POWERPC64
2592 || (mode != DFmode && mode != TFmode))
2593 && (TARGET_POWERPC64 || mode != DImode)
2594 && mode != TImode)
2596 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2597 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2599 else if (ALTIVEC_VECTOR_MODE (mode))
2601 rtx reg;
2603 /* Make sure both operands are registers. */
2604 if (GET_CODE (x) == PLUS)
2605 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2606 force_reg (Pmode, XEXP (x, 1)));
2608 reg = force_reg (Pmode, x);
2609 return reg;
2611 else if (SPE_VECTOR_MODE (mode))
2613 /* We accept [reg + reg] and [reg + OFFSET]. */
2615 if (GET_CODE (x) == PLUS)
2617 rtx op1 = XEXP (x, 0);
2618 rtx op2 = XEXP (x, 1);
2620 op1 = force_reg (Pmode, op1);
2622 if (GET_CODE (op2) != REG
2623 && (GET_CODE (op2) != CONST_INT
2624 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2625 op2 = force_reg (Pmode, op2);
2627 return gen_rtx_PLUS (Pmode, op1, op2);
2630 return force_reg (Pmode, x);
2632 else if (TARGET_ELF
2633 && TARGET_32BIT
2634 && TARGET_NO_TOC
2635 && ! flag_pic
2636 && GET_CODE (x) != CONST_INT
2637 && GET_CODE (x) != CONST_DOUBLE
2638 && CONSTANT_P (x)
2639 && GET_MODE_NUNITS (mode) == 1
2640 && (GET_MODE_BITSIZE (mode) <= 32
2641 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2643 rtx reg = gen_reg_rtx (Pmode);
2644 emit_insn (gen_elf_high (reg, (x)));
2645 return gen_rtx_LO_SUM (Pmode, reg, (x));
2647 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2648 && ! flag_pic
2649 #if TARGET_MACHO
2650 && ! MACHO_DYNAMIC_NO_PIC_P
2651 #endif
2652 && GET_CODE (x) != CONST_INT
2653 && GET_CODE (x) != CONST_DOUBLE
2654 && CONSTANT_P (x)
2655 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2656 && mode != DImode
2657 && mode != TImode)
2659 rtx reg = gen_reg_rtx (Pmode);
2660 emit_insn (gen_macho_high (reg, (x)));
2661 return gen_rtx_LO_SUM (Pmode, reg, (x));
2663 else if (TARGET_TOC
2664 && constant_pool_expr_p (x)
2665 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2667 return create_TOC_reference (x);
2669 else
2670 return NULL_RTX;
2673 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2675 static GTY(()) rtx rs6000_tls_symbol;
2676 static rtx
2677 rs6000_tls_get_addr ()
2679 if (!rs6000_tls_symbol)
2680 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2682 return rs6000_tls_symbol;
2685 /* Construct the SYMBOL_REF for TLS GOT references. */
2687 static GTY(()) rtx rs6000_got_symbol;
2688 static rtx
2689 rs6000_got_sym ()
2691 if (!rs6000_got_symbol)
2693 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2694 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2695 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2698 return rs6000_got_symbol;
2701 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2702 this (thread-local) address. */
2704 static rtx
2705 rs6000_legitimize_tls_address (addr, model)
2706 rtx addr;
2707 enum tls_model model;
2709 rtx dest, insn;
2711 dest = gen_reg_rtx (Pmode);
2712 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2714 rtx tlsreg;
2716 if (TARGET_64BIT)
2718 tlsreg = gen_rtx_REG (Pmode, 13);
2719 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2721 else
2723 tlsreg = gen_rtx_REG (Pmode, 2);
2724 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2726 emit_insn (insn);
2728 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2730 rtx tlsreg, tmp;
2732 tmp = gen_reg_rtx (Pmode);
2733 if (TARGET_64BIT)
2735 tlsreg = gen_rtx_REG (Pmode, 13);
2736 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2738 else
2740 tlsreg = gen_rtx_REG (Pmode, 2);
2741 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2743 emit_insn (insn);
2744 if (TARGET_64BIT)
2745 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2746 else
2747 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2748 emit_insn (insn);
2750 else
2752 rtx r3, got, tga, tmp1, tmp2, eqv;
2754 if (TARGET_64BIT)
2755 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2756 else
2758 if (flag_pic == 1)
2759 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2760 else
2762 rtx gsym = rs6000_got_sym ();
2763 got = gen_reg_rtx (Pmode);
2764 if (flag_pic == 0)
2765 rs6000_emit_move (got, gsym, Pmode);
2766 else
2768 char buf[30];
2769 static int tls_got_labelno = 0;
2770 rtx tempLR, lab, tmp3, mem;
2771 rtx first, last;
2773 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2774 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2775 tempLR = gen_reg_rtx (Pmode);
2776 tmp1 = gen_reg_rtx (Pmode);
2777 tmp2 = gen_reg_rtx (Pmode);
2778 tmp3 = gen_reg_rtx (Pmode);
2779 mem = gen_rtx_MEM (Pmode, tmp1);
2780 RTX_UNCHANGING_P (mem) = 1;
2782 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2783 gsym));
2784 emit_move_insn (tmp1, tempLR);
2785 emit_move_insn (tmp2, mem);
2786 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2787 last = emit_move_insn (got, tmp3);
2788 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2789 REG_NOTES (last));
2790 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2791 REG_NOTES (first));
2792 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2793 REG_NOTES (last));
2798 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2800 r3 = gen_rtx_REG (Pmode, 3);
2801 if (TARGET_64BIT)
2802 insn = gen_tls_gd_64 (r3, got, addr);
2803 else
2804 insn = gen_tls_gd_32 (r3, got, addr);
2805 start_sequence ();
2806 emit_insn (insn);
2807 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2808 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2809 insn = emit_call_insn (insn);
2810 CONST_OR_PURE_CALL_P (insn) = 1;
2811 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2812 insn = get_insns ();
2813 end_sequence ();
2814 emit_libcall_block (insn, dest, r3, addr);
2816 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2818 r3 = gen_rtx_REG (Pmode, 3);
2819 if (TARGET_64BIT)
2820 insn = gen_tls_ld_64 (r3, got);
2821 else
2822 insn = gen_tls_ld_32 (r3, got);
2823 start_sequence ();
2824 emit_insn (insn);
2825 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2826 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2827 insn = emit_call_insn (insn);
2828 CONST_OR_PURE_CALL_P (insn) = 1;
2829 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2830 insn = get_insns ();
2831 end_sequence ();
2832 tmp1 = gen_reg_rtx (Pmode);
2833 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2834 UNSPEC_TLSLD);
2835 emit_libcall_block (insn, tmp1, r3, eqv);
2836 if (rs6000_tls_size == 16)
2838 if (TARGET_64BIT)
2839 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2840 else
2841 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2843 else if (rs6000_tls_size == 32)
2845 tmp2 = gen_reg_rtx (Pmode);
2846 if (TARGET_64BIT)
2847 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2848 else
2849 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2850 emit_insn (insn);
2851 if (TARGET_64BIT)
2852 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2853 else
2854 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2856 else
2858 tmp2 = gen_reg_rtx (Pmode);
2859 if (TARGET_64BIT)
2860 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2861 else
2862 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2863 emit_insn (insn);
2864 insn = gen_rtx_SET (Pmode, dest,
2865 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2867 emit_insn (insn);
2869 else
2871 /* IE, or 64 bit offset LE. */
2872 tmp2 = gen_reg_rtx (Pmode);
2873 if (TARGET_64BIT)
2874 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2875 else
2876 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2877 emit_insn (insn);
2878 if (TARGET_64BIT)
2879 insn = gen_tls_tls_64 (dest, tmp2, addr);
2880 else
2881 insn = gen_tls_tls_32 (dest, tmp2, addr);
2882 emit_insn (insn);
2886 return dest;
2889 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2890 instruction definitions. */
2893 rs6000_tls_symbol_ref (x, mode)
2894 rtx x;
2895 enum machine_mode mode ATTRIBUTE_UNUSED;
2897 return RS6000_SYMBOL_REF_TLS_P (x);
2900 /* Return 1 if X contains a thread-local symbol. */
2902 bool
2903 rs6000_tls_referenced_p (x)
2904 rtx x;
2906 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2909 /* Return 1 if *X is a thread-local symbol. This is the same as
2910 rs6000_tls_symbol_ref except for the type of the unused argument. */
2912 static inline int
2913 rs6000_tls_symbol_ref_1 (x, data)
2914 rtx *x;
2915 void *data ATTRIBUTE_UNUSED;
2917 return RS6000_SYMBOL_REF_TLS_P (*x);
2920 /* The convention appears to be to define this wherever it is used.
2921 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2922 is now used here. */
2923 #ifndef REG_MODE_OK_FOR_BASE_P
2924 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2925 #endif
2927 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2928 replace the input X, or the original X if no replacement is called for.
2929 The output parameter *WIN is 1 if the calling macro should goto WIN,
2930 0 if it should not.
2932 For RS/6000, we wish to handle large displacements off a base
2933 register by splitting the addend across an addiu/addis and the mem insn.
2934 This cuts number of extra insns needed from 3 to 1.
2936 On Darwin, we use this to generate code for floating point constants.
2937 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2938 The Darwin code is inside #if TARGET_MACHO because only then is
2939 machopic_function_base_name() defined. */
2941 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2942 rtx x;
2943 enum machine_mode mode;
2944 int opnum;
2945 int type;
2946 int ind_levels ATTRIBUTE_UNUSED;
2947 int *win;
2949 /* We must recognize output that we have already generated ourselves. */
2950 if (GET_CODE (x) == PLUS
2951 && GET_CODE (XEXP (x, 0)) == PLUS
2952 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2953 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2954 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2956 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2957 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2958 opnum, (enum reload_type)type);
2959 *win = 1;
2960 return x;
2963 #if TARGET_MACHO
2964 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2965 && GET_CODE (x) == LO_SUM
2966 && GET_CODE (XEXP (x, 0)) == PLUS
2967 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2968 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2969 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2970 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2971 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2972 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2973 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2975 /* Result of previous invocation of this function on Darwin
2976 floating point constant. */
2977 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2978 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2979 opnum, (enum reload_type)type);
2980 *win = 1;
2981 return x;
2983 #endif
2984 if (GET_CODE (x) == PLUS
2985 && GET_CODE (XEXP (x, 0)) == REG
2986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2987 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2988 && GET_CODE (XEXP (x, 1)) == CONST_INT
2989 && !SPE_VECTOR_MODE (mode)
2990 && !ALTIVEC_VECTOR_MODE (mode))
2992 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2993 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2994 HOST_WIDE_INT high
2995 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2997 /* Check for 32-bit overflow. */
2998 if (high + low != val)
3000 *win = 0;
3001 return x;
3004 /* Reload the high part into a base reg; leave the low part
3005 in the mem directly. */
3007 x = gen_rtx_PLUS (GET_MODE (x),
3008 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3009 GEN_INT (high)),
3010 GEN_INT (low));
3012 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3013 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3014 opnum, (enum reload_type)type);
3015 *win = 1;
3016 return x;
3018 #if TARGET_MACHO
3019 if (GET_CODE (x) == SYMBOL_REF
3020 && DEFAULT_ABI == ABI_DARWIN
3021 && !ALTIVEC_VECTOR_MODE (mode)
3022 && flag_pic)
3024 /* Darwin load of floating point constant. */
3025 rtx offset = gen_rtx (CONST, Pmode,
3026 gen_rtx (MINUS, Pmode, x,
3027 gen_rtx (SYMBOL_REF, Pmode,
3028 machopic_function_base_name ())));
3029 x = gen_rtx (LO_SUM, GET_MODE (x),
3030 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3031 gen_rtx (HIGH, Pmode, offset)), offset);
3032 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3033 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3034 opnum, (enum reload_type)type);
3035 *win = 1;
3036 return x;
3038 if (GET_CODE (x) == SYMBOL_REF
3039 && DEFAULT_ABI == ABI_DARWIN
3040 && !ALTIVEC_VECTOR_MODE (mode)
3041 && MACHO_DYNAMIC_NO_PIC_P)
3043 /* Darwin load of floating point constant. */
3044 x = gen_rtx (LO_SUM, GET_MODE (x),
3045 gen_rtx (HIGH, Pmode, x), x);
3046 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3047 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3048 opnum, (enum reload_type)type);
3049 *win = 1;
3050 return x;
3052 #endif
3053 if (TARGET_TOC
3054 && constant_pool_expr_p (x)
3055 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3057 (x) = create_TOC_reference (x);
3058 *win = 1;
3059 return x;
3061 *win = 0;
3062 return x;
3065 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3066 that is a valid memory address for an instruction.
3067 The MODE argument is the machine mode for the MEM expression
3068 that wants to use this address.
3070 On the RS/6000, there are four valid address: a SYMBOL_REF that
3071 refers to a constant pool entry of an address (or the sum of it
3072 plus a constant), a short (16-bit signed) constant plus a register,
3073 the sum of two registers, or a register indirect, possibly with an
3074 auto-increment. For DFmode and DImode with a constant plus register,
3075 we must ensure that both words are addressable or PowerPC64 with offset
3076 word aligned.
3078 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3079 32-bit DImode, TImode), indexed addressing cannot be used because
3080 adjacent memory cells are accessed by adding word-sized offsets
3081 during assembly output. */
3083 rs6000_legitimate_address (mode, x, reg_ok_strict)
3084 enum machine_mode mode;
3085 rtx x;
3086 int reg_ok_strict;
3088 if (RS6000_SYMBOL_REF_TLS_P (x))
3089 return 0;
3090 if (legitimate_indirect_address_p (x, reg_ok_strict))
3091 return 1;
3092 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3093 && !ALTIVEC_VECTOR_MODE (mode)
3094 && !SPE_VECTOR_MODE (mode)
3095 && TARGET_UPDATE
3096 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3097 return 1;
3098 if (legitimate_small_data_p (mode, x))
3099 return 1;
3100 if (legitimate_constant_pool_address_p (x))
3101 return 1;
3102 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3103 if (! reg_ok_strict
3104 && GET_CODE (x) == PLUS
3105 && GET_CODE (XEXP (x, 0)) == REG
3106 && XEXP (x, 0) == virtual_stack_vars_rtx
3107 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3108 return 1;
3109 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3110 return 1;
3111 if (mode != TImode
3112 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3113 || TARGET_POWERPC64
3114 || (mode != DFmode && mode != TFmode))
3115 && (TARGET_POWERPC64 || mode != DImode)
3116 && legitimate_indexed_address_p (x, reg_ok_strict))
3117 return 1;
3118 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3119 return 1;
3120 return 0;
3123 /* Go to LABEL if ADDR (a legitimate address expression)
3124 has an effect that depends on the machine mode it is used for.
3126 On the RS/6000 this is true of all integral offsets (since AltiVec
3127 modes don't allow them) or is a pre-increment or decrement.
3129 ??? Except that due to conceptual problems in offsettable_address_p
3130 we can't really report the problems of integral offsets. So leave
3131 this assuming that the adjustable offset must be valid for the
3132 sub-words of a TFmode operand, which is what we had before. */
3134 bool
3135 rs6000_mode_dependent_address (addr)
3136 rtx addr;
3138 switch (GET_CODE (addr))
3140 case PLUS:
3141 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3143 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3144 return val + 12 + 0x8000 >= 0x10000;
3146 break;
3148 case LO_SUM:
3149 return true;
3151 case PRE_INC:
3152 case PRE_DEC:
3153 return TARGET_UPDATE;
3155 default:
3156 break;
3159 return false;
3162 /* Try to output insns to set TARGET equal to the constant C if it can
3163 be done in less than N insns. Do all computations in MODE.
3164 Returns the place where the output has been placed if it can be
3165 done and the insns have been emitted. If it would take more than N
3166 insns, zero is returned and no insns and emitted. */
3169 rs6000_emit_set_const (dest, mode, source, n)
3170 rtx dest, source;
3171 enum machine_mode mode;
3172 int n ATTRIBUTE_UNUSED;
3174 rtx result, insn, set;
3175 HOST_WIDE_INT c0, c1;
3177 if (mode == QImode || mode == HImode)
3179 if (dest == NULL)
3180 dest = gen_reg_rtx (mode);
3181 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3182 return dest;
3184 else if (mode == SImode)
3186 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3188 emit_insn (gen_rtx_SET (VOIDmode, result,
3189 GEN_INT (INTVAL (source)
3190 & (~ (HOST_WIDE_INT) 0xffff))));
3191 emit_insn (gen_rtx_SET (VOIDmode, dest,
3192 gen_rtx_IOR (SImode, result,
3193 GEN_INT (INTVAL (source) & 0xffff))));
3194 result = dest;
3196 else if (mode == DImode)
3198 if (GET_CODE (source) == CONST_INT)
3200 c0 = INTVAL (source);
3201 c1 = -(c0 < 0);
3203 else if (GET_CODE (source) == CONST_DOUBLE)
3205 #if HOST_BITS_PER_WIDE_INT >= 64
3206 c0 = CONST_DOUBLE_LOW (source);
3207 c1 = -(c0 < 0);
3208 #else
3209 c0 = CONST_DOUBLE_LOW (source);
3210 c1 = CONST_DOUBLE_HIGH (source);
3211 #endif
3213 else
3214 abort ();
3216 result = rs6000_emit_set_long_const (dest, c0, c1);
3218 else
3219 abort ();
3221 insn = get_last_insn ();
3222 set = single_set (insn);
3223 if (! CONSTANT_P (SET_SRC (set)))
3224 set_unique_reg_note (insn, REG_EQUAL, source);
3226 return result;
3229 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3230 fall back to a straight forward decomposition. We do this to avoid
3231 exponential run times encountered when looking for longer sequences
3232 with rs6000_emit_set_const. */
3233 static rtx
3234 rs6000_emit_set_long_const (dest, c1, c2)
3235 rtx dest;
3236 HOST_WIDE_INT c1, c2;
3238 if (!TARGET_POWERPC64)
3240 rtx operand1, operand2;
3242 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3243 DImode);
3244 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3245 DImode);
3246 emit_move_insn (operand1, GEN_INT (c1));
3247 emit_move_insn (operand2, GEN_INT (c2));
3249 else
3251 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3253 ud1 = c1 & 0xffff;
3254 ud2 = (c1 & 0xffff0000) >> 16;
3255 #if HOST_BITS_PER_WIDE_INT >= 64
3256 c2 = c1 >> 32;
3257 #endif
3258 ud3 = c2 & 0xffff;
3259 ud4 = (c2 & 0xffff0000) >> 16;
3261 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3262 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3264 if (ud1 & 0x8000)
3265 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3266 else
3267 emit_move_insn (dest, GEN_INT (ud1));
3270 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3271 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3273 if (ud2 & 0x8000)
3274 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3275 - 0x80000000));
3276 else
3277 emit_move_insn (dest, GEN_INT (ud2 << 16));
3278 if (ud1 != 0)
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3281 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3282 || (ud4 == 0 && ! (ud3 & 0x8000)))
3284 if (ud3 & 0x8000)
3285 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3286 - 0x80000000));
3287 else
3288 emit_move_insn (dest, GEN_INT (ud3 << 16));
3290 if (ud2 != 0)
3291 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3292 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3293 if (ud1 != 0)
3294 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3296 else
3298 if (ud4 & 0x8000)
3299 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3300 - 0x80000000));
3301 else
3302 emit_move_insn (dest, GEN_INT (ud4 << 16));
3304 if (ud3 != 0)
3305 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3307 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3308 if (ud2 != 0)
3309 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3310 GEN_INT (ud2 << 16)));
3311 if (ud1 != 0)
3312 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3315 return dest;
3318 /* Emit a move from SOURCE to DEST in mode MODE. */
3319 void
3320 rs6000_emit_move (dest, source, mode)
3321 rtx dest;
3322 rtx source;
3323 enum machine_mode mode;
3325 rtx operands[2];
3326 operands[0] = dest;
3327 operands[1] = source;
3329 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3330 if (GET_CODE (operands[1]) == CONST_DOUBLE
3331 && ! FLOAT_MODE_P (mode)
3332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3334 /* FIXME. This should never happen. */
3335 /* Since it seems that it does, do the safe thing and convert
3336 to a CONST_INT. */
3337 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3339 if (GET_CODE (operands[1]) == CONST_DOUBLE
3340 && ! FLOAT_MODE_P (mode)
3341 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3342 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3343 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3344 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3345 abort ();
3347 /* Check if GCC is setting up a block move that will end up using FP
3348 registers as temporaries. We must make sure this is acceptable. */
3349 if (GET_CODE (operands[0]) == MEM
3350 && GET_CODE (operands[1]) == MEM
3351 && mode == DImode
3352 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3353 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3354 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3355 ? 32 : MEM_ALIGN (operands[0])))
3356 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3357 ? 32
3358 : MEM_ALIGN (operands[1]))))
3359 && ! MEM_VOLATILE_P (operands [0])
3360 && ! MEM_VOLATILE_P (operands [1]))
3362 emit_move_insn (adjust_address (operands[0], SImode, 0),
3363 adjust_address (operands[1], SImode, 0));
3364 emit_move_insn (adjust_address (operands[0], SImode, 4),
3365 adjust_address (operands[1], SImode, 4));
3366 return;
3369 if (!no_new_pseudos)
3371 if (GET_CODE (operands[1]) == MEM && optimize > 0
3372 && (mode == QImode || mode == HImode || mode == SImode)
3373 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3375 rtx reg = gen_reg_rtx (word_mode);
3377 emit_insn (gen_rtx_SET (word_mode, reg,
3378 gen_rtx_ZERO_EXTEND (word_mode,
3379 operands[1])));
3380 operands[1] = gen_lowpart (mode, reg);
3382 if (GET_CODE (operands[0]) != REG)
3383 operands[1] = force_reg (mode, operands[1]);
3386 if (mode == SFmode && ! TARGET_POWERPC
3387 && TARGET_HARD_FLOAT && TARGET_FPRS
3388 && GET_CODE (operands[0]) == MEM)
3390 int regnum;
3392 if (reload_in_progress || reload_completed)
3393 regnum = true_regnum (operands[1]);
3394 else if (GET_CODE (operands[1]) == REG)
3395 regnum = REGNO (operands[1]);
3396 else
3397 regnum = -1;
3399 /* If operands[1] is a register, on POWER it may have
3400 double-precision data in it, so truncate it to single
3401 precision. */
3402 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3404 rtx newreg;
3405 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3406 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3407 operands[1] = newreg;
3411 /* Recognize the case where operand[1] is a reference to thread-local
3412 data and load its address to a register. */
3413 if (GET_CODE (operands[1]) == SYMBOL_REF)
3415 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3416 if (model != 0)
3417 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3420 /* Handle the case where reload calls us with an invalid address. */
3421 if (reload_in_progress && mode == Pmode
3422 && (! general_operand (operands[1], mode)
3423 || ! nonimmediate_operand (operands[0], mode)))
3424 goto emit_set;
3426 /* Handle the case of CONSTANT_P_RTX. */
3427 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3428 goto emit_set;
3430 /* FIXME: In the long term, this switch statement should go away
3431 and be replaced by a sequence of tests based on things like
3432 mode == Pmode. */
3433 switch (mode)
3435 case HImode:
3436 case QImode:
3437 if (CONSTANT_P (operands[1])
3438 && GET_CODE (operands[1]) != CONST_INT)
3439 operands[1] = force_const_mem (mode, operands[1]);
3440 break;
3442 case TFmode:
3443 case DFmode:
3444 case SFmode:
3445 if (CONSTANT_P (operands[1])
3446 && ! easy_fp_constant (operands[1], mode))
3447 operands[1] = force_const_mem (mode, operands[1]);
3448 break;
3450 case V16QImode:
3451 case V8HImode:
3452 case V4SFmode:
3453 case V4SImode:
3454 case V4HImode:
3455 case V2SFmode:
3456 case V2SImode:
3457 case V1DImode:
3458 if (CONSTANT_P (operands[1])
3459 && !easy_vector_constant (operands[1], mode))
3460 operands[1] = force_const_mem (mode, operands[1]);
3461 break;
3463 case SImode:
3464 case DImode:
3465 /* Use default pattern for address of ELF small data */
3466 if (TARGET_ELF
3467 && mode == Pmode
3468 && DEFAULT_ABI == ABI_V4
3469 && (GET_CODE (operands[1]) == SYMBOL_REF
3470 || GET_CODE (operands[1]) == CONST)
3471 && small_data_operand (operands[1], mode))
3473 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3474 return;
3477 if (DEFAULT_ABI == ABI_V4
3478 && mode == Pmode && mode == SImode
3479 && flag_pic == 1 && got_operand (operands[1], mode))
3481 emit_insn (gen_movsi_got (operands[0], operands[1]));
3482 return;
3485 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3486 && TARGET_NO_TOC
3487 && ! flag_pic
3488 && mode == Pmode
3489 && CONSTANT_P (operands[1])
3490 && GET_CODE (operands[1]) != HIGH
3491 && GET_CODE (operands[1]) != CONST_INT)
3493 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3495 /* If this is a function address on -mcall-aixdesc,
3496 convert it to the address of the descriptor. */
3497 if (DEFAULT_ABI == ABI_AIX
3498 && GET_CODE (operands[1]) == SYMBOL_REF
3499 && XSTR (operands[1], 0)[0] == '.')
3501 const char *name = XSTR (operands[1], 0);
3502 rtx new_ref;
3503 while (*name == '.')
3504 name++;
3505 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3506 CONSTANT_POOL_ADDRESS_P (new_ref)
3507 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3508 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3509 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3510 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3511 operands[1] = new_ref;
3514 if (DEFAULT_ABI == ABI_DARWIN)
3516 #if TARGET_MACHO
3517 if (MACHO_DYNAMIC_NO_PIC_P)
3519 /* Take care of any required data indirection. */
3520 operands[1] = rs6000_machopic_legitimize_pic_address (
3521 operands[1], mode, operands[0]);
3522 if (operands[0] != operands[1])
3523 emit_insn (gen_rtx_SET (VOIDmode,
3524 operands[0], operands[1]));
3525 return;
3527 #endif
3528 emit_insn (gen_macho_high (target, operands[1]));
3529 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3530 return;
3533 emit_insn (gen_elf_high (target, operands[1]));
3534 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3535 return;
3538 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3539 and we have put it in the TOC, we just need to make a TOC-relative
3540 reference to it. */
3541 if (TARGET_TOC
3542 && GET_CODE (operands[1]) == SYMBOL_REF
3543 && constant_pool_expr_p (operands[1])
3544 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3545 get_pool_mode (operands[1])))
3547 operands[1] = create_TOC_reference (operands[1]);
3549 else if (mode == Pmode
3550 && CONSTANT_P (operands[1])
3551 && ((GET_CODE (operands[1]) != CONST_INT
3552 && ! easy_fp_constant (operands[1], mode))
3553 || (GET_CODE (operands[1]) == CONST_INT
3554 && num_insns_constant (operands[1], mode) > 2)
3555 || (GET_CODE (operands[0]) == REG
3556 && FP_REGNO_P (REGNO (operands[0]))))
3557 && GET_CODE (operands[1]) != HIGH
3558 && ! legitimate_constant_pool_address_p (operands[1])
3559 && ! toc_relative_expr_p (operands[1]))
3561 /* Emit a USE operation so that the constant isn't deleted if
3562 expensive optimizations are turned on because nobody
3563 references it. This should only be done for operands that
3564 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3565 This should not be done for operands that contain LABEL_REFs.
3566 For now, we just handle the obvious case. */
3567 if (GET_CODE (operands[1]) != LABEL_REF)
3568 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3570 #if TARGET_MACHO
3571 /* Darwin uses a special PIC legitimizer. */
3572 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3574 operands[1] =
3575 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3576 operands[0]);
3577 if (operands[0] != operands[1])
3578 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3579 return;
3581 #endif
3583 /* If we are to limit the number of things we put in the TOC and
3584 this is a symbol plus a constant we can add in one insn,
3585 just put the symbol in the TOC and add the constant. Don't do
3586 this if reload is in progress. */
3587 if (GET_CODE (operands[1]) == CONST
3588 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3589 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3590 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3591 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3592 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3593 && ! side_effects_p (operands[0]))
3595 rtx sym =
3596 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3597 rtx other = XEXP (XEXP (operands[1], 0), 1);
3599 sym = force_reg (mode, sym);
3600 if (mode == SImode)
3601 emit_insn (gen_addsi3 (operands[0], sym, other));
3602 else
3603 emit_insn (gen_adddi3 (operands[0], sym, other));
3604 return;
3607 operands[1] = force_const_mem (mode, operands[1]);
3609 if (TARGET_TOC
3610 && constant_pool_expr_p (XEXP (operands[1], 0))
3611 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3612 get_pool_constant (XEXP (operands[1], 0)),
3613 get_pool_mode (XEXP (operands[1], 0))))
3615 operands[1]
3616 = gen_rtx_MEM (mode,
3617 create_TOC_reference (XEXP (operands[1], 0)));
3618 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3619 RTX_UNCHANGING_P (operands[1]) = 1;
3622 break;
3624 case TImode:
3625 if (GET_CODE (operands[0]) == MEM
3626 && GET_CODE (XEXP (operands[0], 0)) != REG
3627 && ! reload_in_progress)
3628 operands[0]
3629 = replace_equiv_address (operands[0],
3630 copy_addr_to_reg (XEXP (operands[0], 0)));
3632 if (GET_CODE (operands[1]) == MEM
3633 && GET_CODE (XEXP (operands[1], 0)) != REG
3634 && ! reload_in_progress)
3635 operands[1]
3636 = replace_equiv_address (operands[1],
3637 copy_addr_to_reg (XEXP (operands[1], 0)));
3638 if (TARGET_POWER)
3640 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3641 gen_rtvec (2,
3642 gen_rtx_SET (VOIDmode,
3643 operands[0], operands[1]),
3644 gen_rtx_CLOBBER (VOIDmode,
3645 gen_rtx_SCRATCH (SImode)))));
3646 return;
3648 break;
3650 default:
3651 abort ();
3654 /* Above, we may have called force_const_mem which may have returned
3655 an invalid address. If we can, fix this up; otherwise, reload will
3656 have to deal with it. */
3657 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3658 operands[1] = validize_mem (operands[1]);
3660 emit_set:
3661 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3664 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3665 for a call to a function whose data type is FNTYPE.
3666 For a library call, FNTYPE is 0.
3668 For incoming args we set the number of arguments in the prototype large
3669 so we never return a PARALLEL. */
3671 void
3672 init_cumulative_args (cum, fntype, libname, incoming, libcall)
3673 CUMULATIVE_ARGS *cum;
3674 tree fntype;
3675 rtx libname ATTRIBUTE_UNUSED;
3676 int incoming;
3677 int libcall;
3679 static CUMULATIVE_ARGS zero_cumulative;
3681 *cum = zero_cumulative;
3682 cum->words = 0;
3683 cum->fregno = FP_ARG_MIN_REG;
3684 cum->vregno = ALTIVEC_ARG_MIN_REG;
3685 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3686 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3687 ? CALL_LIBCALL : CALL_NORMAL);
3688 cum->sysv_gregno = GP_ARG_MIN_REG;
3689 cum->stdarg = fntype
3690 && (TYPE_ARG_TYPES (fntype) != 0
3691 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3692 != void_type_node));
3694 if (incoming)
3695 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3697 else if (cum->prototype)
3698 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3699 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3700 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3702 else
3703 cum->nargs_prototype = 0;
3705 /* Check for a longcall attribute. */
3706 if (fntype
3707 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3708 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3709 cum->call_cookie = CALL_LONG;
3711 if (TARGET_DEBUG_ARG)
3713 fprintf (stderr, "\ninit_cumulative_args:");
3714 if (fntype)
3716 tree ret_type = TREE_TYPE (fntype);
3717 fprintf (stderr, " ret code = %s,",
3718 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3721 if (cum->call_cookie & CALL_LONG)
3722 fprintf (stderr, " longcall,");
3724 fprintf (stderr, " proto = %d, nargs = %d\n",
3725 cum->prototype, cum->nargs_prototype);
3729 /* If defined, a C expression which determines whether, and in which
3730 direction, to pad out an argument with extra space. The value
3731 should be of type `enum direction': either `upward' to pad above
3732 the argument, `downward' to pad below, or `none' to inhibit
3733 padding.
3735 For the AIX ABI structs are always stored left shifted in their
3736 argument slot. */
3738 enum direction
3739 function_arg_padding (mode, type)
3740 enum machine_mode mode;
3741 tree type;
3743 #ifndef AGGREGATE_PADDING_FIXED
3744 #define AGGREGATE_PADDING_FIXED 0
3745 #endif
3746 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3747 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3748 #endif
3750 if (!AGGREGATE_PADDING_FIXED)
3752 /* GCC used to pass structures of the same size as integer types as
3753 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3754 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3755 passed padded downward, except that -mstrict-align further
3756 muddied the water in that multi-component structures of 2 and 4
3757 bytes in size were passed padded upward.
3759 The following arranges for best compatibility with previous
3760 versions of gcc, but removes the -mstrict-align dependency. */
3761 if (BYTES_BIG_ENDIAN)
3763 HOST_WIDE_INT size = 0;
3765 if (mode == BLKmode)
3767 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3768 size = int_size_in_bytes (type);
3770 else
3771 size = GET_MODE_SIZE (mode);
3773 if (size == 1 || size == 2 || size == 4)
3774 return downward;
3776 return upward;
3779 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3781 if (type != 0 && AGGREGATE_TYPE_P (type))
3782 return upward;
3785 /* This is the default definition. */
3786 return (! BYTES_BIG_ENDIAN
3787 ? upward
3788 : ((mode == BLKmode
3789 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3790 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3791 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3792 ? downward : upward));
3795 /* If defined, a C expression that gives the alignment boundary, in bits,
3796 of an argument with the specified mode and type. If it is not defined,
3797 PARM_BOUNDARY is used for all arguments.
3799 V.4 wants long longs to be double word aligned. */
3802 function_arg_boundary (mode, type)
3803 enum machine_mode mode;
3804 tree type ATTRIBUTE_UNUSED;
3806 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3807 return 64;
3808 else if (SPE_VECTOR_MODE (mode))
3809 return 64;
3810 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3811 return 128;
3812 else
3813 return PARM_BOUNDARY;
3816 /* Update the data in CUM to advance over an argument
3817 of mode MODE and data type TYPE.
3818 (TYPE is null for libcalls where that information may not be available.) */
3820 void
3821 function_arg_advance (cum, mode, type, named)
3822 CUMULATIVE_ARGS *cum;
3823 enum machine_mode mode;
3824 tree type;
3825 int named;
3827 cum->nargs_prototype--;
3829 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3831 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3832 cum->vregno++;
3833 else
3834 cum->words += RS6000_ARG_SIZE (mode, type);
3836 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3837 && !cum->stdarg
3838 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3839 cum->sysv_gregno++;
3840 else if (DEFAULT_ABI == ABI_V4)
3842 if (TARGET_HARD_FLOAT && TARGET_FPRS
3843 && (mode == SFmode || mode == DFmode))
3845 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3846 cum->fregno++;
3847 else
3849 if (mode == DFmode)
3850 cum->words += cum->words & 1;
3851 cum->words += RS6000_ARG_SIZE (mode, type);
3854 else
3856 int n_words;
3857 int gregno = cum->sysv_gregno;
3859 /* Aggregates and IEEE quad get passed by reference. */
3860 if ((type && AGGREGATE_TYPE_P (type))
3861 || mode == TFmode)
3862 n_words = 1;
3863 else
3864 n_words = RS6000_ARG_SIZE (mode, type);
3866 /* Long long and SPE vectors are put in odd registers. */
3867 if (n_words == 2 && (gregno & 1) == 0)
3868 gregno += 1;
3870 /* Long long and SPE vectors are not split between registers
3871 and stack. */
3872 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3874 /* Long long is aligned on the stack. */
3875 if (n_words == 2)
3876 cum->words += cum->words & 1;
3877 cum->words += n_words;
3880 /* Note: continuing to accumulate gregno past when we've started
3881 spilling to the stack indicates the fact that we've started
3882 spilling to the stack to expand_builtin_saveregs. */
3883 cum->sysv_gregno = gregno + n_words;
3886 if (TARGET_DEBUG_ARG)
3888 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3889 cum->words, cum->fregno);
3890 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3891 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3892 fprintf (stderr, "mode = %4s, named = %d\n",
3893 GET_MODE_NAME (mode), named);
3896 else
3898 int align = (TARGET_32BIT && (cum->words & 1) != 0
3899 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3901 cum->words += align + RS6000_ARG_SIZE (mode, type);
3903 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3904 && TARGET_HARD_FLOAT && TARGET_FPRS)
3905 cum->fregno += (mode == TFmode ? 2 : 1);
3907 if (TARGET_DEBUG_ARG)
3909 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3910 cum->words, cum->fregno);
3911 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3912 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3913 fprintf (stderr, "named = %d, align = %d\n", named, align);
3918 /* Determine where to put a SIMD argument on the SPE. */
3919 static rtx
3920 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
3922 if (cum->stdarg)
3924 int gregno = cum->sysv_gregno;
3925 int n_words = RS6000_ARG_SIZE (mode, type);
3927 /* SPE vectors are put in odd registers. */
3928 if (n_words == 2 && (gregno & 1) == 0)
3929 gregno += 1;
3931 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3933 rtx r1, r2;
3934 enum machine_mode m = SImode;
3936 r1 = gen_rtx_REG (m, gregno);
3937 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3938 r2 = gen_rtx_REG (m, gregno + 1);
3939 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3940 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3942 else
3943 return NULL;
3945 else
3947 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3948 return gen_rtx_REG (mode, cum->sysv_gregno);
3949 else
3950 return NULL;
3954 /* Determine where to put an argument to a function.
3955 Value is zero to push the argument on the stack,
3956 or a hard register in which to store the argument.
3958 MODE is the argument's machine mode.
3959 TYPE is the data type of the argument (as a tree).
3960 This is null for libcalls where that information may
3961 not be available.
3962 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3963 the preceding args and about the function being called.
3964 NAMED is nonzero if this argument is a named parameter
3965 (otherwise it is an extra parameter matching an ellipsis).
3967 On RS/6000 the first eight words of non-FP are normally in registers
3968 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3969 Under V.4, the first 8 FP args are in registers.
3971 If this is floating-point and no prototype is specified, we use
3972 both an FP and integer register (or possibly FP reg and stack). Library
3973 functions (when CALL_LIBCALL is set) always have the proper types for args,
3974 so we can pass the FP value just in one register. emit_library_function
3975 doesn't support PARALLEL anyway. */
3977 struct rtx_def *
3978 function_arg (cum, mode, type, named)
3979 CUMULATIVE_ARGS *cum;
3980 enum machine_mode mode;
3981 tree type;
3982 int named;
3984 enum rs6000_abi abi = DEFAULT_ABI;
3986 /* Return a marker to indicate whether CR1 needs to set or clear the
3987 bit that V.4 uses to say fp args were passed in registers.
3988 Assume that we don't need the marker for software floating point,
3989 or compiler generated library calls. */
3990 if (mode == VOIDmode)
3992 if (abi == ABI_V4
3993 && cum->nargs_prototype < 0
3994 && (cum->call_cookie & CALL_LIBCALL) == 0
3995 && (cum->prototype || TARGET_NO_PROTOTYPE))
3997 /* For the SPE, we need to crxor CR6 always. */
3998 if (TARGET_SPE_ABI)
3999 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4000 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4001 return GEN_INT (cum->call_cookie
4002 | ((cum->fregno == FP_ARG_MIN_REG)
4003 ? CALL_V4_SET_FP_ARGS
4004 : CALL_V4_CLEAR_FP_ARGS));
4007 return GEN_INT (cum->call_cookie);
4010 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4012 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
4013 return gen_rtx_REG (mode, cum->vregno);
4014 else
4015 return NULL;
4017 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4018 return rs6000_spe_function_arg (cum, mode, type);
4019 else if (abi == ABI_V4)
4021 if (TARGET_HARD_FLOAT && TARGET_FPRS
4022 && (mode == SFmode || mode == DFmode))
4024 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4025 return gen_rtx_REG (mode, cum->fregno);
4026 else
4027 return NULL;
4029 else
4031 int n_words;
4032 int gregno = cum->sysv_gregno;
4034 /* Aggregates and IEEE quad get passed by reference. */
4035 if ((type && AGGREGATE_TYPE_P (type))
4036 || mode == TFmode)
4037 n_words = 1;
4038 else
4039 n_words = RS6000_ARG_SIZE (mode, type);
4041 /* Long long and SPE vectors are put in odd registers. */
4042 if (n_words == 2 && (gregno & 1) == 0)
4043 gregno += 1;
4045 /* Long long do not split between registers and stack. */
4046 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4047 return gen_rtx_REG (mode, gregno);
4048 else
4049 return NULL;
4052 else
4054 int align = (TARGET_32BIT && (cum->words & 1) != 0
4055 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4056 int align_words = cum->words + align;
4058 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4059 return NULL_RTX;
4061 if (USE_FP_FOR_ARG_P (*cum, mode, type))
4063 if (! type
4064 || ((cum->nargs_prototype > 0)
4065 /* IBM AIX extended its linkage convention definition always
4066 to require FP args after register save area hole on the
4067 stack. */
4068 && (DEFAULT_ABI != ABI_AIX
4069 || ! TARGET_XL_CALL
4070 || (align_words < GP_ARG_NUM_REG))))
4071 return gen_rtx_REG (mode, cum->fregno);
4073 return gen_rtx_PARALLEL (mode,
4074 gen_rtvec (2,
4075 gen_rtx_EXPR_LIST (VOIDmode,
4076 ((align_words >= GP_ARG_NUM_REG)
4077 ? NULL_RTX
4078 : (align_words
4079 + RS6000_ARG_SIZE (mode, type)
4080 > GP_ARG_NUM_REG
4081 /* If this is partially on the stack, then
4082 we only include the portion actually
4083 in registers here. */
4084 ? gen_rtx_REG (SImode,
4085 GP_ARG_MIN_REG + align_words)
4086 : gen_rtx_REG (mode,
4087 GP_ARG_MIN_REG + align_words))),
4088 const0_rtx),
4089 gen_rtx_EXPR_LIST (VOIDmode,
4090 gen_rtx_REG (mode, cum->fregno),
4091 const0_rtx)));
4093 else if (align_words < GP_ARG_NUM_REG)
4094 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4095 else
4096 return NULL_RTX;
4100 /* For an arg passed partly in registers and partly in memory,
4101 this is the number of registers used.
4102 For args passed entirely in registers or entirely in memory, zero. */
4105 function_arg_partial_nregs (cum, mode, type, named)
4106 CUMULATIVE_ARGS *cum;
4107 enum machine_mode mode;
4108 tree type;
4109 int named ATTRIBUTE_UNUSED;
4111 if (DEFAULT_ABI == ABI_V4)
4112 return 0;
4114 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4115 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4117 if (cum->nargs_prototype >= 0)
4118 return 0;
4121 if (cum->words < GP_ARG_NUM_REG
4122 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4124 int ret = GP_ARG_NUM_REG - cum->words;
4125 if (ret && TARGET_DEBUG_ARG)
4126 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4128 return ret;
4131 return 0;
4134 /* A C expression that indicates when an argument must be passed by
4135 reference. If nonzero for an argument, a copy of that argument is
4136 made in memory and a pointer to the argument is passed instead of
4137 the argument itself. The pointer is passed in whatever way is
4138 appropriate for passing a pointer to that type.
4140 Under V.4, structures and unions are passed by reference.
4142 As an extension to all ABIs, variable sized types are passed by
4143 reference. */
4146 function_arg_pass_by_reference (cum, mode, type, named)
4147 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4148 enum machine_mode mode ATTRIBUTE_UNUSED;
4149 tree type;
4150 int named ATTRIBUTE_UNUSED;
4152 if (DEFAULT_ABI == ABI_V4
4153 && ((type && AGGREGATE_TYPE_P (type))
4154 || mode == TFmode))
4156 if (TARGET_DEBUG_ARG)
4157 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4159 return 1;
4161 return type && int_size_in_bytes (type) <= 0;
4164 /* Perform any needed actions needed for a function that is receiving a
4165 variable number of arguments.
4167 CUM is as above.
4169 MODE and TYPE are the mode and type of the current parameter.
4171 PRETEND_SIZE is a variable that should be set to the amount of stack
4172 that must be pushed by the prolog to pretend that our caller pushed
4175 Normally, this macro will push all remaining incoming registers on the
4176 stack and set PRETEND_SIZE to the length of the registers pushed. */
4178 void
4179 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4180 CUMULATIVE_ARGS *cum;
4181 enum machine_mode mode;
4182 tree type;
4183 int *pretend_size ATTRIBUTE_UNUSED;
4184 int no_rtl;
4187 CUMULATIVE_ARGS next_cum;
4188 int reg_size = TARGET_32BIT ? 4 : 8;
4189 rtx save_area = NULL_RTX, mem;
4190 int first_reg_offset, set;
4191 tree fntype;
4192 int stdarg_p;
4194 fntype = TREE_TYPE (current_function_decl);
4195 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4196 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4197 != void_type_node));
4199 /* For varargs, we do not want to skip the dummy va_dcl argument.
4200 For stdargs, we do want to skip the last named argument. */
4201 next_cum = *cum;
4202 if (stdarg_p)
4203 function_arg_advance (&next_cum, mode, type, 1);
4205 if (DEFAULT_ABI == ABI_V4)
4207 /* Indicate to allocate space on the stack for varargs save area. */
4208 cfun->machine->sysv_varargs_p = 1;
4209 if (! no_rtl)
4210 save_area = plus_constant (virtual_stack_vars_rtx,
4211 - RS6000_VARARGS_SIZE);
4213 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4215 else
4217 first_reg_offset = next_cum.words;
4218 save_area = virtual_incoming_args_rtx;
4219 cfun->machine->sysv_varargs_p = 0;
4221 if (MUST_PASS_IN_STACK (mode, type))
4222 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4225 set = get_varargs_alias_set ();
4226 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4228 mem = gen_rtx_MEM (BLKmode,
4229 plus_constant (save_area,
4230 first_reg_offset * reg_size)),
4231 set_mem_alias_set (mem, set);
4232 set_mem_align (mem, BITS_PER_WORD);
4234 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4235 GP_ARG_NUM_REG - first_reg_offset);
4238 /* Save FP registers if needed. */
4239 if (DEFAULT_ABI == ABI_V4
4240 && TARGET_HARD_FLOAT && TARGET_FPRS
4241 && ! no_rtl
4242 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4244 int fregno = next_cum.fregno;
4245 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4246 rtx lab = gen_label_rtx ();
4247 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4249 emit_jump_insn (gen_rtx_SET (VOIDmode,
4250 pc_rtx,
4251 gen_rtx_IF_THEN_ELSE (VOIDmode,
4252 gen_rtx_NE (VOIDmode, cr1,
4253 const0_rtx),
4254 gen_rtx_LABEL_REF (VOIDmode, lab),
4255 pc_rtx)));
4257 while (fregno <= FP_ARG_V4_MAX_REG)
4259 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4260 set_mem_alias_set (mem, set);
4261 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4262 fregno++;
4263 off += 8;
4266 emit_label (lab);
4270 /* Create the va_list data type. */
4272 tree
4273 rs6000_build_va_list ()
4275 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4277 /* For AIX, prefer 'char *' because that's what the system
4278 header files like. */
4279 if (DEFAULT_ABI != ABI_V4)
4280 return build_pointer_type (char_type_node);
4282 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4283 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4285 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4286 unsigned_char_type_node);
4287 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4288 unsigned_char_type_node);
4289 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4290 every user file. */
4291 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4292 short_unsigned_type_node);
4293 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4294 ptr_type_node);
4295 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4296 ptr_type_node);
4298 DECL_FIELD_CONTEXT (f_gpr) = record;
4299 DECL_FIELD_CONTEXT (f_fpr) = record;
4300 DECL_FIELD_CONTEXT (f_res) = record;
4301 DECL_FIELD_CONTEXT (f_ovf) = record;
4302 DECL_FIELD_CONTEXT (f_sav) = record;
4304 TREE_CHAIN (record) = type_decl;
4305 TYPE_NAME (record) = type_decl;
4306 TYPE_FIELDS (record) = f_gpr;
4307 TREE_CHAIN (f_gpr) = f_fpr;
4308 TREE_CHAIN (f_fpr) = f_res;
4309 TREE_CHAIN (f_res) = f_ovf;
4310 TREE_CHAIN (f_ovf) = f_sav;
4312 layout_type (record);
4314 /* The correct type is an array type of one element. */
4315 return build_array_type (record, build_index_type (size_zero_node));
4318 /* Implement va_start. */
4320 void
4321 rs6000_va_start (valist, nextarg)
4322 tree valist;
4323 rtx nextarg;
4325 HOST_WIDE_INT words, n_gpr, n_fpr;
4326 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4327 tree gpr, fpr, ovf, sav, t;
4329 /* Only SVR4 needs something special. */
4330 if (DEFAULT_ABI != ABI_V4)
4332 std_expand_builtin_va_start (valist, nextarg);
4333 return;
4336 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4337 f_fpr = TREE_CHAIN (f_gpr);
4338 f_res = TREE_CHAIN (f_fpr);
4339 f_ovf = TREE_CHAIN (f_res);
4340 f_sav = TREE_CHAIN (f_ovf);
4342 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4343 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4344 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4345 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4346 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4348 /* Count number of gp and fp argument registers used. */
4349 words = current_function_args_info.words;
4350 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4351 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4353 if (TARGET_DEBUG_ARG)
4354 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4355 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4356 words, n_gpr, n_fpr);
4358 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4359 TREE_SIDE_EFFECTS (t) = 1;
4360 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4362 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4363 TREE_SIDE_EFFECTS (t) = 1;
4364 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4366 /* Find the overflow area. */
4367 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4368 if (words != 0)
4369 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4370 build_int_2 (words * UNITS_PER_WORD, 0));
4371 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4372 TREE_SIDE_EFFECTS (t) = 1;
4373 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4375 /* Find the register save area. */
4376 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4377 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4378 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4379 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4380 TREE_SIDE_EFFECTS (t) = 1;
4381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4384 /* Implement va_arg. */
4387 rs6000_va_arg (valist, type)
4388 tree valist, type;
4390 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4391 tree gpr, fpr, ovf, sav, reg, t, u;
4392 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4393 rtx lab_false, lab_over, addr_rtx, r;
4395 if (DEFAULT_ABI != ABI_V4)
4397 /* Variable sized types are passed by reference. */
4398 if (int_size_in_bytes (type) <= 0)
4400 u = build_pointer_type (type);
4402 /* Args grow upward. */
4403 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4404 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4405 TREE_SIDE_EFFECTS (t) = 1;
4407 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4408 TREE_SIDE_EFFECTS (t) = 1;
4410 t = build1 (INDIRECT_REF, u, t);
4411 TREE_SIDE_EFFECTS (t) = 1;
4413 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4415 else
4416 return std_expand_builtin_va_arg (valist, type);
4419 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4420 f_fpr = TREE_CHAIN (f_gpr);
4421 f_res = TREE_CHAIN (f_fpr);
4422 f_ovf = TREE_CHAIN (f_res);
4423 f_sav = TREE_CHAIN (f_ovf);
4425 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4426 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4427 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4428 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4429 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4431 size = int_size_in_bytes (type);
4432 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4434 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4436 /* Aggregates and long doubles are passed by reference. */
4437 indirect_p = 1;
4438 reg = gpr;
4439 n_reg = 1;
4440 sav_ofs = 0;
4441 sav_scale = 4;
4442 size = UNITS_PER_WORD;
4443 rsize = 1;
4445 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4447 /* FP args go in FP registers, if present. */
4448 indirect_p = 0;
4449 reg = fpr;
4450 n_reg = 1;
4451 sav_ofs = 8*4;
4452 sav_scale = 8;
4454 else
4456 /* Otherwise into GP registers. */
4457 indirect_p = 0;
4458 reg = gpr;
4459 n_reg = rsize;
4460 sav_ofs = 0;
4461 sav_scale = 4;
4464 /* Pull the value out of the saved registers ... */
4466 lab_false = gen_label_rtx ();
4467 lab_over = gen_label_rtx ();
4468 addr_rtx = gen_reg_rtx (Pmode);
4470 /* AltiVec vectors never go in registers. */
4471 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4473 TREE_THIS_VOLATILE (reg) = 1;
4474 emit_cmp_and_jump_insns
4475 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4476 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4477 lab_false);
4479 /* Long long is aligned in the registers. */
4480 if (n_reg > 1)
4482 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4483 build_int_2 (n_reg - 1, 0));
4484 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4485 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4486 TREE_SIDE_EFFECTS (u) = 1;
4487 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4490 if (sav_ofs)
4491 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4492 else
4493 t = sav;
4495 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4496 build_int_2 (n_reg, 0));
4497 TREE_SIDE_EFFECTS (u) = 1;
4499 u = build1 (CONVERT_EXPR, integer_type_node, u);
4500 TREE_SIDE_EFFECTS (u) = 1;
4502 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4503 TREE_SIDE_EFFECTS (u) = 1;
4505 t = build (PLUS_EXPR, ptr_type_node, t, u);
4506 TREE_SIDE_EFFECTS (t) = 1;
4508 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4509 if (r != addr_rtx)
4510 emit_move_insn (addr_rtx, r);
4512 emit_jump_insn (gen_jump (lab_over));
4513 emit_barrier ();
4516 emit_label (lab_false);
4518 /* ... otherwise out of the overflow area. */
4520 /* Make sure we don't find reg 7 for the next int arg.
4522 All AltiVec vectors go in the overflow area. So in the AltiVec
4523 case we need to get the vectors from the overflow area, but
4524 remember where the GPRs and FPRs are. */
4525 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4526 || !TARGET_ALTIVEC))
4528 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4529 TREE_SIDE_EFFECTS (t) = 1;
4530 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4533 /* Care for on-stack alignment if needed. */
4534 if (rsize <= 1)
4535 t = ovf;
4536 else
4538 int align;
4540 /* AltiVec vectors are 16 byte aligned. */
4541 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4542 align = 15;
4543 else
4544 align = 7;
4546 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4547 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4549 t = save_expr (t);
4551 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4552 if (r != addr_rtx)
4553 emit_move_insn (addr_rtx, r);
4555 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4556 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4557 TREE_SIDE_EFFECTS (t) = 1;
4558 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4560 emit_label (lab_over);
4562 if (indirect_p)
4564 r = gen_rtx_MEM (Pmode, addr_rtx);
4565 set_mem_alias_set (r, get_varargs_alias_set ());
4566 emit_move_insn (addr_rtx, r);
4569 return addr_rtx;
4572 /* Builtins. */
4574 #define def_builtin(MASK, NAME, TYPE, CODE) \
4575 do { \
4576 if ((MASK) & target_flags) \
4577 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4578 NULL, NULL_TREE); \
4579 } while (0)
4581 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4583 static const struct builtin_description bdesc_3arg[] =
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4610 /* DST operations: void foo (void *, const int, const char). */
4612 static const struct builtin_description bdesc_dst[] =
4614 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4620 /* Simple binary operations: VECc = foo (VECa, VECb). */
4622 static struct builtin_description bdesc_2arg[] =
4624 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4625 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4626 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4627 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4635 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4636 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4637 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4638 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4639 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4640 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4641 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4642 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4643 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4644 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4645 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4646 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4647 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4648 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4649 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4650 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4651 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4652 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4653 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4654 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4655 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4656 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4657 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4658 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4659 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4660 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4661 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4662 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4663 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4664 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4665 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4666 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4667 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4668 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4669 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4670 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4671 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4672 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4673 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4674 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4675 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4676 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4677 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4678 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4679 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4680 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4681 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4682 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4683 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4684 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4685 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4686 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4687 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4688 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4689 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4690 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4691 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4692 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4693 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4694 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4695 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4696 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4697 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4698 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4699 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4700 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4701 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4702 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4703 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4704 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4705 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4706 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4707 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4708 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4709 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4710 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4711 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4712 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4713 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4714 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4715 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4716 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4717 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4718 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4719 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4720 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4721 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4722 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4723 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4724 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4725 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4726 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4727 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4728 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4729 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4730 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4731 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4732 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4733 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4734 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4735 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4736 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4738 /* Place holder, leave as first spe builtin. */
4739 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4740 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4741 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4742 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4743 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4744 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4745 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4746 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4747 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4748 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4749 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4750 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4751 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4752 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4753 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4754 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4755 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4756 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4757 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4758 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4759 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4760 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4761 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4762 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4763 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4764 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4765 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4766 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4767 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4768 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4769 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4770 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4771 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4772 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4773 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4774 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4775 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4776 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4777 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4778 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4779 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4780 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4781 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4782 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4783 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4784 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4785 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4786 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4787 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4788 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4789 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4790 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4791 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4792 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4793 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4794 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4795 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4796 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4797 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4798 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4799 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4800 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4801 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4802 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4803 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4804 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4805 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4806 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4807 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4808 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4809 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4810 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4811 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4812 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4813 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4814 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4815 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4816 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4817 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4818 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4819 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4820 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4821 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4822 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4823 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4824 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4825 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4826 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4827 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4828 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4829 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4830 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4831 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4832 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4833 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4834 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4835 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4836 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4837 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4838 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4839 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4840 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4841 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4842 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4843 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4844 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4845 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4846 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4847 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4849 /* SPE binary operations expecting a 5-bit unsigned literal. */
4850 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4852 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4853 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4854 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4855 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4856 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4857 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4858 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4859 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4860 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4861 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4862 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4863 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4864 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4865 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4866 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4867 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4868 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4869 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4870 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4871 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4872 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4873 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4874 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4875 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4876 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4877 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4879 /* Place-holder. Leave as last binary SPE builtin. */
4880 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4883 /* AltiVec predicates. */
4885 struct builtin_description_predicates
4887 const unsigned int mask;
4888 const enum insn_code icode;
4889 const char *opcode;
4890 const char *const name;
4891 const enum rs6000_builtins code;
4894 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4896 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4897 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4898 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4901 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4903 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4908 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4911 /* SPE predicates. */
4912 static struct builtin_description bdesc_spe_predicates[] =
4914 /* Place-holder. Leave as first. */
4915 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4916 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4917 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4918 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4919 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4920 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4921 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4922 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4923 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4924 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4925 /* Place-holder. Leave as last. */
4926 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4929 /* SPE evsel predicates. */
4930 static struct builtin_description bdesc_spe_evsel[] =
4932 /* Place-holder. Leave as first. */
4933 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4934 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4935 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4936 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4937 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4938 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4939 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4940 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4941 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4942 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4943 /* Place-holder. Leave as last. */
4944 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4947 /* ABS* operations. */
4949 static const struct builtin_description bdesc_abs[] =
4951 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4952 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4953 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4954 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4955 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4956 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4957 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4960 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4961 foo (VECa). */
4963 static struct builtin_description bdesc_1arg[] =
4965 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4966 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4967 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4968 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4969 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4970 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4971 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4972 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4973 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4974 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4975 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4976 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4977 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4978 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4979 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4980 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4981 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4983 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4984 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4985 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4986 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4987 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4988 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4989 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4990 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4991 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4992 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4993 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4994 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4995 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4996 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4997 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4998 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4999 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5000 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5001 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5002 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5003 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5004 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5005 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5006 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5007 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5008 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5009 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5010 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5011 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5012 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5014 /* Place-holder. Leave as last unary SPE builtin. */
5015 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5018 static rtx
5019 rs6000_expand_unop_builtin (icode, arglist, target)
5020 enum insn_code icode;
5021 tree arglist;
5022 rtx target;
5024 rtx pat;
5025 tree arg0 = TREE_VALUE (arglist);
5026 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5027 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5028 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5030 if (icode == CODE_FOR_nothing)
5031 /* Builtin not supported on this processor. */
5032 return 0;
5034 /* If we got invalid arguments bail out before generating bad rtl. */
5035 if (arg0 == error_mark_node)
5036 return const0_rtx;
5038 if (icode == CODE_FOR_altivec_vspltisb
5039 || icode == CODE_FOR_altivec_vspltish
5040 || icode == CODE_FOR_altivec_vspltisw
5041 || icode == CODE_FOR_spe_evsplatfi
5042 || icode == CODE_FOR_spe_evsplati)
5044 /* Only allow 5-bit *signed* literals. */
5045 if (GET_CODE (op0) != CONST_INT
5046 || INTVAL (op0) > 0x1f
5047 || INTVAL (op0) < -0x1f)
5049 error ("argument 1 must be a 5-bit signed literal");
5050 return const0_rtx;
5054 if (target == 0
5055 || GET_MODE (target) != tmode
5056 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5057 target = gen_reg_rtx (tmode);
5059 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5060 op0 = copy_to_mode_reg (mode0, op0);
5062 pat = GEN_FCN (icode) (target, op0);
5063 if (! pat)
5064 return 0;
5065 emit_insn (pat);
5067 return target;
5070 static rtx
5071 altivec_expand_abs_builtin (icode, arglist, target)
5072 enum insn_code icode;
5073 tree arglist;
5074 rtx target;
5076 rtx pat, scratch1, scratch2;
5077 tree arg0 = TREE_VALUE (arglist);
5078 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5079 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5080 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5082 /* If we have invalid arguments, bail out before generating bad rtl. */
5083 if (arg0 == error_mark_node)
5084 return const0_rtx;
5086 if (target == 0
5087 || GET_MODE (target) != tmode
5088 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5089 target = gen_reg_rtx (tmode);
5091 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5092 op0 = copy_to_mode_reg (mode0, op0);
5094 scratch1 = gen_reg_rtx (mode0);
5095 scratch2 = gen_reg_rtx (mode0);
5097 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5098 if (! pat)
5099 return 0;
5100 emit_insn (pat);
5102 return target;
5105 static rtx
5106 rs6000_expand_binop_builtin (icode, arglist, target)
5107 enum insn_code icode;
5108 tree arglist;
5109 rtx target;
5111 rtx pat;
5112 tree arg0 = TREE_VALUE (arglist);
5113 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5114 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5115 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5116 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5117 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5118 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5120 if (icode == CODE_FOR_nothing)
5121 /* Builtin not supported on this processor. */
5122 return 0;
5124 /* If we got invalid arguments bail out before generating bad rtl. */
5125 if (arg0 == error_mark_node || arg1 == error_mark_node)
5126 return const0_rtx;
5128 if (icode == CODE_FOR_altivec_vcfux
5129 || icode == CODE_FOR_altivec_vcfsx
5130 || icode == CODE_FOR_altivec_vctsxs
5131 || icode == CODE_FOR_altivec_vctuxs
5132 || icode == CODE_FOR_altivec_vspltb
5133 || icode == CODE_FOR_altivec_vsplth
5134 || icode == CODE_FOR_altivec_vspltw
5135 || icode == CODE_FOR_spe_evaddiw
5136 || icode == CODE_FOR_spe_evldd
5137 || icode == CODE_FOR_spe_evldh
5138 || icode == CODE_FOR_spe_evldw
5139 || icode == CODE_FOR_spe_evlhhesplat
5140 || icode == CODE_FOR_spe_evlhhossplat
5141 || icode == CODE_FOR_spe_evlhhousplat
5142 || icode == CODE_FOR_spe_evlwhe
5143 || icode == CODE_FOR_spe_evlwhos
5144 || icode == CODE_FOR_spe_evlwhou
5145 || icode == CODE_FOR_spe_evlwhsplat
5146 || icode == CODE_FOR_spe_evlwwsplat
5147 || icode == CODE_FOR_spe_evrlwi
5148 || icode == CODE_FOR_spe_evslwi
5149 || icode == CODE_FOR_spe_evsrwis
5150 || icode == CODE_FOR_spe_evsubifw
5151 || icode == CODE_FOR_spe_evsrwiu)
5153 /* Only allow 5-bit unsigned literals. */
5154 if (TREE_CODE (arg1) != INTEGER_CST
5155 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5157 error ("argument 2 must be a 5-bit unsigned literal");
5158 return const0_rtx;
5162 if (target == 0
5163 || GET_MODE (target) != tmode
5164 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5165 target = gen_reg_rtx (tmode);
5167 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5168 op0 = copy_to_mode_reg (mode0, op0);
5169 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5170 op1 = copy_to_mode_reg (mode1, op1);
5172 pat = GEN_FCN (icode) (target, op0, op1);
5173 if (! pat)
5174 return 0;
5175 emit_insn (pat);
5177 return target;
5180 static rtx
5181 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5182 enum insn_code icode;
5183 const char *opcode;
5184 tree arglist;
5185 rtx target;
5187 rtx pat, scratch;
5188 tree cr6_form = TREE_VALUE (arglist);
5189 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5190 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5191 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5192 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5193 enum machine_mode tmode = SImode;
5194 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5195 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5196 int cr6_form_int;
5198 if (TREE_CODE (cr6_form) != INTEGER_CST)
5200 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5201 return const0_rtx;
5203 else
5204 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5206 if (mode0 != mode1)
5207 abort ();
5209 /* If we have invalid arguments, bail out before generating bad rtl. */
5210 if (arg0 == error_mark_node || arg1 == error_mark_node)
5211 return const0_rtx;
5213 if (target == 0
5214 || GET_MODE (target) != tmode
5215 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5216 target = gen_reg_rtx (tmode);
5218 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5219 op0 = copy_to_mode_reg (mode0, op0);
5220 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5221 op1 = copy_to_mode_reg (mode1, op1);
5223 scratch = gen_reg_rtx (mode0);
5225 pat = GEN_FCN (icode) (scratch, op0, op1,
5226 gen_rtx (SYMBOL_REF, Pmode, opcode));
5227 if (! pat)
5228 return 0;
5229 emit_insn (pat);
5231 /* The vec_any* and vec_all* predicates use the same opcodes for two
5232 different operations, but the bits in CR6 will be different
5233 depending on what information we want. So we have to play tricks
5234 with CR6 to get the right bits out.
5236 If you think this is disgusting, look at the specs for the
5237 AltiVec predicates. */
5239 switch (cr6_form_int)
5241 case 0:
5242 emit_insn (gen_cr6_test_for_zero (target));
5243 break;
5244 case 1:
5245 emit_insn (gen_cr6_test_for_zero_reverse (target));
5246 break;
5247 case 2:
5248 emit_insn (gen_cr6_test_for_lt (target));
5249 break;
5250 case 3:
5251 emit_insn (gen_cr6_test_for_lt_reverse (target));
5252 break;
5253 default:
5254 error ("argument 1 of __builtin_altivec_predicate is out of range");
5255 break;
5258 return target;
5261 static rtx
5262 altivec_expand_stv_builtin (icode, arglist)
5263 enum insn_code icode;
5264 tree arglist;
5266 tree arg0 = TREE_VALUE (arglist);
5267 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5268 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5269 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5270 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5271 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5272 rtx pat;
5273 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5274 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5275 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5277 /* Invalid arguments. Bail before doing anything stoopid! */
5278 if (arg0 == error_mark_node
5279 || arg1 == error_mark_node
5280 || arg2 == error_mark_node)
5281 return const0_rtx;
5283 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5284 op0 = copy_to_mode_reg (mode2, op0);
5285 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5286 op1 = copy_to_mode_reg (mode0, op1);
5287 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5288 op2 = copy_to_mode_reg (mode1, op2);
5290 pat = GEN_FCN (icode) (op1, op2, op0);
5291 if (pat)
5292 emit_insn (pat);
5293 return NULL_RTX;
5296 static rtx
5297 rs6000_expand_ternop_builtin (icode, arglist, target)
5298 enum insn_code icode;
5299 tree arglist;
5300 rtx target;
5302 rtx pat;
5303 tree arg0 = TREE_VALUE (arglist);
5304 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5305 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5306 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5307 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5308 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5309 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5310 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5311 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5312 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5314 if (icode == CODE_FOR_nothing)
5315 /* Builtin not supported on this processor. */
5316 return 0;
5318 /* If we got invalid arguments bail out before generating bad rtl. */
5319 if (arg0 == error_mark_node
5320 || arg1 == error_mark_node
5321 || arg2 == error_mark_node)
5322 return const0_rtx;
5324 if (icode == CODE_FOR_altivec_vsldoi_4sf
5325 || icode == CODE_FOR_altivec_vsldoi_4si
5326 || icode == CODE_FOR_altivec_vsldoi_8hi
5327 || icode == CODE_FOR_altivec_vsldoi_16qi)
5329 /* Only allow 4-bit unsigned literals. */
5330 if (TREE_CODE (arg2) != INTEGER_CST
5331 || TREE_INT_CST_LOW (arg2) & ~0xf)
5333 error ("argument 3 must be a 4-bit unsigned literal");
5334 return const0_rtx;
5338 if (target == 0
5339 || GET_MODE (target) != tmode
5340 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5341 target = gen_reg_rtx (tmode);
5343 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5344 op0 = copy_to_mode_reg (mode0, op0);
5345 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5346 op1 = copy_to_mode_reg (mode1, op1);
5347 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5348 op2 = copy_to_mode_reg (mode2, op2);
5350 pat = GEN_FCN (icode) (target, op0, op1, op2);
5351 if (! pat)
5352 return 0;
5353 emit_insn (pat);
5355 return target;
5358 /* Expand the lvx builtins. */
5359 static rtx
5360 altivec_expand_ld_builtin (exp, target, expandedp)
5361 tree exp;
5362 rtx target;
5363 bool *expandedp;
5365 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5366 tree arglist = TREE_OPERAND (exp, 1);
5367 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5368 tree arg0;
5369 enum machine_mode tmode, mode0;
5370 rtx pat, op0;
5371 enum insn_code icode;
5373 switch (fcode)
5375 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5376 icode = CODE_FOR_altivec_lvx_16qi;
5377 break;
5378 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5379 icode = CODE_FOR_altivec_lvx_8hi;
5380 break;
5381 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5382 icode = CODE_FOR_altivec_lvx_4si;
5383 break;
5384 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5385 icode = CODE_FOR_altivec_lvx_4sf;
5386 break;
5387 default:
5388 *expandedp = false;
5389 return NULL_RTX;
5392 *expandedp = true;
5394 arg0 = TREE_VALUE (arglist);
5395 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5396 tmode = insn_data[icode].operand[0].mode;
5397 mode0 = insn_data[icode].operand[1].mode;
5399 if (target == 0
5400 || GET_MODE (target) != tmode
5401 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5402 target = gen_reg_rtx (tmode);
5404 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5405 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5407 pat = GEN_FCN (icode) (target, op0);
5408 if (! pat)
5409 return 0;
5410 emit_insn (pat);
5411 return target;
5414 /* Expand the stvx builtins. */
5415 static rtx
5416 altivec_expand_st_builtin (exp, target, expandedp)
5417 tree exp;
5418 rtx target ATTRIBUTE_UNUSED;
5419 bool *expandedp;
5421 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5422 tree arglist = TREE_OPERAND (exp, 1);
5423 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5424 tree arg0, arg1;
5425 enum machine_mode mode0, mode1;
5426 rtx pat, op0, op1;
5427 enum insn_code icode;
5429 switch (fcode)
5431 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5432 icode = CODE_FOR_altivec_stvx_16qi;
5433 break;
5434 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5435 icode = CODE_FOR_altivec_stvx_8hi;
5436 break;
5437 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5438 icode = CODE_FOR_altivec_stvx_4si;
5439 break;
5440 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5441 icode = CODE_FOR_altivec_stvx_4sf;
5442 break;
5443 default:
5444 *expandedp = false;
5445 return NULL_RTX;
5448 arg0 = TREE_VALUE (arglist);
5449 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5450 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5451 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5452 mode0 = insn_data[icode].operand[0].mode;
5453 mode1 = insn_data[icode].operand[1].mode;
5455 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5456 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5457 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5458 op1 = copy_to_mode_reg (mode1, op1);
5460 pat = GEN_FCN (icode) (op0, op1);
5461 if (pat)
5462 emit_insn (pat);
5464 *expandedp = true;
5465 return NULL_RTX;
5468 /* Expand the dst builtins. */
5469 static rtx
5470 altivec_expand_dst_builtin (exp, target, expandedp)
5471 tree exp;
5472 rtx target ATTRIBUTE_UNUSED;
5473 bool *expandedp;
5475 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5476 tree arglist = TREE_OPERAND (exp, 1);
5477 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5478 tree arg0, arg1, arg2;
5479 enum machine_mode mode0, mode1, mode2;
5480 rtx pat, op0, op1, op2;
5481 struct builtin_description *d;
5482 size_t i;
5484 *expandedp = false;
5486 /* Handle DST variants. */
5487 d = (struct builtin_description *) bdesc_dst;
5488 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5489 if (d->code == fcode)
5491 arg0 = TREE_VALUE (arglist);
5492 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5493 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5494 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5495 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5496 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5497 mode0 = insn_data[d->icode].operand[0].mode;
5498 mode1 = insn_data[d->icode].operand[1].mode;
5499 mode2 = insn_data[d->icode].operand[2].mode;
5501 /* Invalid arguments, bail out before generating bad rtl. */
5502 if (arg0 == error_mark_node
5503 || arg1 == error_mark_node
5504 || arg2 == error_mark_node)
5505 return const0_rtx;
5507 if (TREE_CODE (arg2) != INTEGER_CST
5508 || TREE_INT_CST_LOW (arg2) & ~0x3)
5510 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5511 return const0_rtx;
5514 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5515 op0 = copy_to_mode_reg (mode0, op0);
5516 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5517 op1 = copy_to_mode_reg (mode1, op1);
5519 pat = GEN_FCN (d->icode) (op0, op1, op2);
5520 if (pat != 0)
5521 emit_insn (pat);
5523 *expandedp = true;
5524 return NULL_RTX;
5527 return NULL_RTX;
5530 /* Expand the builtin in EXP and store the result in TARGET. Store
5531 true in *EXPANDEDP if we found a builtin to expand. */
5532 static rtx
5533 altivec_expand_builtin (exp, target, expandedp)
5534 tree exp;
5535 rtx target;
5536 bool *expandedp;
5538 struct builtin_description *d;
5539 struct builtin_description_predicates *dp;
5540 size_t i;
5541 enum insn_code icode;
5542 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5543 tree arglist = TREE_OPERAND (exp, 1);
5544 tree arg0;
5545 rtx op0, pat;
5546 enum machine_mode tmode, mode0;
5547 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5549 target = altivec_expand_ld_builtin (exp, target, expandedp);
5550 if (*expandedp)
5551 return target;
5553 target = altivec_expand_st_builtin (exp, target, expandedp);
5554 if (*expandedp)
5555 return target;
5557 target = altivec_expand_dst_builtin (exp, target, expandedp);
5558 if (*expandedp)
5559 return target;
5561 *expandedp = true;
5563 switch (fcode)
5565 case ALTIVEC_BUILTIN_STVX:
5566 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5567 case ALTIVEC_BUILTIN_STVEBX:
5568 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5569 case ALTIVEC_BUILTIN_STVEHX:
5570 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5571 case ALTIVEC_BUILTIN_STVEWX:
5572 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5573 case ALTIVEC_BUILTIN_STVXL:
5574 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5576 case ALTIVEC_BUILTIN_MFVSCR:
5577 icode = CODE_FOR_altivec_mfvscr;
5578 tmode = insn_data[icode].operand[0].mode;
5580 if (target == 0
5581 || GET_MODE (target) != tmode
5582 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5583 target = gen_reg_rtx (tmode);
5585 pat = GEN_FCN (icode) (target);
5586 if (! pat)
5587 return 0;
5588 emit_insn (pat);
5589 return target;
5591 case ALTIVEC_BUILTIN_MTVSCR:
5592 icode = CODE_FOR_altivec_mtvscr;
5593 arg0 = TREE_VALUE (arglist);
5594 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5595 mode0 = insn_data[icode].operand[0].mode;
5597 /* If we got invalid arguments bail out before generating bad rtl. */
5598 if (arg0 == error_mark_node)
5599 return const0_rtx;
5601 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5602 op0 = copy_to_mode_reg (mode0, op0);
5604 pat = GEN_FCN (icode) (op0);
5605 if (pat)
5606 emit_insn (pat);
5607 return NULL_RTX;
5609 case ALTIVEC_BUILTIN_DSSALL:
5610 emit_insn (gen_altivec_dssall ());
5611 return NULL_RTX;
5613 case ALTIVEC_BUILTIN_DSS:
5614 icode = CODE_FOR_altivec_dss;
5615 arg0 = TREE_VALUE (arglist);
5616 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5617 mode0 = insn_data[icode].operand[0].mode;
5619 /* If we got invalid arguments bail out before generating bad rtl. */
5620 if (arg0 == error_mark_node)
5621 return const0_rtx;
5623 if (TREE_CODE (arg0) != INTEGER_CST
5624 || TREE_INT_CST_LOW (arg0) & ~0x3)
5626 error ("argument to dss must be a 2-bit unsigned literal");
5627 return const0_rtx;
5630 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5631 op0 = copy_to_mode_reg (mode0, op0);
5633 emit_insn (gen_altivec_dss (op0));
5634 return NULL_RTX;
5637 /* Expand abs* operations. */
5638 d = (struct builtin_description *) bdesc_abs;
5639 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5640 if (d->code == fcode)
5641 return altivec_expand_abs_builtin (d->icode, arglist, target);
5643 /* Expand the AltiVec predicates. */
5644 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5645 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5646 if (dp->code == fcode)
5647 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5649 /* LV* are funky. We initialized them differently. */
5650 switch (fcode)
5652 case ALTIVEC_BUILTIN_LVSL:
5653 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5654 arglist, target);
5655 case ALTIVEC_BUILTIN_LVSR:
5656 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5657 arglist, target);
5658 case ALTIVEC_BUILTIN_LVEBX:
5659 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5660 arglist, target);
5661 case ALTIVEC_BUILTIN_LVEHX:
5662 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5663 arglist, target);
5664 case ALTIVEC_BUILTIN_LVEWX:
5665 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5666 arglist, target);
5667 case ALTIVEC_BUILTIN_LVXL:
5668 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5669 arglist, target);
5670 case ALTIVEC_BUILTIN_LVX:
5671 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5672 arglist, target);
5673 default:
5674 break;
5675 /* Fall through. */
5678 *expandedp = false;
5679 return NULL_RTX;
5682 /* Binops that need to be initialized manually, but can be expanded
5683 automagically by rs6000_expand_binop_builtin. */
5684 static struct builtin_description bdesc_2arg_spe[] =
5686 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5687 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5688 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5689 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5690 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5691 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5692 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5693 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5694 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5695 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5696 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5697 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5698 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5699 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5700 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5701 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5702 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5703 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5704 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5705 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5706 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5707 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5710 /* Expand the builtin in EXP and store the result in TARGET. Store
5711 true in *EXPANDEDP if we found a builtin to expand.
5713 This expands the SPE builtins that are not simple unary and binary
5714 operations. */
5715 static rtx
5716 spe_expand_builtin (exp, target, expandedp)
5717 tree exp;
5718 rtx target;
5719 bool *expandedp;
5721 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5722 tree arglist = TREE_OPERAND (exp, 1);
5723 tree arg1, arg0;
5724 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5725 enum insn_code icode;
5726 enum machine_mode tmode, mode0;
5727 rtx pat, op0;
5728 struct builtin_description *d;
5729 size_t i;
5731 *expandedp = true;
5733 /* Syntax check for a 5-bit unsigned immediate. */
5734 switch (fcode)
5736 case SPE_BUILTIN_EVSTDD:
5737 case SPE_BUILTIN_EVSTDH:
5738 case SPE_BUILTIN_EVSTDW:
5739 case SPE_BUILTIN_EVSTWHE:
5740 case SPE_BUILTIN_EVSTWHO:
5741 case SPE_BUILTIN_EVSTWWE:
5742 case SPE_BUILTIN_EVSTWWO:
5743 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5744 if (TREE_CODE (arg1) != INTEGER_CST
5745 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5747 error ("argument 2 must be a 5-bit unsigned literal");
5748 return const0_rtx;
5750 break;
5751 default:
5752 break;
5755 /* The evsplat*i instructions are not quite generic. */
5756 switch (fcode)
5758 case SPE_BUILTIN_EVSPLATFI:
5759 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5760 arglist, target);
5761 case SPE_BUILTIN_EVSPLATI:
5762 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5763 arglist, target);
5764 default:
5765 break;
5768 d = (struct builtin_description *) bdesc_2arg_spe;
5769 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5770 if (d->code == fcode)
5771 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5773 d = (struct builtin_description *) bdesc_spe_predicates;
5774 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5775 if (d->code == fcode)
5776 return spe_expand_predicate_builtin (d->icode, arglist, target);
5778 d = (struct builtin_description *) bdesc_spe_evsel;
5779 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5780 if (d->code == fcode)
5781 return spe_expand_evsel_builtin (d->icode, arglist, target);
5783 switch (fcode)
5785 case SPE_BUILTIN_EVSTDDX:
5786 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5787 case SPE_BUILTIN_EVSTDHX:
5788 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5789 case SPE_BUILTIN_EVSTDWX:
5790 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5791 case SPE_BUILTIN_EVSTWHEX:
5792 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5793 case SPE_BUILTIN_EVSTWHOX:
5794 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5795 case SPE_BUILTIN_EVSTWWEX:
5796 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5797 case SPE_BUILTIN_EVSTWWOX:
5798 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5799 case SPE_BUILTIN_EVSTDD:
5800 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5801 case SPE_BUILTIN_EVSTDH:
5802 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5803 case SPE_BUILTIN_EVSTDW:
5804 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5805 case SPE_BUILTIN_EVSTWHE:
5806 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5807 case SPE_BUILTIN_EVSTWHO:
5808 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5809 case SPE_BUILTIN_EVSTWWE:
5810 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5811 case SPE_BUILTIN_EVSTWWO:
5812 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5813 case SPE_BUILTIN_MFSPEFSCR:
5814 icode = CODE_FOR_spe_mfspefscr;
5815 tmode = insn_data[icode].operand[0].mode;
5817 if (target == 0
5818 || GET_MODE (target) != tmode
5819 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5820 target = gen_reg_rtx (tmode);
5822 pat = GEN_FCN (icode) (target);
5823 if (! pat)
5824 return 0;
5825 emit_insn (pat);
5826 return target;
5827 case SPE_BUILTIN_MTSPEFSCR:
5828 icode = CODE_FOR_spe_mtspefscr;
5829 arg0 = TREE_VALUE (arglist);
5830 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5831 mode0 = insn_data[icode].operand[0].mode;
5833 if (arg0 == error_mark_node)
5834 return const0_rtx;
5836 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5837 op0 = copy_to_mode_reg (mode0, op0);
5839 pat = GEN_FCN (icode) (op0);
5840 if (pat)
5841 emit_insn (pat);
5842 return NULL_RTX;
5843 default:
5844 break;
5847 *expandedp = false;
5848 return NULL_RTX;
5851 static rtx
5852 spe_expand_predicate_builtin (icode, arglist, target)
5853 enum insn_code icode;
5854 tree arglist;
5855 rtx target;
5857 rtx pat, scratch, tmp;
5858 tree form = TREE_VALUE (arglist);
5859 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5860 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5861 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5862 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5863 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5864 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5865 int form_int;
5866 enum rtx_code code;
5868 if (TREE_CODE (form) != INTEGER_CST)
5870 error ("argument 1 of __builtin_spe_predicate must be a constant");
5871 return const0_rtx;
5873 else
5874 form_int = TREE_INT_CST_LOW (form);
5876 if (mode0 != mode1)
5877 abort ();
5879 if (arg0 == error_mark_node || arg1 == error_mark_node)
5880 return const0_rtx;
5882 if (target == 0
5883 || GET_MODE (target) != SImode
5884 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5885 target = gen_reg_rtx (SImode);
5887 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5888 op0 = copy_to_mode_reg (mode0, op0);
5889 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5890 op1 = copy_to_mode_reg (mode1, op1);
5892 scratch = gen_reg_rtx (CCmode);
5894 pat = GEN_FCN (icode) (scratch, op0, op1);
5895 if (! pat)
5896 return const0_rtx;
5897 emit_insn (pat);
5899 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5900 _lower_. We use one compare, but look in different bits of the
5901 CR for each variant.
5903 There are 2 elements in each SPE simd type (upper/lower). The CR
5904 bits are set as follows:
5906 BIT0 | BIT 1 | BIT 2 | BIT 3
5907 U | L | (U | L) | (U & L)
5909 So, for an "all" relationship, BIT 3 would be set.
5910 For an "any" relationship, BIT 2 would be set. Etc.
5912 Following traditional nomenclature, these bits map to:
5914 BIT0 | BIT 1 | BIT 2 | BIT 3
5915 LT | GT | EQ | OV
5917 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5920 switch (form_int)
5922 /* All variant. OV bit. */
5923 case 0:
5924 /* We need to get to the OV bit, which is the ORDERED bit. We
5925 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5926 that's ugly and will trigger a validate_condition_mode abort.
5927 So let's just use another pattern. */
5928 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5929 return target;
5930 /* Any variant. EQ bit. */
5931 case 1:
5932 code = EQ;
5933 break;
5934 /* Upper variant. LT bit. */
5935 case 2:
5936 code = LT;
5937 break;
5938 /* Lower variant. GT bit. */
5939 case 3:
5940 code = GT;
5941 break;
5942 default:
5943 error ("argument 1 of __builtin_spe_predicate is out of range");
5944 return const0_rtx;
5947 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5948 emit_move_insn (target, tmp);
5950 return target;
5953 /* The evsel builtins look like this:
5955 e = __builtin_spe_evsel_OP (a, b, c, d);
5957 and work like this:
5959 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5960 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5963 static rtx
5964 spe_expand_evsel_builtin (icode, arglist, target)
5965 enum insn_code icode;
5966 tree arglist;
5967 rtx target;
5969 rtx pat, scratch;
5970 tree arg0 = TREE_VALUE (arglist);
5971 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5972 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5973 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5974 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5975 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5976 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5977 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5978 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5979 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5981 if (mode0 != mode1)
5982 abort ();
5984 if (arg0 == error_mark_node || arg1 == error_mark_node
5985 || arg2 == error_mark_node || arg3 == error_mark_node)
5986 return const0_rtx;
5988 if (target == 0
5989 || GET_MODE (target) != mode0
5990 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5991 target = gen_reg_rtx (mode0);
5993 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5994 op0 = copy_to_mode_reg (mode0, op0);
5995 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5996 op1 = copy_to_mode_reg (mode0, op1);
5997 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5998 op2 = copy_to_mode_reg (mode0, op2);
5999 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6000 op3 = copy_to_mode_reg (mode0, op3);
6002 /* Generate the compare. */
6003 scratch = gen_reg_rtx (CCmode);
6004 pat = GEN_FCN (icode) (scratch, op0, op1);
6005 if (! pat)
6006 return const0_rtx;
6007 emit_insn (pat);
6009 if (mode0 == V2SImode)
6010 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6011 else
6012 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6014 return target;
6017 /* Expand an expression EXP that calls a built-in function,
6018 with result going to TARGET if that's convenient
6019 (and in mode MODE if that's convenient).
6020 SUBTARGET may be used as the target for computing one of EXP's operands.
6021 IGNORE is nonzero if the value is to be ignored. */
6023 static rtx
6024 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
6025 tree exp;
6026 rtx target;
6027 rtx subtarget ATTRIBUTE_UNUSED;
6028 enum machine_mode mode ATTRIBUTE_UNUSED;
6029 int ignore ATTRIBUTE_UNUSED;
6031 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6032 tree arglist = TREE_OPERAND (exp, 1);
6033 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6034 struct builtin_description *d;
6035 size_t i;
6036 rtx ret;
6037 bool success;
6039 if (TARGET_ALTIVEC)
6041 ret = altivec_expand_builtin (exp, target, &success);
6043 if (success)
6044 return ret;
6046 if (TARGET_SPE)
6048 ret = spe_expand_builtin (exp, target, &success);
6050 if (success)
6051 return ret;
6054 if (TARGET_ALTIVEC || TARGET_SPE)
6056 /* Handle simple unary operations. */
6057 d = (struct builtin_description *) bdesc_1arg;
6058 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6059 if (d->code == fcode)
6060 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6062 /* Handle simple binary operations. */
6063 d = (struct builtin_description *) bdesc_2arg;
6064 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6065 if (d->code == fcode)
6066 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6068 /* Handle simple ternary operations. */
6069 d = (struct builtin_description *) bdesc_3arg;
6070 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6071 if (d->code == fcode)
6072 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6075 abort ();
6076 return NULL_RTX;
6079 static void
6080 rs6000_init_builtins ()
6082 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6083 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6084 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6086 if (TARGET_SPE)
6087 spe_init_builtins ();
6088 if (TARGET_ALTIVEC)
6089 altivec_init_builtins ();
6090 if (TARGET_ALTIVEC || TARGET_SPE)
6091 rs6000_common_init_builtins ();
6094 /* Search through a set of builtins and enable the mask bits.
6095 DESC is an array of builtins.
6096 SIZE is the total number of builtins.
6097 START is the builtin enum at which to start.
6098 END is the builtin enum at which to end. */
6099 static void
6100 enable_mask_for_builtins (desc, size, start, end)
6101 struct builtin_description *desc;
6102 int size;
6103 enum rs6000_builtins start, end;
6105 int i;
6107 for (i = 0; i < size; ++i)
6108 if (desc[i].code == start)
6109 break;
6111 if (i == size)
6112 return;
6114 for (; i < size; ++i)
6116 /* Flip all the bits on. */
6117 desc[i].mask = target_flags;
6118 if (desc[i].code == end)
6119 break;
6123 static void
6124 spe_init_builtins ()
6126 tree endlink = void_list_node;
6127 tree puint_type_node = build_pointer_type (unsigned_type_node);
6128 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6129 struct builtin_description *d;
6130 size_t i;
6132 tree v2si_ftype_4_v2si
6133 = build_function_type
6134 (opaque_V2SI_type_node,
6135 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6136 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6137 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6138 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6139 endlink)))));
6141 tree v2sf_ftype_4_v2sf
6142 = build_function_type
6143 (opaque_V2SF_type_node,
6144 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6145 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6146 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6147 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6148 endlink)))));
6150 tree int_ftype_int_v2si_v2si
6151 = build_function_type
6152 (integer_type_node,
6153 tree_cons (NULL_TREE, integer_type_node,
6154 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6155 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6156 endlink))));
6158 tree int_ftype_int_v2sf_v2sf
6159 = build_function_type
6160 (integer_type_node,
6161 tree_cons (NULL_TREE, integer_type_node,
6162 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6163 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6164 endlink))));
6166 tree void_ftype_v2si_puint_int
6167 = build_function_type (void_type_node,
6168 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6169 tree_cons (NULL_TREE, puint_type_node,
6170 tree_cons (NULL_TREE,
6171 integer_type_node,
6172 endlink))));
6174 tree void_ftype_v2si_puint_char
6175 = build_function_type (void_type_node,
6176 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6177 tree_cons (NULL_TREE, puint_type_node,
6178 tree_cons (NULL_TREE,
6179 char_type_node,
6180 endlink))));
6182 tree void_ftype_v2si_pv2si_int
6183 = build_function_type (void_type_node,
6184 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6185 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6186 tree_cons (NULL_TREE,
6187 integer_type_node,
6188 endlink))));
6190 tree void_ftype_v2si_pv2si_char
6191 = build_function_type (void_type_node,
6192 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6193 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6194 tree_cons (NULL_TREE,
6195 char_type_node,
6196 endlink))));
6198 tree void_ftype_int
6199 = build_function_type (void_type_node,
6200 tree_cons (NULL_TREE, integer_type_node, endlink));
6202 tree int_ftype_void
6203 = build_function_type (integer_type_node, endlink);
6205 tree v2si_ftype_pv2si_int
6206 = build_function_type (opaque_V2SI_type_node,
6207 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6208 tree_cons (NULL_TREE, integer_type_node,
6209 endlink)));
6211 tree v2si_ftype_puint_int
6212 = build_function_type (opaque_V2SI_type_node,
6213 tree_cons (NULL_TREE, puint_type_node,
6214 tree_cons (NULL_TREE, integer_type_node,
6215 endlink)));
6217 tree v2si_ftype_pushort_int
6218 = build_function_type (opaque_V2SI_type_node,
6219 tree_cons (NULL_TREE, pushort_type_node,
6220 tree_cons (NULL_TREE, integer_type_node,
6221 endlink)));
6223 tree v2si_ftype_signed_char
6224 = build_function_type (opaque_V2SI_type_node,
6225 tree_cons (NULL_TREE, signed_char_type_node,
6226 endlink));
6228 /* The initialization of the simple binary and unary builtins is
6229 done in rs6000_common_init_builtins, but we have to enable the
6230 mask bits here manually because we have run out of `target_flags'
6231 bits. We really need to redesign this mask business. */
6233 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6234 ARRAY_SIZE (bdesc_2arg),
6235 SPE_BUILTIN_EVADDW,
6236 SPE_BUILTIN_EVXOR);
6237 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6238 ARRAY_SIZE (bdesc_1arg),
6239 SPE_BUILTIN_EVABS,
6240 SPE_BUILTIN_EVSUBFUSIAAW);
6241 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6242 ARRAY_SIZE (bdesc_spe_predicates),
6243 SPE_BUILTIN_EVCMPEQ,
6244 SPE_BUILTIN_EVFSTSTLT);
6245 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6246 ARRAY_SIZE (bdesc_spe_evsel),
6247 SPE_BUILTIN_EVSEL_CMPGTS,
6248 SPE_BUILTIN_EVSEL_FSTSTEQ);
6250 (*lang_hooks.decls.pushdecl)
6251 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6252 opaque_V2SI_type_node));
6254 /* Initialize irregular SPE builtins. */
6256 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6257 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6258 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6259 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6260 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6261 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6262 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6263 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6264 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6265 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6266 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6267 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6268 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6269 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6270 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6271 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6272 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6273 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6275 /* Loads. */
6276 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6277 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6278 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6279 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6280 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6281 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6282 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6283 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6284 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6285 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6286 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6287 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6288 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6289 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6290 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6291 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6292 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6293 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6294 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6295 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6296 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6297 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6299 /* Predicates. */
6300 d = (struct builtin_description *) bdesc_spe_predicates;
6301 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6303 tree type;
6305 switch (insn_data[d->icode].operand[1].mode)
6307 case V2SImode:
6308 type = int_ftype_int_v2si_v2si;
6309 break;
6310 case V2SFmode:
6311 type = int_ftype_int_v2sf_v2sf;
6312 break;
6313 default:
6314 abort ();
6317 def_builtin (d->mask, d->name, type, d->code);
6320 /* Evsel predicates. */
6321 d = (struct builtin_description *) bdesc_spe_evsel;
6322 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6324 tree type;
6326 switch (insn_data[d->icode].operand[1].mode)
6328 case V2SImode:
6329 type = v2si_ftype_4_v2si;
6330 break;
6331 case V2SFmode:
6332 type = v2sf_ftype_4_v2sf;
6333 break;
6334 default:
6335 abort ();
6338 def_builtin (d->mask, d->name, type, d->code);
6342 static void
6343 altivec_init_builtins ()
6345 struct builtin_description *d;
6346 struct builtin_description_predicates *dp;
6347 size_t i;
6348 tree pfloat_type_node = build_pointer_type (float_type_node);
6349 tree pint_type_node = build_pointer_type (integer_type_node);
6350 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6351 tree pchar_type_node = build_pointer_type (char_type_node);
6353 tree pvoid_type_node = build_pointer_type (void_type_node);
6355 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6356 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6357 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6358 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6360 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6362 tree int_ftype_int_v4si_v4si
6363 = build_function_type_list (integer_type_node,
6364 integer_type_node, V4SI_type_node,
6365 V4SI_type_node, NULL_TREE);
6366 tree v4sf_ftype_pcfloat
6367 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6368 tree void_ftype_pfloat_v4sf
6369 = build_function_type_list (void_type_node,
6370 pfloat_type_node, V4SF_type_node, NULL_TREE);
6371 tree v4si_ftype_pcint
6372 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6373 tree void_ftype_pint_v4si
6374 = build_function_type_list (void_type_node,
6375 pint_type_node, V4SI_type_node, NULL_TREE);
6376 tree v8hi_ftype_pcshort
6377 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6378 tree void_ftype_pshort_v8hi
6379 = build_function_type_list (void_type_node,
6380 pshort_type_node, V8HI_type_node, NULL_TREE);
6381 tree v16qi_ftype_pcchar
6382 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6383 tree void_ftype_pchar_v16qi
6384 = build_function_type_list (void_type_node,
6385 pchar_type_node, V16QI_type_node, NULL_TREE);
6386 tree void_ftype_v4si
6387 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6388 tree v8hi_ftype_void
6389 = build_function_type (V8HI_type_node, void_list_node);
6390 tree void_ftype_void
6391 = build_function_type (void_type_node, void_list_node);
6392 tree void_ftype_qi
6393 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6395 tree v16qi_ftype_int_pcvoid
6396 = build_function_type_list (V16QI_type_node,
6397 integer_type_node, pcvoid_type_node, NULL_TREE);
6398 tree v8hi_ftype_int_pcvoid
6399 = build_function_type_list (V8HI_type_node,
6400 integer_type_node, pcvoid_type_node, NULL_TREE);
6401 tree v4si_ftype_int_pcvoid
6402 = build_function_type_list (V4SI_type_node,
6403 integer_type_node, pcvoid_type_node, NULL_TREE);
6405 tree void_ftype_v4si_int_pvoid
6406 = build_function_type_list (void_type_node,
6407 V4SI_type_node, integer_type_node,
6408 pvoid_type_node, NULL_TREE);
6409 tree void_ftype_v16qi_int_pvoid
6410 = build_function_type_list (void_type_node,
6411 V16QI_type_node, integer_type_node,
6412 pvoid_type_node, NULL_TREE);
6413 tree void_ftype_v8hi_int_pvoid
6414 = build_function_type_list (void_type_node,
6415 V8HI_type_node, integer_type_node,
6416 pvoid_type_node, NULL_TREE);
6417 tree int_ftype_int_v8hi_v8hi
6418 = build_function_type_list (integer_type_node,
6419 integer_type_node, V8HI_type_node,
6420 V8HI_type_node, NULL_TREE);
6421 tree int_ftype_int_v16qi_v16qi
6422 = build_function_type_list (integer_type_node,
6423 integer_type_node, V16QI_type_node,
6424 V16QI_type_node, NULL_TREE);
6425 tree int_ftype_int_v4sf_v4sf
6426 = build_function_type_list (integer_type_node,
6427 integer_type_node, V4SF_type_node,
6428 V4SF_type_node, NULL_TREE);
6429 tree v4si_ftype_v4si
6430 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6431 tree v8hi_ftype_v8hi
6432 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6433 tree v16qi_ftype_v16qi
6434 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6435 tree v4sf_ftype_v4sf
6436 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6437 tree void_ftype_pcvoid_int_char
6438 = build_function_type_list (void_type_node,
6439 pcvoid_type_node, integer_type_node,
6440 char_type_node, NULL_TREE);
6442 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6443 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6444 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6445 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6446 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6447 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6448 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6449 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6450 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6451 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6452 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6453 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6454 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6455 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6456 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6457 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6458 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6459 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6460 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6461 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6462 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6463 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6464 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6465 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6466 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6467 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6468 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6469 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6470 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6471 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6472 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6473 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6475 /* Add the DST variants. */
6476 d = (struct builtin_description *) bdesc_dst;
6477 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6478 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6480 /* Initialize the predicates. */
6481 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6482 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6484 enum machine_mode mode1;
6485 tree type;
6487 mode1 = insn_data[dp->icode].operand[1].mode;
6489 switch (mode1)
6491 case V4SImode:
6492 type = int_ftype_int_v4si_v4si;
6493 break;
6494 case V8HImode:
6495 type = int_ftype_int_v8hi_v8hi;
6496 break;
6497 case V16QImode:
6498 type = int_ftype_int_v16qi_v16qi;
6499 break;
6500 case V4SFmode:
6501 type = int_ftype_int_v4sf_v4sf;
6502 break;
6503 default:
6504 abort ();
6507 def_builtin (dp->mask, dp->name, type, dp->code);
6510 /* Initialize the abs* operators. */
6511 d = (struct builtin_description *) bdesc_abs;
6512 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6514 enum machine_mode mode0;
6515 tree type;
6517 mode0 = insn_data[d->icode].operand[0].mode;
6519 switch (mode0)
6521 case V4SImode:
6522 type = v4si_ftype_v4si;
6523 break;
6524 case V8HImode:
6525 type = v8hi_ftype_v8hi;
6526 break;
6527 case V16QImode:
6528 type = v16qi_ftype_v16qi;
6529 break;
6530 case V4SFmode:
6531 type = v4sf_ftype_v4sf;
6532 break;
6533 default:
6534 abort ();
6537 def_builtin (d->mask, d->name, type, d->code);
6541 static void
6542 rs6000_common_init_builtins ()
6544 struct builtin_description *d;
6545 size_t i;
6547 tree v4sf_ftype_v4sf_v4sf_v16qi
6548 = build_function_type_list (V4SF_type_node,
6549 V4SF_type_node, V4SF_type_node,
6550 V16QI_type_node, NULL_TREE);
6551 tree v4si_ftype_v4si_v4si_v16qi
6552 = build_function_type_list (V4SI_type_node,
6553 V4SI_type_node, V4SI_type_node,
6554 V16QI_type_node, NULL_TREE);
6555 tree v8hi_ftype_v8hi_v8hi_v16qi
6556 = build_function_type_list (V8HI_type_node,
6557 V8HI_type_node, V8HI_type_node,
6558 V16QI_type_node, NULL_TREE);
6559 tree v16qi_ftype_v16qi_v16qi_v16qi
6560 = build_function_type_list (V16QI_type_node,
6561 V16QI_type_node, V16QI_type_node,
6562 V16QI_type_node, NULL_TREE);
6563 tree v4si_ftype_char
6564 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6565 tree v8hi_ftype_char
6566 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6567 tree v16qi_ftype_char
6568 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6569 tree v8hi_ftype_v16qi
6570 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6571 tree v4sf_ftype_v4sf
6572 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6574 tree v2si_ftype_v2si_v2si
6575 = build_function_type_list (opaque_V2SI_type_node,
6576 opaque_V2SI_type_node,
6577 opaque_V2SI_type_node, NULL_TREE);
6579 tree v2sf_ftype_v2sf_v2sf
6580 = build_function_type_list (opaque_V2SF_type_node,
6581 opaque_V2SF_type_node,
6582 opaque_V2SF_type_node, NULL_TREE);
6584 tree v2si_ftype_int_int
6585 = build_function_type_list (opaque_V2SI_type_node,
6586 integer_type_node, integer_type_node,
6587 NULL_TREE);
6589 tree v2si_ftype_v2si
6590 = build_function_type_list (opaque_V2SI_type_node,
6591 opaque_V2SI_type_node, NULL_TREE);
6593 tree v2sf_ftype_v2sf
6594 = build_function_type_list (opaque_V2SF_type_node,
6595 opaque_V2SF_type_node, NULL_TREE);
6597 tree v2sf_ftype_v2si
6598 = build_function_type_list (opaque_V2SF_type_node,
6599 opaque_V2SI_type_node, NULL_TREE);
6601 tree v2si_ftype_v2sf
6602 = build_function_type_list (opaque_V2SI_type_node,
6603 opaque_V2SF_type_node, NULL_TREE);
6605 tree v2si_ftype_v2si_char
6606 = build_function_type_list (opaque_V2SI_type_node,
6607 opaque_V2SI_type_node,
6608 char_type_node, NULL_TREE);
6610 tree v2si_ftype_int_char
6611 = build_function_type_list (opaque_V2SI_type_node,
6612 integer_type_node, char_type_node, NULL_TREE);
6614 tree v2si_ftype_char
6615 = build_function_type_list (opaque_V2SI_type_node,
6616 char_type_node, NULL_TREE);
6618 tree int_ftype_int_int
6619 = build_function_type_list (integer_type_node,
6620 integer_type_node, integer_type_node,
6621 NULL_TREE);
6623 tree v4si_ftype_v4si_v4si
6624 = build_function_type_list (V4SI_type_node,
6625 V4SI_type_node, V4SI_type_node, NULL_TREE);
6626 tree v4sf_ftype_v4si_char
6627 = build_function_type_list (V4SF_type_node,
6628 V4SI_type_node, char_type_node, NULL_TREE);
6629 tree v4si_ftype_v4sf_char
6630 = build_function_type_list (V4SI_type_node,
6631 V4SF_type_node, char_type_node, NULL_TREE);
6632 tree v4si_ftype_v4si_char
6633 = build_function_type_list (V4SI_type_node,
6634 V4SI_type_node, char_type_node, NULL_TREE);
6635 tree v8hi_ftype_v8hi_char
6636 = build_function_type_list (V8HI_type_node,
6637 V8HI_type_node, char_type_node, NULL_TREE);
6638 tree v16qi_ftype_v16qi_char
6639 = build_function_type_list (V16QI_type_node,
6640 V16QI_type_node, char_type_node, NULL_TREE);
6641 tree v16qi_ftype_v16qi_v16qi_char
6642 = build_function_type_list (V16QI_type_node,
6643 V16QI_type_node, V16QI_type_node,
6644 char_type_node, NULL_TREE);
6645 tree v8hi_ftype_v8hi_v8hi_char
6646 = build_function_type_list (V8HI_type_node,
6647 V8HI_type_node, V8HI_type_node,
6648 char_type_node, NULL_TREE);
6649 tree v4si_ftype_v4si_v4si_char
6650 = build_function_type_list (V4SI_type_node,
6651 V4SI_type_node, V4SI_type_node,
6652 char_type_node, NULL_TREE);
6653 tree v4sf_ftype_v4sf_v4sf_char
6654 = build_function_type_list (V4SF_type_node,
6655 V4SF_type_node, V4SF_type_node,
6656 char_type_node, NULL_TREE);
6657 tree v4sf_ftype_v4sf_v4sf
6658 = build_function_type_list (V4SF_type_node,
6659 V4SF_type_node, V4SF_type_node, NULL_TREE);
6660 tree v4sf_ftype_v4sf_v4sf_v4si
6661 = build_function_type_list (V4SF_type_node,
6662 V4SF_type_node, V4SF_type_node,
6663 V4SI_type_node, NULL_TREE);
6664 tree v4sf_ftype_v4sf_v4sf_v4sf
6665 = build_function_type_list (V4SF_type_node,
6666 V4SF_type_node, V4SF_type_node,
6667 V4SF_type_node, NULL_TREE);
6668 tree v4si_ftype_v4si_v4si_v4si
6669 = build_function_type_list (V4SI_type_node,
6670 V4SI_type_node, V4SI_type_node,
6671 V4SI_type_node, NULL_TREE);
6672 tree v8hi_ftype_v8hi_v8hi
6673 = build_function_type_list (V8HI_type_node,
6674 V8HI_type_node, V8HI_type_node, NULL_TREE);
6675 tree v8hi_ftype_v8hi_v8hi_v8hi
6676 = build_function_type_list (V8HI_type_node,
6677 V8HI_type_node, V8HI_type_node,
6678 V8HI_type_node, NULL_TREE);
6679 tree v4si_ftype_v8hi_v8hi_v4si
6680 = build_function_type_list (V4SI_type_node,
6681 V8HI_type_node, V8HI_type_node,
6682 V4SI_type_node, NULL_TREE);
6683 tree v4si_ftype_v16qi_v16qi_v4si
6684 = build_function_type_list (V4SI_type_node,
6685 V16QI_type_node, V16QI_type_node,
6686 V4SI_type_node, NULL_TREE);
6687 tree v16qi_ftype_v16qi_v16qi
6688 = build_function_type_list (V16QI_type_node,
6689 V16QI_type_node, V16QI_type_node, NULL_TREE);
6690 tree v4si_ftype_v4sf_v4sf
6691 = build_function_type_list (V4SI_type_node,
6692 V4SF_type_node, V4SF_type_node, NULL_TREE);
6693 tree v8hi_ftype_v16qi_v16qi
6694 = build_function_type_list (V8HI_type_node,
6695 V16QI_type_node, V16QI_type_node, NULL_TREE);
6696 tree v4si_ftype_v8hi_v8hi
6697 = build_function_type_list (V4SI_type_node,
6698 V8HI_type_node, V8HI_type_node, NULL_TREE);
6699 tree v8hi_ftype_v4si_v4si
6700 = build_function_type_list (V8HI_type_node,
6701 V4SI_type_node, V4SI_type_node, NULL_TREE);
6702 tree v16qi_ftype_v8hi_v8hi
6703 = build_function_type_list (V16QI_type_node,
6704 V8HI_type_node, V8HI_type_node, NULL_TREE);
6705 tree v4si_ftype_v16qi_v4si
6706 = build_function_type_list (V4SI_type_node,
6707 V16QI_type_node, V4SI_type_node, NULL_TREE);
6708 tree v4si_ftype_v16qi_v16qi
6709 = build_function_type_list (V4SI_type_node,
6710 V16QI_type_node, V16QI_type_node, NULL_TREE);
6711 tree v4si_ftype_v8hi_v4si
6712 = build_function_type_list (V4SI_type_node,
6713 V8HI_type_node, V4SI_type_node, NULL_TREE);
6714 tree v4si_ftype_v8hi
6715 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6716 tree int_ftype_v4si_v4si
6717 = build_function_type_list (integer_type_node,
6718 V4SI_type_node, V4SI_type_node, NULL_TREE);
6719 tree int_ftype_v4sf_v4sf
6720 = build_function_type_list (integer_type_node,
6721 V4SF_type_node, V4SF_type_node, NULL_TREE);
6722 tree int_ftype_v16qi_v16qi
6723 = build_function_type_list (integer_type_node,
6724 V16QI_type_node, V16QI_type_node, NULL_TREE);
6725 tree int_ftype_v8hi_v8hi
6726 = build_function_type_list (integer_type_node,
6727 V8HI_type_node, V8HI_type_node, NULL_TREE);
6729 /* Add the simple ternary operators. */
6730 d = (struct builtin_description *) bdesc_3arg;
6731 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6734 enum machine_mode mode0, mode1, mode2, mode3;
6735 tree type;
6737 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6738 continue;
6740 mode0 = insn_data[d->icode].operand[0].mode;
6741 mode1 = insn_data[d->icode].operand[1].mode;
6742 mode2 = insn_data[d->icode].operand[2].mode;
6743 mode3 = insn_data[d->icode].operand[3].mode;
6745 /* When all four are of the same mode. */
6746 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6748 switch (mode0)
6750 case V4SImode:
6751 type = v4si_ftype_v4si_v4si_v4si;
6752 break;
6753 case V4SFmode:
6754 type = v4sf_ftype_v4sf_v4sf_v4sf;
6755 break;
6756 case V8HImode:
6757 type = v8hi_ftype_v8hi_v8hi_v8hi;
6758 break;
6759 case V16QImode:
6760 type = v16qi_ftype_v16qi_v16qi_v16qi;
6761 break;
6762 default:
6763 abort();
6766 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6768 switch (mode0)
6770 case V4SImode:
6771 type = v4si_ftype_v4si_v4si_v16qi;
6772 break;
6773 case V4SFmode:
6774 type = v4sf_ftype_v4sf_v4sf_v16qi;
6775 break;
6776 case V8HImode:
6777 type = v8hi_ftype_v8hi_v8hi_v16qi;
6778 break;
6779 case V16QImode:
6780 type = v16qi_ftype_v16qi_v16qi_v16qi;
6781 break;
6782 default:
6783 abort();
6786 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6787 && mode3 == V4SImode)
6788 type = v4si_ftype_v16qi_v16qi_v4si;
6789 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6790 && mode3 == V4SImode)
6791 type = v4si_ftype_v8hi_v8hi_v4si;
6792 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6793 && mode3 == V4SImode)
6794 type = v4sf_ftype_v4sf_v4sf_v4si;
6796 /* vchar, vchar, vchar, 4 bit literal. */
6797 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6798 && mode3 == QImode)
6799 type = v16qi_ftype_v16qi_v16qi_char;
6801 /* vshort, vshort, vshort, 4 bit literal. */
6802 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6803 && mode3 == QImode)
6804 type = v8hi_ftype_v8hi_v8hi_char;
6806 /* vint, vint, vint, 4 bit literal. */
6807 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6808 && mode3 == QImode)
6809 type = v4si_ftype_v4si_v4si_char;
6811 /* vfloat, vfloat, vfloat, 4 bit literal. */
6812 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6813 && mode3 == QImode)
6814 type = v4sf_ftype_v4sf_v4sf_char;
6816 else
6817 abort ();
6819 def_builtin (d->mask, d->name, type, d->code);
6822 /* Add the simple binary operators. */
6823 d = (struct builtin_description *) bdesc_2arg;
6824 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6826 enum machine_mode mode0, mode1, mode2;
6827 tree type;
6829 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6830 continue;
6832 mode0 = insn_data[d->icode].operand[0].mode;
6833 mode1 = insn_data[d->icode].operand[1].mode;
6834 mode2 = insn_data[d->icode].operand[2].mode;
6836 /* When all three operands are of the same mode. */
6837 if (mode0 == mode1 && mode1 == mode2)
6839 switch (mode0)
6841 case V4SFmode:
6842 type = v4sf_ftype_v4sf_v4sf;
6843 break;
6844 case V4SImode:
6845 type = v4si_ftype_v4si_v4si;
6846 break;
6847 case V16QImode:
6848 type = v16qi_ftype_v16qi_v16qi;
6849 break;
6850 case V8HImode:
6851 type = v8hi_ftype_v8hi_v8hi;
6852 break;
6853 case V2SImode:
6854 type = v2si_ftype_v2si_v2si;
6855 break;
6856 case V2SFmode:
6857 type = v2sf_ftype_v2sf_v2sf;
6858 break;
6859 case SImode:
6860 type = int_ftype_int_int;
6861 break;
6862 default:
6863 abort ();
6867 /* A few other combos we really don't want to do manually. */
6869 /* vint, vfloat, vfloat. */
6870 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6871 type = v4si_ftype_v4sf_v4sf;
6873 /* vshort, vchar, vchar. */
6874 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6875 type = v8hi_ftype_v16qi_v16qi;
6877 /* vint, vshort, vshort. */
6878 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6879 type = v4si_ftype_v8hi_v8hi;
6881 /* vshort, vint, vint. */
6882 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6883 type = v8hi_ftype_v4si_v4si;
6885 /* vchar, vshort, vshort. */
6886 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6887 type = v16qi_ftype_v8hi_v8hi;
6889 /* vint, vchar, vint. */
6890 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6891 type = v4si_ftype_v16qi_v4si;
6893 /* vint, vchar, vchar. */
6894 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6895 type = v4si_ftype_v16qi_v16qi;
6897 /* vint, vshort, vint. */
6898 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6899 type = v4si_ftype_v8hi_v4si;
6901 /* vint, vint, 5 bit literal. */
6902 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6903 type = v4si_ftype_v4si_char;
6905 /* vshort, vshort, 5 bit literal. */
6906 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6907 type = v8hi_ftype_v8hi_char;
6909 /* vchar, vchar, 5 bit literal. */
6910 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6911 type = v16qi_ftype_v16qi_char;
6913 /* vfloat, vint, 5 bit literal. */
6914 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6915 type = v4sf_ftype_v4si_char;
6917 /* vint, vfloat, 5 bit literal. */
6918 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6919 type = v4si_ftype_v4sf_char;
6921 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6922 type = v2si_ftype_int_int;
6924 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6925 type = v2si_ftype_v2si_char;
6927 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6928 type = v2si_ftype_int_char;
6930 /* int, x, x. */
6931 else if (mode0 == SImode)
6933 switch (mode1)
6935 case V4SImode:
6936 type = int_ftype_v4si_v4si;
6937 break;
6938 case V4SFmode:
6939 type = int_ftype_v4sf_v4sf;
6940 break;
6941 case V16QImode:
6942 type = int_ftype_v16qi_v16qi;
6943 break;
6944 case V8HImode:
6945 type = int_ftype_v8hi_v8hi;
6946 break;
6947 default:
6948 abort ();
6952 else
6953 abort ();
6955 def_builtin (d->mask, d->name, type, d->code);
6958 /* Add the simple unary operators. */
6959 d = (struct builtin_description *) bdesc_1arg;
6960 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6962 enum machine_mode mode0, mode1;
6963 tree type;
6965 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6966 continue;
6968 mode0 = insn_data[d->icode].operand[0].mode;
6969 mode1 = insn_data[d->icode].operand[1].mode;
6971 if (mode0 == V4SImode && mode1 == QImode)
6972 type = v4si_ftype_char;
6973 else if (mode0 == V8HImode && mode1 == QImode)
6974 type = v8hi_ftype_char;
6975 else if (mode0 == V16QImode && mode1 == QImode)
6976 type = v16qi_ftype_char;
6977 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6978 type = v4sf_ftype_v4sf;
6979 else if (mode0 == V8HImode && mode1 == V16QImode)
6980 type = v8hi_ftype_v16qi;
6981 else if (mode0 == V4SImode && mode1 == V8HImode)
6982 type = v4si_ftype_v8hi;
6983 else if (mode0 == V2SImode && mode1 == V2SImode)
6984 type = v2si_ftype_v2si;
6985 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6986 type = v2sf_ftype_v2sf;
6987 else if (mode0 == V2SFmode && mode1 == V2SImode)
6988 type = v2sf_ftype_v2si;
6989 else if (mode0 == V2SImode && mode1 == V2SFmode)
6990 type = v2si_ftype_v2sf;
6991 else if (mode0 == V2SImode && mode1 == QImode)
6992 type = v2si_ftype_char;
6993 else
6994 abort ();
6996 def_builtin (d->mask, d->name, type, d->code);
7001 /* Expand a block move operation, and return 1 if successful. Return 0
7002 if we should let the compiler generate normal code.
7004 operands[0] is the destination
7005 operands[1] is the source
7006 operands[2] is the length
7007 operands[3] is the alignment */
7009 #define MAX_MOVE_REG 4
7012 expand_block_move (operands)
7013 rtx operands[];
7015 rtx orig_dest = operands[0];
7016 rtx orig_src = operands[1];
7017 rtx bytes_rtx = operands[2];
7018 rtx align_rtx = operands[3];
7019 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7020 int align;
7021 int bytes;
7022 int offset;
7023 int move_bytes;
7024 rtx stores[MAX_MOVE_REG];
7025 int num_reg = 0;
7027 /* If this is not a fixed size move, just call memcpy */
7028 if (! constp)
7029 return 0;
7031 /* If this is not a fixed size alignment, abort */
7032 if (GET_CODE (align_rtx) != CONST_INT)
7033 abort ();
7034 align = INTVAL (align_rtx);
7036 /* Anything to move? */
7037 bytes = INTVAL (bytes_rtx);
7038 if (bytes <= 0)
7039 return 1;
7041 /* store_one_arg depends on expand_block_move to handle at least the size of
7042 reg_parm_stack_space. */
7043 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7044 return 0;
7046 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7048 union {
7049 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
7050 rtx (*mov) PARAMS ((rtx, rtx));
7051 } gen_func;
7052 enum machine_mode mode = BLKmode;
7053 rtx src, dest;
7055 if (TARGET_STRING
7056 && bytes > 24 /* move up to 32 bytes at a time */
7057 && ! fixed_regs[5]
7058 && ! fixed_regs[6]
7059 && ! fixed_regs[7]
7060 && ! fixed_regs[8]
7061 && ! fixed_regs[9]
7062 && ! fixed_regs[10]
7063 && ! fixed_regs[11]
7064 && ! fixed_regs[12])
7066 move_bytes = (bytes > 32) ? 32 : bytes;
7067 gen_func.movstrsi = gen_movstrsi_8reg;
7069 else if (TARGET_STRING
7070 && bytes > 16 /* move up to 24 bytes at a time */
7071 && ! fixed_regs[5]
7072 && ! fixed_regs[6]
7073 && ! fixed_regs[7]
7074 && ! fixed_regs[8]
7075 && ! fixed_regs[9]
7076 && ! fixed_regs[10])
7078 move_bytes = (bytes > 24) ? 24 : bytes;
7079 gen_func.movstrsi = gen_movstrsi_6reg;
7081 else if (TARGET_STRING
7082 && bytes > 8 /* move up to 16 bytes at a time */
7083 && ! fixed_regs[5]
7084 && ! fixed_regs[6]
7085 && ! fixed_regs[7]
7086 && ! fixed_regs[8])
7088 move_bytes = (bytes > 16) ? 16 : bytes;
7089 gen_func.movstrsi = gen_movstrsi_4reg;
7091 else if (bytes >= 8 && TARGET_POWERPC64
7092 /* 64-bit loads and stores require word-aligned
7093 displacements. */
7094 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7096 move_bytes = 8;
7097 mode = DImode;
7098 gen_func.mov = gen_movdi;
7100 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7101 { /* move up to 8 bytes at a time */
7102 move_bytes = (bytes > 8) ? 8 : bytes;
7103 gen_func.movstrsi = gen_movstrsi_2reg;
7105 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7106 { /* move 4 bytes */
7107 move_bytes = 4;
7108 mode = SImode;
7109 gen_func.mov = gen_movsi;
7111 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7112 { /* move 2 bytes */
7113 move_bytes = 2;
7114 mode = HImode;
7115 gen_func.mov = gen_movhi;
7117 else if (TARGET_STRING && bytes > 1)
7118 { /* move up to 4 bytes at a time */
7119 move_bytes = (bytes > 4) ? 4 : bytes;
7120 gen_func.movstrsi = gen_movstrsi_1reg;
7122 else /* move 1 byte at a time */
7124 move_bytes = 1;
7125 mode = QImode;
7126 gen_func.mov = gen_movqi;
7129 src = adjust_address (orig_src, mode, offset);
7130 dest = adjust_address (orig_dest, mode, offset);
7132 if (mode != BLKmode)
7134 rtx tmp_reg = gen_reg_rtx (mode);
7136 emit_insn ((*gen_func.mov) (tmp_reg, src));
7137 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7140 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7142 int i;
7143 for (i = 0; i < num_reg; i++)
7144 emit_insn (stores[i]);
7145 num_reg = 0;
7148 if (mode == BLKmode)
7150 /* Move the address into scratch registers. The movstrsi
7151 patterns require zero offset. */
7152 if (!REG_P (XEXP (src, 0)))
7154 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7155 src = replace_equiv_address (src, src_reg);
7157 set_mem_size (src, GEN_INT (move_bytes));
7159 if (!REG_P (XEXP (dest, 0)))
7161 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7162 dest = replace_equiv_address (dest, dest_reg);
7164 set_mem_size (dest, GEN_INT (move_bytes));
7166 emit_insn ((*gen_func.movstrsi) (dest, src,
7167 GEN_INT (move_bytes & 31),
7168 align_rtx));
7172 return 1;
7176 /* Return 1 if OP is a load multiple operation. It is known to be a
7177 PARALLEL and the first section will be tested. */
7180 load_multiple_operation (op, mode)
7181 rtx op;
7182 enum machine_mode mode ATTRIBUTE_UNUSED;
7184 int count = XVECLEN (op, 0);
7185 unsigned int dest_regno;
7186 rtx src_addr;
7187 int i;
7189 /* Perform a quick check so we don't blow up below. */
7190 if (count <= 1
7191 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7192 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7193 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7194 return 0;
7196 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7197 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7199 for (i = 1; i < count; i++)
7201 rtx elt = XVECEXP (op, 0, i);
7203 if (GET_CODE (elt) != SET
7204 || GET_CODE (SET_DEST (elt)) != REG
7205 || GET_MODE (SET_DEST (elt)) != SImode
7206 || REGNO (SET_DEST (elt)) != dest_regno + i
7207 || GET_CODE (SET_SRC (elt)) != MEM
7208 || GET_MODE (SET_SRC (elt)) != SImode
7209 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7210 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7211 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7212 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7213 return 0;
7216 return 1;
7219 /* Similar, but tests for store multiple. Here, the second vector element
7220 is a CLOBBER. It will be tested later. */
7223 store_multiple_operation (op, mode)
7224 rtx op;
7225 enum machine_mode mode ATTRIBUTE_UNUSED;
7227 int count = XVECLEN (op, 0) - 1;
7228 unsigned int src_regno;
7229 rtx dest_addr;
7230 int i;
7232 /* Perform a quick check so we don't blow up below. */
7233 if (count <= 1
7234 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7235 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7236 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7237 return 0;
7239 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7240 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7242 for (i = 1; i < count; i++)
7244 rtx elt = XVECEXP (op, 0, i + 1);
7246 if (GET_CODE (elt) != SET
7247 || GET_CODE (SET_SRC (elt)) != REG
7248 || GET_MODE (SET_SRC (elt)) != SImode
7249 || REGNO (SET_SRC (elt)) != src_regno + i
7250 || GET_CODE (SET_DEST (elt)) != MEM
7251 || GET_MODE (SET_DEST (elt)) != SImode
7252 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7253 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7254 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7255 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7256 return 0;
7259 return 1;
7262 /* Return a string to perform a load_multiple operation.
7263 operands[0] is the vector.
7264 operands[1] is the source address.
7265 operands[2] is the first destination register. */
7267 const char *
7268 rs6000_output_load_multiple (operands)
7269 rtx operands[3];
7271 /* We have to handle the case where the pseudo used to contain the address
7272 is assigned to one of the output registers. */
7273 int i, j;
7274 int words = XVECLEN (operands[0], 0);
7275 rtx xop[10];
7277 if (XVECLEN (operands[0], 0) == 1)
7278 return "{l|lwz} %2,0(%1)";
7280 for (i = 0; i < words; i++)
7281 if (refers_to_regno_p (REGNO (operands[2]) + i,
7282 REGNO (operands[2]) + i + 1, operands[1], 0))
7284 if (i == words-1)
7286 xop[0] = GEN_INT (4 * (words-1));
7287 xop[1] = operands[1];
7288 xop[2] = operands[2];
7289 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7290 return "";
7292 else if (i == 0)
7294 xop[0] = GEN_INT (4 * (words-1));
7295 xop[1] = operands[1];
7296 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7297 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7298 return "";
7300 else
7302 for (j = 0; j < words; j++)
7303 if (j != i)
7305 xop[0] = GEN_INT (j * 4);
7306 xop[1] = operands[1];
7307 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7308 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7310 xop[0] = GEN_INT (i * 4);
7311 xop[1] = operands[1];
7312 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7313 return "";
7317 return "{lsi|lswi} %2,%1,%N0";
7320 /* Return 1 for a parallel vrsave operation. */
7323 vrsave_operation (op, mode)
7324 rtx op;
7325 enum machine_mode mode ATTRIBUTE_UNUSED;
7327 int count = XVECLEN (op, 0);
7328 unsigned int dest_regno, src_regno;
7329 int i;
7331 if (count <= 1
7332 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7333 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7334 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7335 return 0;
7337 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7338 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7340 if (dest_regno != VRSAVE_REGNO
7341 && src_regno != VRSAVE_REGNO)
7342 return 0;
7344 for (i = 1; i < count; i++)
7346 rtx elt = XVECEXP (op, 0, i);
7348 if (GET_CODE (elt) != CLOBBER
7349 && GET_CODE (elt) != SET)
7350 return 0;
7353 return 1;
7356 /* Return 1 for an PARALLEL suitable for mfcr. */
7359 mfcr_operation (op, mode)
7360 rtx op;
7361 enum machine_mode mode ATTRIBUTE_UNUSED;
7363 int count = XVECLEN (op, 0);
7364 int i;
7366 /* Perform a quick check so we don't blow up below. */
7367 if (count < 1
7368 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7369 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7370 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7371 return 0;
7373 for (i = 0; i < count; i++)
7375 rtx exp = XVECEXP (op, 0, i);
7376 rtx unspec;
7377 int maskval;
7378 rtx src_reg;
7380 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7382 if (GET_CODE (src_reg) != REG
7383 || GET_MODE (src_reg) != CCmode
7384 || ! CR_REGNO_P (REGNO (src_reg)))
7385 return 0;
7387 if (GET_CODE (exp) != SET
7388 || GET_CODE (SET_DEST (exp)) != REG
7389 || GET_MODE (SET_DEST (exp)) != SImode
7390 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7391 return 0;
7392 unspec = SET_SRC (exp);
7393 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7395 if (GET_CODE (unspec) != UNSPEC
7396 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7397 || XVECLEN (unspec, 0) != 2
7398 || XVECEXP (unspec, 0, 0) != src_reg
7399 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7400 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7401 return 0;
7403 return 1;
7406 /* Return 1 for an PARALLEL suitable for mtcrf. */
7409 mtcrf_operation (op, mode)
7410 rtx op;
7411 enum machine_mode mode ATTRIBUTE_UNUSED;
7413 int count = XVECLEN (op, 0);
7414 int i;
7415 rtx src_reg;
7417 /* Perform a quick check so we don't blow up below. */
7418 if (count < 1
7419 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7420 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7421 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7422 return 0;
7423 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7425 if (GET_CODE (src_reg) != REG
7426 || GET_MODE (src_reg) != SImode
7427 || ! INT_REGNO_P (REGNO (src_reg)))
7428 return 0;
7430 for (i = 0; i < count; i++)
7432 rtx exp = XVECEXP (op, 0, i);
7433 rtx unspec;
7434 int maskval;
7436 if (GET_CODE (exp) != SET
7437 || GET_CODE (SET_DEST (exp)) != REG
7438 || GET_MODE (SET_DEST (exp)) != CCmode
7439 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7440 return 0;
7441 unspec = SET_SRC (exp);
7442 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7444 if (GET_CODE (unspec) != UNSPEC
7445 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7446 || XVECLEN (unspec, 0) != 2
7447 || XVECEXP (unspec, 0, 0) != src_reg
7448 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7449 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7450 return 0;
7452 return 1;
7455 /* Return 1 for an PARALLEL suitable for lmw. */
7458 lmw_operation (op, mode)
7459 rtx op;
7460 enum machine_mode mode ATTRIBUTE_UNUSED;
7462 int count = XVECLEN (op, 0);
7463 unsigned int dest_regno;
7464 rtx src_addr;
7465 unsigned int base_regno;
7466 HOST_WIDE_INT offset;
7467 int i;
7469 /* Perform a quick check so we don't blow up below. */
7470 if (count <= 1
7471 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7472 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7473 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7474 return 0;
7476 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7477 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7479 if (dest_regno > 31
7480 || count != 32 - (int) dest_regno)
7481 return 0;
7483 if (legitimate_indirect_address_p (src_addr, 0))
7485 offset = 0;
7486 base_regno = REGNO (src_addr);
7487 if (base_regno == 0)
7488 return 0;
7490 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7492 offset = INTVAL (XEXP (src_addr, 1));
7493 base_regno = REGNO (XEXP (src_addr, 0));
7495 else
7496 return 0;
7498 for (i = 0; i < count; i++)
7500 rtx elt = XVECEXP (op, 0, i);
7501 rtx newaddr;
7502 rtx addr_reg;
7503 HOST_WIDE_INT newoffset;
7505 if (GET_CODE (elt) != SET
7506 || GET_CODE (SET_DEST (elt)) != REG
7507 || GET_MODE (SET_DEST (elt)) != SImode
7508 || REGNO (SET_DEST (elt)) != dest_regno + i
7509 || GET_CODE (SET_SRC (elt)) != MEM
7510 || GET_MODE (SET_SRC (elt)) != SImode)
7511 return 0;
7512 newaddr = XEXP (SET_SRC (elt), 0);
7513 if (legitimate_indirect_address_p (newaddr, 0))
7515 newoffset = 0;
7516 addr_reg = newaddr;
7518 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7520 addr_reg = XEXP (newaddr, 0);
7521 newoffset = INTVAL (XEXP (newaddr, 1));
7523 else
7524 return 0;
7525 if (REGNO (addr_reg) != base_regno
7526 || newoffset != offset + 4 * i)
7527 return 0;
7530 return 1;
7533 /* Return 1 for an PARALLEL suitable for stmw. */
7536 stmw_operation (op, mode)
7537 rtx op;
7538 enum machine_mode mode ATTRIBUTE_UNUSED;
7540 int count = XVECLEN (op, 0);
7541 unsigned int src_regno;
7542 rtx dest_addr;
7543 unsigned int base_regno;
7544 HOST_WIDE_INT offset;
7545 int i;
7547 /* Perform a quick check so we don't blow up below. */
7548 if (count <= 1
7549 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7550 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7551 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7552 return 0;
7554 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7555 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7557 if (src_regno > 31
7558 || count != 32 - (int) src_regno)
7559 return 0;
7561 if (legitimate_indirect_address_p (dest_addr, 0))
7563 offset = 0;
7564 base_regno = REGNO (dest_addr);
7565 if (base_regno == 0)
7566 return 0;
7568 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7570 offset = INTVAL (XEXP (dest_addr, 1));
7571 base_regno = REGNO (XEXP (dest_addr, 0));
7573 else
7574 return 0;
7576 for (i = 0; i < count; i++)
7578 rtx elt = XVECEXP (op, 0, i);
7579 rtx newaddr;
7580 rtx addr_reg;
7581 HOST_WIDE_INT newoffset;
7583 if (GET_CODE (elt) != SET
7584 || GET_CODE (SET_SRC (elt)) != REG
7585 || GET_MODE (SET_SRC (elt)) != SImode
7586 || REGNO (SET_SRC (elt)) != src_regno + i
7587 || GET_CODE (SET_DEST (elt)) != MEM
7588 || GET_MODE (SET_DEST (elt)) != SImode)
7589 return 0;
7590 newaddr = XEXP (SET_DEST (elt), 0);
7591 if (legitimate_indirect_address_p (newaddr, 0))
7593 newoffset = 0;
7594 addr_reg = newaddr;
7596 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7598 addr_reg = XEXP (newaddr, 0);
7599 newoffset = INTVAL (XEXP (newaddr, 1));
7601 else
7602 return 0;
7603 if (REGNO (addr_reg) != base_regno
7604 || newoffset != offset + 4 * i)
7605 return 0;
7608 return 1;
7611 /* A validation routine: say whether CODE, a condition code, and MODE
7612 match. The other alternatives either don't make sense or should
7613 never be generated. */
7615 static void
7616 validate_condition_mode (code, mode)
7617 enum rtx_code code;
7618 enum machine_mode mode;
7620 if (GET_RTX_CLASS (code) != '<'
7621 || GET_MODE_CLASS (mode) != MODE_CC)
7622 abort ();
7624 /* These don't make sense. */
7625 if ((code == GT || code == LT || code == GE || code == LE)
7626 && mode == CCUNSmode)
7627 abort ();
7629 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7630 && mode != CCUNSmode)
7631 abort ();
7633 if (mode != CCFPmode
7634 && (code == ORDERED || code == UNORDERED
7635 || code == UNEQ || code == LTGT
7636 || code == UNGT || code == UNLT
7637 || code == UNGE || code == UNLE))
7638 abort ();
7640 /* These should never be generated except for
7641 flag_finite_math_only. */
7642 if (mode == CCFPmode
7643 && ! flag_finite_math_only
7644 && (code == LE || code == GE
7645 || code == UNEQ || code == LTGT
7646 || code == UNGT || code == UNLT))
7647 abort ();
7649 /* These are invalid; the information is not there. */
7650 if (mode == CCEQmode
7651 && code != EQ && code != NE)
7652 abort ();
7655 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7656 We only check the opcode against the mode of the CC value here. */
7659 branch_comparison_operator (op, mode)
7660 rtx op;
7661 enum machine_mode mode ATTRIBUTE_UNUSED;
7663 enum rtx_code code = GET_CODE (op);
7664 enum machine_mode cc_mode;
7666 if (GET_RTX_CLASS (code) != '<')
7667 return 0;
7669 cc_mode = GET_MODE (XEXP (op, 0));
7670 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7671 return 0;
7673 validate_condition_mode (code, cc_mode);
7675 return 1;
7678 /* Return 1 if OP is a comparison operation that is valid for a branch
7679 insn and which is true if the corresponding bit in the CC register
7680 is set. */
7683 branch_positive_comparison_operator (op, mode)
7684 rtx op;
7685 enum machine_mode mode;
7687 enum rtx_code code;
7689 if (! branch_comparison_operator (op, mode))
7690 return 0;
7692 code = GET_CODE (op);
7693 return (code == EQ || code == LT || code == GT
7694 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7695 || code == LTU || code == GTU
7696 || code == UNORDERED);
7699 /* Return 1 if OP is a comparison operation that is valid for an scc
7700 insn: it must be a positive comparison. */
7703 scc_comparison_operator (op, mode)
7704 rtx op;
7705 enum machine_mode mode;
7707 return branch_positive_comparison_operator (op, mode);
7711 trap_comparison_operator (op, mode)
7712 rtx op;
7713 enum machine_mode mode;
7715 if (mode != VOIDmode && mode != GET_MODE (op))
7716 return 0;
7717 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7721 boolean_operator (op, mode)
7722 rtx op;
7723 enum machine_mode mode ATTRIBUTE_UNUSED;
7725 enum rtx_code code = GET_CODE (op);
7726 return (code == AND || code == IOR || code == XOR);
7730 boolean_or_operator (op, mode)
7731 rtx op;
7732 enum machine_mode mode ATTRIBUTE_UNUSED;
7734 enum rtx_code code = GET_CODE (op);
7735 return (code == IOR || code == XOR);
7739 min_max_operator (op, mode)
7740 rtx op;
7741 enum machine_mode mode ATTRIBUTE_UNUSED;
7743 enum rtx_code code = GET_CODE (op);
7744 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7747 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7748 mask required to convert the result of a rotate insn into a shift
7749 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7752 includes_lshift_p (shiftop, andop)
7753 rtx shiftop;
7754 rtx andop;
7756 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7758 shift_mask <<= INTVAL (shiftop);
7760 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7763 /* Similar, but for right shift. */
7766 includes_rshift_p (shiftop, andop)
7767 rtx shiftop;
7768 rtx andop;
7770 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7772 shift_mask >>= INTVAL (shiftop);
7774 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7777 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7778 to perform a left shift. It must have exactly SHIFTOP least
7779 significant 0's, then one or more 1's, then zero or more 0's. */
7782 includes_rldic_lshift_p (shiftop, andop)
7783 rtx shiftop;
7784 rtx andop;
7786 if (GET_CODE (andop) == CONST_INT)
7788 HOST_WIDE_INT c, lsb, shift_mask;
7790 c = INTVAL (andop);
7791 if (c == 0 || c == ~0)
7792 return 0;
7794 shift_mask = ~0;
7795 shift_mask <<= INTVAL (shiftop);
7797 /* Find the least significant one bit. */
7798 lsb = c & -c;
7800 /* It must coincide with the LSB of the shift mask. */
7801 if (-lsb != shift_mask)
7802 return 0;
7804 /* Invert to look for the next transition (if any). */
7805 c = ~c;
7807 /* Remove the low group of ones (originally low group of zeros). */
7808 c &= -lsb;
7810 /* Again find the lsb, and check we have all 1's above. */
7811 lsb = c & -c;
7812 return c == -lsb;
7814 else if (GET_CODE (andop) == CONST_DOUBLE
7815 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7817 HOST_WIDE_INT low, high, lsb;
7818 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7820 low = CONST_DOUBLE_LOW (andop);
7821 if (HOST_BITS_PER_WIDE_INT < 64)
7822 high = CONST_DOUBLE_HIGH (andop);
7824 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7825 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7826 return 0;
7828 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7830 shift_mask_high = ~0;
7831 if (INTVAL (shiftop) > 32)
7832 shift_mask_high <<= INTVAL (shiftop) - 32;
7834 lsb = high & -high;
7836 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7837 return 0;
7839 high = ~high;
7840 high &= -lsb;
7842 lsb = high & -high;
7843 return high == -lsb;
7846 shift_mask_low = ~0;
7847 shift_mask_low <<= INTVAL (shiftop);
7849 lsb = low & -low;
7851 if (-lsb != shift_mask_low)
7852 return 0;
7854 if (HOST_BITS_PER_WIDE_INT < 64)
7855 high = ~high;
7856 low = ~low;
7857 low &= -lsb;
7859 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7861 lsb = high & -high;
7862 return high == -lsb;
7865 lsb = low & -low;
7866 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7868 else
7869 return 0;
7872 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7873 to perform a left shift. It must have SHIFTOP or more least
7874 significant 0's, with the remainder of the word 1's. */
7877 includes_rldicr_lshift_p (shiftop, andop)
7878 rtx shiftop;
7879 rtx andop;
7881 if (GET_CODE (andop) == CONST_INT)
7883 HOST_WIDE_INT c, lsb, shift_mask;
7885 shift_mask = ~0;
7886 shift_mask <<= INTVAL (shiftop);
7887 c = INTVAL (andop);
7889 /* Find the least significant one bit. */
7890 lsb = c & -c;
7892 /* It must be covered by the shift mask.
7893 This test also rejects c == 0. */
7894 if ((lsb & shift_mask) == 0)
7895 return 0;
7897 /* Check we have all 1's above the transition, and reject all 1's. */
7898 return c == -lsb && lsb != 1;
7900 else if (GET_CODE (andop) == CONST_DOUBLE
7901 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7903 HOST_WIDE_INT low, lsb, shift_mask_low;
7905 low = CONST_DOUBLE_LOW (andop);
7907 if (HOST_BITS_PER_WIDE_INT < 64)
7909 HOST_WIDE_INT high, shift_mask_high;
7911 high = CONST_DOUBLE_HIGH (andop);
7913 if (low == 0)
7915 shift_mask_high = ~0;
7916 if (INTVAL (shiftop) > 32)
7917 shift_mask_high <<= INTVAL (shiftop) - 32;
7919 lsb = high & -high;
7921 if ((lsb & shift_mask_high) == 0)
7922 return 0;
7924 return high == -lsb;
7926 if (high != ~0)
7927 return 0;
7930 shift_mask_low = ~0;
7931 shift_mask_low <<= INTVAL (shiftop);
7933 lsb = low & -low;
7935 if ((lsb & shift_mask_low) == 0)
7936 return 0;
7938 return low == -lsb && lsb != 1;
7940 else
7941 return 0;
7944 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7945 for lfq and stfq insns.
7947 Note reg1 and reg2 *must* be hard registers. To be sure we will
7948 abort if we are passed pseudo registers. */
7951 registers_ok_for_quad_peep (reg1, reg2)
7952 rtx reg1, reg2;
7954 /* We might have been passed a SUBREG. */
7955 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7956 return 0;
7958 return (REGNO (reg1) == REGNO (reg2) - 1);
7961 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7962 addr1 and addr2 must be in consecutive memory locations
7963 (addr2 == addr1 + 8). */
7966 addrs_ok_for_quad_peep (addr1, addr2)
7967 rtx addr1;
7968 rtx addr2;
7970 unsigned int reg1;
7971 int offset1;
7973 /* Extract an offset (if used) from the first addr. */
7974 if (GET_CODE (addr1) == PLUS)
7976 /* If not a REG, return zero. */
7977 if (GET_CODE (XEXP (addr1, 0)) != REG)
7978 return 0;
7979 else
7981 reg1 = REGNO (XEXP (addr1, 0));
7982 /* The offset must be constant! */
7983 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7984 return 0;
7985 offset1 = INTVAL (XEXP (addr1, 1));
7988 else if (GET_CODE (addr1) != REG)
7989 return 0;
7990 else
7992 reg1 = REGNO (addr1);
7993 /* This was a simple (mem (reg)) expression. Offset is 0. */
7994 offset1 = 0;
7997 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7998 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7999 register as addr1. */
8000 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8001 return 1;
8002 if (GET_CODE (addr2) != PLUS)
8003 return 0;
8005 if (GET_CODE (XEXP (addr2, 0)) != REG
8006 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8007 return 0;
8009 if (reg1 != REGNO (XEXP (addr2, 0)))
8010 return 0;
8012 /* The offset for the second addr must be 8 more than the first addr. */
8013 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8014 return 0;
8016 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8017 instructions. */
8018 return 1;
8021 /* Return the register class of a scratch register needed to copy IN into
8022 or out of a register in CLASS in MODE. If it can be done directly,
8023 NO_REGS is returned. */
8025 enum reg_class
8026 secondary_reload_class (class, mode, in)
8027 enum reg_class class;
8028 enum machine_mode mode ATTRIBUTE_UNUSED;
8029 rtx in;
8031 int regno;
8033 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8034 #if TARGET_MACHO
8035 && MACHOPIC_INDIRECT
8036 #endif
8039 /* We cannot copy a symbolic operand directly into anything
8040 other than BASE_REGS for TARGET_ELF. So indicate that a
8041 register from BASE_REGS is needed as an intermediate
8042 register.
8044 On Darwin, pic addresses require a load from memory, which
8045 needs a base register. */
8046 if (class != BASE_REGS
8047 && (GET_CODE (in) == SYMBOL_REF
8048 || GET_CODE (in) == HIGH
8049 || GET_CODE (in) == LABEL_REF
8050 || GET_CODE (in) == CONST))
8051 return BASE_REGS;
8054 if (GET_CODE (in) == REG)
8056 regno = REGNO (in);
8057 if (regno >= FIRST_PSEUDO_REGISTER)
8059 regno = true_regnum (in);
8060 if (regno >= FIRST_PSEUDO_REGISTER)
8061 regno = -1;
8064 else if (GET_CODE (in) == SUBREG)
8066 regno = true_regnum (in);
8067 if (regno >= FIRST_PSEUDO_REGISTER)
8068 regno = -1;
8070 else
8071 regno = -1;
8073 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8074 into anything. */
8075 if (class == GENERAL_REGS || class == BASE_REGS
8076 || (regno >= 0 && INT_REGNO_P (regno)))
8077 return NO_REGS;
8079 /* Constants, memory, and FP registers can go into FP registers. */
8080 if ((regno == -1 || FP_REGNO_P (regno))
8081 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8082 return NO_REGS;
8084 /* Memory, and AltiVec registers can go into AltiVec registers. */
8085 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8086 && class == ALTIVEC_REGS)
8087 return NO_REGS;
8089 /* We can copy among the CR registers. */
8090 if ((class == CR_REGS || class == CR0_REGS)
8091 && regno >= 0 && CR_REGNO_P (regno))
8092 return NO_REGS;
8094 /* Otherwise, we need GENERAL_REGS. */
8095 return GENERAL_REGS;
8098 /* Given a comparison operation, return the bit number in CCR to test. We
8099 know this is a valid comparison.
8101 SCC_P is 1 if this is for an scc. That means that %D will have been
8102 used instead of %C, so the bits will be in different places.
8104 Return -1 if OP isn't a valid comparison for some reason. */
8107 ccr_bit (op, scc_p)
8108 rtx op;
8109 int scc_p;
8111 enum rtx_code code = GET_CODE (op);
8112 enum machine_mode cc_mode;
8113 int cc_regnum;
8114 int base_bit;
8115 rtx reg;
8117 if (GET_RTX_CLASS (code) != '<')
8118 return -1;
8120 reg = XEXP (op, 0);
8122 if (GET_CODE (reg) != REG
8123 || ! CR_REGNO_P (REGNO (reg)))
8124 abort ();
8126 cc_mode = GET_MODE (reg);
8127 cc_regnum = REGNO (reg);
8128 base_bit = 4 * (cc_regnum - CR0_REGNO);
8130 validate_condition_mode (code, cc_mode);
8132 /* When generating a sCOND operation, only positive conditions are
8133 allowed. */
8134 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8135 && code != GTU && code != LTU)
8136 abort ();
8138 switch (code)
8140 case NE:
8141 if (TARGET_E500 && !TARGET_FPRS
8142 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8143 return base_bit + 1;
8144 return scc_p ? base_bit + 3 : base_bit + 2;
8145 case EQ:
8146 if (TARGET_E500 && !TARGET_FPRS
8147 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8148 return base_bit + 1;
8149 return base_bit + 2;
8150 case GT: case GTU: case UNLE:
8151 return base_bit + 1;
8152 case LT: case LTU: case UNGE:
8153 return base_bit;
8154 case ORDERED: case UNORDERED:
8155 return base_bit + 3;
8157 case GE: case GEU:
8158 /* If scc, we will have done a cror to put the bit in the
8159 unordered position. So test that bit. For integer, this is ! LT
8160 unless this is an scc insn. */
8161 return scc_p ? base_bit + 3 : base_bit;
8163 case LE: case LEU:
8164 return scc_p ? base_bit + 3 : base_bit + 1;
8166 default:
8167 abort ();
8171 /* Return the GOT register. */
8173 struct rtx_def *
8174 rs6000_got_register (value)
8175 rtx value ATTRIBUTE_UNUSED;
8177 /* The second flow pass currently (June 1999) can't update
8178 regs_ever_live without disturbing other parts of the compiler, so
8179 update it here to make the prolog/epilogue code happy. */
8180 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8181 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8183 current_function_uses_pic_offset_table = 1;
8185 return pic_offset_table_rtx;
8188 /* Function to init struct machine_function.
8189 This will be called, via a pointer variable,
8190 from push_function_context. */
8192 static struct machine_function *
8193 rs6000_init_machine_status ()
8195 return ggc_alloc_cleared (sizeof (machine_function));
8198 /* These macros test for integers and extract the low-order bits. */
8199 #define INT_P(X) \
8200 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8201 && GET_MODE (X) == VOIDmode)
8203 #define INT_LOWPART(X) \
8204 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8207 extract_MB (op)
8208 rtx op;
8210 int i;
8211 unsigned long val = INT_LOWPART (op);
8213 /* If the high bit is zero, the value is the first 1 bit we find
8214 from the left. */
8215 if ((val & 0x80000000) == 0)
8217 if ((val & 0xffffffff) == 0)
8218 abort ();
8220 i = 1;
8221 while (((val <<= 1) & 0x80000000) == 0)
8222 ++i;
8223 return i;
8226 /* If the high bit is set and the low bit is not, or the mask is all
8227 1's, the value is zero. */
8228 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8229 return 0;
8231 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8232 from the right. */
8233 i = 31;
8234 while (((val >>= 1) & 1) != 0)
8235 --i;
8237 return i;
8241 extract_ME (op)
8242 rtx op;
8244 int i;
8245 unsigned long val = INT_LOWPART (op);
8247 /* If the low bit is zero, the value is the first 1 bit we find from
8248 the right. */
8249 if ((val & 1) == 0)
8251 if ((val & 0xffffffff) == 0)
8252 abort ();
8254 i = 30;
8255 while (((val >>= 1) & 1) == 0)
8256 --i;
8258 return i;
8261 /* If the low bit is set and the high bit is not, or the mask is all
8262 1's, the value is 31. */
8263 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8264 return 31;
8266 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8267 from the left. */
8268 i = 0;
8269 while (((val <<= 1) & 0x80000000) != 0)
8270 ++i;
8272 return i;
8275 /* Locate some local-dynamic symbol still in use by this function
8276 so that we can print its name in some tls_ld pattern. */
8278 static const char *
8279 rs6000_get_some_local_dynamic_name ()
8281 rtx insn;
8283 if (cfun->machine->some_ld_name)
8284 return cfun->machine->some_ld_name;
8286 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8287 if (INSN_P (insn)
8288 && for_each_rtx (&PATTERN (insn),
8289 rs6000_get_some_local_dynamic_name_1, 0))
8290 return cfun->machine->some_ld_name;
8292 abort ();
8295 /* Helper function for rs6000_get_some_local_dynamic_name. */
8297 static int
8298 rs6000_get_some_local_dynamic_name_1 (px, data)
8299 rtx *px;
8300 void *data ATTRIBUTE_UNUSED;
8302 rtx x = *px;
8304 if (GET_CODE (x) == SYMBOL_REF)
8306 const char *str = XSTR (x, 0);
8307 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8309 cfun->machine->some_ld_name = str;
8310 return 1;
8314 return 0;
8317 /* Print an operand. Recognize special options, documented below. */
8319 #if TARGET_ELF
8320 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8321 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8322 #else
8323 #define SMALL_DATA_RELOC "sda21"
8324 #define SMALL_DATA_REG 0
8325 #endif
8327 void
8328 print_operand (file, x, code)
8329 FILE *file;
8330 rtx x;
8331 int code;
8333 int i;
8334 HOST_WIDE_INT val;
8335 unsigned HOST_WIDE_INT uval;
8337 switch (code)
8339 case '.':
8340 /* Write out an instruction after the call which may be replaced
8341 with glue code by the loader. This depends on the AIX version. */
8342 asm_fprintf (file, RS6000_CALL_GLUE);
8343 return;
8345 /* %a is output_address. */
8347 case 'A':
8348 /* If X is a constant integer whose low-order 5 bits are zero,
8349 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8350 in the AIX assembler where "sri" with a zero shift count
8351 writes a trash instruction. */
8352 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8353 putc ('l', file);
8354 else
8355 putc ('r', file);
8356 return;
8358 case 'b':
8359 /* If constant, low-order 16 bits of constant, unsigned.
8360 Otherwise, write normally. */
8361 if (INT_P (x))
8362 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8363 else
8364 print_operand (file, x, 0);
8365 return;
8367 case 'B':
8368 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8369 for 64-bit mask direction. */
8370 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8371 return;
8373 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8374 output_operand. */
8376 case 'E':
8377 /* X is a CR register. Print the number of the EQ bit of the CR */
8378 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8379 output_operand_lossage ("invalid %%E value");
8380 else
8381 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8382 return;
8384 case 'f':
8385 /* X is a CR register. Print the shift count needed to move it
8386 to the high-order four bits. */
8387 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8388 output_operand_lossage ("invalid %%f value");
8389 else
8390 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8391 return;
8393 case 'F':
8394 /* Similar, but print the count for the rotate in the opposite
8395 direction. */
8396 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8397 output_operand_lossage ("invalid %%F value");
8398 else
8399 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8400 return;
8402 case 'G':
8403 /* X is a constant integer. If it is negative, print "m",
8404 otherwise print "z". This is to make an aze or ame insn. */
8405 if (GET_CODE (x) != CONST_INT)
8406 output_operand_lossage ("invalid %%G value");
8407 else if (INTVAL (x) >= 0)
8408 putc ('z', file);
8409 else
8410 putc ('m', file);
8411 return;
8413 case 'h':
8414 /* If constant, output low-order five bits. Otherwise, write
8415 normally. */
8416 if (INT_P (x))
8417 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8418 else
8419 print_operand (file, x, 0);
8420 return;
8422 case 'H':
8423 /* If constant, output low-order six bits. Otherwise, write
8424 normally. */
8425 if (INT_P (x))
8426 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8427 else
8428 print_operand (file, x, 0);
8429 return;
8431 case 'I':
8432 /* Print `i' if this is a constant, else nothing. */
8433 if (INT_P (x))
8434 putc ('i', file);
8435 return;
8437 case 'j':
8438 /* Write the bit number in CCR for jump. */
8439 i = ccr_bit (x, 0);
8440 if (i == -1)
8441 output_operand_lossage ("invalid %%j code");
8442 else
8443 fprintf (file, "%d", i);
8444 return;
8446 case 'J':
8447 /* Similar, but add one for shift count in rlinm for scc and pass
8448 scc flag to `ccr_bit'. */
8449 i = ccr_bit (x, 1);
8450 if (i == -1)
8451 output_operand_lossage ("invalid %%J code");
8452 else
8453 /* If we want bit 31, write a shift count of zero, not 32. */
8454 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8455 return;
8457 case 'k':
8458 /* X must be a constant. Write the 1's complement of the
8459 constant. */
8460 if (! INT_P (x))
8461 output_operand_lossage ("invalid %%k value");
8462 else
8463 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8464 return;
8466 case 'K':
8467 /* X must be a symbolic constant on ELF. Write an
8468 expression suitable for an 'addi' that adds in the low 16
8469 bits of the MEM. */
8470 if (GET_CODE (x) != CONST)
8472 print_operand_address (file, x);
8473 fputs ("@l", file);
8475 else
8477 if (GET_CODE (XEXP (x, 0)) != PLUS
8478 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8479 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8480 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8481 output_operand_lossage ("invalid %%K value");
8482 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8483 fputs ("@l", file);
8484 /* For GNU as, there must be a non-alphanumeric character
8485 between 'l' and the number. The '-' is added by
8486 print_operand() already. */
8487 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8488 fputs ("+", file);
8489 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8491 return;
8493 /* %l is output_asm_label. */
8495 case 'L':
8496 /* Write second word of DImode or DFmode reference. Works on register
8497 or non-indexed memory only. */
8498 if (GET_CODE (x) == REG)
8499 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8500 else if (GET_CODE (x) == MEM)
8502 /* Handle possible auto-increment. Since it is pre-increment and
8503 we have already done it, we can just use an offset of word. */
8504 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8505 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8506 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8507 UNITS_PER_WORD));
8508 else
8509 output_address (XEXP (adjust_address_nv (x, SImode,
8510 UNITS_PER_WORD),
8511 0));
8513 if (small_data_operand (x, GET_MODE (x)))
8514 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8515 reg_names[SMALL_DATA_REG]);
8517 return;
8519 case 'm':
8520 /* MB value for a mask operand. */
8521 if (! mask_operand (x, SImode))
8522 output_operand_lossage ("invalid %%m value");
8524 fprintf (file, "%d", extract_MB (x));
8525 return;
8527 case 'M':
8528 /* ME value for a mask operand. */
8529 if (! mask_operand (x, SImode))
8530 output_operand_lossage ("invalid %%M value");
8532 fprintf (file, "%d", extract_ME (x));
8533 return;
8535 /* %n outputs the negative of its operand. */
8537 case 'N':
8538 /* Write the number of elements in the vector times 4. */
8539 if (GET_CODE (x) != PARALLEL)
8540 output_operand_lossage ("invalid %%N value");
8541 else
8542 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8543 return;
8545 case 'O':
8546 /* Similar, but subtract 1 first. */
8547 if (GET_CODE (x) != PARALLEL)
8548 output_operand_lossage ("invalid %%O value");
8549 else
8550 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8551 return;
8553 case 'p':
8554 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8555 if (! INT_P (x)
8556 || INT_LOWPART (x) < 0
8557 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8558 output_operand_lossage ("invalid %%p value");
8559 else
8560 fprintf (file, "%d", i);
8561 return;
8563 case 'P':
8564 /* The operand must be an indirect memory reference. The result
8565 is the register number. */
8566 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8567 || REGNO (XEXP (x, 0)) >= 32)
8568 output_operand_lossage ("invalid %%P value");
8569 else
8570 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8571 return;
8573 case 'q':
8574 /* This outputs the logical code corresponding to a boolean
8575 expression. The expression may have one or both operands
8576 negated (if one, only the first one). For condition register
8577 logical operations, it will also treat the negated
8578 CR codes as NOTs, but not handle NOTs of them. */
8580 const char *const *t = 0;
8581 const char *s;
8582 enum rtx_code code = GET_CODE (x);
8583 static const char * const tbl[3][3] = {
8584 { "and", "andc", "nor" },
8585 { "or", "orc", "nand" },
8586 { "xor", "eqv", "xor" } };
8588 if (code == AND)
8589 t = tbl[0];
8590 else if (code == IOR)
8591 t = tbl[1];
8592 else if (code == XOR)
8593 t = tbl[2];
8594 else
8595 output_operand_lossage ("invalid %%q value");
8597 if (GET_CODE (XEXP (x, 0)) != NOT)
8598 s = t[0];
8599 else
8601 if (GET_CODE (XEXP (x, 1)) == NOT)
8602 s = t[2];
8603 else
8604 s = t[1];
8607 fputs (s, file);
8609 return;
8611 case 'Q':
8612 if (TARGET_MFCRF)
8613 fputc (',',file);
8614 /* FALLTHRU */
8615 else
8616 return;
8618 case 'R':
8619 /* X is a CR register. Print the mask for `mtcrf'. */
8620 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8621 output_operand_lossage ("invalid %%R value");
8622 else
8623 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8624 return;
8626 case 's':
8627 /* Low 5 bits of 32 - value */
8628 if (! INT_P (x))
8629 output_operand_lossage ("invalid %%s value");
8630 else
8631 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8632 return;
8634 case 'S':
8635 /* PowerPC64 mask position. All 0's is excluded.
8636 CONST_INT 32-bit mask is considered sign-extended so any
8637 transition must occur within the CONST_INT, not on the boundary. */
8638 if (! mask64_operand (x, DImode))
8639 output_operand_lossage ("invalid %%S value");
8641 uval = INT_LOWPART (x);
8643 if (uval & 1) /* Clear Left */
8645 #if HOST_BITS_PER_WIDE_INT > 64
8646 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8647 #endif
8648 i = 64;
8650 else /* Clear Right */
8652 uval = ~uval;
8653 #if HOST_BITS_PER_WIDE_INT > 64
8654 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8655 #endif
8656 i = 63;
8658 while (uval != 0)
8659 --i, uval >>= 1;
8660 if (i < 0)
8661 abort ();
8662 fprintf (file, "%d", i);
8663 return;
8665 case 't':
8666 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8667 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8668 abort ();
8670 /* Bit 3 is OV bit. */
8671 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8673 /* If we want bit 31, write a shift count of zero, not 32. */
8674 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8675 return;
8677 case 'T':
8678 /* Print the symbolic name of a branch target register. */
8679 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8680 && REGNO (x) != COUNT_REGISTER_REGNUM))
8681 output_operand_lossage ("invalid %%T value");
8682 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8683 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8684 else
8685 fputs ("ctr", file);
8686 return;
8688 case 'u':
8689 /* High-order 16 bits of constant for use in unsigned operand. */
8690 if (! INT_P (x))
8691 output_operand_lossage ("invalid %%u value");
8692 else
8693 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8694 (INT_LOWPART (x) >> 16) & 0xffff);
8695 return;
8697 case 'v':
8698 /* High-order 16 bits of constant for use in signed operand. */
8699 if (! INT_P (x))
8700 output_operand_lossage ("invalid %%v value");
8701 else
8702 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8703 (INT_LOWPART (x) >> 16) & 0xffff);
8704 return;
8706 case 'U':
8707 /* Print `u' if this has an auto-increment or auto-decrement. */
8708 if (GET_CODE (x) == MEM
8709 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8710 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8711 putc ('u', file);
8712 return;
8714 case 'V':
8715 /* Print the trap code for this operand. */
8716 switch (GET_CODE (x))
8718 case EQ:
8719 fputs ("eq", file); /* 4 */
8720 break;
8721 case NE:
8722 fputs ("ne", file); /* 24 */
8723 break;
8724 case LT:
8725 fputs ("lt", file); /* 16 */
8726 break;
8727 case LE:
8728 fputs ("le", file); /* 20 */
8729 break;
8730 case GT:
8731 fputs ("gt", file); /* 8 */
8732 break;
8733 case GE:
8734 fputs ("ge", file); /* 12 */
8735 break;
8736 case LTU:
8737 fputs ("llt", file); /* 2 */
8738 break;
8739 case LEU:
8740 fputs ("lle", file); /* 6 */
8741 break;
8742 case GTU:
8743 fputs ("lgt", file); /* 1 */
8744 break;
8745 case GEU:
8746 fputs ("lge", file); /* 5 */
8747 break;
8748 default:
8749 abort ();
8751 break;
8753 case 'w':
8754 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8755 normally. */
8756 if (INT_P (x))
8757 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8758 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8759 else
8760 print_operand (file, x, 0);
8761 return;
8763 case 'W':
8764 /* MB value for a PowerPC64 rldic operand. */
8765 val = (GET_CODE (x) == CONST_INT
8766 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8768 if (val < 0)
8769 i = -1;
8770 else
8771 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8772 if ((val <<= 1) < 0)
8773 break;
8775 #if HOST_BITS_PER_WIDE_INT == 32
8776 if (GET_CODE (x) == CONST_INT && i >= 0)
8777 i += 32; /* zero-extend high-part was all 0's */
8778 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8780 val = CONST_DOUBLE_LOW (x);
8782 if (val == 0)
8783 abort ();
8784 else if (val < 0)
8785 --i;
8786 else
8787 for ( ; i < 64; i++)
8788 if ((val <<= 1) < 0)
8789 break;
8791 #endif
8793 fprintf (file, "%d", i + 1);
8794 return;
8796 case 'X':
8797 if (GET_CODE (x) == MEM
8798 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8799 putc ('x', file);
8800 return;
8802 case 'Y':
8803 /* Like 'L', for third word of TImode */
8804 if (GET_CODE (x) == REG)
8805 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8806 else if (GET_CODE (x) == MEM)
8808 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8809 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8810 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8811 else
8812 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8813 if (small_data_operand (x, GET_MODE (x)))
8814 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8815 reg_names[SMALL_DATA_REG]);
8817 return;
8819 case 'z':
8820 /* X is a SYMBOL_REF. Write out the name preceded by a
8821 period and without any trailing data in brackets. Used for function
8822 names. If we are configured for System V (or the embedded ABI) on
8823 the PowerPC, do not emit the period, since those systems do not use
8824 TOCs and the like. */
8825 if (GET_CODE (x) != SYMBOL_REF)
8826 abort ();
8828 if (XSTR (x, 0)[0] != '.')
8830 switch (DEFAULT_ABI)
8832 default:
8833 abort ();
8835 case ABI_AIX:
8836 putc ('.', file);
8837 break;
8839 case ABI_V4:
8840 case ABI_DARWIN:
8841 break;
8844 if (TARGET_AIX)
8845 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8846 else
8847 assemble_name (file, XSTR (x, 0));
8848 return;
8850 case 'Z':
8851 /* Like 'L', for last word of TImode. */
8852 if (GET_CODE (x) == REG)
8853 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8854 else if (GET_CODE (x) == MEM)
8856 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8857 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8858 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8859 else
8860 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8861 if (small_data_operand (x, GET_MODE (x)))
8862 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8863 reg_names[SMALL_DATA_REG]);
8865 return;
8867 /* Print AltiVec or SPE memory operand. */
8868 case 'y':
8870 rtx tmp;
8872 if (GET_CODE (x) != MEM)
8873 abort ();
8875 tmp = XEXP (x, 0);
8877 if (TARGET_E500)
8879 /* Handle [reg]. */
8880 if (GET_CODE (tmp) == REG)
8882 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8883 break;
8885 /* Handle [reg+UIMM]. */
8886 else if (GET_CODE (tmp) == PLUS &&
8887 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8889 int x;
8891 if (GET_CODE (XEXP (tmp, 0)) != REG)
8892 abort ();
8894 x = INTVAL (XEXP (tmp, 1));
8895 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8896 break;
8899 /* Fall through. Must be [reg+reg]. */
8901 if (GET_CODE (tmp) == REG)
8902 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8903 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8905 if (REGNO (XEXP (tmp, 0)) == 0)
8906 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8907 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8908 else
8909 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8910 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8912 else
8913 abort ();
8914 break;
8917 case 0:
8918 if (GET_CODE (x) == REG)
8919 fprintf (file, "%s", reg_names[REGNO (x)]);
8920 else if (GET_CODE (x) == MEM)
8922 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8923 know the width from the mode. */
8924 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8925 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8926 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8927 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8928 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8929 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8930 else
8931 output_address (XEXP (x, 0));
8933 else
8934 output_addr_const (file, x);
8935 return;
8937 case '&':
8938 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8939 return;
8941 default:
8942 output_operand_lossage ("invalid %%xn code");
8946 /* Print the address of an operand. */
8948 void
8949 print_operand_address (file, x)
8950 FILE *file;
8951 rtx x;
8953 if (GET_CODE (x) == REG)
8954 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8955 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8956 || GET_CODE (x) == LABEL_REF)
8958 output_addr_const (file, x);
8959 if (small_data_operand (x, GET_MODE (x)))
8960 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8961 reg_names[SMALL_DATA_REG]);
8962 else if (TARGET_TOC)
8963 abort ();
8965 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8967 if (REGNO (XEXP (x, 0)) == 0)
8968 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8969 reg_names[ REGNO (XEXP (x, 0)) ]);
8970 else
8971 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8972 reg_names[ REGNO (XEXP (x, 1)) ]);
8974 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8975 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8976 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8977 #if TARGET_ELF
8978 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8979 && CONSTANT_P (XEXP (x, 1)))
8981 output_addr_const (file, XEXP (x, 1));
8982 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8984 #endif
8985 #if TARGET_MACHO
8986 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8987 && CONSTANT_P (XEXP (x, 1)))
8989 fprintf (file, "lo16(");
8990 output_addr_const (file, XEXP (x, 1));
8991 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8993 #endif
8994 else if (legitimate_constant_pool_address_p (x))
8996 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8998 rtx contains_minus = XEXP (x, 1);
8999 rtx minus, symref;
9000 const char *name;
9002 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9003 turn it into (sym) for output_addr_const. */
9004 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9005 contains_minus = XEXP (contains_minus, 0);
9007 minus = XEXP (contains_minus, 0);
9008 symref = XEXP (minus, 0);
9009 XEXP (contains_minus, 0) = symref;
9010 if (TARGET_ELF)
9012 char *newname;
9014 name = XSTR (symref, 0);
9015 newname = alloca (strlen (name) + sizeof ("@toc"));
9016 strcpy (newname, name);
9017 strcat (newname, "@toc");
9018 XSTR (symref, 0) = newname;
9020 output_addr_const (file, XEXP (x, 1));
9021 if (TARGET_ELF)
9022 XSTR (symref, 0) = name;
9023 XEXP (contains_minus, 0) = minus;
9025 else
9026 output_addr_const (file, XEXP (x, 1));
9028 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9030 else
9031 abort ();
9034 /* Target hook for assembling integer objects. The PowerPC version has
9035 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9036 is defined. It also needs to handle DI-mode objects on 64-bit
9037 targets. */
9039 static bool
9040 rs6000_assemble_integer (x, size, aligned_p)
9041 rtx x;
9042 unsigned int size;
9043 int aligned_p;
9045 #ifdef RELOCATABLE_NEEDS_FIXUP
9046 /* Special handling for SI values. */
9047 if (size == 4 && aligned_p)
9049 extern int in_toc_section PARAMS ((void));
9050 static int recurse = 0;
9052 /* For -mrelocatable, we mark all addresses that need to be fixed up
9053 in the .fixup section. */
9054 if (TARGET_RELOCATABLE
9055 && !in_toc_section ()
9056 && !in_text_section ()
9057 && !recurse
9058 && GET_CODE (x) != CONST_INT
9059 && GET_CODE (x) != CONST_DOUBLE
9060 && CONSTANT_P (x))
9062 char buf[256];
9064 recurse = 1;
9065 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9066 fixuplabelno++;
9067 ASM_OUTPUT_LABEL (asm_out_file, buf);
9068 fprintf (asm_out_file, "\t.long\t(");
9069 output_addr_const (asm_out_file, x);
9070 fprintf (asm_out_file, ")@fixup\n");
9071 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9072 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9073 fprintf (asm_out_file, "\t.long\t");
9074 assemble_name (asm_out_file, buf);
9075 fprintf (asm_out_file, "\n\t.previous\n");
9076 recurse = 0;
9077 return true;
9079 /* Remove initial .'s to turn a -mcall-aixdesc function
9080 address into the address of the descriptor, not the function
9081 itself. */
9082 else if (GET_CODE (x) == SYMBOL_REF
9083 && XSTR (x, 0)[0] == '.'
9084 && DEFAULT_ABI == ABI_AIX)
9086 const char *name = XSTR (x, 0);
9087 while (*name == '.')
9088 name++;
9090 fprintf (asm_out_file, "\t.long\t%s\n", name);
9091 return true;
9094 #endif /* RELOCATABLE_NEEDS_FIXUP */
9095 return default_assemble_integer (x, size, aligned_p);
9098 #ifdef HAVE_GAS_HIDDEN
9099 /* Emit an assembler directive to set symbol visibility for DECL to
9100 VISIBILITY_TYPE. */
9102 static void
9103 rs6000_assemble_visibility (decl, vis)
9104 tree decl;
9105 int vis;
9107 /* Functions need to have their entry point symbol visibility set as
9108 well as their descriptor symbol visibility. */
9109 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9111 static const char * const visibility_types[] = {
9112 NULL, "internal", "hidden", "protected"
9115 const char *name, *type;
9117 name = ((* targetm.strip_name_encoding)
9118 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9119 type = visibility_types[vis];
9121 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9122 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9124 else
9125 default_assemble_visibility (decl, vis);
9127 #endif
9129 enum rtx_code
9130 rs6000_reverse_condition (mode, code)
9131 enum machine_mode mode;
9132 enum rtx_code code;
9134 /* Reversal of FP compares takes care -- an ordered compare
9135 becomes an unordered compare and vice versa. */
9136 if (mode == CCFPmode
9137 && (!flag_finite_math_only
9138 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9139 || code == UNEQ || code == LTGT))
9140 return reverse_condition_maybe_unordered (code);
9141 else
9142 return reverse_condition (code);
9145 /* Generate a compare for CODE. Return a brand-new rtx that
9146 represents the result of the compare. */
9148 static rtx
9149 rs6000_generate_compare (code)
9150 enum rtx_code code;
9152 enum machine_mode comp_mode;
9153 rtx compare_result;
9155 if (rs6000_compare_fp_p)
9156 comp_mode = CCFPmode;
9157 else if (code == GTU || code == LTU
9158 || code == GEU || code == LEU)
9159 comp_mode = CCUNSmode;
9160 else
9161 comp_mode = CCmode;
9163 /* First, the compare. */
9164 compare_result = gen_reg_rtx (comp_mode);
9166 /* SPE FP compare instructions on the GPRs. Yuck! */
9167 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9168 && rs6000_compare_fp_p)
9170 rtx cmp, or1, or2, or_result, compare_result2;
9172 switch (code)
9174 case EQ:
9175 case UNEQ:
9176 case NE:
9177 case LTGT:
9178 cmp = flag_finite_math_only
9179 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9180 rs6000_compare_op1)
9181 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9182 rs6000_compare_op1);
9183 break;
9184 case GT:
9185 case GTU:
9186 case UNGT:
9187 case UNGE:
9188 case GE:
9189 case GEU:
9190 cmp = flag_finite_math_only
9191 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9192 rs6000_compare_op1)
9193 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9194 rs6000_compare_op1);
9195 break;
9196 case LT:
9197 case LTU:
9198 case UNLT:
9199 case UNLE:
9200 case LE:
9201 case LEU:
9202 cmp = flag_finite_math_only
9203 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9204 rs6000_compare_op1)
9205 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9206 rs6000_compare_op1);
9207 break;
9208 default:
9209 abort ();
9212 /* Synthesize LE and GE from LT/GT || EQ. */
9213 if (code == LE || code == GE || code == LEU || code == GEU)
9215 /* Synthesize GE/LE frome GT/LT || EQ. */
9217 emit_insn (cmp);
9219 switch (code)
9221 case LE: code = LT; break;
9222 case GE: code = GT; break;
9223 case LEU: code = LT; break;
9224 case GEU: code = GT; break;
9225 default: abort ();
9228 or1 = gen_reg_rtx (SImode);
9229 or2 = gen_reg_rtx (SImode);
9230 or_result = gen_reg_rtx (CCEQmode);
9231 compare_result2 = gen_reg_rtx (CCFPmode);
9233 /* Do the EQ. */
9234 cmp = flag_finite_math_only
9235 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9236 rs6000_compare_op1)
9237 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9238 rs6000_compare_op1);
9239 emit_insn (cmp);
9241 /* The MC8540 FP compare instructions set the CR bits
9242 differently than other PPC compare instructions. For
9243 that matter, there is no generic test instruction, but a
9244 testgt, testlt, and testeq. For a true condition, bit 2
9245 is set (x1xx) in the CR. Following the traditional CR
9246 values:
9248 LT GT EQ OV
9249 bit3 bit2 bit1 bit0
9251 ... bit 2 would be a GT CR alias, so later on we
9252 look in the GT bits for the branch instructions.
9253 However, we must be careful to emit correct RTL in
9254 the meantime, so optimizations don't get confused. */
9256 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9257 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9259 /* OR them together. */
9260 cmp = gen_rtx_SET (VOIDmode, or_result,
9261 gen_rtx_COMPARE (CCEQmode,
9262 gen_rtx_IOR (SImode, or1, or2),
9263 const_true_rtx));
9264 compare_result = or_result;
9265 code = EQ;
9267 else
9269 /* We only care about 1 bit (x1xx), so map everything to NE to
9270 maintain rtl sanity. We'll get to the right bit (x1xx) at
9271 code output time. */
9272 if (code == NE || code == LTGT)
9273 /* Do the inverse here because we have no cmpne
9274 instruction. We use the cmpeq instruction and expect
9275 to get a 0 instead. */
9276 code = EQ;
9277 else
9278 code = NE;
9281 emit_insn (cmp);
9283 else
9284 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9285 gen_rtx_COMPARE (comp_mode,
9286 rs6000_compare_op0,
9287 rs6000_compare_op1)));
9289 /* Some kinds of FP comparisons need an OR operation;
9290 under flag_finite_math_only we don't bother. */
9291 if (rs6000_compare_fp_p
9292 && ! flag_finite_math_only
9293 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9294 && (code == LE || code == GE
9295 || code == UNEQ || code == LTGT
9296 || code == UNGT || code == UNLT))
9298 enum rtx_code or1, or2;
9299 rtx or1_rtx, or2_rtx, compare2_rtx;
9300 rtx or_result = gen_reg_rtx (CCEQmode);
9302 switch (code)
9304 case LE: or1 = LT; or2 = EQ; break;
9305 case GE: or1 = GT; or2 = EQ; break;
9306 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9307 case LTGT: or1 = LT; or2 = GT; break;
9308 case UNGT: or1 = UNORDERED; or2 = GT; break;
9309 case UNLT: or1 = UNORDERED; or2 = LT; break;
9310 default: abort ();
9312 validate_condition_mode (or1, comp_mode);
9313 validate_condition_mode (or2, comp_mode);
9314 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9315 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9316 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9317 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9318 const_true_rtx);
9319 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9321 compare_result = or_result;
9322 code = EQ;
9325 validate_condition_mode (code, GET_MODE (compare_result));
9327 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9331 /* Emit the RTL for an sCOND pattern. */
9333 void
9334 rs6000_emit_sCOND (code, result)
9335 enum rtx_code code;
9336 rtx result;
9338 rtx condition_rtx;
9339 enum machine_mode op_mode;
9340 enum rtx_code cond_code;
9342 condition_rtx = rs6000_generate_compare (code);
9343 cond_code = GET_CODE (condition_rtx);
9345 if (cond_code == NE
9346 || cond_code == GE || cond_code == LE
9347 || cond_code == GEU || cond_code == LEU
9348 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9350 rtx not_result = gen_reg_rtx (CCEQmode);
9351 rtx not_op, rev_cond_rtx;
9352 enum machine_mode cc_mode;
9354 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9356 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9357 SImode, XEXP (condition_rtx, 0), const0_rtx);
9358 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9359 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9360 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9363 op_mode = GET_MODE (rs6000_compare_op0);
9364 if (op_mode == VOIDmode)
9365 op_mode = GET_MODE (rs6000_compare_op1);
9367 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9369 PUT_MODE (condition_rtx, DImode);
9370 convert_move (result, condition_rtx, 0);
9372 else
9374 PUT_MODE (condition_rtx, SImode);
9375 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9379 /* Emit a branch of kind CODE to location LOC. */
9381 void
9382 rs6000_emit_cbranch (code, loc)
9383 enum rtx_code code;
9384 rtx loc;
9386 rtx condition_rtx, loc_ref;
9388 condition_rtx = rs6000_generate_compare (code);
9389 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9390 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9391 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9392 loc_ref, pc_rtx)));
9395 /* Return the string to output a conditional branch to LABEL, which is
9396 the operand number of the label, or -1 if the branch is really a
9397 conditional return.
9399 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9400 condition code register and its mode specifies what kind of
9401 comparison we made.
9403 REVERSED is nonzero if we should reverse the sense of the comparison.
9405 INSN is the insn. */
9407 char *
9408 output_cbranch (op, label, reversed, insn)
9409 rtx op;
9410 const char * label;
9411 int reversed;
9412 rtx insn;
9414 static char string[64];
9415 enum rtx_code code = GET_CODE (op);
9416 rtx cc_reg = XEXP (op, 0);
9417 enum machine_mode mode = GET_MODE (cc_reg);
9418 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9419 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9420 int really_reversed = reversed ^ need_longbranch;
9421 char *s = string;
9422 const char *ccode;
9423 const char *pred;
9424 rtx note;
9426 validate_condition_mode (code, mode);
9428 /* Work out which way this really branches. We could use
9429 reverse_condition_maybe_unordered here always but this
9430 makes the resulting assembler clearer. */
9431 if (really_reversed)
9433 /* Reversal of FP compares takes care -- an ordered compare
9434 becomes an unordered compare and vice versa. */
9435 if (mode == CCFPmode)
9436 code = reverse_condition_maybe_unordered (code);
9437 else
9438 code = reverse_condition (code);
9441 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9443 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9444 to the GT bit. */
9445 if (code == EQ)
9446 /* Opposite of GT. */
9447 code = UNLE;
9448 else if (code == NE)
9449 code = GT;
9450 else
9451 abort ();
9454 switch (code)
9456 /* Not all of these are actually distinct opcodes, but
9457 we distinguish them for clarity of the resulting assembler. */
9458 case NE: case LTGT:
9459 ccode = "ne"; break;
9460 case EQ: case UNEQ:
9461 ccode = "eq"; break;
9462 case GE: case GEU:
9463 ccode = "ge"; break;
9464 case GT: case GTU: case UNGT:
9465 ccode = "gt"; break;
9466 case LE: case LEU:
9467 ccode = "le"; break;
9468 case LT: case LTU: case UNLT:
9469 ccode = "lt"; break;
9470 case UNORDERED: ccode = "un"; break;
9471 case ORDERED: ccode = "nu"; break;
9472 case UNGE: ccode = "nl"; break;
9473 case UNLE: ccode = "ng"; break;
9474 default:
9475 abort ();
9478 /* Maybe we have a guess as to how likely the branch is.
9479 The old mnemonics don't have a way to specify this information. */
9480 pred = "";
9481 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9482 if (note != NULL_RTX)
9484 /* PROB is the difference from 50%. */
9485 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9486 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9488 /* Only hint for highly probable/improbable branches on newer
9489 cpus as static prediction overrides processor dynamic
9490 prediction. For older cpus we may as well always hint, but
9491 assume not taken for branches that are very close to 50% as a
9492 mispredicted taken branch is more expensive than a
9493 mispredicted not-taken branch. */
9494 if (always_hint
9495 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9497 if (abs (prob) > REG_BR_PROB_BASE / 20
9498 && ((prob > 0) ^ need_longbranch))
9499 pred = "+";
9500 else
9501 pred = "-";
9505 if (label == NULL)
9506 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9507 else
9508 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9510 /* We need to escape any '%' characters in the reg_names string.
9511 Assume they'd only be the first character... */
9512 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9513 *s++ = '%';
9514 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9516 if (label != NULL)
9518 /* If the branch distance was too far, we may have to use an
9519 unconditional branch to go the distance. */
9520 if (need_longbranch)
9521 s += sprintf (s, ",$+8\n\tb %s", label);
9522 else
9523 s += sprintf (s, ",%s", label);
9526 return string;
9529 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9530 operands of the last comparison is nonzero/true, FALSE_COND if it
9531 is zero/false. Return 0 if the hardware has no such operation. */
9534 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9535 rtx dest;
9536 rtx op;
9537 rtx true_cond;
9538 rtx false_cond;
9540 enum rtx_code code = GET_CODE (op);
9541 rtx op0 = rs6000_compare_op0;
9542 rtx op1 = rs6000_compare_op1;
9543 REAL_VALUE_TYPE c1;
9544 enum machine_mode compare_mode = GET_MODE (op0);
9545 enum machine_mode result_mode = GET_MODE (dest);
9546 rtx temp;
9548 /* These modes should always match. */
9549 if (GET_MODE (op1) != compare_mode
9550 /* In the isel case however, we can use a compare immediate, so
9551 op1 may be a small constant. */
9552 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9553 return 0;
9554 if (GET_MODE (true_cond) != result_mode)
9555 return 0;
9556 if (GET_MODE (false_cond) != result_mode)
9557 return 0;
9559 /* First, work out if the hardware can do this at all, or
9560 if it's too slow... */
9561 if (! rs6000_compare_fp_p)
9563 if (TARGET_ISEL)
9564 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9565 return 0;
9568 /* Eliminate half of the comparisons by switching operands, this
9569 makes the remaining code simpler. */
9570 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9571 || code == LTGT || code == LT || code == UNLE)
9573 code = reverse_condition_maybe_unordered (code);
9574 temp = true_cond;
9575 true_cond = false_cond;
9576 false_cond = temp;
9579 /* UNEQ and LTGT take four instructions for a comparison with zero,
9580 it'll probably be faster to use a branch here too. */
9581 if (code == UNEQ && HONOR_NANS (compare_mode))
9582 return 0;
9584 if (GET_CODE (op1) == CONST_DOUBLE)
9585 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9587 /* We're going to try to implement comparisons by performing
9588 a subtract, then comparing against zero. Unfortunately,
9589 Inf - Inf is NaN which is not zero, and so if we don't
9590 know that the operand is finite and the comparison
9591 would treat EQ different to UNORDERED, we can't do it. */
9592 if (HONOR_INFINITIES (compare_mode)
9593 && code != GT && code != UNGE
9594 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9595 /* Constructs of the form (a OP b ? a : b) are safe. */
9596 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9597 || (! rtx_equal_p (op0, true_cond)
9598 && ! rtx_equal_p (op1, true_cond))))
9599 return 0;
9600 /* At this point we know we can use fsel. */
9602 /* Reduce the comparison to a comparison against zero. */
9603 temp = gen_reg_rtx (compare_mode);
9604 emit_insn (gen_rtx_SET (VOIDmode, temp,
9605 gen_rtx_MINUS (compare_mode, op0, op1)));
9606 op0 = temp;
9607 op1 = CONST0_RTX (compare_mode);
9609 /* If we don't care about NaNs we can reduce some of the comparisons
9610 down to faster ones. */
9611 if (! HONOR_NANS (compare_mode))
9612 switch (code)
9614 case GT:
9615 code = LE;
9616 temp = true_cond;
9617 true_cond = false_cond;
9618 false_cond = temp;
9619 break;
9620 case UNGE:
9621 code = GE;
9622 break;
9623 case UNEQ:
9624 code = EQ;
9625 break;
9626 default:
9627 break;
9630 /* Now, reduce everything down to a GE. */
9631 switch (code)
9633 case GE:
9634 break;
9636 case LE:
9637 temp = gen_reg_rtx (compare_mode);
9638 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9639 op0 = temp;
9640 break;
9642 case ORDERED:
9643 temp = gen_reg_rtx (compare_mode);
9644 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9645 op0 = temp;
9646 break;
9648 case EQ:
9649 temp = gen_reg_rtx (compare_mode);
9650 emit_insn (gen_rtx_SET (VOIDmode, temp,
9651 gen_rtx_NEG (compare_mode,
9652 gen_rtx_ABS (compare_mode, op0))));
9653 op0 = temp;
9654 break;
9656 case UNGE:
9657 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9658 temp = gen_reg_rtx (result_mode);
9659 emit_insn (gen_rtx_SET (VOIDmode, temp,
9660 gen_rtx_IF_THEN_ELSE (result_mode,
9661 gen_rtx_GE (VOIDmode,
9662 op0, op1),
9663 true_cond, false_cond)));
9664 false_cond = true_cond;
9665 true_cond = temp;
9667 temp = gen_reg_rtx (compare_mode);
9668 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9669 op0 = temp;
9670 break;
9672 case GT:
9673 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9674 temp = gen_reg_rtx (result_mode);
9675 emit_insn (gen_rtx_SET (VOIDmode, temp,
9676 gen_rtx_IF_THEN_ELSE (result_mode,
9677 gen_rtx_GE (VOIDmode,
9678 op0, op1),
9679 true_cond, false_cond)));
9680 true_cond = false_cond;
9681 false_cond = temp;
9683 temp = gen_reg_rtx (compare_mode);
9684 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9685 op0 = temp;
9686 break;
9688 default:
9689 abort ();
9692 emit_insn (gen_rtx_SET (VOIDmode, dest,
9693 gen_rtx_IF_THEN_ELSE (result_mode,
9694 gen_rtx_GE (VOIDmode,
9695 op0, op1),
9696 true_cond, false_cond)));
9697 return 1;
9700 /* Same as above, but for ints (isel). */
9702 static int
9703 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9704 rtx dest;
9705 rtx op;
9706 rtx true_cond;
9707 rtx false_cond;
9709 rtx condition_rtx, cr;
9711 /* All isel implementations thus far are 32-bits. */
9712 if (GET_MODE (rs6000_compare_op0) != SImode)
9713 return 0;
9715 /* We still have to do the compare, because isel doesn't do a
9716 compare, it just looks at the CRx bits set by a previous compare
9717 instruction. */
9718 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9719 cr = XEXP (condition_rtx, 0);
9721 if (GET_MODE (cr) == CCmode)
9722 emit_insn (gen_isel_signed (dest, condition_rtx,
9723 true_cond, false_cond, cr));
9724 else
9725 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9726 true_cond, false_cond, cr));
9728 return 1;
9731 const char *
9732 output_isel (operands)
9733 rtx *operands;
9735 enum rtx_code code;
9737 code = GET_CODE (operands[1]);
9738 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9740 PUT_CODE (operands[1], reverse_condition (code));
9741 return "isel %0,%3,%2,%j1";
9743 else
9744 return "isel %0,%2,%3,%j1";
9747 void
9748 rs6000_emit_minmax (dest, code, op0, op1)
9749 rtx dest;
9750 enum rtx_code code;
9751 rtx op0;
9752 rtx op1;
9754 enum machine_mode mode = GET_MODE (op0);
9755 enum rtx_code c;
9756 rtx target;
9758 if (code == SMAX || code == SMIN)
9759 c = GE;
9760 else
9761 c = GEU;
9763 if (code == SMAX || code == UMAX)
9764 target = emit_conditional_move (dest, c, op0, op1, mode,
9765 op0, op1, mode, 0);
9766 else
9767 target = emit_conditional_move (dest, c, op0, op1, mode,
9768 op1, op0, mode, 0);
9769 if (target == NULL_RTX)
9770 abort ();
9771 if (target != dest)
9772 emit_move_insn (dest, target);
9775 /* Called by altivec splitter.
9776 Input:
9777 operands[0] : Destination of move
9778 operands[1] : Source of move
9779 noperands : Size of operands vector
9780 Output:
9781 operands[2-5] ([2-3] in 64 bit) : Destination slots
9782 operands[6-9] ([4-5] in 64 bit) : Source slots
9784 Splits the move of operands[1] to operands[0].
9785 This is done, if GPRs are one of the operands. In this case
9786 a sequence of simple move insns has to be issued. The sequence of these
9787 move insns has to be done in correct order to avoid early clobber of the
9788 base register or destructive overlap of registers.
9791 void
9792 rs6000_split_altivec_in_gprs (rtx *operands)
9794 int nregs, reg, i, j;
9795 enum machine_mode mode;
9797 /* Calculate number to move (2/4 for 32/64 bit mode). */
9799 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9800 mode = GET_MODE (operands[0]);
9801 nregs = HARD_REGNO_NREGS (reg, mode);
9803 if (REG_P (operands[1])
9804 && REG_P (operands[0])
9805 && (REGNO (operands[1]) < REGNO (operands[0])))
9807 /* Move register range backwards, if we have destructive overlap. */
9809 j = nregs;
9810 for (i = 0; i < nregs; i++)
9812 j--;
9813 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9814 operands[i + 2 + nregs] =
9815 operand_subword (operands[1], j, 0, mode);
9818 else
9820 j = -1;
9822 if (GET_CODE (operands[1]) == MEM)
9824 rtx breg;
9825 /* We have offsettable addresses only. If we use one of the
9826 registers to address memory, we have change that register last. */
9827 breg = GET_CODE (XEXP (operands[1], 0)) == PLUS ?
9828 XEXP (XEXP (operands[1], 0), 0) :
9829 XEXP (operands[1], 0);
9831 if (REGNO (breg) >= REGNO (operands[0])
9832 && REGNO (breg) < REGNO (operands[0]) + nregs)
9833 j = REGNO (breg) - REGNO (operands[0]);
9836 for (i = 0; i < nregs; i++)
9838 /* Calculate index to next subword. */
9839 j++;
9840 if (j == nregs)
9841 j = 0;
9843 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9844 operands[i + 2 + nregs] =
9845 operand_subword (operands[1], j, 0, mode);
9852 /* This page contains routines that are used to determine what the
9853 function prologue and epilogue code will do and write them out. */
9855 /* Return the first fixed-point register that is required to be
9856 saved. 32 if none. */
9859 first_reg_to_save ()
9861 int first_reg;
9863 /* Find lowest numbered live register. */
9864 for (first_reg = 13; first_reg <= 31; first_reg++)
9865 if (regs_ever_live[first_reg]
9866 && (! call_used_regs[first_reg]
9867 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9868 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9869 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9870 break;
9872 #if TARGET_MACHO
9873 if (flag_pic
9874 && current_function_uses_pic_offset_table
9875 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9876 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9877 #endif
9879 return first_reg;
9882 /* Similar, for FP regs. */
9885 first_fp_reg_to_save ()
9887 int first_reg;
9889 /* Find lowest numbered live register. */
9890 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9891 if (regs_ever_live[first_reg])
9892 break;
9894 return first_reg;
9897 /* Similar, for AltiVec regs. */
9899 static int
9900 first_altivec_reg_to_save ()
9902 int i;
9904 /* Stack frame remains as is unless we are in AltiVec ABI. */
9905 if (! TARGET_ALTIVEC_ABI)
9906 return LAST_ALTIVEC_REGNO + 1;
9908 /* Find lowest numbered live register. */
9909 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9910 if (regs_ever_live[i])
9911 break;
9913 return i;
9916 /* Return a 32-bit mask of the AltiVec registers we need to set in
9917 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9918 the 32-bit word is 0. */
9920 static unsigned int
9921 compute_vrsave_mask ()
9923 unsigned int i, mask = 0;
9925 /* First, find out if we use _any_ altivec registers. */
9926 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9927 if (regs_ever_live[i])
9928 mask |= ALTIVEC_REG_BIT (i);
9930 if (mask == 0)
9931 return mask;
9933 /* Next, remove the argument registers from the set. These must
9934 be in the VRSAVE mask set by the caller, so we don't need to add
9935 them in again. More importantly, the mask we compute here is
9936 used to generate CLOBBERs in the set_vrsave insn, and we do not
9937 wish the argument registers to die. */
9938 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9939 mask &= ~ALTIVEC_REG_BIT (i);
9941 /* Similarly, remove the return value from the set. */
9943 bool yes = false;
9944 diddle_return_value (is_altivec_return_reg, &yes);
9945 if (yes)
9946 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9949 return mask;
9952 static void
9953 is_altivec_return_reg (reg, xyes)
9954 rtx reg;
9955 void *xyes;
9957 bool *yes = (bool *) xyes;
9958 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9959 *yes = true;
9963 /* Calculate the stack information for the current function. This is
9964 complicated by having two separate calling sequences, the AIX calling
9965 sequence and the V.4 calling sequence.
9967 AIX (and Darwin/Mac OS X) stack frames look like:
9968 32-bit 64-bit
9969 SP----> +---------------------------------------+
9970 | back chain to caller | 0 0
9971 +---------------------------------------+
9972 | saved CR | 4 8 (8-11)
9973 +---------------------------------------+
9974 | saved LR | 8 16
9975 +---------------------------------------+
9976 | reserved for compilers | 12 24
9977 +---------------------------------------+
9978 | reserved for binders | 16 32
9979 +---------------------------------------+
9980 | saved TOC pointer | 20 40
9981 +---------------------------------------+
9982 | Parameter save area (P) | 24 48
9983 +---------------------------------------+
9984 | Alloca space (A) | 24+P etc.
9985 +---------------------------------------+
9986 | Local variable space (L) | 24+P+A
9987 +---------------------------------------+
9988 | Float/int conversion temporary (X) | 24+P+A+L
9989 +---------------------------------------+
9990 | Save area for AltiVec registers (W) | 24+P+A+L+X
9991 +---------------------------------------+
9992 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9993 +---------------------------------------+
9994 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9995 +---------------------------------------+
9996 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9997 +---------------------------------------+
9998 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9999 +---------------------------------------+
10000 old SP->| back chain to caller's caller |
10001 +---------------------------------------+
10003 The required alignment for AIX configurations is two words (i.e., 8
10004 or 16 bytes).
10007 V.4 stack frames look like:
10009 SP----> +---------------------------------------+
10010 | back chain to caller | 0
10011 +---------------------------------------+
10012 | caller's saved LR | 4
10013 +---------------------------------------+
10014 | Parameter save area (P) | 8
10015 +---------------------------------------+
10016 | Alloca space (A) | 8+P
10017 +---------------------------------------+
10018 | Varargs save area (V) | 8+P+A
10019 +---------------------------------------+
10020 | Local variable space (L) | 8+P+A+V
10021 +---------------------------------------+
10022 | Float/int conversion temporary (X) | 8+P+A+V+L
10023 +---------------------------------------+
10024 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10025 +---------------------------------------+
10026 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10027 +---------------------------------------+
10028 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10029 +---------------------------------------+
10030 | SPE: area for 64-bit GP registers |
10031 +---------------------------------------+
10032 | SPE alignment padding |
10033 +---------------------------------------+
10034 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10035 +---------------------------------------+
10036 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10037 +---------------------------------------+
10038 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10039 +---------------------------------------+
10040 old SP->| back chain to caller's caller |
10041 +---------------------------------------+
10043 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10044 given. (But note below and in sysv4.h that we require only 8 and
10045 may round up the size of our stack frame anyways. The historical
10046 reason is early versions of powerpc-linux which didn't properly
10047 align the stack at program startup. A happy side-effect is that
10048 -mno-eabi libraries can be used with -meabi programs.)
10050 The EABI configuration defaults to the V.4 layout. However,
10051 the stack alignment requirements may differ. If -mno-eabi is not
10052 given, the required stack alignment is 8 bytes; if -mno-eabi is
10053 given, the required alignment is 16 bytes. (But see V.4 comment
10054 above.) */
10056 #ifndef ABI_STACK_BOUNDARY
10057 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10058 #endif
10060 rs6000_stack_t *
10061 rs6000_stack_info ()
10063 static rs6000_stack_t info, zero_info;
10064 rs6000_stack_t *info_ptr = &info;
10065 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10066 int ehrd_size;
10067 int total_raw_size;
10069 /* Zero all fields portably. */
10070 info = zero_info;
10072 if (TARGET_SPE)
10074 /* Cache value so we don't rescan instruction chain over and over. */
10075 if (cfun->machine->insn_chain_scanned_p == 0)
10077 cfun->machine->insn_chain_scanned_p = 1;
10078 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10082 /* Select which calling sequence. */
10083 info_ptr->abi = DEFAULT_ABI;
10085 /* Calculate which registers need to be saved & save area size. */
10086 info_ptr->first_gp_reg_save = first_reg_to_save ();
10087 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10088 even if it currently looks like we won't. */
10089 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10090 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10091 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10092 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10093 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10094 else
10095 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10097 /* For the SPE, we have an additional upper 32-bits on each GPR.
10098 Ideally we should save the entire 64-bits only when the upper
10099 half is used in SIMD instructions. Since we only record
10100 registers live (not the size they are used in), this proves
10101 difficult because we'd have to traverse the instruction chain at
10102 the right time, taking reload into account. This is a real pain,
10103 so we opt to save the GPRs in 64-bits always if but one register
10104 gets used in 64-bits. Otherwise, all the registers in the frame
10105 get saved in 32-bits.
10107 So... since when we save all GPRs (except the SP) in 64-bits, the
10108 traditional GP save area will be empty. */
10109 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10110 info_ptr->gp_size = 0;
10112 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10113 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10115 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10116 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10117 - info_ptr->first_altivec_reg_save);
10119 /* Does this function call anything? */
10120 info_ptr->calls_p = (! current_function_is_leaf
10121 || cfun->machine->ra_needs_full_frame);
10123 /* Determine if we need to save the link register. */
10124 if (rs6000_ra_ever_killed ()
10125 || (DEFAULT_ABI == ABI_AIX
10126 && current_function_profile
10127 && !TARGET_PROFILE_KERNEL)
10128 #ifdef TARGET_RELOCATABLE
10129 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10130 #endif
10131 || (info_ptr->first_fp_reg_save != 64
10132 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10133 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10134 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10135 || (DEFAULT_ABI == ABI_DARWIN
10136 && flag_pic
10137 && current_function_uses_pic_offset_table)
10138 || info_ptr->calls_p)
10140 info_ptr->lr_save_p = 1;
10141 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10144 /* Determine if we need to save the condition code registers. */
10145 if (regs_ever_live[CR2_REGNO]
10146 || regs_ever_live[CR3_REGNO]
10147 || regs_ever_live[CR4_REGNO])
10149 info_ptr->cr_save_p = 1;
10150 if (DEFAULT_ABI == ABI_V4)
10151 info_ptr->cr_size = reg_size;
10154 /* If the current function calls __builtin_eh_return, then we need
10155 to allocate stack space for registers that will hold data for
10156 the exception handler. */
10157 if (current_function_calls_eh_return)
10159 unsigned int i;
10160 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10161 continue;
10163 /* SPE saves EH registers in 64-bits. */
10164 ehrd_size = i * (TARGET_SPE_ABI
10165 && info_ptr->spe_64bit_regs_used != 0
10166 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10168 else
10169 ehrd_size = 0;
10171 /* Determine various sizes. */
10172 info_ptr->reg_size = reg_size;
10173 info_ptr->fixed_size = RS6000_SAVE_AREA;
10174 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10175 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10176 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10179 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10180 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10181 else
10182 info_ptr->spe_gp_size = 0;
10184 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10186 info_ptr->vrsave_mask = compute_vrsave_mask ();
10187 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10189 else
10191 info_ptr->vrsave_mask = 0;
10192 info_ptr->vrsave_size = 0;
10195 /* Calculate the offsets. */
10196 switch (DEFAULT_ABI)
10198 case ABI_NONE:
10199 default:
10200 abort ();
10202 case ABI_AIX:
10203 case ABI_DARWIN:
10204 info_ptr->fp_save_offset = - info_ptr->fp_size;
10205 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10207 if (TARGET_ALTIVEC_ABI)
10209 info_ptr->vrsave_save_offset
10210 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10212 /* Align stack so vector save area is on a quadword boundary. */
10213 if (info_ptr->altivec_size != 0)
10214 info_ptr->altivec_padding_size
10215 = 16 - (-info_ptr->vrsave_save_offset % 16);
10216 else
10217 info_ptr->altivec_padding_size = 0;
10219 info_ptr->altivec_save_offset
10220 = info_ptr->vrsave_save_offset
10221 - info_ptr->altivec_padding_size
10222 - info_ptr->altivec_size;
10224 /* Adjust for AltiVec case. */
10225 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10227 else
10228 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10229 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10230 info_ptr->lr_save_offset = 2*reg_size;
10231 break;
10233 case ABI_V4:
10234 info_ptr->fp_save_offset = - info_ptr->fp_size;
10235 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10236 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10238 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10240 /* Align stack so SPE GPR save area is aligned on a
10241 double-word boundary. */
10242 if (info_ptr->spe_gp_size != 0)
10243 info_ptr->spe_padding_size
10244 = 8 - (-info_ptr->cr_save_offset % 8);
10245 else
10246 info_ptr->spe_padding_size = 0;
10248 info_ptr->spe_gp_save_offset
10249 = info_ptr->cr_save_offset
10250 - info_ptr->spe_padding_size
10251 - info_ptr->spe_gp_size;
10253 /* Adjust for SPE case. */
10254 info_ptr->toc_save_offset
10255 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10257 else if (TARGET_ALTIVEC_ABI)
10259 info_ptr->vrsave_save_offset
10260 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10262 /* Align stack so vector save area is on a quadword boundary. */
10263 if (info_ptr->altivec_size != 0)
10264 info_ptr->altivec_padding_size
10265 = 16 - (-info_ptr->vrsave_save_offset % 16);
10266 else
10267 info_ptr->altivec_padding_size = 0;
10269 info_ptr->altivec_save_offset
10270 = info_ptr->vrsave_save_offset
10271 - info_ptr->altivec_padding_size
10272 - info_ptr->altivec_size;
10274 /* Adjust for AltiVec case. */
10275 info_ptr->toc_save_offset
10276 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10278 else
10279 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10280 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10281 info_ptr->lr_save_offset = reg_size;
10282 break;
10285 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10286 + info_ptr->gp_size
10287 + info_ptr->altivec_size
10288 + info_ptr->altivec_padding_size
10289 + info_ptr->spe_gp_size
10290 + info_ptr->spe_padding_size
10291 + ehrd_size
10292 + info_ptr->cr_size
10293 + info_ptr->lr_size
10294 + info_ptr->vrsave_size
10295 + info_ptr->toc_size,
10296 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10297 ? 16 : 8);
10299 total_raw_size = (info_ptr->vars_size
10300 + info_ptr->parm_size
10301 + info_ptr->save_size
10302 + info_ptr->varargs_size
10303 + info_ptr->fixed_size);
10305 info_ptr->total_size =
10306 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10308 /* Determine if we need to allocate any stack frame:
10310 For AIX we need to push the stack if a frame pointer is needed
10311 (because the stack might be dynamically adjusted), if we are
10312 debugging, if we make calls, or if the sum of fp_save, gp_save,
10313 and local variables are more than the space needed to save all
10314 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10315 + 18*8 = 288 (GPR13 reserved).
10317 For V.4 we don't have the stack cushion that AIX uses, but assume
10318 that the debugger can handle stackless frames. */
10320 if (info_ptr->calls_p)
10321 info_ptr->push_p = 1;
10323 else if (DEFAULT_ABI == ABI_V4)
10324 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10326 else if (frame_pointer_needed)
10327 info_ptr->push_p = 1;
10329 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10330 info_ptr->push_p = 1;
10332 else
10333 info_ptr->push_p
10334 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10336 /* Zero offsets if we're not saving those registers. */
10337 if (info_ptr->fp_size == 0)
10338 info_ptr->fp_save_offset = 0;
10340 if (info_ptr->gp_size == 0)
10341 info_ptr->gp_save_offset = 0;
10343 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10344 info_ptr->altivec_save_offset = 0;
10346 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10347 info_ptr->vrsave_save_offset = 0;
10349 if (! TARGET_SPE_ABI
10350 || info_ptr->spe_64bit_regs_used == 0
10351 || info_ptr->spe_gp_size == 0)
10352 info_ptr->spe_gp_save_offset = 0;
10354 if (! info_ptr->lr_save_p)
10355 info_ptr->lr_save_offset = 0;
10357 if (! info_ptr->cr_save_p)
10358 info_ptr->cr_save_offset = 0;
10360 if (! info_ptr->toc_save_p)
10361 info_ptr->toc_save_offset = 0;
10363 return info_ptr;
10366 /* Return true if the current function uses any GPRs in 64-bit SIMD
10367 mode. */
10369 static bool
10370 spe_func_has_64bit_regs_p ()
10372 rtx insns, insn;
10374 /* Functions that save and restore all the call-saved registers will
10375 need to save/restore the registers in 64-bits. */
10376 if (current_function_calls_eh_return
10377 || current_function_calls_setjmp
10378 || current_function_has_nonlocal_goto)
10379 return true;
10381 insns = get_insns ();
10383 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10385 if (INSN_P (insn))
10387 rtx i;
10389 i = PATTERN (insn);
10390 if (GET_CODE (i) == SET
10391 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10392 return true;
10396 return false;
10399 void
10400 debug_stack_info (info)
10401 rs6000_stack_t *info;
10403 const char *abi_string;
10405 if (! info)
10406 info = rs6000_stack_info ();
10408 fprintf (stderr, "\nStack information for function %s:\n",
10409 ((current_function_decl && DECL_NAME (current_function_decl))
10410 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10411 : "<unknown>"));
10413 switch (info->abi)
10415 default: abi_string = "Unknown"; break;
10416 case ABI_NONE: abi_string = "NONE"; break;
10417 case ABI_AIX: abi_string = "AIX"; break;
10418 case ABI_DARWIN: abi_string = "Darwin"; break;
10419 case ABI_V4: abi_string = "V.4"; break;
10422 fprintf (stderr, "\tABI = %5s\n", abi_string);
10424 if (TARGET_ALTIVEC_ABI)
10425 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10427 if (TARGET_SPE_ABI)
10428 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10430 if (info->first_gp_reg_save != 32)
10431 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10433 if (info->first_fp_reg_save != 64)
10434 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10436 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10437 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10438 info->first_altivec_reg_save);
10440 if (info->lr_save_p)
10441 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10443 if (info->cr_save_p)
10444 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10446 if (info->toc_save_p)
10447 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10449 if (info->vrsave_mask)
10450 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10452 if (info->push_p)
10453 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10455 if (info->calls_p)
10456 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10458 if (info->gp_save_offset)
10459 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10461 if (info->fp_save_offset)
10462 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10464 if (info->altivec_save_offset)
10465 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10466 info->altivec_save_offset);
10468 if (info->spe_gp_save_offset)
10469 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10470 info->spe_gp_save_offset);
10472 if (info->vrsave_save_offset)
10473 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10474 info->vrsave_save_offset);
10476 if (info->lr_save_offset)
10477 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10479 if (info->cr_save_offset)
10480 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10482 if (info->toc_save_offset)
10483 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10485 if (info->varargs_save_offset)
10486 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10488 if (info->total_size)
10489 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10491 if (info->varargs_size)
10492 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10494 if (info->vars_size)
10495 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10497 if (info->parm_size)
10498 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10500 if (info->fixed_size)
10501 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10503 if (info->gp_size)
10504 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10506 if (info->spe_gp_size)
10507 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10509 if (info->fp_size)
10510 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10512 if (info->altivec_size)
10513 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10515 if (info->vrsave_size)
10516 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10518 if (info->altivec_padding_size)
10519 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10520 info->altivec_padding_size);
10522 if (info->spe_padding_size)
10523 fprintf (stderr, "\tspe_padding_size = %5d\n",
10524 info->spe_padding_size);
10526 if (info->lr_size)
10527 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10529 if (info->cr_size)
10530 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10532 if (info->toc_size)
10533 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10535 if (info->save_size)
10536 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10538 if (info->reg_size != 4)
10539 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10541 fprintf (stderr, "\n");
10545 rs6000_return_addr (count, frame)
10546 int count;
10547 rtx frame;
10549 /* Currently we don't optimize very well between prolog and body
10550 code and for PIC code the code can be actually quite bad, so
10551 don't try to be too clever here. */
10552 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10554 cfun->machine->ra_needs_full_frame = 1;
10556 return
10557 gen_rtx_MEM
10558 (Pmode,
10559 memory_address
10560 (Pmode,
10561 plus_constant (copy_to_reg
10562 (gen_rtx_MEM (Pmode,
10563 memory_address (Pmode, frame))),
10564 RETURN_ADDRESS_OFFSET)));
10567 cfun->machine->ra_need_lr = 1;
10568 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10571 /* Say whether a function is a candidate for sibcall handling or not.
10572 We do not allow indirect calls to be optimized into sibling calls.
10573 Also, we can't do it if there are any vector parameters; there's
10574 nowhere to put the VRsave code so it works; note that functions with
10575 vector parameters are required to have a prototype, so the argument
10576 type info must be available here. (The tail recursion case can work
10577 with vector parameters, but there's no way to distinguish here.) */
10578 static bool
10579 rs6000_function_ok_for_sibcall (decl, exp)
10580 tree decl;
10581 tree exp ATTRIBUTE_UNUSED;
10583 tree type;
10584 if (decl)
10586 if (TARGET_ALTIVEC_VRSAVE)
10588 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10589 type; type = TREE_CHAIN (type))
10591 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10592 return false;
10595 if (DEFAULT_ABI == ABI_DARWIN
10596 || (*targetm.binds_local_p) (decl))
10598 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10600 if (!lookup_attribute ("longcall", attr_list)
10601 || lookup_attribute ("shortcall", attr_list))
10602 return true;
10605 return false;
10608 static int
10609 rs6000_ra_ever_killed ()
10611 rtx top;
10612 rtx reg;
10613 rtx insn;
10615 /* Irritatingly, there are two kinds of thunks -- those created with
10616 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10617 through the regular part of the compiler. This is a very hacky
10618 way to tell them apart. */
10619 if (current_function_is_thunk && !no_new_pseudos)
10620 return 0;
10622 /* regs_ever_live has LR marked as used if any sibcalls are present,
10623 but this should not force saving and restoring in the
10624 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10625 clobbers LR, so that is inappropriate. */
10627 /* Also, the prologue can generate a store into LR that
10628 doesn't really count, like this:
10630 move LR->R0
10631 bcl to set PIC register
10632 move LR->R31
10633 move R0->LR
10635 When we're called from the epilogue, we need to avoid counting
10636 this as a store. */
10638 push_topmost_sequence ();
10639 top = get_insns ();
10640 pop_topmost_sequence ();
10641 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10643 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10645 if (INSN_P (insn))
10647 if (FIND_REG_INC_NOTE (insn, reg))
10648 return 1;
10649 else if (GET_CODE (insn) == CALL_INSN
10650 && !SIBLING_CALL_P (insn))
10651 return 1;
10652 else if (set_of (reg, insn) != NULL_RTX
10653 && !prologue_epilogue_contains (insn))
10654 return 1;
10657 return 0;
10660 /* Add a REG_MAYBE_DEAD note to the insn. */
10661 static void
10662 rs6000_maybe_dead (insn)
10663 rtx insn;
10665 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10666 const0_rtx,
10667 REG_NOTES (insn));
10670 /* Emit instructions needed to load the TOC register.
10671 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10672 a constant pool; or for SVR4 -fpic. */
10674 void
10675 rs6000_emit_load_toc_table (fromprolog)
10676 int fromprolog;
10678 rtx dest, insn;
10679 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10681 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10683 rtx temp = (fromprolog
10684 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10685 : gen_reg_rtx (Pmode));
10686 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10687 if (fromprolog)
10688 rs6000_maybe_dead (insn);
10689 insn = emit_move_insn (dest, temp);
10690 if (fromprolog)
10691 rs6000_maybe_dead (insn);
10693 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10695 char buf[30];
10696 rtx tempLR = (fromprolog
10697 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10698 : gen_reg_rtx (Pmode));
10699 rtx temp0 = (fromprolog
10700 ? gen_rtx_REG (Pmode, 0)
10701 : gen_reg_rtx (Pmode));
10702 rtx symF;
10704 /* possibly create the toc section */
10705 if (! toc_initialized)
10707 toc_section ();
10708 function_section (current_function_decl);
10711 if (fromprolog)
10713 rtx symL;
10715 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10716 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10718 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10719 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10721 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10722 symF)));
10723 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10724 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10725 symL,
10726 symF)));
10728 else
10730 rtx tocsym;
10731 static int reload_toc_labelno = 0;
10733 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10735 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10736 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10738 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10739 emit_move_insn (dest, tempLR);
10740 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10742 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10743 if (fromprolog)
10744 rs6000_maybe_dead (insn);
10746 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10748 /* This is for AIX code running in non-PIC ELF32. */
10749 char buf[30];
10750 rtx realsym;
10751 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10752 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10754 insn = emit_insn (gen_elf_high (dest, realsym));
10755 if (fromprolog)
10756 rs6000_maybe_dead (insn);
10757 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10758 if (fromprolog)
10759 rs6000_maybe_dead (insn);
10761 else if (DEFAULT_ABI == ABI_AIX)
10763 if (TARGET_32BIT)
10764 insn = emit_insn (gen_load_toc_aix_si (dest));
10765 else
10766 insn = emit_insn (gen_load_toc_aix_di (dest));
10767 if (fromprolog)
10768 rs6000_maybe_dead (insn);
10770 else
10771 abort ();
10774 int
10775 get_TOC_alias_set ()
10777 static int set = -1;
10778 if (set == -1)
10779 set = new_alias_set ();
10780 return set;
10783 /* This returns nonzero if the current function uses the TOC. This is
10784 determined by the presence of (unspec ... UNSPEC_TOC) or
10785 use (unspec ... UNSPEC_TOC), which are generated by the various
10786 load_toc_* patterns. */
10789 uses_TOC ()
10791 rtx insn;
10793 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10794 if (INSN_P (insn))
10796 rtx pat = PATTERN (insn);
10797 int i;
10799 if (GET_CODE (pat) == PARALLEL)
10800 for (i = 0; i < XVECLEN (pat, 0); i++)
10802 rtx sub = XVECEXP (pat, 0, i);
10803 if (GET_CODE (sub) == USE)
10805 sub = XEXP (sub, 0);
10806 if (GET_CODE (sub) == UNSPEC
10807 && XINT (sub, 1) == UNSPEC_TOC)
10808 return 1;
10812 return 0;
10816 create_TOC_reference (symbol)
10817 rtx symbol;
10819 return gen_rtx_PLUS (Pmode,
10820 gen_rtx_REG (Pmode, TOC_REGISTER),
10821 gen_rtx_CONST (Pmode,
10822 gen_rtx_MINUS (Pmode, symbol,
10823 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10826 /* If _Unwind_* has been called from within the same module,
10827 toc register is not guaranteed to be saved to 40(1) on function
10828 entry. Save it there in that case. */
10830 void
10831 rs6000_aix_emit_builtin_unwind_init ()
10833 rtx mem;
10834 rtx stack_top = gen_reg_rtx (Pmode);
10835 rtx opcode_addr = gen_reg_rtx (Pmode);
10836 rtx opcode = gen_reg_rtx (SImode);
10837 rtx tocompare = gen_reg_rtx (SImode);
10838 rtx no_toc_save_needed = gen_label_rtx ();
10840 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10841 emit_move_insn (stack_top, mem);
10843 mem = gen_rtx_MEM (Pmode,
10844 gen_rtx_PLUS (Pmode, stack_top,
10845 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10846 emit_move_insn (opcode_addr, mem);
10847 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10848 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10849 : 0xE8410028, SImode));
10851 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
10852 SImode, NULL_RTX, NULL_RTX,
10853 no_toc_save_needed);
10855 mem = gen_rtx_MEM (Pmode,
10856 gen_rtx_PLUS (Pmode, stack_top,
10857 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10858 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
10859 emit_label (no_toc_save_needed);
10862 /* This ties together stack memory (MEM with an alias set of
10863 rs6000_sr_alias_set) and the change to the stack pointer. */
10865 static void
10866 rs6000_emit_stack_tie ()
10868 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10870 set_mem_alias_set (mem, rs6000_sr_alias_set);
10871 emit_insn (gen_stack_tie (mem));
10874 /* Emit the correct code for allocating stack space, as insns.
10875 If COPY_R12, make sure a copy of the old frame is left in r12.
10876 The generated code may use hard register 0 as a temporary. */
10878 static void
10879 rs6000_emit_allocate_stack (size, copy_r12)
10880 HOST_WIDE_INT size;
10881 int copy_r12;
10883 rtx insn;
10884 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10885 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10886 rtx todec = GEN_INT (-size);
10888 if (current_function_limit_stack)
10890 if (REG_P (stack_limit_rtx)
10891 && REGNO (stack_limit_rtx) > 1
10892 && REGNO (stack_limit_rtx) <= 31)
10894 emit_insn (TARGET_32BIT
10895 ? gen_addsi3 (tmp_reg,
10896 stack_limit_rtx,
10897 GEN_INT (size))
10898 : gen_adddi3 (tmp_reg,
10899 stack_limit_rtx,
10900 GEN_INT (size)));
10902 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10903 const0_rtx));
10905 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10906 && TARGET_32BIT
10907 && DEFAULT_ABI == ABI_V4)
10909 rtx toload = gen_rtx_CONST (VOIDmode,
10910 gen_rtx_PLUS (Pmode,
10911 stack_limit_rtx,
10912 GEN_INT (size)));
10914 emit_insn (gen_elf_high (tmp_reg, toload));
10915 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10916 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10917 const0_rtx));
10919 else
10920 warning ("stack limit expression is not supported");
10923 if (copy_r12 || ! TARGET_UPDATE)
10924 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10926 if (TARGET_UPDATE)
10928 if (size > 32767)
10930 /* Need a note here so that try_split doesn't get confused. */
10931 if (get_last_insn() == NULL_RTX)
10932 emit_note (NOTE_INSN_DELETED);
10933 insn = emit_move_insn (tmp_reg, todec);
10934 try_split (PATTERN (insn), insn, 0);
10935 todec = tmp_reg;
10938 insn = emit_insn (TARGET_32BIT
10939 ? gen_movsi_update (stack_reg, stack_reg,
10940 todec, stack_reg)
10941 : gen_movdi_update (stack_reg, stack_reg,
10942 todec, stack_reg));
10944 else
10946 insn = emit_insn (TARGET_32BIT
10947 ? gen_addsi3 (stack_reg, stack_reg, todec)
10948 : gen_adddi3 (stack_reg, stack_reg, todec));
10949 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10950 gen_rtx_REG (Pmode, 12));
10953 RTX_FRAME_RELATED_P (insn) = 1;
10954 REG_NOTES (insn) =
10955 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10956 gen_rtx_SET (VOIDmode, stack_reg,
10957 gen_rtx_PLUS (Pmode, stack_reg,
10958 GEN_INT (-size))),
10959 REG_NOTES (insn));
10962 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10963 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10964 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10965 deduce these equivalences by itself so it wasn't necessary to hold
10966 its hand so much. */
10968 static void
10969 rs6000_frame_related (insn, reg, val, reg2, rreg)
10970 rtx insn;
10971 rtx reg;
10972 HOST_WIDE_INT val;
10973 rtx reg2;
10974 rtx rreg;
10976 rtx real, temp;
10978 /* copy_rtx will not make unique copies of registers, so we need to
10979 ensure we don't have unwanted sharing here. */
10980 if (reg == reg2)
10981 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10983 if (reg == rreg)
10984 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10986 real = copy_rtx (PATTERN (insn));
10988 if (reg2 != NULL_RTX)
10989 real = replace_rtx (real, reg2, rreg);
10991 real = replace_rtx (real, reg,
10992 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10993 STACK_POINTER_REGNUM),
10994 GEN_INT (val)));
10996 /* We expect that 'real' is either a SET or a PARALLEL containing
10997 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10998 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11000 if (GET_CODE (real) == SET)
11002 rtx set = real;
11004 temp = simplify_rtx (SET_SRC (set));
11005 if (temp)
11006 SET_SRC (set) = temp;
11007 temp = simplify_rtx (SET_DEST (set));
11008 if (temp)
11009 SET_DEST (set) = temp;
11010 if (GET_CODE (SET_DEST (set)) == MEM)
11012 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11013 if (temp)
11014 XEXP (SET_DEST (set), 0) = temp;
11017 else if (GET_CODE (real) == PARALLEL)
11019 int i;
11020 for (i = 0; i < XVECLEN (real, 0); i++)
11021 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11023 rtx set = XVECEXP (real, 0, i);
11025 temp = simplify_rtx (SET_SRC (set));
11026 if (temp)
11027 SET_SRC (set) = temp;
11028 temp = simplify_rtx (SET_DEST (set));
11029 if (temp)
11030 SET_DEST (set) = temp;
11031 if (GET_CODE (SET_DEST (set)) == MEM)
11033 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11034 if (temp)
11035 XEXP (SET_DEST (set), 0) = temp;
11037 RTX_FRAME_RELATED_P (set) = 1;
11040 else
11041 abort ();
11043 if (TARGET_SPE)
11044 real = spe_synthesize_frame_save (real);
11046 RTX_FRAME_RELATED_P (insn) = 1;
11047 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11048 real,
11049 REG_NOTES (insn));
11052 /* Given an SPE frame note, return a PARALLEL of SETs with the
11053 original note, plus a synthetic register save. */
11055 static rtx
11056 spe_synthesize_frame_save (real)
11057 rtx real;
11059 rtx synth, offset, reg, real2;
11061 if (GET_CODE (real) != SET
11062 || GET_MODE (SET_SRC (real)) != V2SImode)
11063 return real;
11065 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11066 frame related note. The parallel contains a set of the register
11067 being saved, and another set to a synthetic register (n+1200).
11068 This is so we can differentiate between 64-bit and 32-bit saves.
11069 Words cannot describe this nastiness. */
11071 if (GET_CODE (SET_DEST (real)) != MEM
11072 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11073 || GET_CODE (SET_SRC (real)) != REG)
11074 abort ();
11076 /* Transform:
11077 (set (mem (plus (reg x) (const y)))
11078 (reg z))
11079 into:
11080 (set (mem (plus (reg x) (const y+4)))
11081 (reg z+1200))
11084 real2 = copy_rtx (real);
11085 PUT_MODE (SET_DEST (real2), SImode);
11086 reg = SET_SRC (real2);
11087 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11088 synth = copy_rtx (real2);
11090 if (BYTES_BIG_ENDIAN)
11092 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11093 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11096 reg = SET_SRC (synth);
11098 synth = replace_rtx (synth, reg,
11099 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11101 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11102 synth = replace_rtx (synth, offset,
11103 GEN_INT (INTVAL (offset)
11104 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11106 RTX_FRAME_RELATED_P (synth) = 1;
11107 RTX_FRAME_RELATED_P (real2) = 1;
11108 if (BYTES_BIG_ENDIAN)
11109 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11110 else
11111 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11113 return real;
11116 /* Returns an insn that has a vrsave set operation with the
11117 appropriate CLOBBERs. */
11119 static rtx
11120 generate_set_vrsave (reg, info, epiloguep)
11121 rtx reg;
11122 rs6000_stack_t *info;
11123 int epiloguep;
11125 int nclobs, i;
11126 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11127 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11129 clobs[0]
11130 = gen_rtx_SET (VOIDmode,
11131 vrsave,
11132 gen_rtx_UNSPEC_VOLATILE (SImode,
11133 gen_rtvec (2, reg, vrsave),
11134 30));
11136 nclobs = 1;
11138 /* We need to clobber the registers in the mask so the scheduler
11139 does not move sets to VRSAVE before sets of AltiVec registers.
11141 However, if the function receives nonlocal gotos, reload will set
11142 all call saved registers live. We will end up with:
11144 (set (reg 999) (mem))
11145 (parallel [ (set (reg vrsave) (unspec blah))
11146 (clobber (reg 999))])
11148 The clobber will cause the store into reg 999 to be dead, and
11149 flow will attempt to delete an epilogue insn. In this case, we
11150 need an unspec use/set of the register. */
11152 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11153 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11155 if (!epiloguep || call_used_regs [i])
11156 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11157 gen_rtx_REG (V4SImode, i));
11158 else
11160 rtx reg = gen_rtx_REG (V4SImode, i);
11162 clobs[nclobs++]
11163 = gen_rtx_SET (VOIDmode,
11164 reg,
11165 gen_rtx_UNSPEC (V4SImode,
11166 gen_rtvec (1, reg), 27));
11170 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11172 for (i = 0; i < nclobs; ++i)
11173 XVECEXP (insn, 0, i) = clobs[i];
11175 return insn;
11178 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11179 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11181 static void
11182 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11183 rtx frame_reg;
11184 rtx frame_ptr;
11185 enum machine_mode mode;
11186 unsigned int regno;
11187 int offset;
11188 int total_size;
11190 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11191 rtx replacea, replaceb;
11193 int_rtx = GEN_INT (offset);
11195 /* Some cases that need register indexed addressing. */
11196 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11197 || (TARGET_SPE_ABI
11198 && SPE_VECTOR_MODE (mode)
11199 && !SPE_CONST_OFFSET_OK (offset)))
11201 /* Whomever calls us must make sure r11 is available in the
11202 flow path of instructions in the prologue. */
11203 offset_rtx = gen_rtx_REG (Pmode, 11);
11204 emit_move_insn (offset_rtx, int_rtx);
11206 replacea = offset_rtx;
11207 replaceb = int_rtx;
11209 else
11211 offset_rtx = int_rtx;
11212 replacea = NULL_RTX;
11213 replaceb = NULL_RTX;
11216 reg = gen_rtx_REG (mode, regno);
11217 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11218 mem = gen_rtx_MEM (mode, addr);
11219 set_mem_alias_set (mem, rs6000_sr_alias_set);
11221 insn = emit_move_insn (mem, reg);
11223 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11226 /* Emit an offset memory reference suitable for a frame store, while
11227 converting to a valid addressing mode. */
11229 static rtx
11230 gen_frame_mem_offset (mode, reg, offset)
11231 enum machine_mode mode;
11232 rtx reg;
11233 int offset;
11235 rtx int_rtx, offset_rtx;
11237 int_rtx = GEN_INT (offset);
11239 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11241 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11242 emit_move_insn (offset_rtx, int_rtx);
11244 else
11245 offset_rtx = int_rtx;
11247 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11250 /* Emit function prologue as insns. */
11252 void
11253 rs6000_emit_prologue ()
11255 rs6000_stack_t *info = rs6000_stack_info ();
11256 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11257 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11258 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11259 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11260 rtx frame_reg_rtx = sp_reg_rtx;
11261 rtx cr_save_rtx = NULL;
11262 rtx insn;
11263 int saving_FPRs_inline;
11264 int using_store_multiple;
11265 HOST_WIDE_INT sp_offset = 0;
11267 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11269 reg_mode = V2SImode;
11270 reg_size = 8;
11273 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11274 && (!TARGET_SPE_ABI
11275 || info->spe_64bit_regs_used == 0)
11276 && info->first_gp_reg_save < 31);
11277 saving_FPRs_inline = (info->first_fp_reg_save == 64
11278 || FP_SAVE_INLINE (info->first_fp_reg_save)
11279 || current_function_calls_eh_return
11280 || cfun->machine->ra_need_lr);
11282 /* For V.4, update stack before we do any saving and set back pointer. */
11283 if (info->push_p
11284 && (DEFAULT_ABI == ABI_V4
11285 || current_function_calls_eh_return))
11287 if (info->total_size < 32767)
11288 sp_offset = info->total_size;
11289 else
11290 frame_reg_rtx = frame_ptr_rtx;
11291 rs6000_emit_allocate_stack (info->total_size,
11292 (frame_reg_rtx != sp_reg_rtx
11293 && (info->cr_save_p
11294 || info->lr_save_p
11295 || info->first_fp_reg_save < 64
11296 || info->first_gp_reg_save < 32
11297 )));
11298 if (frame_reg_rtx != sp_reg_rtx)
11299 rs6000_emit_stack_tie ();
11302 /* Save AltiVec registers if needed. */
11303 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11305 int i;
11307 /* There should be a non inline version of this, for when we
11308 are saving lots of vector registers. */
11309 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11310 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11312 rtx areg, savereg, mem;
11313 int offset;
11315 offset = info->altivec_save_offset + sp_offset
11316 + 16 * (i - info->first_altivec_reg_save);
11318 savereg = gen_rtx_REG (V4SImode, i);
11320 areg = gen_rtx_REG (Pmode, 0);
11321 emit_move_insn (areg, GEN_INT (offset));
11323 /* AltiVec addressing mode is [reg+reg]. */
11324 mem = gen_rtx_MEM (V4SImode,
11325 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11327 set_mem_alias_set (mem, rs6000_sr_alias_set);
11329 insn = emit_move_insn (mem, savereg);
11331 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11332 areg, GEN_INT (offset));
11336 /* VRSAVE is a bit vector representing which AltiVec registers
11337 are used. The OS uses this to determine which vector
11338 registers to save on a context switch. We need to save
11339 VRSAVE on the stack frame, add whatever AltiVec registers we
11340 used in this function, and do the corresponding magic in the
11341 epilogue. */
11343 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11345 rtx reg, mem, vrsave;
11346 int offset;
11348 /* Get VRSAVE onto a GPR. */
11349 reg = gen_rtx_REG (SImode, 12);
11350 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11351 if (TARGET_MACHO)
11352 emit_insn (gen_get_vrsave_internal (reg));
11353 else
11354 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11356 /* Save VRSAVE. */
11357 offset = info->vrsave_save_offset + sp_offset;
11359 = gen_rtx_MEM (SImode,
11360 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11361 set_mem_alias_set (mem, rs6000_sr_alias_set);
11362 insn = emit_move_insn (mem, reg);
11364 /* Include the registers in the mask. */
11365 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11367 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11370 /* If we use the link register, get it into r0. */
11371 if (info->lr_save_p)
11372 emit_move_insn (gen_rtx_REG (Pmode, 0),
11373 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11375 /* If we need to save CR, put it into r12. */
11376 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11378 cr_save_rtx = gen_rtx_REG (SImode, 12);
11379 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11382 /* Do any required saving of fpr's. If only one or two to save, do
11383 it ourselves. Otherwise, call function. */
11384 if (saving_FPRs_inline)
11386 int i;
11387 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11388 if ((regs_ever_live[info->first_fp_reg_save+i]
11389 && ! call_used_regs[info->first_fp_reg_save+i]))
11390 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11391 info->first_fp_reg_save + i,
11392 info->fp_save_offset + sp_offset + 8 * i,
11393 info->total_size);
11395 else if (info->first_fp_reg_save != 64)
11397 int i;
11398 char rname[30];
11399 const char *alloc_rname;
11400 rtvec p;
11401 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11403 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11404 gen_rtx_REG (Pmode,
11405 LINK_REGISTER_REGNUM));
11406 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11407 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11408 alloc_rname = ggc_strdup (rname);
11409 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11410 gen_rtx_SYMBOL_REF (Pmode,
11411 alloc_rname));
11412 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11414 rtx addr, reg, mem;
11415 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11416 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11417 GEN_INT (info->fp_save_offset
11418 + sp_offset + 8*i));
11419 mem = gen_rtx_MEM (DFmode, addr);
11420 set_mem_alias_set (mem, rs6000_sr_alias_set);
11422 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11424 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11425 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11426 NULL_RTX, NULL_RTX);
11429 /* Save GPRs. This is done as a PARALLEL if we are using
11430 the store-multiple instructions. */
11431 if (using_store_multiple)
11433 rtvec p;
11434 int i;
11435 p = rtvec_alloc (32 - info->first_gp_reg_save);
11436 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11438 rtx addr, reg, mem;
11439 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11440 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11441 GEN_INT (info->gp_save_offset
11442 + sp_offset
11443 + reg_size * i));
11444 mem = gen_rtx_MEM (reg_mode, addr);
11445 set_mem_alias_set (mem, rs6000_sr_alias_set);
11447 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11449 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11450 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11451 NULL_RTX, NULL_RTX);
11453 else
11455 int i;
11456 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11457 if ((regs_ever_live[info->first_gp_reg_save+i]
11458 && ! call_used_regs[info->first_gp_reg_save+i])
11459 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11460 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11461 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11463 rtx addr, reg, mem;
11464 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11466 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11468 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11469 rtx b;
11471 if (!SPE_CONST_OFFSET_OK (offset))
11473 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11474 emit_move_insn (b, GEN_INT (offset));
11476 else
11477 b = GEN_INT (offset);
11479 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11480 mem = gen_rtx_MEM (V2SImode, addr);
11481 set_mem_alias_set (mem, rs6000_sr_alias_set);
11482 insn = emit_move_insn (mem, reg);
11484 if (GET_CODE (b) == CONST_INT)
11485 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11486 NULL_RTX, NULL_RTX);
11487 else
11488 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11489 b, GEN_INT (offset));
11491 else
11493 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11494 GEN_INT (info->gp_save_offset
11495 + sp_offset
11496 + reg_size * i));
11497 mem = gen_rtx_MEM (reg_mode, addr);
11498 set_mem_alias_set (mem, rs6000_sr_alias_set);
11500 insn = emit_move_insn (mem, reg);
11501 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11502 NULL_RTX, NULL_RTX);
11507 /* ??? There's no need to emit actual instructions here, but it's the
11508 easiest way to get the frame unwind information emitted. */
11509 if (current_function_calls_eh_return)
11511 unsigned int i, regno;
11513 /* In AIX ABI we need to pretend we save r2 here. */
11514 if (TARGET_AIX)
11516 rtx addr, reg, mem;
11518 reg = gen_rtx_REG (reg_mode, 2);
11519 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11520 GEN_INT (sp_offset + 5 * reg_size));
11521 mem = gen_rtx_MEM (reg_mode, addr);
11522 set_mem_alias_set (mem, rs6000_sr_alias_set);
11524 insn = emit_move_insn (mem, reg);
11525 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11526 NULL_RTX, NULL_RTX);
11527 PATTERN (insn) = gen_blockage ();
11530 for (i = 0; ; ++i)
11532 regno = EH_RETURN_DATA_REGNO (i);
11533 if (regno == INVALID_REGNUM)
11534 break;
11536 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11537 info->ehrd_offset + sp_offset
11538 + reg_size * (int) i,
11539 info->total_size);
11543 /* Save lr if we used it. */
11544 if (info->lr_save_p)
11546 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11547 GEN_INT (info->lr_save_offset + sp_offset));
11548 rtx reg = gen_rtx_REG (Pmode, 0);
11549 rtx mem = gen_rtx_MEM (Pmode, addr);
11550 /* This should not be of rs6000_sr_alias_set, because of
11551 __builtin_return_address. */
11553 insn = emit_move_insn (mem, reg);
11554 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11555 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11558 /* Save CR if we use any that must be preserved. */
11559 if (info->cr_save_p)
11561 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11562 GEN_INT (info->cr_save_offset + sp_offset));
11563 rtx mem = gen_rtx_MEM (SImode, addr);
11565 set_mem_alias_set (mem, rs6000_sr_alias_set);
11567 /* If r12 was used to hold the original sp, copy cr into r0 now
11568 that it's free. */
11569 if (REGNO (frame_reg_rtx) == 12)
11571 cr_save_rtx = gen_rtx_REG (SImode, 0);
11572 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11574 insn = emit_move_insn (mem, cr_save_rtx);
11576 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11577 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11578 But that's OK. All we have to do is specify that _one_ condition
11579 code register is saved in this stack slot. The thrower's epilogue
11580 will then restore all the call-saved registers.
11581 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11582 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11583 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11586 /* Update stack and set back pointer unless this is V.4,
11587 for which it was done previously. */
11588 if (info->push_p
11589 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11590 rs6000_emit_allocate_stack (info->total_size, FALSE);
11592 /* Set frame pointer, if needed. */
11593 if (frame_pointer_needed)
11595 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11596 sp_reg_rtx);
11597 RTX_FRAME_RELATED_P (insn) = 1;
11600 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11601 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11602 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11603 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11605 /* If emit_load_toc_table will use the link register, we need to save
11606 it. We use R12 for this purpose because emit_load_toc_table
11607 can use register 0. This allows us to use a plain 'blr' to return
11608 from the procedure more often. */
11609 int save_LR_around_toc_setup = (TARGET_ELF
11610 && DEFAULT_ABI != ABI_AIX
11611 && flag_pic
11612 && ! info->lr_save_p
11613 && EXIT_BLOCK_PTR->pred != NULL);
11614 if (save_LR_around_toc_setup)
11616 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11617 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11618 rs6000_emit_load_toc_table (TRUE);
11619 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11621 else
11622 rs6000_emit_load_toc_table (TRUE);
11625 #if TARGET_MACHO
11626 if (DEFAULT_ABI == ABI_DARWIN
11627 && flag_pic && current_function_uses_pic_offset_table)
11629 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11630 const char *picbase = machopic_function_base_name ();
11631 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11633 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11635 rs6000_maybe_dead (
11636 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11637 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11639 #endif
11642 /* Write function prologue. */
11644 static void
11645 rs6000_output_function_prologue (file, size)
11646 FILE *file;
11647 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11649 rs6000_stack_t *info = rs6000_stack_info ();
11651 if (TARGET_DEBUG_STACK)
11652 debug_stack_info (info);
11654 /* Write .extern for any function we will call to save and restore
11655 fp values. */
11656 if (info->first_fp_reg_save < 64
11657 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11658 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11659 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11660 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11661 RESTORE_FP_SUFFIX);
11663 /* Write .extern for AIX common mode routines, if needed. */
11664 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11666 fputs ("\t.extern __mulh\n", file);
11667 fputs ("\t.extern __mull\n", file);
11668 fputs ("\t.extern __divss\n", file);
11669 fputs ("\t.extern __divus\n", file);
11670 fputs ("\t.extern __quoss\n", file);
11671 fputs ("\t.extern __quous\n", file);
11672 common_mode_defined = 1;
11675 if (! HAVE_prologue)
11677 start_sequence ();
11679 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11680 the "toplevel" insn chain. */
11681 emit_note (NOTE_INSN_DELETED);
11682 rs6000_emit_prologue ();
11683 emit_note (NOTE_INSN_DELETED);
11685 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11687 rtx insn;
11688 unsigned addr = 0;
11689 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11691 INSN_ADDRESSES_NEW (insn, addr);
11692 addr += 4;
11696 if (TARGET_DEBUG_STACK)
11697 debug_rtx_list (get_insns (), 100);
11698 final (get_insns (), file, FALSE, FALSE);
11699 end_sequence ();
11702 rs6000_pic_labelno++;
11705 /* Emit function epilogue as insns.
11707 At present, dwarf2out_frame_debug_expr doesn't understand
11708 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11709 anywhere in the epilogue. Most of the insns below would in any case
11710 need special notes to explain where r11 is in relation to the stack. */
11712 void
11713 rs6000_emit_epilogue (sibcall)
11714 int sibcall;
11716 rs6000_stack_t *info;
11717 int restoring_FPRs_inline;
11718 int using_load_multiple;
11719 int using_mfcr_multiple;
11720 int use_backchain_to_restore_sp;
11721 int sp_offset = 0;
11722 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11723 rtx frame_reg_rtx = sp_reg_rtx;
11724 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11725 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11726 int i;
11728 info = rs6000_stack_info ();
11730 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11732 reg_mode = V2SImode;
11733 reg_size = 8;
11736 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11737 && (!TARGET_SPE_ABI
11738 || info->spe_64bit_regs_used == 0)
11739 && info->first_gp_reg_save < 31);
11740 restoring_FPRs_inline = (sibcall
11741 || current_function_calls_eh_return
11742 || info->first_fp_reg_save == 64
11743 || FP_SAVE_INLINE (info->first_fp_reg_save));
11744 use_backchain_to_restore_sp = (frame_pointer_needed
11745 || current_function_calls_alloca
11746 || info->total_size > 32767);
11747 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11748 || rs6000_cpu == PROCESSOR_PPC603
11749 || rs6000_cpu == PROCESSOR_PPC750
11750 || optimize_size);
11752 /* If we have a frame pointer, a call to alloca, or a large stack
11753 frame, restore the old stack pointer using the backchain. Otherwise,
11754 we know what size to update it with. */
11755 if (use_backchain_to_restore_sp)
11757 /* Under V.4, don't reset the stack pointer until after we're done
11758 loading the saved registers. */
11759 if (DEFAULT_ABI == ABI_V4)
11760 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11762 emit_move_insn (frame_reg_rtx,
11763 gen_rtx_MEM (Pmode, sp_reg_rtx));
11766 else if (info->push_p)
11768 if (DEFAULT_ABI == ABI_V4
11769 || current_function_calls_eh_return)
11770 sp_offset = info->total_size;
11771 else
11773 emit_insn (TARGET_32BIT
11774 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11775 GEN_INT (info->total_size))
11776 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11777 GEN_INT (info->total_size)));
11781 /* Restore AltiVec registers if needed. */
11782 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11784 int i;
11786 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11787 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11789 rtx addr, areg, mem;
11791 areg = gen_rtx_REG (Pmode, 0);
11792 emit_move_insn
11793 (areg, GEN_INT (info->altivec_save_offset
11794 + sp_offset
11795 + 16 * (i - info->first_altivec_reg_save)));
11797 /* AltiVec addressing mode is [reg+reg]. */
11798 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11799 mem = gen_rtx_MEM (V4SImode, addr);
11800 set_mem_alias_set (mem, rs6000_sr_alias_set);
11802 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11806 /* Restore VRSAVE if needed. */
11807 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11809 rtx addr, mem, reg;
11811 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11812 GEN_INT (info->vrsave_save_offset + sp_offset));
11813 mem = gen_rtx_MEM (SImode, addr);
11814 set_mem_alias_set (mem, rs6000_sr_alias_set);
11815 reg = gen_rtx_REG (SImode, 12);
11816 emit_move_insn (reg, mem);
11818 emit_insn (generate_set_vrsave (reg, info, 1));
11821 /* Get the old lr if we saved it. */
11822 if (info->lr_save_p)
11824 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11825 info->lr_save_offset + sp_offset);
11827 set_mem_alias_set (mem, rs6000_sr_alias_set);
11829 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11832 /* Get the old cr if we saved it. */
11833 if (info->cr_save_p)
11835 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11836 GEN_INT (info->cr_save_offset + sp_offset));
11837 rtx mem = gen_rtx_MEM (SImode, addr);
11839 set_mem_alias_set (mem, rs6000_sr_alias_set);
11841 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11844 /* Set LR here to try to overlap restores below. */
11845 if (info->lr_save_p)
11846 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11847 gen_rtx_REG (Pmode, 0));
11849 /* Load exception handler data registers, if needed. */
11850 if (current_function_calls_eh_return)
11852 unsigned int i, regno;
11854 if (TARGET_AIX)
11856 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11857 GEN_INT (sp_offset + 5 * reg_size));
11858 rtx mem = gen_rtx_MEM (reg_mode, addr);
11860 set_mem_alias_set (mem, rs6000_sr_alias_set);
11862 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
11865 for (i = 0; ; ++i)
11867 rtx mem;
11869 regno = EH_RETURN_DATA_REGNO (i);
11870 if (regno == INVALID_REGNUM)
11871 break;
11873 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11874 info->ehrd_offset + sp_offset
11875 + reg_size * (int) i);
11876 set_mem_alias_set (mem, rs6000_sr_alias_set);
11878 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11882 /* Restore GPRs. This is done as a PARALLEL if we are using
11883 the load-multiple instructions. */
11884 if (using_load_multiple)
11886 rtvec p;
11887 p = rtvec_alloc (32 - info->first_gp_reg_save);
11888 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11890 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11891 GEN_INT (info->gp_save_offset
11892 + sp_offset
11893 + reg_size * i));
11894 rtx mem = gen_rtx_MEM (reg_mode, addr);
11896 set_mem_alias_set (mem, rs6000_sr_alias_set);
11898 RTVEC_ELT (p, i) =
11899 gen_rtx_SET (VOIDmode,
11900 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11901 mem);
11903 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11905 else
11906 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11907 if ((regs_ever_live[info->first_gp_reg_save+i]
11908 && ! call_used_regs[info->first_gp_reg_save+i])
11909 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11910 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11911 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11913 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11914 GEN_INT (info->gp_save_offset
11915 + sp_offset
11916 + reg_size * i));
11917 rtx mem = gen_rtx_MEM (reg_mode, addr);
11919 /* Restore 64-bit quantities for SPE. */
11920 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11922 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11923 rtx b;
11925 if (!SPE_CONST_OFFSET_OK (offset))
11927 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11928 emit_move_insn (b, GEN_INT (offset));
11930 else
11931 b = GEN_INT (offset);
11933 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11934 mem = gen_rtx_MEM (V2SImode, addr);
11937 set_mem_alias_set (mem, rs6000_sr_alias_set);
11939 emit_move_insn (gen_rtx_REG (reg_mode,
11940 info->first_gp_reg_save + i), mem);
11943 /* Restore fpr's if we need to do it without calling a function. */
11944 if (restoring_FPRs_inline)
11945 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11946 if ((regs_ever_live[info->first_fp_reg_save+i]
11947 && ! call_used_regs[info->first_fp_reg_save+i]))
11949 rtx addr, mem;
11950 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11951 GEN_INT (info->fp_save_offset
11952 + sp_offset
11953 + 8 * i));
11954 mem = gen_rtx_MEM (DFmode, addr);
11955 set_mem_alias_set (mem, rs6000_sr_alias_set);
11957 emit_move_insn (gen_rtx_REG (DFmode,
11958 info->first_fp_reg_save + i),
11959 mem);
11962 /* If we saved cr, restore it here. Just those that were used. */
11963 if (info->cr_save_p)
11965 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11966 int count = 0;
11968 if (using_mfcr_multiple)
11970 for (i = 0; i < 8; i++)
11971 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11972 count++;
11973 if (count == 0)
11974 abort ();
11977 if (using_mfcr_multiple && count > 1)
11979 rtvec p;
11980 int ndx;
11982 p = rtvec_alloc (count);
11984 ndx = 0;
11985 for (i = 0; i < 8; i++)
11986 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11988 rtvec r = rtvec_alloc (2);
11989 RTVEC_ELT (r, 0) = r12_rtx;
11990 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11991 RTVEC_ELT (p, ndx) =
11992 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11993 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11994 ndx++;
11996 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11997 if (ndx != count)
11998 abort ();
12000 else
12001 for (i = 0; i < 8; i++)
12002 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12004 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12005 CR0_REGNO+i),
12006 r12_rtx));
12010 /* If this is V.4, unwind the stack pointer after all of the loads
12011 have been done. We need to emit a block here so that sched
12012 doesn't decide to move the sp change before the register restores
12013 (which may not have any obvious dependency on the stack). This
12014 doesn't hurt performance, because there is no scheduling that can
12015 be done after this point. */
12016 if (DEFAULT_ABI == ABI_V4
12017 || current_function_calls_eh_return)
12019 if (frame_reg_rtx != sp_reg_rtx)
12020 rs6000_emit_stack_tie ();
12022 if (use_backchain_to_restore_sp)
12024 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12026 else if (sp_offset != 0)
12028 emit_insn (TARGET_32BIT
12029 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12030 GEN_INT (sp_offset))
12031 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12032 GEN_INT (sp_offset)));
12036 if (current_function_calls_eh_return)
12038 rtx sa = EH_RETURN_STACKADJ_RTX;
12039 emit_insn (TARGET_32BIT
12040 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12041 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12044 if (!sibcall)
12046 rtvec p;
12047 if (! restoring_FPRs_inline)
12048 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12049 else
12050 p = rtvec_alloc (2);
12052 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12053 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12054 gen_rtx_REG (Pmode,
12055 LINK_REGISTER_REGNUM));
12057 /* If we have to restore more than two FP registers, branch to the
12058 restore function. It will return to our caller. */
12059 if (! restoring_FPRs_inline)
12061 int i;
12062 char rname[30];
12063 const char *alloc_rname;
12065 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12066 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12067 alloc_rname = ggc_strdup (rname);
12068 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12069 gen_rtx_SYMBOL_REF (Pmode,
12070 alloc_rname));
12072 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12074 rtx addr, mem;
12075 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12076 GEN_INT (info->fp_save_offset + 8*i));
12077 mem = gen_rtx_MEM (DFmode, addr);
12078 set_mem_alias_set (mem, rs6000_sr_alias_set);
12080 RTVEC_ELT (p, i+3) =
12081 gen_rtx_SET (VOIDmode,
12082 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12083 mem);
12087 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12091 /* Write function epilogue. */
12093 static void
12094 rs6000_output_function_epilogue (file, size)
12095 FILE *file;
12096 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
12098 rs6000_stack_t *info = rs6000_stack_info ();
12100 if (! HAVE_epilogue)
12102 rtx insn = get_last_insn ();
12103 /* If the last insn was a BARRIER, we don't have to write anything except
12104 the trace table. */
12105 if (GET_CODE (insn) == NOTE)
12106 insn = prev_nonnote_insn (insn);
12107 if (insn == 0 || GET_CODE (insn) != BARRIER)
12109 /* This is slightly ugly, but at least we don't have two
12110 copies of the epilogue-emitting code. */
12111 start_sequence ();
12113 /* A NOTE_INSN_DELETED is supposed to be at the start
12114 and end of the "toplevel" insn chain. */
12115 emit_note (NOTE_INSN_DELETED);
12116 rs6000_emit_epilogue (FALSE);
12117 emit_note (NOTE_INSN_DELETED);
12119 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12121 rtx insn;
12122 unsigned addr = 0;
12123 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12125 INSN_ADDRESSES_NEW (insn, addr);
12126 addr += 4;
12130 if (TARGET_DEBUG_STACK)
12131 debug_rtx_list (get_insns (), 100);
12132 final (get_insns (), file, FALSE, FALSE);
12133 end_sequence ();
12137 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
12138 /* Mach-O doesn't support labels at the end of objects, so if
12139 it looks like we might want one, insert a NOP. */
12141 rtx insn = get_last_insn ();
12142 while (insn
12143 && NOTE_P (insn)
12144 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12145 insn = PREV_INSN (insn);
12146 if (insn
12147 && (LABEL_P (insn)
12148 || (NOTE_P (insn)
12149 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12150 fputs ("\tnop\n", file);
12152 #endif
12154 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12155 on its format.
12157 We don't output a traceback table if -finhibit-size-directive was
12158 used. The documentation for -finhibit-size-directive reads
12159 ``don't output a @code{.size} assembler directive, or anything
12160 else that would cause trouble if the function is split in the
12161 middle, and the two halves are placed at locations far apart in
12162 memory.'' The traceback table has this property, since it
12163 includes the offset from the start of the function to the
12164 traceback table itself.
12166 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12167 different traceback table. */
12168 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12169 && rs6000_traceback != traceback_none)
12171 const char *fname = NULL;
12172 const char *language_string = lang_hooks.name;
12173 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12174 int i;
12175 int optional_tbtab;
12177 if (rs6000_traceback == traceback_full)
12178 optional_tbtab = 1;
12179 else if (rs6000_traceback == traceback_part)
12180 optional_tbtab = 0;
12181 else
12182 optional_tbtab = !optimize_size && !TARGET_ELF;
12184 if (optional_tbtab)
12186 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12187 while (*fname == '.') /* V.4 encodes . in the name */
12188 fname++;
12190 /* Need label immediately before tbtab, so we can compute
12191 its offset from the function start. */
12192 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12193 ASM_OUTPUT_LABEL (file, fname);
12196 /* The .tbtab pseudo-op can only be used for the first eight
12197 expressions, since it can't handle the possibly variable
12198 length fields that follow. However, if you omit the optional
12199 fields, the assembler outputs zeros for all optional fields
12200 anyways, giving each variable length field is minimum length
12201 (as defined in sys/debug.h). Thus we can not use the .tbtab
12202 pseudo-op at all. */
12204 /* An all-zero word flags the start of the tbtab, for debuggers
12205 that have to find it by searching forward from the entry
12206 point or from the current pc. */
12207 fputs ("\t.long 0\n", file);
12209 /* Tbtab format type. Use format type 0. */
12210 fputs ("\t.byte 0,", file);
12212 /* Language type. Unfortunately, there doesn't seem to be any
12213 official way to get this info, so we use language_string. C
12214 is 0. C++ is 9. No number defined for Obj-C, so use the
12215 value for C for now. There is no official value for Java,
12216 although IBM appears to be using 13. There is no official value
12217 for Chill, so we've chosen 44 pseudo-randomly. */
12218 if (! strcmp (language_string, "GNU C")
12219 || ! strcmp (language_string, "GNU Objective-C"))
12220 i = 0;
12221 else if (! strcmp (language_string, "GNU F77"))
12222 i = 1;
12223 else if (! strcmp (language_string, "GNU Ada"))
12224 i = 3;
12225 else if (! strcmp (language_string, "GNU Pascal"))
12226 i = 2;
12227 else if (! strcmp (language_string, "GNU C++"))
12228 i = 9;
12229 else if (! strcmp (language_string, "GNU Java"))
12230 i = 13;
12231 else if (! strcmp (language_string, "GNU CHILL"))
12232 i = 44;
12233 else
12234 abort ();
12235 fprintf (file, "%d,", i);
12237 /* 8 single bit fields: global linkage (not set for C extern linkage,
12238 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12239 from start of procedure stored in tbtab, internal function, function
12240 has controlled storage, function has no toc, function uses fp,
12241 function logs/aborts fp operations. */
12242 /* Assume that fp operations are used if any fp reg must be saved. */
12243 fprintf (file, "%d,",
12244 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12246 /* 6 bitfields: function is interrupt handler, name present in
12247 proc table, function calls alloca, on condition directives
12248 (controls stack walks, 3 bits), saves condition reg, saves
12249 link reg. */
12250 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12251 set up as a frame pointer, even when there is no alloca call. */
12252 fprintf (file, "%d,",
12253 ((optional_tbtab << 6)
12254 | ((optional_tbtab & frame_pointer_needed) << 5)
12255 | (info->cr_save_p << 1)
12256 | (info->lr_save_p)));
12258 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12259 (6 bits). */
12260 fprintf (file, "%d,",
12261 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12263 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12264 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12266 if (optional_tbtab)
12268 /* Compute the parameter info from the function decl argument
12269 list. */
12270 tree decl;
12271 int next_parm_info_bit = 31;
12273 for (decl = DECL_ARGUMENTS (current_function_decl);
12274 decl; decl = TREE_CHAIN (decl))
12276 rtx parameter = DECL_INCOMING_RTL (decl);
12277 enum machine_mode mode = GET_MODE (parameter);
12279 if (GET_CODE (parameter) == REG)
12281 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12283 int bits;
12285 float_parms++;
12287 if (mode == SFmode)
12288 bits = 0x2;
12289 else if (mode == DFmode || mode == TFmode)
12290 bits = 0x3;
12291 else
12292 abort ();
12294 /* If only one bit will fit, don't or in this entry. */
12295 if (next_parm_info_bit > 0)
12296 parm_info |= (bits << (next_parm_info_bit - 1));
12297 next_parm_info_bit -= 2;
12299 else
12301 fixed_parms += ((GET_MODE_SIZE (mode)
12302 + (UNITS_PER_WORD - 1))
12303 / UNITS_PER_WORD);
12304 next_parm_info_bit -= 1;
12310 /* Number of fixed point parameters. */
12311 /* This is actually the number of words of fixed point parameters; thus
12312 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12313 fprintf (file, "%d,", fixed_parms);
12315 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12316 all on stack. */
12317 /* This is actually the number of fp registers that hold parameters;
12318 and thus the maximum value is 13. */
12319 /* Set parameters on stack bit if parameters are not in their original
12320 registers, regardless of whether they are on the stack? Xlc
12321 seems to set the bit when not optimizing. */
12322 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12324 if (! optional_tbtab)
12325 return;
12327 /* Optional fields follow. Some are variable length. */
12329 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12330 11 double float. */
12331 /* There is an entry for each parameter in a register, in the order that
12332 they occur in the parameter list. Any intervening arguments on the
12333 stack are ignored. If the list overflows a long (max possible length
12334 34 bits) then completely leave off all elements that don't fit. */
12335 /* Only emit this long if there was at least one parameter. */
12336 if (fixed_parms || float_parms)
12337 fprintf (file, "\t.long %d\n", parm_info);
12339 /* Offset from start of code to tb table. */
12340 fputs ("\t.long ", file);
12341 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12342 #if TARGET_AIX
12343 RS6000_OUTPUT_BASENAME (file, fname);
12344 #else
12345 assemble_name (file, fname);
12346 #endif
12347 fputs ("-.", file);
12348 #if TARGET_AIX
12349 RS6000_OUTPUT_BASENAME (file, fname);
12350 #else
12351 assemble_name (file, fname);
12352 #endif
12353 putc ('\n', file);
12355 /* Interrupt handler mask. */
12356 /* Omit this long, since we never set the interrupt handler bit
12357 above. */
12359 /* Number of CTL (controlled storage) anchors. */
12360 /* Omit this long, since the has_ctl bit is never set above. */
12362 /* Displacement into stack of each CTL anchor. */
12363 /* Omit this list of longs, because there are no CTL anchors. */
12365 /* Length of function name. */
12366 if (*fname == '*')
12367 ++fname;
12368 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12370 /* Function name. */
12371 assemble_string (fname, strlen (fname));
12373 /* Register for alloca automatic storage; this is always reg 31.
12374 Only emit this if the alloca bit was set above. */
12375 if (frame_pointer_needed)
12376 fputs ("\t.byte 31\n", file);
12378 fputs ("\t.align 2\n", file);
12382 /* A C compound statement that outputs the assembler code for a thunk
12383 function, used to implement C++ virtual function calls with
12384 multiple inheritance. The thunk acts as a wrapper around a virtual
12385 function, adjusting the implicit object parameter before handing
12386 control off to the real function.
12388 First, emit code to add the integer DELTA to the location that
12389 contains the incoming first argument. Assume that this argument
12390 contains a pointer, and is the one used to pass the `this' pointer
12391 in C++. This is the incoming argument *before* the function
12392 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12393 values of all other incoming arguments.
12395 After the addition, emit code to jump to FUNCTION, which is a
12396 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12397 not touch the return address. Hence returning from FUNCTION will
12398 return to whoever called the current `thunk'.
12400 The effect must be as if FUNCTION had been called directly with the
12401 adjusted first argument. This macro is responsible for emitting
12402 all of the code for a thunk function; output_function_prologue()
12403 and output_function_epilogue() are not invoked.
12405 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12406 been extracted from it.) It might possibly be useful on some
12407 targets, but probably not.
12409 If you do not define this macro, the target-independent code in the
12410 C++ frontend will generate a less efficient heavyweight thunk that
12411 calls FUNCTION instead of jumping to it. The generic approach does
12412 not support varargs. */
12414 static void
12415 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12416 FILE *file;
12417 tree thunk_fndecl ATTRIBUTE_UNUSED;
12418 HOST_WIDE_INT delta;
12419 HOST_WIDE_INT vcall_offset;
12420 tree function;
12422 rtx this, insn, funexp;
12424 reload_completed = 1;
12425 epilogue_completed = 1;
12426 no_new_pseudos = 1;
12428 /* Mark the end of the (empty) prologue. */
12429 emit_note (NOTE_INSN_PROLOGUE_END);
12431 /* Find the "this" pointer. If the function returns a structure,
12432 the structure return pointer is in r3. */
12433 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12434 this = gen_rtx_REG (Pmode, 4);
12435 else
12436 this = gen_rtx_REG (Pmode, 3);
12438 /* Apply the constant offset, if required. */
12439 if (delta)
12441 rtx delta_rtx = GEN_INT (delta);
12442 emit_insn (TARGET_32BIT
12443 ? gen_addsi3 (this, this, delta_rtx)
12444 : gen_adddi3 (this, this, delta_rtx));
12447 /* Apply the offset from the vtable, if required. */
12448 if (vcall_offset)
12450 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12451 rtx tmp = gen_rtx_REG (Pmode, 12);
12453 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12454 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12456 emit_insn (TARGET_32BIT
12457 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12458 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12459 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12461 else
12463 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12465 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12467 emit_insn (TARGET_32BIT
12468 ? gen_addsi3 (this, this, tmp)
12469 : gen_adddi3 (this, this, tmp));
12472 /* Generate a tail call to the target function. */
12473 if (!TREE_USED (function))
12475 assemble_external (function);
12476 TREE_USED (function) = 1;
12478 funexp = XEXP (DECL_RTL (function), 0);
12479 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12481 #if TARGET_MACHO
12482 if (MACHOPIC_INDIRECT)
12483 funexp = machopic_indirect_call_target (funexp);
12484 #endif
12486 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12487 generate sibcall RTL explicitly to avoid constraint abort. */
12488 insn = emit_call_insn (
12489 gen_rtx_PARALLEL (VOIDmode,
12490 gen_rtvec (4,
12491 gen_rtx_CALL (VOIDmode,
12492 funexp, const0_rtx),
12493 gen_rtx_USE (VOIDmode, const0_rtx),
12494 gen_rtx_USE (VOIDmode,
12495 gen_rtx_REG (SImode,
12496 LINK_REGISTER_REGNUM)),
12497 gen_rtx_RETURN (VOIDmode))));
12498 SIBLING_CALL_P (insn) = 1;
12499 emit_barrier ();
12501 /* Run just enough of rest_of_compilation to get the insns emitted.
12502 There's not really enough bulk here to make other passes such as
12503 instruction scheduling worth while. Note that use_thunk calls
12504 assemble_start_function and assemble_end_function. */
12505 insn = get_insns ();
12506 insn_locators_initialize ();
12507 shorten_branches (insn);
12508 final_start_function (insn, file, 1);
12509 final (insn, file, 1, 0);
12510 final_end_function ();
12512 reload_completed = 0;
12513 epilogue_completed = 0;
12514 no_new_pseudos = 0;
12517 /* A quick summary of the various types of 'constant-pool tables'
12518 under PowerPC:
12520 Target Flags Name One table per
12521 AIX (none) AIX TOC object file
12522 AIX -mfull-toc AIX TOC object file
12523 AIX -mminimal-toc AIX minimal TOC translation unit
12524 SVR4/EABI (none) SVR4 SDATA object file
12525 SVR4/EABI -fpic SVR4 pic object file
12526 SVR4/EABI -fPIC SVR4 PIC translation unit
12527 SVR4/EABI -mrelocatable EABI TOC function
12528 SVR4/EABI -maix AIX TOC object file
12529 SVR4/EABI -maix -mminimal-toc
12530 AIX minimal TOC translation unit
12532 Name Reg. Set by entries contains:
12533 made by addrs? fp? sum?
12535 AIX TOC 2 crt0 as Y option option
12536 AIX minimal TOC 30 prolog gcc Y Y option
12537 SVR4 SDATA 13 crt0 gcc N Y N
12538 SVR4 pic 30 prolog ld Y not yet N
12539 SVR4 PIC 30 prolog gcc Y option option
12540 EABI TOC 30 prolog gcc Y option option
12544 /* Hash functions for the hash table. */
12546 static unsigned
12547 rs6000_hash_constant (k)
12548 rtx k;
12550 enum rtx_code code = GET_CODE (k);
12551 enum machine_mode mode = GET_MODE (k);
12552 unsigned result = (code << 3) ^ mode;
12553 const char *format;
12554 int flen, fidx;
12556 format = GET_RTX_FORMAT (code);
12557 flen = strlen (format);
12558 fidx = 0;
12560 switch (code)
12562 case LABEL_REF:
12563 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12565 case CONST_DOUBLE:
12566 if (mode != VOIDmode)
12567 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12568 flen = 2;
12569 break;
12571 case CODE_LABEL:
12572 fidx = 3;
12573 break;
12575 default:
12576 break;
12579 for (; fidx < flen; fidx++)
12580 switch (format[fidx])
12582 case 's':
12584 unsigned i, len;
12585 const char *str = XSTR (k, fidx);
12586 len = strlen (str);
12587 result = result * 613 + len;
12588 for (i = 0; i < len; i++)
12589 result = result * 613 + (unsigned) str[i];
12590 break;
12592 case 'u':
12593 case 'e':
12594 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12595 break;
12596 case 'i':
12597 case 'n':
12598 result = result * 613 + (unsigned) XINT (k, fidx);
12599 break;
12600 case 'w':
12601 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12602 result = result * 613 + (unsigned) XWINT (k, fidx);
12603 else
12605 size_t i;
12606 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12607 result = result * 613 + (unsigned) (XWINT (k, fidx)
12608 >> CHAR_BIT * i);
12610 break;
12611 case '0':
12612 break;
12613 default:
12614 abort ();
12617 return result;
12620 static unsigned
12621 toc_hash_function (hash_entry)
12622 const void * hash_entry;
12624 const struct toc_hash_struct *thc =
12625 (const struct toc_hash_struct *) hash_entry;
12626 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12629 /* Compare H1 and H2 for equivalence. */
12631 static int
12632 toc_hash_eq (h1, h2)
12633 const void * h1;
12634 const void * h2;
12636 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12637 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12639 if (((const struct toc_hash_struct *) h1)->key_mode
12640 != ((const struct toc_hash_struct *) h2)->key_mode)
12641 return 0;
12643 return rtx_equal_p (r1, r2);
12646 /* These are the names given by the C++ front-end to vtables, and
12647 vtable-like objects. Ideally, this logic should not be here;
12648 instead, there should be some programmatic way of inquiring as
12649 to whether or not an object is a vtable. */
12651 #define VTABLE_NAME_P(NAME) \
12652 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12653 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12654 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12655 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12657 void
12658 rs6000_output_symbol_ref (file, x)
12659 FILE *file;
12660 rtx x;
12662 /* Currently C++ toc references to vtables can be emitted before it
12663 is decided whether the vtable is public or private. If this is
12664 the case, then the linker will eventually complain that there is
12665 a reference to an unknown section. Thus, for vtables only,
12666 we emit the TOC reference to reference the symbol and not the
12667 section. */
12668 const char *name = XSTR (x, 0);
12670 if (VTABLE_NAME_P (name))
12672 RS6000_OUTPUT_BASENAME (file, name);
12674 else
12675 assemble_name (file, name);
12678 /* Output a TOC entry. We derive the entry name from what is being
12679 written. */
12681 void
12682 output_toc (file, x, labelno, mode)
12683 FILE *file;
12684 rtx x;
12685 int labelno;
12686 enum machine_mode mode;
12688 char buf[256];
12689 const char *name = buf;
12690 const char *real_name;
12691 rtx base = x;
12692 int offset = 0;
12694 if (TARGET_NO_TOC)
12695 abort ();
12697 /* When the linker won't eliminate them, don't output duplicate
12698 TOC entries (this happens on AIX if there is any kind of TOC,
12699 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12700 CODE_LABELs. */
12701 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12703 struct toc_hash_struct *h;
12704 void * * found;
12706 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12707 time because GGC is not initialized at that point. */
12708 if (toc_hash_table == NULL)
12709 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12710 toc_hash_eq, NULL);
12712 h = ggc_alloc (sizeof (*h));
12713 h->key = x;
12714 h->key_mode = mode;
12715 h->labelno = labelno;
12717 found = htab_find_slot (toc_hash_table, h, 1);
12718 if (*found == NULL)
12719 *found = h;
12720 else /* This is indeed a duplicate.
12721 Set this label equal to that label. */
12723 fputs ("\t.set ", file);
12724 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12725 fprintf (file, "%d,", labelno);
12726 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12727 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12728 found)->labelno));
12729 return;
12733 /* If we're going to put a double constant in the TOC, make sure it's
12734 aligned properly when strict alignment is on. */
12735 if (GET_CODE (x) == CONST_DOUBLE
12736 && STRICT_ALIGNMENT
12737 && GET_MODE_BITSIZE (mode) >= 64
12738 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12739 ASM_OUTPUT_ALIGN (file, 3);
12742 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12744 /* Handle FP constants specially. Note that if we have a minimal
12745 TOC, things we put here aren't actually in the TOC, so we can allow
12746 FP constants. */
12747 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12749 REAL_VALUE_TYPE rv;
12750 long k[4];
12752 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12753 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12755 if (TARGET_64BIT)
12757 if (TARGET_MINIMAL_TOC)
12758 fputs (DOUBLE_INT_ASM_OP, file);
12759 else
12760 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12761 k[0] & 0xffffffff, k[1] & 0xffffffff,
12762 k[2] & 0xffffffff, k[3] & 0xffffffff);
12763 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12764 k[0] & 0xffffffff, k[1] & 0xffffffff,
12765 k[2] & 0xffffffff, k[3] & 0xffffffff);
12766 return;
12768 else
12770 if (TARGET_MINIMAL_TOC)
12771 fputs ("\t.long ", file);
12772 else
12773 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12774 k[0] & 0xffffffff, k[1] & 0xffffffff,
12775 k[2] & 0xffffffff, k[3] & 0xffffffff);
12776 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12777 k[0] & 0xffffffff, k[1] & 0xffffffff,
12778 k[2] & 0xffffffff, k[3] & 0xffffffff);
12779 return;
12782 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12784 REAL_VALUE_TYPE rv;
12785 long k[2];
12787 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12788 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12790 if (TARGET_64BIT)
12792 if (TARGET_MINIMAL_TOC)
12793 fputs (DOUBLE_INT_ASM_OP, file);
12794 else
12795 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12796 k[0] & 0xffffffff, k[1] & 0xffffffff);
12797 fprintf (file, "0x%lx%08lx\n",
12798 k[0] & 0xffffffff, k[1] & 0xffffffff);
12799 return;
12801 else
12803 if (TARGET_MINIMAL_TOC)
12804 fputs ("\t.long ", file);
12805 else
12806 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12807 k[0] & 0xffffffff, k[1] & 0xffffffff);
12808 fprintf (file, "0x%lx,0x%lx\n",
12809 k[0] & 0xffffffff, k[1] & 0xffffffff);
12810 return;
12813 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12815 REAL_VALUE_TYPE rv;
12816 long l;
12818 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12819 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12821 if (TARGET_64BIT)
12823 if (TARGET_MINIMAL_TOC)
12824 fputs (DOUBLE_INT_ASM_OP, file);
12825 else
12826 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12827 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12828 return;
12830 else
12832 if (TARGET_MINIMAL_TOC)
12833 fputs ("\t.long ", file);
12834 else
12835 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12836 fprintf (file, "0x%lx\n", l & 0xffffffff);
12837 return;
12840 else if (GET_MODE (x) == VOIDmode
12841 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12843 unsigned HOST_WIDE_INT low;
12844 HOST_WIDE_INT high;
12846 if (GET_CODE (x) == CONST_DOUBLE)
12848 low = CONST_DOUBLE_LOW (x);
12849 high = CONST_DOUBLE_HIGH (x);
12851 else
12852 #if HOST_BITS_PER_WIDE_INT == 32
12854 low = INTVAL (x);
12855 high = (low & 0x80000000) ? ~0 : 0;
12857 #else
12859 low = INTVAL (x) & 0xffffffff;
12860 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12862 #endif
12864 /* TOC entries are always Pmode-sized, but since this
12865 is a bigendian machine then if we're putting smaller
12866 integer constants in the TOC we have to pad them.
12867 (This is still a win over putting the constants in
12868 a separate constant pool, because then we'd have
12869 to have both a TOC entry _and_ the actual constant.)
12871 For a 32-bit target, CONST_INT values are loaded and shifted
12872 entirely within `low' and can be stored in one TOC entry. */
12874 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12875 abort ();/* It would be easy to make this work, but it doesn't now. */
12877 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12879 #if HOST_BITS_PER_WIDE_INT == 32
12880 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12881 POINTER_SIZE, &low, &high, 0);
12882 #else
12883 low |= high << 32;
12884 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12885 high = (HOST_WIDE_INT) low >> 32;
12886 low &= 0xffffffff;
12887 #endif
12890 if (TARGET_64BIT)
12892 if (TARGET_MINIMAL_TOC)
12893 fputs (DOUBLE_INT_ASM_OP, file);
12894 else
12895 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12896 (long) high & 0xffffffff, (long) low & 0xffffffff);
12897 fprintf (file, "0x%lx%08lx\n",
12898 (long) high & 0xffffffff, (long) low & 0xffffffff);
12899 return;
12901 else
12903 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12905 if (TARGET_MINIMAL_TOC)
12906 fputs ("\t.long ", file);
12907 else
12908 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12909 (long) high & 0xffffffff, (long) low & 0xffffffff);
12910 fprintf (file, "0x%lx,0x%lx\n",
12911 (long) high & 0xffffffff, (long) low & 0xffffffff);
12913 else
12915 if (TARGET_MINIMAL_TOC)
12916 fputs ("\t.long ", file);
12917 else
12918 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12919 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12921 return;
12925 if (GET_CODE (x) == CONST)
12927 if (GET_CODE (XEXP (x, 0)) != PLUS)
12928 abort ();
12930 base = XEXP (XEXP (x, 0), 0);
12931 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12934 if (GET_CODE (base) == SYMBOL_REF)
12935 name = XSTR (base, 0);
12936 else if (GET_CODE (base) == LABEL_REF)
12937 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12938 else if (GET_CODE (base) == CODE_LABEL)
12939 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12940 else
12941 abort ();
12943 real_name = (*targetm.strip_name_encoding) (name);
12944 if (TARGET_MINIMAL_TOC)
12945 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12946 else
12948 fprintf (file, "\t.tc %s", real_name);
12950 if (offset < 0)
12951 fprintf (file, ".N%d", - offset);
12952 else if (offset)
12953 fprintf (file, ".P%d", offset);
12955 fputs ("[TC],", file);
12958 /* Currently C++ toc references to vtables can be emitted before it
12959 is decided whether the vtable is public or private. If this is
12960 the case, then the linker will eventually complain that there is
12961 a TOC reference to an unknown section. Thus, for vtables only,
12962 we emit the TOC reference to reference the symbol and not the
12963 section. */
12964 if (VTABLE_NAME_P (name))
12966 RS6000_OUTPUT_BASENAME (file, name);
12967 if (offset < 0)
12968 fprintf (file, "%d", offset);
12969 else if (offset > 0)
12970 fprintf (file, "+%d", offset);
12972 else
12973 output_addr_const (file, x);
12974 putc ('\n', file);
12977 /* Output an assembler pseudo-op to write an ASCII string of N characters
12978 starting at P to FILE.
12980 On the RS/6000, we have to do this using the .byte operation and
12981 write out special characters outside the quoted string.
12982 Also, the assembler is broken; very long strings are truncated,
12983 so we must artificially break them up early. */
12985 void
12986 output_ascii (file, p, n)
12987 FILE *file;
12988 const char *p;
12989 int n;
12991 char c;
12992 int i, count_string;
12993 const char *for_string = "\t.byte \"";
12994 const char *for_decimal = "\t.byte ";
12995 const char *to_close = NULL;
12997 count_string = 0;
12998 for (i = 0; i < n; i++)
13000 c = *p++;
13001 if (c >= ' ' && c < 0177)
13003 if (for_string)
13004 fputs (for_string, file);
13005 putc (c, file);
13007 /* Write two quotes to get one. */
13008 if (c == '"')
13010 putc (c, file);
13011 ++count_string;
13014 for_string = NULL;
13015 for_decimal = "\"\n\t.byte ";
13016 to_close = "\"\n";
13017 ++count_string;
13019 if (count_string >= 512)
13021 fputs (to_close, file);
13023 for_string = "\t.byte \"";
13024 for_decimal = "\t.byte ";
13025 to_close = NULL;
13026 count_string = 0;
13029 else
13031 if (for_decimal)
13032 fputs (for_decimal, file);
13033 fprintf (file, "%d", c);
13035 for_string = "\n\t.byte \"";
13036 for_decimal = ", ";
13037 to_close = "\n";
13038 count_string = 0;
13042 /* Now close the string if we have written one. Then end the line. */
13043 if (to_close)
13044 fputs (to_close, file);
13047 /* Generate a unique section name for FILENAME for a section type
13048 represented by SECTION_DESC. Output goes into BUF.
13050 SECTION_DESC can be any string, as long as it is different for each
13051 possible section type.
13053 We name the section in the same manner as xlc. The name begins with an
13054 underscore followed by the filename (after stripping any leading directory
13055 names) with the last period replaced by the string SECTION_DESC. If
13056 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13057 the name. */
13059 void
13060 rs6000_gen_section_name (buf, filename, section_desc)
13061 char **buf;
13062 const char *filename;
13063 const char *section_desc;
13065 const char *q, *after_last_slash, *last_period = 0;
13066 char *p;
13067 int len;
13069 after_last_slash = filename;
13070 for (q = filename; *q; q++)
13072 if (*q == '/')
13073 after_last_slash = q + 1;
13074 else if (*q == '.')
13075 last_period = q;
13078 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13079 *buf = (char *) xmalloc (len);
13081 p = *buf;
13082 *p++ = '_';
13084 for (q = after_last_slash; *q; q++)
13086 if (q == last_period)
13088 strcpy (p, section_desc);
13089 p += strlen (section_desc);
13090 break;
13093 else if (ISALNUM (*q))
13094 *p++ = *q;
13097 if (last_period == 0)
13098 strcpy (p, section_desc);
13099 else
13100 *p = '\0';
13103 /* Emit profile function. */
13105 void
13106 output_profile_hook (labelno)
13107 int labelno ATTRIBUTE_UNUSED;
13109 if (TARGET_PROFILE_KERNEL)
13110 return;
13112 if (DEFAULT_ABI == ABI_AIX)
13114 #ifndef NO_PROFILE_COUNTERS
13115 # define NO_PROFILE_COUNTERS 0
13116 #endif
13117 if (NO_PROFILE_COUNTERS)
13118 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13119 else
13121 char buf[30];
13122 const char *label_name;
13123 rtx fun;
13125 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13126 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13127 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13129 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13130 fun, Pmode);
13133 else if (DEFAULT_ABI == ABI_DARWIN)
13135 const char *mcount_name = RS6000_MCOUNT;
13136 int caller_addr_regno = LINK_REGISTER_REGNUM;
13138 /* Be conservative and always set this, at least for now. */
13139 current_function_uses_pic_offset_table = 1;
13141 #if TARGET_MACHO
13142 /* For PIC code, set up a stub and collect the caller's address
13143 from r0, which is where the prologue puts it. */
13144 if (MACHOPIC_INDIRECT)
13146 mcount_name = machopic_stub_name (mcount_name);
13147 if (current_function_uses_pic_offset_table)
13148 caller_addr_regno = 0;
13150 #endif
13151 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13152 0, VOIDmode, 1,
13153 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13157 /* Write function profiler code. */
13159 void
13160 output_function_profiler (file, labelno)
13161 FILE *file;
13162 int labelno;
13164 char buf[100];
13165 int save_lr = 8;
13167 switch (DEFAULT_ABI)
13169 default:
13170 abort ();
13172 case ABI_V4:
13173 save_lr = 4;
13174 if (!TARGET_32BIT)
13176 warning ("no profiling of 64-bit code for this ABI");
13177 return;
13179 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13180 fprintf (file, "\tmflr %s\n", reg_names[0]);
13181 if (flag_pic == 1)
13183 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13184 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13185 reg_names[0], save_lr, reg_names[1]);
13186 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13187 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13188 assemble_name (file, buf);
13189 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13191 else if (flag_pic > 1)
13193 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13194 reg_names[0], save_lr, reg_names[1]);
13195 /* Now, we need to get the address of the label. */
13196 fputs ("\tbl 1f\n\t.long ", file);
13197 assemble_name (file, buf);
13198 fputs ("-.\n1:", file);
13199 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13200 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13201 reg_names[0], reg_names[11]);
13202 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13203 reg_names[0], reg_names[0], reg_names[11]);
13205 else
13207 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13208 assemble_name (file, buf);
13209 fputs ("@ha\n", file);
13210 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13211 reg_names[0], save_lr, reg_names[1]);
13212 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13213 assemble_name (file, buf);
13214 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13217 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13218 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13219 break;
13221 case ABI_AIX:
13222 case ABI_DARWIN:
13223 if (!TARGET_PROFILE_KERNEL)
13225 /* Don't do anything, done in output_profile_hook (). */
13227 else
13229 if (TARGET_32BIT)
13230 abort ();
13232 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13233 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13235 if (current_function_needs_context)
13237 asm_fprintf (file, "\tstd %s,24(%s)\n",
13238 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13239 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13240 asm_fprintf (file, "\tld %s,24(%s)\n",
13241 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13243 else
13244 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13246 break;
13251 static int
13252 rs6000_use_dfa_pipeline_interface ()
13254 return 1;
13257 /* Power4 load update and store update instructions are cracked into a
13258 load or store and an integer insn which are executed in the same cycle.
13259 Branches have their own dispatch slot which does not count against the
13260 GCC issue rate, but it changes the program flow so there are no other
13261 instructions to issue in this cycle. */
13263 static int
13264 rs6000_variable_issue (stream, verbose, insn, more)
13265 FILE *stream ATTRIBUTE_UNUSED;
13266 int verbose ATTRIBUTE_UNUSED;
13267 rtx insn;
13268 int more;
13270 if (GET_CODE (PATTERN (insn)) == USE
13271 || GET_CODE (PATTERN (insn)) == CLOBBER)
13272 return more;
13274 if (rs6000_cpu == PROCESSOR_POWER4)
13276 enum attr_type type = get_attr_type (insn);
13277 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13278 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
13279 || type == TYPE_MFCR)
13280 return 0;
13281 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13282 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13283 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13284 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13285 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13286 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13287 || type == TYPE_IDIV || type == TYPE_LDIV
13288 || type == TYPE_INSERT_WORD)
13289 return more > 2 ? more - 2 : 0;
13292 return more - 1;
13295 /* Adjust the cost of a scheduling dependency. Return the new cost of
13296 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13298 static int
13299 rs6000_adjust_cost (insn, link, dep_insn, cost)
13300 rtx insn;
13301 rtx link;
13302 rtx dep_insn ATTRIBUTE_UNUSED;
13303 int cost;
13305 if (! recog_memoized (insn))
13306 return 0;
13308 if (REG_NOTE_KIND (link) != 0)
13309 return 0;
13311 if (REG_NOTE_KIND (link) == 0)
13313 /* Data dependency; DEP_INSN writes a register that INSN reads
13314 some cycles later. */
13315 switch (get_attr_type (insn))
13317 case TYPE_JMPREG:
13318 /* Tell the first scheduling pass about the latency between
13319 a mtctr and bctr (and mtlr and br/blr). The first
13320 scheduling pass will not know about this latency since
13321 the mtctr instruction, which has the latency associated
13322 to it, will be generated by reload. */
13323 return TARGET_POWER ? 5 : 4;
13324 case TYPE_BRANCH:
13325 /* Leave some extra cycles between a compare and its
13326 dependent branch, to inhibit expensive mispredicts. */
13327 if ((rs6000_cpu_attr == CPU_PPC603
13328 || rs6000_cpu_attr == CPU_PPC604
13329 || rs6000_cpu_attr == CPU_PPC604E
13330 || rs6000_cpu_attr == CPU_PPC620
13331 || rs6000_cpu_attr == CPU_PPC630
13332 || rs6000_cpu_attr == CPU_PPC750
13333 || rs6000_cpu_attr == CPU_PPC7400
13334 || rs6000_cpu_attr == CPU_PPC7450
13335 || rs6000_cpu_attr == CPU_POWER4)
13336 && recog_memoized (dep_insn)
13337 && (INSN_CODE (dep_insn) >= 0)
13338 && (get_attr_type (dep_insn) == TYPE_CMP
13339 || get_attr_type (dep_insn) == TYPE_COMPARE
13340 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13341 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13342 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13343 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13344 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13345 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13346 return cost + 2;
13347 default:
13348 break;
13350 /* Fall out to return default cost. */
13353 return cost;
13356 /* A C statement (sans semicolon) to update the integer scheduling
13357 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13358 INSN earlier, increase the priority to execute INSN later. Do not
13359 define this macro if you do not need to adjust the scheduling
13360 priorities of insns. */
13362 static int
13363 rs6000_adjust_priority (insn, priority)
13364 rtx insn ATTRIBUTE_UNUSED;
13365 int priority;
13367 /* On machines (like the 750) which have asymmetric integer units,
13368 where one integer unit can do multiply and divides and the other
13369 can't, reduce the priority of multiply/divide so it is scheduled
13370 before other integer operations. */
13372 #if 0
13373 if (! INSN_P (insn))
13374 return priority;
13376 if (GET_CODE (PATTERN (insn)) == USE)
13377 return priority;
13379 switch (rs6000_cpu_attr) {
13380 case CPU_PPC750:
13381 switch (get_attr_type (insn))
13383 default:
13384 break;
13386 case TYPE_IMUL:
13387 case TYPE_IDIV:
13388 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13389 priority, priority);
13390 if (priority >= 0 && priority < 0x01000000)
13391 priority >>= 3;
13392 break;
13395 #endif
13397 return priority;
13400 /* Return how many instructions the machine can issue per cycle. */
13402 static int
13403 rs6000_issue_rate ()
13405 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13406 if (!reload_completed)
13407 return 1;
13409 switch (rs6000_cpu_attr) {
13410 case CPU_RIOS1: /* ? */
13411 case CPU_RS64A:
13412 case CPU_PPC601: /* ? */
13413 case CPU_PPC7450:
13414 return 3;
13415 case CPU_PPC440:
13416 case CPU_PPC603:
13417 case CPU_PPC750:
13418 case CPU_PPC7400:
13419 case CPU_PPC8540:
13420 return 2;
13421 case CPU_RIOS2:
13422 case CPU_PPC604:
13423 case CPU_PPC604E:
13424 case CPU_PPC620:
13425 case CPU_PPC630:
13426 case CPU_POWER4:
13427 return 4;
13428 default:
13429 return 1;
13433 /* Return how many instructions to look ahead for better insn
13434 scheduling. */
13436 static int
13437 rs6000_use_sched_lookahead ()
13439 if (rs6000_cpu_attr == CPU_PPC8540)
13440 return 4;
13441 return 0;
13445 /* Length in units of the trampoline for entering a nested function. */
13448 rs6000_trampoline_size ()
13450 int ret = 0;
13452 switch (DEFAULT_ABI)
13454 default:
13455 abort ();
13457 case ABI_AIX:
13458 ret = (TARGET_32BIT) ? 12 : 24;
13459 break;
13461 case ABI_DARWIN:
13462 case ABI_V4:
13463 ret = (TARGET_32BIT) ? 40 : 48;
13464 break;
13467 return ret;
13470 /* Emit RTL insns to initialize the variable parts of a trampoline.
13471 FNADDR is an RTX for the address of the function's pure code.
13472 CXT is an RTX for the static chain value for the function. */
13474 void
13475 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13476 rtx addr;
13477 rtx fnaddr;
13478 rtx cxt;
13480 enum machine_mode pmode = Pmode;
13481 int regsize = (TARGET_32BIT) ? 4 : 8;
13482 rtx ctx_reg = force_reg (pmode, cxt);
13484 switch (DEFAULT_ABI)
13486 default:
13487 abort ();
13489 /* Macros to shorten the code expansions below. */
13490 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13491 #define MEM_PLUS(addr,offset) \
13492 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13494 /* Under AIX, just build the 3 word function descriptor */
13495 case ABI_AIX:
13497 rtx fn_reg = gen_reg_rtx (pmode);
13498 rtx toc_reg = gen_reg_rtx (pmode);
13499 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13500 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13501 emit_move_insn (MEM_DEREF (addr), fn_reg);
13502 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13503 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13505 break;
13507 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13508 case ABI_DARWIN:
13509 case ABI_V4:
13510 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13511 FALSE, VOIDmode, 4,
13512 addr, pmode,
13513 GEN_INT (rs6000_trampoline_size ()), SImode,
13514 fnaddr, pmode,
13515 ctx_reg, pmode);
13516 break;
13519 return;
13523 /* Table of valid machine attributes. */
13525 const struct attribute_spec rs6000_attribute_table[] =
13527 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13528 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13529 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13530 { NULL, 0, 0, false, false, false, NULL }
13533 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13534 struct attribute_spec.handler. */
13536 static tree
13537 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13538 tree *node;
13539 tree name;
13540 tree args ATTRIBUTE_UNUSED;
13541 int flags ATTRIBUTE_UNUSED;
13542 bool *no_add_attrs;
13544 if (TREE_CODE (*node) != FUNCTION_TYPE
13545 && TREE_CODE (*node) != FIELD_DECL
13546 && TREE_CODE (*node) != TYPE_DECL)
13548 warning ("`%s' attribute only applies to functions",
13549 IDENTIFIER_POINTER (name));
13550 *no_add_attrs = true;
13553 return NULL_TREE;
13556 /* Set longcall attributes on all functions declared when
13557 rs6000_default_long_calls is true. */
13558 static void
13559 rs6000_set_default_type_attributes (type)
13560 tree type;
13562 if (rs6000_default_long_calls
13563 && (TREE_CODE (type) == FUNCTION_TYPE
13564 || TREE_CODE (type) == METHOD_TYPE))
13565 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13566 NULL_TREE,
13567 TYPE_ATTRIBUTES (type));
13570 /* Return a reference suitable for calling a function with the
13571 longcall attribute. */
13573 struct rtx_def *
13574 rs6000_longcall_ref (call_ref)
13575 rtx call_ref;
13577 const char *call_name;
13578 tree node;
13580 if (GET_CODE (call_ref) != SYMBOL_REF)
13581 return call_ref;
13583 /* System V adds '.' to the internal name, so skip them. */
13584 call_name = XSTR (call_ref, 0);
13585 if (*call_name == '.')
13587 while (*call_name == '.')
13588 call_name++;
13590 node = get_identifier (call_name);
13591 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13594 return force_reg (Pmode, call_ref);
13597 #ifdef USING_ELFOS_H
13599 /* A C statement or statements to switch to the appropriate section
13600 for output of RTX in mode MODE. You can assume that RTX is some
13601 kind of constant in RTL. The argument MODE is redundant except in
13602 the case of a `const_int' rtx. Select the section by calling
13603 `text_section' or one of the alternatives for other sections.
13605 Do not define this macro if you put all constants in the read-only
13606 data section. */
13608 static void
13609 rs6000_elf_select_rtx_section (mode, x, align)
13610 enum machine_mode mode;
13611 rtx x;
13612 unsigned HOST_WIDE_INT align;
13614 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13615 toc_section ();
13616 else
13617 default_elf_select_rtx_section (mode, x, align);
13620 /* A C statement or statements to switch to the appropriate
13621 section for output of DECL. DECL is either a `VAR_DECL' node
13622 or a constant of some sort. RELOC indicates whether forming
13623 the initial value of DECL requires link-time relocations. */
13625 static void
13626 rs6000_elf_select_section (decl, reloc, align)
13627 tree decl;
13628 int reloc;
13629 unsigned HOST_WIDE_INT align;
13631 /* Pretend that we're always building for a shared library when
13632 ABI_AIX, because otherwise we end up with dynamic relocations
13633 in read-only sections. This happens for function pointers,
13634 references to vtables in typeinfo, and probably other cases. */
13635 default_elf_select_section_1 (decl, reloc, align,
13636 flag_pic || DEFAULT_ABI == ABI_AIX);
13639 /* A C statement to build up a unique section name, expressed as a
13640 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13641 RELOC indicates whether the initial value of EXP requires
13642 link-time relocations. If you do not define this macro, GCC will use
13643 the symbol name prefixed by `.' as the section name. Note - this
13644 macro can now be called for uninitialized data items as well as
13645 initialized data and functions. */
13647 static void
13648 rs6000_elf_unique_section (decl, reloc)
13649 tree decl;
13650 int reloc;
13652 /* As above, pretend that we're always building for a shared library
13653 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13654 default_unique_section_1 (decl, reloc,
13655 flag_pic || DEFAULT_ABI == ABI_AIX);
13658 /* For a SYMBOL_REF, set generic flags and then perform some
13659 target-specific processing.
13661 When the AIX ABI is requested on a non-AIX system, replace the
13662 function name with the real name (with a leading .) rather than the
13663 function descriptor name. This saves a lot of overriding code to
13664 read the prefixes. */
13666 static void
13667 rs6000_elf_encode_section_info (decl, rtl, first)
13668 tree decl;
13669 rtx rtl;
13670 int first;
13672 default_encode_section_info (decl, rtl, first);
13674 if (first
13675 && TREE_CODE (decl) == FUNCTION_DECL
13676 && !TARGET_AIX
13677 && DEFAULT_ABI == ABI_AIX)
13679 rtx sym_ref = XEXP (rtl, 0);
13680 size_t len = strlen (XSTR (sym_ref, 0));
13681 char *str = alloca (len + 2);
13682 str[0] = '.';
13683 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13684 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13688 static bool
13689 rs6000_elf_in_small_data_p (decl)
13690 tree decl;
13692 if (rs6000_sdata == SDATA_NONE)
13693 return false;
13695 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13697 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13698 if (strcmp (section, ".sdata") == 0
13699 || strcmp (section, ".sdata2") == 0
13700 || strcmp (section, ".sbss") == 0
13701 || strcmp (section, ".sbss2") == 0
13702 || strcmp (section, ".PPC.EMB.sdata0") == 0
13703 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13704 return true;
13706 else
13708 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13710 if (size > 0
13711 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13712 /* If it's not public, and we're not going to reference it there,
13713 there's no need to put it in the small data section. */
13714 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13715 return true;
13718 return false;
13721 #endif /* USING_ELFOS_H */
13724 /* Return a REG that occurs in ADDR with coefficient 1.
13725 ADDR can be effectively incremented by incrementing REG.
13727 r0 is special and we must not select it as an address
13728 register by this routine since our caller will try to
13729 increment the returned register via an "la" instruction. */
13731 struct rtx_def *
13732 find_addr_reg (addr)
13733 rtx addr;
13735 while (GET_CODE (addr) == PLUS)
13737 if (GET_CODE (XEXP (addr, 0)) == REG
13738 && REGNO (XEXP (addr, 0)) != 0)
13739 addr = XEXP (addr, 0);
13740 else if (GET_CODE (XEXP (addr, 1)) == REG
13741 && REGNO (XEXP (addr, 1)) != 0)
13742 addr = XEXP (addr, 1);
13743 else if (CONSTANT_P (XEXP (addr, 0)))
13744 addr = XEXP (addr, 1);
13745 else if (CONSTANT_P (XEXP (addr, 1)))
13746 addr = XEXP (addr, 0);
13747 else
13748 abort ();
13750 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13751 return addr;
13752 abort ();
13755 void
13756 rs6000_fatal_bad_address (op)
13757 rtx op;
13759 fatal_insn ("bad address", op);
13762 #if TARGET_MACHO
13764 #if 0
13765 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13766 reference and a constant. */
13769 symbolic_operand (op)
13770 rtx op;
13772 switch (GET_CODE (op))
13774 case SYMBOL_REF:
13775 case LABEL_REF:
13776 return 1;
13777 case CONST:
13778 op = XEXP (op, 0);
13779 return (GET_CODE (op) == SYMBOL_REF ||
13780 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13781 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13782 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13783 default:
13784 return 0;
13787 #endif
13789 #ifdef RS6000_LONG_BRANCH
13791 static tree stub_list = 0;
13793 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13794 procedure calls to the linked list. */
13796 void
13797 add_compiler_stub (label_name, function_name, line_number)
13798 tree label_name;
13799 tree function_name;
13800 int line_number;
13802 tree stub = build_tree_list (function_name, label_name);
13803 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13804 TREE_CHAIN (stub) = stub_list;
13805 stub_list = stub;
13808 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13809 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13810 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13812 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13813 handling procedure calls from the linked list and initializes the
13814 linked list. */
13816 void
13817 output_compiler_stub ()
13819 char tmp_buf[256];
13820 char label_buf[256];
13821 tree stub;
13823 if (!flag_pic)
13824 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13826 fprintf (asm_out_file,
13827 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13829 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13830 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13831 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13832 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13834 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13835 strcpy (label_buf,
13836 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13837 else
13839 label_buf[0] = '_';
13840 strcpy (label_buf+1,
13841 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13844 strcpy (tmp_buf, "lis r12,hi16(");
13845 strcat (tmp_buf, label_buf);
13846 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13847 strcat (tmp_buf, label_buf);
13848 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13849 output_asm_insn (tmp_buf, 0);
13851 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13852 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13853 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13854 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13857 stub_list = 0;
13860 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13861 already there or not. */
13864 no_previous_def (function_name)
13865 tree function_name;
13867 tree stub;
13868 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13869 if (function_name == STUB_FUNCTION_NAME (stub))
13870 return 0;
13871 return 1;
13874 /* GET_PREV_LABEL gets the label name from the previous definition of
13875 the function. */
13877 tree
13878 get_prev_label (function_name)
13879 tree function_name;
13881 tree stub;
13882 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13883 if (function_name == STUB_FUNCTION_NAME (stub))
13884 return STUB_LABEL_NAME (stub);
13885 return 0;
13888 /* INSN is either a function call or a millicode call. It may have an
13889 unconditional jump in its delay slot.
13891 CALL_DEST is the routine we are calling. */
13893 char *
13894 output_call (insn, call_dest, operand_number)
13895 rtx insn;
13896 rtx call_dest;
13897 int operand_number;
13899 static char buf[256];
13900 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13902 tree labelname;
13903 tree funname = get_identifier (XSTR (call_dest, 0));
13905 if (no_previous_def (funname))
13907 int line_number = 0;
13908 rtx label_rtx = gen_label_rtx ();
13909 char *label_buf, temp_buf[256];
13910 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13911 CODE_LABEL_NUMBER (label_rtx));
13912 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13913 labelname = get_identifier (label_buf);
13914 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13915 if (insn)
13916 line_number = NOTE_LINE_NUMBER (insn);
13917 add_compiler_stub (labelname, funname, line_number);
13919 else
13920 labelname = get_prev_label (funname);
13922 sprintf (buf, "jbsr %%z%d,%.246s",
13923 operand_number, IDENTIFIER_POINTER (labelname));
13924 return buf;
13926 else
13928 sprintf (buf, "bl %%z%d", operand_number);
13929 return buf;
13933 #endif /* RS6000_LONG_BRANCH */
13935 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13936 do { \
13937 const char *const symbol_ = (SYMBOL); \
13938 char *buffer_ = (BUF); \
13939 if (symbol_[0] == '"') \
13941 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13943 else if (name_needs_quotes(symbol_)) \
13945 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13947 else \
13949 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13951 } while (0)
13954 /* Generate PIC and indirect symbol stubs. */
13956 void
13957 machopic_output_stub (file, symb, stub)
13958 FILE *file;
13959 const char *symb, *stub;
13961 unsigned int length;
13962 char *symbol_name, *lazy_ptr_name;
13963 char *local_label_0;
13964 static int label = 0;
13966 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13967 symb = (*targetm.strip_name_encoding) (symb);
13969 label += 1;
13971 length = strlen (symb);
13972 symbol_name = alloca (length + 32);
13973 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13975 lazy_ptr_name = alloca (length + 32);
13976 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13978 local_label_0 = alloca (length + 32);
13979 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13981 if (flag_pic == 2)
13982 machopic_picsymbol_stub1_section ();
13983 else
13984 machopic_symbol_stub1_section ();
13985 fprintf (file, "\t.align 2\n");
13987 fprintf (file, "%s:\n", stub);
13988 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13990 if (flag_pic == 2)
13992 fprintf (file, "\tmflr r0\n");
13993 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13994 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13995 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13996 lazy_ptr_name, local_label_0);
13997 fprintf (file, "\tmtlr r0\n");
13998 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13999 lazy_ptr_name, local_label_0);
14000 fprintf (file, "\tmtctr r12\n");
14001 fprintf (file, "\tbctr\n");
14003 else
14005 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
14006 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
14007 fprintf (file, "\tmtctr r12\n");
14008 fprintf (file, "\tbctr\n");
14011 machopic_lazy_symbol_ptr_section ();
14012 fprintf (file, "%s:\n", lazy_ptr_name);
14013 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14014 fprintf (file, "\t.long dyld_stub_binding_helper\n");
14017 /* Legitimize PIC addresses. If the address is already
14018 position-independent, we return ORIG. Newly generated
14019 position-independent addresses go into a reg. This is REG if non
14020 zero, otherwise we allocate register(s) as necessary. */
14022 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
14025 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
14026 rtx orig;
14027 enum machine_mode mode;
14028 rtx reg;
14030 rtx base, offset;
14032 if (reg == NULL && ! reload_in_progress && ! reload_completed)
14033 reg = gen_reg_rtx (Pmode);
14035 if (GET_CODE (orig) == CONST)
14037 if (GET_CODE (XEXP (orig, 0)) == PLUS
14038 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
14039 return orig;
14041 if (GET_CODE (XEXP (orig, 0)) == PLUS)
14043 base =
14044 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
14045 Pmode, reg);
14046 offset =
14047 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
14048 Pmode, reg);
14050 else
14051 abort ();
14053 if (GET_CODE (offset) == CONST_INT)
14055 if (SMALL_INT (offset))
14056 return plus_constant (base, INTVAL (offset));
14057 else if (! reload_in_progress && ! reload_completed)
14058 offset = force_reg (Pmode, offset);
14059 else
14061 rtx mem = force_const_mem (Pmode, orig);
14062 return machopic_legitimize_pic_address (mem, Pmode, reg);
14065 return gen_rtx (PLUS, Pmode, base, offset);
14068 /* Fall back on generic machopic code. */
14069 return machopic_legitimize_pic_address (orig, mode, reg);
14072 /* This is just a placeholder to make linking work without having to
14073 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
14074 ever needed for Darwin (not too likely!) this would have to get a
14075 real definition. */
14077 void
14078 toc_section ()
14082 #endif /* TARGET_MACHO */
14084 #if TARGET_ELF
14085 static unsigned int
14086 rs6000_elf_section_type_flags (decl, name, reloc)
14087 tree decl;
14088 const char *name;
14089 int reloc;
14091 unsigned int flags
14092 = default_section_type_flags_1 (decl, name, reloc,
14093 flag_pic || DEFAULT_ABI == ABI_AIX);
14095 if (TARGET_RELOCATABLE)
14096 flags |= SECTION_WRITE;
14098 return flags;
14101 /* Record an element in the table of global constructors. SYMBOL is
14102 a SYMBOL_REF of the function to be called; PRIORITY is a number
14103 between 0 and MAX_INIT_PRIORITY.
14105 This differs from default_named_section_asm_out_constructor in
14106 that we have special handling for -mrelocatable. */
14108 static void
14109 rs6000_elf_asm_out_constructor (symbol, priority)
14110 rtx symbol;
14111 int priority;
14113 const char *section = ".ctors";
14114 char buf[16];
14116 if (priority != DEFAULT_INIT_PRIORITY)
14118 sprintf (buf, ".ctors.%.5u",
14119 /* Invert the numbering so the linker puts us in the proper
14120 order; constructors are run from right to left, and the
14121 linker sorts in increasing order. */
14122 MAX_INIT_PRIORITY - priority);
14123 section = buf;
14126 named_section_flags (section, SECTION_WRITE);
14127 assemble_align (POINTER_SIZE);
14129 if (TARGET_RELOCATABLE)
14131 fputs ("\t.long (", asm_out_file);
14132 output_addr_const (asm_out_file, symbol);
14133 fputs (")@fixup\n", asm_out_file);
14135 else
14136 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14139 static void
14140 rs6000_elf_asm_out_destructor (symbol, priority)
14141 rtx symbol;
14142 int priority;
14144 const char *section = ".dtors";
14145 char buf[16];
14147 if (priority != DEFAULT_INIT_PRIORITY)
14149 sprintf (buf, ".dtors.%.5u",
14150 /* Invert the numbering so the linker puts us in the proper
14151 order; constructors are run from right to left, and the
14152 linker sorts in increasing order. */
14153 MAX_INIT_PRIORITY - priority);
14154 section = buf;
14157 named_section_flags (section, SECTION_WRITE);
14158 assemble_align (POINTER_SIZE);
14160 if (TARGET_RELOCATABLE)
14162 fputs ("\t.long (", asm_out_file);
14163 output_addr_const (asm_out_file, symbol);
14164 fputs (")@fixup\n", asm_out_file);
14166 else
14167 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14170 void
14171 rs6000_elf_declare_function_name (file, name, decl)
14172 FILE *file;
14173 const char *name;
14174 tree decl;
14176 if (TARGET_64BIT)
14178 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
14179 ASM_OUTPUT_LABEL (file, name);
14180 fputs (DOUBLE_INT_ASM_OP, file);
14181 putc ('.', file);
14182 assemble_name (file, name);
14183 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
14184 assemble_name (file, name);
14185 fputs (",24\n\t.type\t.", file);
14186 assemble_name (file, name);
14187 fputs (",@function\n", file);
14188 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
14190 fputs ("\t.globl\t.", file);
14191 assemble_name (file, name);
14192 putc ('\n', file);
14194 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14195 putc ('.', file);
14196 ASM_OUTPUT_LABEL (file, name);
14197 return;
14200 if (TARGET_RELOCATABLE
14201 && (get_pool_size () != 0 || current_function_profile)
14202 && uses_TOC())
14204 char buf[256];
14206 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14208 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14209 fprintf (file, "\t.long ");
14210 assemble_name (file, buf);
14211 putc ('-', file);
14212 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14213 assemble_name (file, buf);
14214 putc ('\n', file);
14217 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14218 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14220 if (DEFAULT_ABI == ABI_AIX)
14222 const char *desc_name, *orig_name;
14224 orig_name = (*targetm.strip_name_encoding) (name);
14225 desc_name = orig_name;
14226 while (*desc_name == '.')
14227 desc_name++;
14229 if (TREE_PUBLIC (decl))
14230 fprintf (file, "\t.globl %s\n", desc_name);
14232 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14233 fprintf (file, "%s:\n", desc_name);
14234 fprintf (file, "\t.long %s\n", orig_name);
14235 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14236 if (DEFAULT_ABI == ABI_AIX)
14237 fputs ("\t.long 0\n", file);
14238 fprintf (file, "\t.previous\n");
14240 ASM_OUTPUT_LABEL (file, name);
14242 #endif
14244 #if TARGET_XCOFF
14245 static void
14246 rs6000_xcoff_asm_globalize_label (stream, name)
14247 FILE *stream;
14248 const char *name;
14250 fputs (GLOBAL_ASM_OP, stream);
14251 RS6000_OUTPUT_BASENAME (stream, name);
14252 putc ('\n', stream);
14255 static void
14256 rs6000_xcoff_asm_named_section (name, flags)
14257 const char *name;
14258 unsigned int flags;
14260 int smclass;
14261 static const char * const suffix[3] = { "PR", "RO", "RW" };
14263 if (flags & SECTION_CODE)
14264 smclass = 0;
14265 else if (flags & SECTION_WRITE)
14266 smclass = 2;
14267 else
14268 smclass = 1;
14270 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14271 (flags & SECTION_CODE) ? "." : "",
14272 name, suffix[smclass], flags & SECTION_ENTSIZE);
14275 static void
14276 rs6000_xcoff_select_section (decl, reloc, align)
14277 tree decl;
14278 int reloc;
14279 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14281 if (decl_readonly_section_1 (decl, reloc, 1))
14283 if (TREE_PUBLIC (decl))
14284 read_only_data_section ();
14285 else
14286 read_only_private_data_section ();
14288 else
14290 if (TREE_PUBLIC (decl))
14291 data_section ();
14292 else
14293 private_data_section ();
14297 static void
14298 rs6000_xcoff_unique_section (decl, reloc)
14299 tree decl;
14300 int reloc ATTRIBUTE_UNUSED;
14302 const char *name;
14304 /* Use select_section for private and uninitialized data. */
14305 if (!TREE_PUBLIC (decl)
14306 || DECL_COMMON (decl)
14307 || DECL_INITIAL (decl) == NULL_TREE
14308 || DECL_INITIAL (decl) == error_mark_node
14309 || (flag_zero_initialized_in_bss
14310 && initializer_zerop (DECL_INITIAL (decl))))
14311 return;
14313 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14314 name = (*targetm.strip_name_encoding) (name);
14315 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14318 /* Select section for constant in constant pool.
14320 On RS/6000, all constants are in the private read-only data area.
14321 However, if this is being placed in the TOC it must be output as a
14322 toc entry. */
14324 static void
14325 rs6000_xcoff_select_rtx_section (mode, x, align)
14326 enum machine_mode mode;
14327 rtx x;
14328 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14330 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14331 toc_section ();
14332 else
14333 read_only_private_data_section ();
14336 /* Remove any trailing [DS] or the like from the symbol name. */
14338 static const char *
14339 rs6000_xcoff_strip_name_encoding (name)
14340 const char *name;
14342 size_t len;
14343 if (*name == '*')
14344 name++;
14345 len = strlen (name);
14346 if (name[len - 1] == ']')
14347 return ggc_alloc_string (name, len - 4);
14348 else
14349 return name;
14352 /* Section attributes. AIX is always PIC. */
14354 static unsigned int
14355 rs6000_xcoff_section_type_flags (decl, name, reloc)
14356 tree decl;
14357 const char *name;
14358 int reloc;
14360 unsigned int align;
14361 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14363 /* Align to at least UNIT size. */
14364 if (flags & SECTION_CODE)
14365 align = MIN_UNITS_PER_WORD;
14366 else
14367 /* Increase alignment of large objects if not already stricter. */
14368 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14369 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14370 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14372 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14375 /* Output at beginning of assembler file.
14377 Initialize the section names for the RS/6000 at this point.
14379 Specify filename, including full path, to assembler.
14381 We want to go into the TOC section so at least one .toc will be emitted.
14382 Also, in order to output proper .bs/.es pairs, we need at least one static
14383 [RW] section emitted.
14385 Finally, declare mcount when profiling to make the assembler happy. */
14387 static void
14388 rs6000_xcoff_file_start ()
14390 rs6000_gen_section_name (&xcoff_bss_section_name,
14391 main_input_filename, ".bss_");
14392 rs6000_gen_section_name (&xcoff_private_data_section_name,
14393 main_input_filename, ".rw_");
14394 rs6000_gen_section_name (&xcoff_read_only_section_name,
14395 main_input_filename, ".ro_");
14397 fputs ("\t.file\t", asm_out_file);
14398 output_quoted_string (asm_out_file, main_input_filename);
14399 fputc ('\n', asm_out_file);
14400 toc_section ();
14401 if (write_symbols != NO_DEBUG)
14402 private_data_section ();
14403 text_section ();
14404 if (profile_flag)
14405 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
14406 rs6000_file_start ();
14409 /* Output at end of assembler file.
14410 On the RS/6000, referencing data should automatically pull in text. */
14412 static void
14413 rs6000_xcoff_file_end ()
14415 text_section ();
14416 fputs ("_section_.text:\n", asm_out_file);
14417 data_section ();
14418 fputs (TARGET_32BIT
14419 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14420 asm_out_file);
14422 #endif /* TARGET_XCOFF */
14424 #if TARGET_MACHO
14425 /* Cross-module name binding. Darwin does not support overriding
14426 functions at dynamic-link time. */
14428 static bool
14429 rs6000_binds_local_p (decl)
14430 tree decl;
14432 return default_binds_local_p_1 (decl, 0);
14434 #endif
14436 /* Compute a (partial) cost for rtx X. Return true if the complete
14437 cost has been computed, and false if subexpressions should be
14438 scanned. In either case, *TOTAL contains the cost result. */
14440 static bool
14441 rs6000_rtx_costs (x, code, outer_code, total)
14442 rtx x;
14443 int code, outer_code ATTRIBUTE_UNUSED;
14444 int *total;
14446 switch (code)
14448 /* On the RS/6000, if it is valid in the insn, it is free.
14449 So this always returns 0. */
14450 case CONST_INT:
14451 case CONST:
14452 case LABEL_REF:
14453 case SYMBOL_REF:
14454 case CONST_DOUBLE:
14455 case HIGH:
14456 *total = 0;
14457 return true;
14459 case PLUS:
14460 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14461 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14462 + 0x8000) >= 0x10000)
14463 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14464 ? COSTS_N_INSNS (2)
14465 : COSTS_N_INSNS (1));
14466 return true;
14468 case AND:
14469 case IOR:
14470 case XOR:
14471 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14472 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14473 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14474 ? COSTS_N_INSNS (2)
14475 : COSTS_N_INSNS (1));
14476 return true;
14478 case MULT:
14479 if (optimize_size)
14481 *total = COSTS_N_INSNS (2);
14482 return true;
14484 switch (rs6000_cpu)
14486 case PROCESSOR_RIOS1:
14487 case PROCESSOR_PPC405:
14488 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14489 ? COSTS_N_INSNS (5)
14490 : (INTVAL (XEXP (x, 1)) >= -256
14491 && INTVAL (XEXP (x, 1)) <= 255)
14492 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14493 return true;
14495 case PROCESSOR_PPC440:
14496 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14497 ? COSTS_N_INSNS (3)
14498 : COSTS_N_INSNS (2));
14499 return true;
14501 case PROCESSOR_RS64A:
14502 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14503 ? GET_MODE (XEXP (x, 1)) != DImode
14504 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14505 : (INTVAL (XEXP (x, 1)) >= -256
14506 && INTVAL (XEXP (x, 1)) <= 255)
14507 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14508 return true;
14510 case PROCESSOR_RIOS2:
14511 case PROCESSOR_MPCCORE:
14512 case PROCESSOR_PPC604e:
14513 *total = COSTS_N_INSNS (2);
14514 return true;
14516 case PROCESSOR_PPC601:
14517 *total = COSTS_N_INSNS (5);
14518 return true;
14520 case PROCESSOR_PPC603:
14521 case PROCESSOR_PPC7400:
14522 case PROCESSOR_PPC750:
14523 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14524 ? COSTS_N_INSNS (5)
14525 : (INTVAL (XEXP (x, 1)) >= -256
14526 && INTVAL (XEXP (x, 1)) <= 255)
14527 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14528 return true;
14530 case PROCESSOR_PPC7450:
14531 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14532 ? COSTS_N_INSNS (4)
14533 : COSTS_N_INSNS (3));
14534 return true;
14536 case PROCESSOR_PPC403:
14537 case PROCESSOR_PPC604:
14538 case PROCESSOR_PPC8540:
14539 *total = COSTS_N_INSNS (4);
14540 return true;
14542 case PROCESSOR_PPC620:
14543 case PROCESSOR_PPC630:
14544 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14545 ? GET_MODE (XEXP (x, 1)) != DImode
14546 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14547 : (INTVAL (XEXP (x, 1)) >= -256
14548 && INTVAL (XEXP (x, 1)) <= 255)
14549 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14550 return true;
14552 case PROCESSOR_POWER4:
14553 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14554 ? GET_MODE (XEXP (x, 1)) != DImode
14555 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14556 : COSTS_N_INSNS (2));
14557 return true;
14559 default:
14560 abort ();
14563 case DIV:
14564 case MOD:
14565 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14566 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14568 *total = COSTS_N_INSNS (2);
14569 return true;
14571 /* FALLTHRU */
14573 case UDIV:
14574 case UMOD:
14575 switch (rs6000_cpu)
14577 case PROCESSOR_RIOS1:
14578 *total = COSTS_N_INSNS (19);
14579 return true;
14581 case PROCESSOR_RIOS2:
14582 *total = COSTS_N_INSNS (13);
14583 return true;
14585 case PROCESSOR_RS64A:
14586 *total = (GET_MODE (XEXP (x, 1)) != DImode
14587 ? COSTS_N_INSNS (65)
14588 : COSTS_N_INSNS (67));
14589 return true;
14591 case PROCESSOR_MPCCORE:
14592 *total = COSTS_N_INSNS (6);
14593 return true;
14595 case PROCESSOR_PPC403:
14596 *total = COSTS_N_INSNS (33);
14597 return true;
14599 case PROCESSOR_PPC405:
14600 *total = COSTS_N_INSNS (35);
14601 return true;
14603 case PROCESSOR_PPC440:
14604 *total = COSTS_N_INSNS (34);
14605 return true;
14607 case PROCESSOR_PPC601:
14608 *total = COSTS_N_INSNS (36);
14609 return true;
14611 case PROCESSOR_PPC603:
14612 *total = COSTS_N_INSNS (37);
14613 return true;
14615 case PROCESSOR_PPC604:
14616 case PROCESSOR_PPC604e:
14617 *total = COSTS_N_INSNS (20);
14618 return true;
14620 case PROCESSOR_PPC620:
14621 case PROCESSOR_PPC630:
14622 *total = (GET_MODE (XEXP (x, 1)) != DImode
14623 ? COSTS_N_INSNS (21)
14624 : COSTS_N_INSNS (37));
14625 return true;
14627 case PROCESSOR_PPC750:
14628 case PROCESSOR_PPC8540:
14629 case PROCESSOR_PPC7400:
14630 *total = COSTS_N_INSNS (19);
14631 return true;
14633 case PROCESSOR_PPC7450:
14634 *total = COSTS_N_INSNS (23);
14635 return true;
14637 case PROCESSOR_POWER4:
14638 *total = (GET_MODE (XEXP (x, 1)) != DImode
14639 ? COSTS_N_INSNS (18)
14640 : COSTS_N_INSNS (34));
14641 return true;
14643 default:
14644 abort ();
14647 case FFS:
14648 *total = COSTS_N_INSNS (4);
14649 return true;
14651 case MEM:
14652 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14653 *total = 5;
14654 return true;
14656 default:
14657 return false;
14661 /* A C expression returning the cost of moving data from a register of class
14662 CLASS1 to one of CLASS2. */
14665 rs6000_register_move_cost (mode, from, to)
14666 enum machine_mode mode;
14667 enum reg_class from, to;
14669 /* Moves from/to GENERAL_REGS. */
14670 if (reg_classes_intersect_p (to, GENERAL_REGS)
14671 || reg_classes_intersect_p (from, GENERAL_REGS))
14673 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14674 from = to;
14676 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14677 return (rs6000_memory_move_cost (mode, from, 0)
14678 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14680 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14681 else if (from == CR_REGS)
14682 return 4;
14684 else
14685 /* A move will cost one instruction per GPR moved. */
14686 return 2 * HARD_REGNO_NREGS (0, mode);
14689 /* Moving between two similar registers is just one instruction. */
14690 else if (reg_classes_intersect_p (to, from))
14691 return mode == TFmode ? 4 : 2;
14693 /* Everything else has to go through GENERAL_REGS. */
14694 else
14695 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14696 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14699 /* A C expressions returning the cost of moving data of MODE from a register to
14700 or from memory. */
14703 rs6000_memory_move_cost (mode, class, in)
14704 enum machine_mode mode;
14705 enum reg_class class;
14706 int in ATTRIBUTE_UNUSED;
14708 if (reg_classes_intersect_p (class, GENERAL_REGS))
14709 return 4 * HARD_REGNO_NREGS (0, mode);
14710 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14711 return 4 * HARD_REGNO_NREGS (32, mode);
14712 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14713 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14714 else
14715 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14718 /* Return an RTX representing where to find the function value of a
14719 function returning MODE. */
14720 static rtx
14721 rs6000_complex_function_value (enum machine_mode mode)
14723 unsigned int regno;
14724 rtx r1, r2;
14725 enum machine_mode inner = GET_MODE_INNER (mode);
14727 if (FLOAT_MODE_P (mode))
14728 regno = FP_ARG_RETURN;
14729 else
14731 regno = GP_ARG_RETURN;
14733 /* 32-bit is OK since it'll go in r3/r4. */
14734 if (TARGET_32BIT
14735 && GET_MODE_BITSIZE (inner) >= 32)
14736 return gen_rtx_REG (mode, regno);
14739 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14740 const0_rtx);
14741 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14742 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14743 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14746 /* Define how to find the value returned by a function.
14747 VALTYPE is the data type of the value (as a tree).
14748 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14749 otherwise, FUNC is 0.
14751 On the SPE, both FPs and vectors are returned in r3.
14753 On RS/6000 an integer value is in r3 and a floating-point value is in
14754 fp1, unless -msoft-float. */
14757 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14759 enum machine_mode mode;
14760 unsigned int regno;
14762 if ((INTEGRAL_TYPE_P (valtype)
14763 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14764 || POINTER_TYPE_P (valtype))
14765 mode = word_mode;
14766 else
14767 mode = TYPE_MODE (valtype);
14769 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14770 regno = FP_ARG_RETURN;
14771 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14772 && TARGET_HARD_FLOAT
14773 && SPLIT_COMPLEX_ARGS)
14774 return rs6000_complex_function_value (mode);
14775 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14776 regno = ALTIVEC_ARG_RETURN;
14777 else
14778 regno = GP_ARG_RETURN;
14780 return gen_rtx_REG (mode, regno);
14783 /* Define how to find the value returned by a library function
14784 assuming the value has mode MODE. */
14786 rs6000_libcall_value (enum machine_mode mode)
14788 unsigned int regno;
14790 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14791 && TARGET_HARD_FLOAT && TARGET_FPRS)
14792 regno = FP_ARG_RETURN;
14793 else if (ALTIVEC_VECTOR_MODE (mode))
14794 regno = ALTIVEC_ARG_RETURN;
14795 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14796 return rs6000_complex_function_value (mode);
14797 else
14798 regno = GP_ARG_RETURN;
14800 return gen_rtx_REG (mode, regno);
14803 /* Return true if TYPE is of type __ev64_opaque__. */
14805 static bool
14806 is_ev64_opaque_type (type)
14807 tree type;
14809 return (TARGET_SPE
14810 && (type == opaque_V2SI_type_node
14811 || type == opaque_V2SF_type_node
14812 || type == opaque_p_V2SI_type_node));
14815 static rtx
14816 rs6000_dwarf_register_span (reg)
14817 rtx reg;
14819 unsigned regno;
14821 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14822 return NULL_RTX;
14824 regno = REGNO (reg);
14826 /* The duality of the SPE register size wreaks all kinds of havoc.
14827 This is a way of distinguishing r0 in 32-bits from r0 in
14828 64-bits. */
14829 return
14830 gen_rtx_PARALLEL (VOIDmode,
14831 BYTES_BIG_ENDIAN
14832 ? gen_rtvec (2,
14833 gen_rtx_REG (SImode, regno + 1200),
14834 gen_rtx_REG (SImode, regno))
14835 : gen_rtvec (2,
14836 gen_rtx_REG (SImode, regno),
14837 gen_rtx_REG (SImode, regno + 1200)));
14840 #include "gt-rs6000.h"