* config/i386/uwin.h: Remove SUBTARGET_PROLOGUE.
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob2b9a76124e3f3b571831972ea38d47e39cb5ecc7
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
62 && !((n) & 1) \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
68 /* Target cpu type */
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
93 int rs6000_spe_abi;
95 /* Whether isel instructions should be generated. */
96 int rs6000_isel;
98 /* Whether SPE simd instructions should be generated. */
99 int rs6000_spe;
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
116 /* Save information from a "cmpxx" operation until the branch or scc is
117 emitted. */
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
125 #ifdef USING_ELFOS_H
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
137 #endif
139 /* Bit size of immediate TLS offsets and string from which it is decoded. */
140 int rs6000_tls_size = 32;
141 const char *rs6000_tls_size_string;
143 /* ABI enumeration available for subtarget to use. */
144 enum rs6000_abi rs6000_current_abi;
146 /* ABI string from -mabi= option. */
147 const char *rs6000_abi_string;
149 /* Debug flags */
150 const char *rs6000_debug_name;
151 int rs6000_debug_stack; /* debug stack applications */
152 int rs6000_debug_arg; /* debug argument handling */
154 /* Opaque types. */
155 static GTY(()) tree opaque_V2SI_type_node;
156 static GTY(()) tree opaque_V2SF_type_node;
157 static GTY(()) tree opaque_p_V2SI_type_node;
159 const char *rs6000_traceback_name;
160 static enum {
161 traceback_default = 0,
162 traceback_none,
163 traceback_part,
164 traceback_full
165 } rs6000_traceback;
167 /* Flag to say the TOC is initialized */
168 int toc_initialized;
169 char toc_label_name[10];
171 /* Alias set for saves and restores from the rs6000 stack. */
172 static int rs6000_sr_alias_set;
174 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
175 The only place that looks at this is rs6000_set_default_type_attributes;
176 everywhere else should rely on the presence or absence of a longcall
177 attribute on the function declaration. */
178 int rs6000_default_long_calls;
179 const char *rs6000_longcall_switch;
181 /* Control alignment for fields within structures. */
182 /* String from -malign-XXXXX. */
183 const char *rs6000_alignment_string;
184 int rs6000_alignment_flags;
186 struct builtin_description
188 /* mask is not const because we're going to alter it below. This
189 nonsense will go away when we rewrite the -march infrastructure
190 to give us more target flag bits. */
191 unsigned int mask;
192 const enum insn_code icode;
193 const char *const name;
194 const enum rs6000_builtins code;
197 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
198 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
199 static void validate_condition_mode
200 PARAMS ((enum rtx_code, enum machine_mode));
201 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
202 static void rs6000_maybe_dead PARAMS ((rtx));
203 static void rs6000_emit_stack_tie PARAMS ((void));
204 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
205 static rtx spe_synthesize_frame_save PARAMS ((rtx));
206 static bool spe_func_has_64bit_regs_p PARAMS ((void));
207 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
208 unsigned int, int, int));
209 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
210 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
211 static unsigned rs6000_hash_constant PARAMS ((rtx));
212 static unsigned toc_hash_function PARAMS ((const void *));
213 static int toc_hash_eq PARAMS ((const void *, const void *));
214 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
215 static bool constant_pool_expr_p PARAMS ((rtx));
216 static bool toc_relative_expr_p PARAMS ((rtx));
217 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
218 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
219 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
220 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
221 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
222 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
223 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
224 #ifdef HAVE_GAS_HIDDEN
225 static void rs6000_assemble_visibility PARAMS ((tree, int));
226 #endif
227 static int rs6000_ra_ever_killed PARAMS ((void));
228 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
229 extern const struct attribute_spec rs6000_attribute_table[];
230 static void rs6000_set_default_type_attributes PARAMS ((tree));
231 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
232 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
233 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
234 HOST_WIDE_INT, tree));
235 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
236 HOST_WIDE_INT, HOST_WIDE_INT));
237 #if TARGET_ELF
238 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
239 int));
240 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
241 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
242 static void rs6000_elf_select_section PARAMS ((tree, int,
243 unsigned HOST_WIDE_INT));
244 static void rs6000_elf_unique_section PARAMS ((tree, int));
245 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
246 unsigned HOST_WIDE_INT));
247 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
248 ATTRIBUTE_UNUSED;
249 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
250 #endif
251 #if TARGET_XCOFF
252 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
253 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
254 static void rs6000_xcoff_select_section PARAMS ((tree, int,
255 unsigned HOST_WIDE_INT));
256 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
257 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
258 unsigned HOST_WIDE_INT));
259 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
260 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
261 #endif
262 #if TARGET_MACHO
263 static bool rs6000_binds_local_p PARAMS ((tree));
264 #endif
265 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
266 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
267 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
268 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
269 static int rs6000_adjust_priority PARAMS ((rtx, int));
270 static int rs6000_issue_rate PARAMS ((void));
271 static int rs6000_use_sched_lookahead PARAMS ((void));
273 static void rs6000_init_builtins PARAMS ((void));
274 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
275 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
276 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
277 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
278 static void altivec_init_builtins PARAMS ((void));
279 static void rs6000_common_init_builtins PARAMS ((void));
281 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
282 int, enum rs6000_builtins,
283 enum rs6000_builtins));
284 static void spe_init_builtins PARAMS ((void));
285 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
286 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
287 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
288 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
290 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
291 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
292 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
293 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
294 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
295 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
296 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
297 static void rs6000_parse_abi_options PARAMS ((void));
298 static void rs6000_parse_alignment_option PARAMS ((void));
299 static void rs6000_parse_tls_size_option PARAMS ((void));
300 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
301 static int first_altivec_reg_to_save PARAMS ((void));
302 static unsigned int compute_vrsave_mask PARAMS ((void));
303 static void is_altivec_return_reg PARAMS ((rtx, void *));
304 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
305 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
306 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
307 static bool is_ev64_opaque_type PARAMS ((tree));
308 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
309 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
310 static rtx rs6000_tls_get_addr PARAMS ((void));
311 static rtx rs6000_got_sym PARAMS ((void));
312 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
313 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
314 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
315 static rtx rs6000_complex_function_value (enum machine_mode);
317 /* Hash table stuff for keeping track of TOC entries. */
319 struct toc_hash_struct GTY(())
321 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
322 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
323 rtx key;
324 enum machine_mode key_mode;
325 int labelno;
328 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
330 /* Default register names. */
331 char rs6000_reg_names[][8] =
333 "0", "1", "2", "3", "4", "5", "6", "7",
334 "8", "9", "10", "11", "12", "13", "14", "15",
335 "16", "17", "18", "19", "20", "21", "22", "23",
336 "24", "25", "26", "27", "28", "29", "30", "31",
337 "0", "1", "2", "3", "4", "5", "6", "7",
338 "8", "9", "10", "11", "12", "13", "14", "15",
339 "16", "17", "18", "19", "20", "21", "22", "23",
340 "24", "25", "26", "27", "28", "29", "30", "31",
341 "mq", "lr", "ctr","ap",
342 "0", "1", "2", "3", "4", "5", "6", "7",
343 "xer",
344 /* AltiVec registers. */
345 "0", "1", "2", "3", "4", "5", "6", "7",
346 "8", "9", "10", "11", "12", "13", "14", "15",
347 "16", "17", "18", "19", "20", "21", "22", "23",
348 "24", "25", "26", "27", "28", "29", "30", "31",
349 "vrsave", "vscr",
350 /* SPE registers. */
351 "spe_acc", "spefscr"
354 #ifdef TARGET_REGNAMES
355 static const char alt_reg_names[][8] =
357 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
358 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
359 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
360 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
361 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
362 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
363 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
364 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
365 "mq", "lr", "ctr", "ap",
366 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
367 "xer",
368 /* AltiVec registers. */
369 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
370 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
371 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
372 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
373 "vrsave", "vscr",
374 /* SPE registers. */
375 "spe_acc", "spefscr"
377 #endif
379 #ifndef MASK_STRICT_ALIGN
380 #define MASK_STRICT_ALIGN 0
381 #endif
382 #ifndef TARGET_PROFILE_KERNEL
383 #define TARGET_PROFILE_KERNEL 0
384 #endif
386 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
387 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
389 /* Return 1 for a symbol ref for a thread-local storage symbol. */
390 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
391 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
393 /* Initialize the GCC target structure. */
394 #undef TARGET_ATTRIBUTE_TABLE
395 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
396 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
397 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
399 #undef TARGET_ASM_ALIGNED_DI_OP
400 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
402 /* Default unaligned ops are only provided for ELF. Find the ops needed
403 for non-ELF systems. */
404 #ifndef OBJECT_FORMAT_ELF
405 #if TARGET_XCOFF
406 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
407 64-bit targets. */
408 #undef TARGET_ASM_UNALIGNED_HI_OP
409 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
410 #undef TARGET_ASM_UNALIGNED_SI_OP
411 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
412 #undef TARGET_ASM_UNALIGNED_DI_OP
413 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
414 #else
415 /* For Darwin. */
416 #undef TARGET_ASM_UNALIGNED_HI_OP
417 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
418 #undef TARGET_ASM_UNALIGNED_SI_OP
419 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
420 #endif
421 #endif
423 /* This hook deals with fixups for relocatable code and DI-mode objects
424 in 64-bit code. */
425 #undef TARGET_ASM_INTEGER
426 #define TARGET_ASM_INTEGER rs6000_assemble_integer
428 #ifdef HAVE_GAS_HIDDEN
429 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
430 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
431 #endif
433 #undef TARGET_HAVE_TLS
434 #define TARGET_HAVE_TLS HAVE_AS_TLS
436 #undef TARGET_CANNOT_FORCE_CONST_MEM
437 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
439 #undef TARGET_ASM_FUNCTION_PROLOGUE
440 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
441 #undef TARGET_ASM_FUNCTION_EPILOGUE
442 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
444 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
445 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
446 #undef TARGET_SCHED_VARIABLE_ISSUE
447 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
449 #undef TARGET_SCHED_ISSUE_RATE
450 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
451 #undef TARGET_SCHED_ADJUST_COST
452 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
453 #undef TARGET_SCHED_ADJUST_PRIORITY
454 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
456 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
457 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
459 #undef TARGET_INIT_BUILTINS
460 #define TARGET_INIT_BUILTINS rs6000_init_builtins
462 #undef TARGET_EXPAND_BUILTIN
463 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
465 #if TARGET_MACHO
466 #undef TARGET_BINDS_LOCAL_P
467 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
468 #endif
470 #undef TARGET_ASM_OUTPUT_MI_THUNK
471 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
473 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
474 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
476 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
477 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
479 #undef TARGET_RTX_COSTS
480 #define TARGET_RTX_COSTS rs6000_rtx_costs
481 #undef TARGET_ADDRESS_COST
482 #define TARGET_ADDRESS_COST hook_int_rtx_0
484 #undef TARGET_VECTOR_OPAQUE_P
485 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
487 #undef TARGET_DWARF_REGISTER_SPAN
488 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
490 struct gcc_target targetm = TARGET_INITIALIZER;
492 /* Override command line options. Mostly we process the processor
493 type and sometimes adjust other TARGET_ options. */
495 void
496 rs6000_override_options (default_cpu)
497 const char *default_cpu;
499 size_t i, j;
500 struct rs6000_cpu_select *ptr;
502 /* Simplify the entries below by making a mask for any POWER
503 variant and any PowerPC variant. */
505 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
506 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
507 | MASK_PPC_GFXOPT | MASK_POWERPC64)
508 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
510 static struct ptt
512 const char *const name; /* Canonical processor name. */
513 const enum processor_type processor; /* Processor type enum value. */
514 const int target_enable; /* Target flags to enable. */
515 const int target_disable; /* Target flags to disable. */
516 } const processor_target_table[]
517 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
518 POWER_MASKS | POWERPC_MASKS},
519 {"power", PROCESSOR_POWER,
520 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
521 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
522 {"power2", PROCESSOR_POWER,
523 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
524 POWERPC_MASKS | MASK_NEW_MNEMONICS},
525 {"power3", PROCESSOR_PPC630,
526 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
527 POWER_MASKS},
528 {"power4", PROCESSOR_POWER4,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
530 POWER_MASKS},
531 {"powerpc", PROCESSOR_POWERPC,
532 MASK_POWERPC | MASK_NEW_MNEMONICS,
533 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
534 {"powerpc64", PROCESSOR_POWERPC64,
535 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
536 POWER_MASKS | POWERPC_OPT_MASKS},
537 {"rios", PROCESSOR_RIOS1,
538 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
539 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
540 {"rios1", PROCESSOR_RIOS1,
541 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
542 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
543 {"rsc", PROCESSOR_PPC601,
544 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
545 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
546 {"rsc1", PROCESSOR_PPC601,
547 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
548 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
549 {"rios2", PROCESSOR_RIOS2,
550 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
551 POWERPC_MASKS | MASK_NEW_MNEMONICS},
552 {"rs64a", PROCESSOR_RS64A,
553 MASK_POWERPC | MASK_NEW_MNEMONICS,
554 POWER_MASKS | POWERPC_OPT_MASKS},
555 {"401", PROCESSOR_PPC403,
556 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
558 {"403", PROCESSOR_PPC403,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
561 {"405", PROCESSOR_PPC405,
562 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
563 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
564 {"405fp", PROCESSOR_PPC405,
565 MASK_POWERPC | MASK_NEW_MNEMONICS,
566 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
567 {"440", PROCESSOR_PPC440,
568 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
569 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
570 {"440fp", PROCESSOR_PPC440,
571 MASK_POWERPC | MASK_NEW_MNEMONICS,
572 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
573 {"505", PROCESSOR_MPCCORE,
574 MASK_POWERPC | MASK_NEW_MNEMONICS,
575 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
576 {"601", PROCESSOR_PPC601,
577 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
578 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
579 {"602", PROCESSOR_PPC603,
580 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
581 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
582 {"603", PROCESSOR_PPC603,
583 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
584 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
585 {"603e", PROCESSOR_PPC603,
586 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
587 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
588 {"ec603e", PROCESSOR_PPC603,
589 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
590 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
591 {"604", PROCESSOR_PPC604,
592 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
593 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
594 {"604e", PROCESSOR_PPC604e,
595 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
596 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
597 {"620", PROCESSOR_PPC620,
598 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
599 POWER_MASKS},
600 {"630", PROCESSOR_PPC630,
601 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
602 POWER_MASKS},
603 {"740", PROCESSOR_PPC750,
604 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
605 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
606 {"750", PROCESSOR_PPC750,
607 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
608 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
609 {"7400", PROCESSOR_PPC7400,
610 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
611 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
612 {"7450", PROCESSOR_PPC7450,
613 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
614 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
615 {"8540", PROCESSOR_PPC8540,
616 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
617 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
618 {"801", PROCESSOR_MPCCORE,
619 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
620 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
621 {"821", PROCESSOR_MPCCORE,
622 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
623 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
624 {"823", PROCESSOR_MPCCORE,
625 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
626 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
627 {"860", PROCESSOR_MPCCORE,
628 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
629 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
631 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
633 /* Save current -mmultiple/-mno-multiple status. */
634 int multiple = TARGET_MULTIPLE;
635 /* Save current -mstring/-mno-string status. */
636 int string = TARGET_STRING;
638 /* Identify the processor type. */
639 rs6000_select[0].string = default_cpu;
640 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
642 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
644 ptr = &rs6000_select[i];
645 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
647 for (j = 0; j < ptt_size; j++)
648 if (! strcmp (ptr->string, processor_target_table[j].name))
650 if (ptr->set_tune_p)
651 rs6000_cpu = processor_target_table[j].processor;
653 if (ptr->set_arch_p)
655 target_flags |= processor_target_table[j].target_enable;
656 target_flags &= ~processor_target_table[j].target_disable;
658 break;
661 if (j == ptt_size)
662 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
666 if (TARGET_E500)
667 rs6000_isel = 1;
669 /* If we are optimizing big endian systems for space, use the load/store
670 multiple and string instructions. */
671 if (BYTES_BIG_ENDIAN && optimize_size)
672 target_flags |= MASK_MULTIPLE | MASK_STRING;
674 /* If -mmultiple or -mno-multiple was explicitly used, don't
675 override with the processor default */
676 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
677 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
679 /* If -mstring or -mno-string was explicitly used, don't override
680 with the processor default. */
681 if ((target_flags_explicit & MASK_STRING) != 0)
682 target_flags = (target_flags & ~MASK_STRING) | string;
684 /* Don't allow -mmultiple or -mstring on little endian systems
685 unless the cpu is a 750, because the hardware doesn't support the
686 instructions used in little endian mode, and causes an alignment
687 trap. The 750 does not cause an alignment trap (except when the
688 target is unaligned). */
690 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
692 if (TARGET_MULTIPLE)
694 target_flags &= ~MASK_MULTIPLE;
695 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
696 warning ("-mmultiple is not supported on little endian systems");
699 if (TARGET_STRING)
701 target_flags &= ~MASK_STRING;
702 if ((target_flags_explicit & MASK_STRING) != 0)
703 warning ("-mstring is not supported on little endian systems");
707 /* Set debug flags */
708 if (rs6000_debug_name)
710 if (! strcmp (rs6000_debug_name, "all"))
711 rs6000_debug_stack = rs6000_debug_arg = 1;
712 else if (! strcmp (rs6000_debug_name, "stack"))
713 rs6000_debug_stack = 1;
714 else if (! strcmp (rs6000_debug_name, "arg"))
715 rs6000_debug_arg = 1;
716 else
717 error ("unknown -mdebug-%s switch", rs6000_debug_name);
720 if (rs6000_traceback_name)
722 if (! strncmp (rs6000_traceback_name, "full", 4))
723 rs6000_traceback = traceback_full;
724 else if (! strncmp (rs6000_traceback_name, "part", 4))
725 rs6000_traceback = traceback_part;
726 else if (! strncmp (rs6000_traceback_name, "no", 2))
727 rs6000_traceback = traceback_none;
728 else
729 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
730 rs6000_traceback_name);
733 /* Set size of long double */
734 rs6000_long_double_type_size = 64;
735 if (rs6000_long_double_size_string)
737 char *tail;
738 int size = strtol (rs6000_long_double_size_string, &tail, 10);
739 if (*tail != '\0' || (size != 64 && size != 128))
740 error ("Unknown switch -mlong-double-%s",
741 rs6000_long_double_size_string);
742 else
743 rs6000_long_double_type_size = size;
746 /* Handle -mabi= options. */
747 rs6000_parse_abi_options ();
749 /* Handle -malign-XXXXX option. */
750 rs6000_parse_alignment_option ();
752 /* Handle generic -mFOO=YES/NO options. */
753 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
754 &rs6000_altivec_vrsave);
755 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
756 &rs6000_isel);
757 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
758 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
759 &rs6000_float_gprs);
761 /* Handle -mtls-size option. */
762 rs6000_parse_tls_size_option ();
764 #ifdef SUBTARGET_OVERRIDE_OPTIONS
765 SUBTARGET_OVERRIDE_OPTIONS;
766 #endif
767 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
768 SUBSUBTARGET_OVERRIDE_OPTIONS;
769 #endif
771 if (TARGET_E500)
773 /* The e500 does not have string instructions, and we set
774 MASK_STRING above when optimizing for size. */
775 if ((target_flags & MASK_STRING) != 0)
776 target_flags = target_flags & ~MASK_STRING;
778 /* No SPE means 64-bit long doubles, even if an E500. */
779 if (rs6000_spe_string != 0
780 && !strcmp (rs6000_spe_string, "no"))
781 rs6000_long_double_type_size = 64;
783 else if (rs6000_select[1].string != NULL)
785 /* For the powerpc-eabispe configuration, we set all these by
786 default, so let's unset them if we manually set another
787 CPU that is not the E500. */
788 if (rs6000_abi_string == 0)
789 rs6000_spe_abi = 0;
790 if (rs6000_spe_string == 0)
791 rs6000_spe = 0;
792 if (rs6000_float_gprs_string == 0)
793 rs6000_float_gprs = 0;
794 if (rs6000_isel_string == 0)
795 rs6000_isel = 0;
796 if (rs6000_long_double_size_string == 0)
797 rs6000_long_double_type_size = 64;
800 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
801 using TARGET_OPTIONS to handle a toggle switch, but we're out of
802 bits in target_flags so TARGET_SWITCHES cannot be used.
803 Assumption here is that rs6000_longcall_switch points into the
804 text of the complete option, rather than being a copy, so we can
805 scan back for the presence or absence of the no- modifier. */
806 if (rs6000_longcall_switch)
808 const char *base = rs6000_longcall_switch;
809 while (base[-1] != 'm') base--;
811 if (*rs6000_longcall_switch != '\0')
812 error ("invalid option `%s'", base);
813 rs6000_default_long_calls = (base[0] != 'n');
816 #ifdef TARGET_REGNAMES
817 /* If the user desires alternate register names, copy in the
818 alternate names now. */
819 if (TARGET_REGNAMES)
820 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
821 #endif
823 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
824 If -maix-struct-return or -msvr4-struct-return was explicitly
825 used, don't override with the ABI default. */
826 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
828 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
829 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
830 else
831 target_flags |= MASK_AIX_STRUCT_RET;
834 if (TARGET_LONG_DOUBLE_128
835 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
836 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
838 /* Allocate an alias set for register saves & restores from stack. */
839 rs6000_sr_alias_set = new_alias_set ();
841 if (TARGET_TOC)
842 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
844 /* We can only guarantee the availability of DI pseudo-ops when
845 assembling for 64-bit targets. */
846 if (!TARGET_64BIT)
848 targetm.asm_out.aligned_op.di = NULL;
849 targetm.asm_out.unaligned_op.di = NULL;
852 /* Set maximum branch target alignment at two instructions, eight bytes. */
853 align_jumps_max_skip = 8;
854 align_loops_max_skip = 8;
856 /* Arrange to save and restore machine status around nested functions. */
857 init_machine_status = rs6000_init_machine_status;
860 /* Handle generic options of the form -mfoo=yes/no.
861 NAME is the option name.
862 VALUE is the option value.
863 FLAG is the pointer to the flag where to store a 1 or 0, depending on
864 whether the option value is 'yes' or 'no' respectively. */
865 static void
866 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
868 if (value == 0)
869 return;
870 else if (!strcmp (value, "yes"))
871 *flag = 1;
872 else if (!strcmp (value, "no"))
873 *flag = 0;
874 else
875 error ("unknown -m%s= option specified: '%s'", name, value);
878 /* Handle -mabi= options. */
879 static void
880 rs6000_parse_abi_options ()
882 if (rs6000_abi_string == 0)
883 return;
884 else if (! strcmp (rs6000_abi_string, "altivec"))
885 rs6000_altivec_abi = 1;
886 else if (! strcmp (rs6000_abi_string, "no-altivec"))
887 rs6000_altivec_abi = 0;
888 else if (! strcmp (rs6000_abi_string, "spe"))
890 rs6000_spe_abi = 1;
891 if (!TARGET_SPE_ABI)
892 error ("not configured for ABI: '%s'", rs6000_abi_string);
895 else if (! strcmp (rs6000_abi_string, "no-spe"))
896 rs6000_spe_abi = 0;
897 else
898 error ("unknown ABI specified: '%s'", rs6000_abi_string);
901 /* Handle -malign-XXXXXX options. */
902 static void
903 rs6000_parse_alignment_option ()
905 if (rs6000_alignment_string == 0
906 || ! strcmp (rs6000_alignment_string, "power"))
907 rs6000_alignment_flags = MASK_ALIGN_POWER;
908 else if (! strcmp (rs6000_alignment_string, "natural"))
909 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
910 else
911 error ("unknown -malign-XXXXX option specified: '%s'",
912 rs6000_alignment_string);
915 /* Validate and record the size specified with the -mtls-size option. */
917 static void
918 rs6000_parse_tls_size_option ()
920 if (rs6000_tls_size_string == 0)
921 return;
922 else if (strcmp (rs6000_tls_size_string, "16") == 0)
923 rs6000_tls_size = 16;
924 else if (strcmp (rs6000_tls_size_string, "32") == 0)
925 rs6000_tls_size = 32;
926 else if (strcmp (rs6000_tls_size_string, "64") == 0)
927 rs6000_tls_size = 64;
928 else
929 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
932 void
933 optimization_options (level, size)
934 int level ATTRIBUTE_UNUSED;
935 int size ATTRIBUTE_UNUSED;
939 /* Do anything needed at the start of the asm file. */
941 void
942 rs6000_file_start (file, default_cpu)
943 FILE *file;
944 const char *default_cpu;
946 size_t i;
947 char buffer[80];
948 const char *start = buffer;
949 struct rs6000_cpu_select *ptr;
951 if (flag_verbose_asm)
953 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
954 rs6000_select[0].string = default_cpu;
956 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
958 ptr = &rs6000_select[i];
959 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
961 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
962 start = "";
966 #ifdef USING_ELFOS_H
967 switch (rs6000_sdata)
969 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
970 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
971 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
972 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
975 if (rs6000_sdata && g_switch_value)
977 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
978 g_switch_value);
979 start = "";
981 #endif
983 if (*start == '\0')
984 putc ('\n', file);
988 /* Return nonzero if this function is known to have a null epilogue. */
991 direct_return ()
993 if (reload_completed)
995 rs6000_stack_t *info = rs6000_stack_info ();
997 if (info->first_gp_reg_save == 32
998 && info->first_fp_reg_save == 64
999 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1000 && ! info->lr_save_p
1001 && ! info->cr_save_p
1002 && info->vrsave_mask == 0
1003 && ! info->push_p)
1004 return 1;
1007 return 0;
1010 /* Returns 1 always. */
1013 any_operand (op, mode)
1014 rtx op ATTRIBUTE_UNUSED;
1015 enum machine_mode mode ATTRIBUTE_UNUSED;
1017 return 1;
1020 /* Returns 1 if op is the count register. */
1022 count_register_operand (op, mode)
1023 rtx op;
1024 enum machine_mode mode ATTRIBUTE_UNUSED;
1026 if (GET_CODE (op) != REG)
1027 return 0;
1029 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1030 return 1;
1032 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1033 return 1;
1035 return 0;
1038 /* Returns 1 if op is an altivec register. */
1040 altivec_register_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode ATTRIBUTE_UNUSED;
1045 return (register_operand (op, mode)
1046 && (GET_CODE (op) != REG
1047 || REGNO (op) > FIRST_PSEUDO_REGISTER
1048 || ALTIVEC_REGNO_P (REGNO (op))));
1052 xer_operand (op, mode)
1053 rtx op;
1054 enum machine_mode mode ATTRIBUTE_UNUSED;
1056 if (GET_CODE (op) != REG)
1057 return 0;
1059 if (XER_REGNO_P (REGNO (op)))
1060 return 1;
1062 return 0;
1065 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1066 by such constants completes more quickly. */
1069 s8bit_cint_operand (op, mode)
1070 rtx op;
1071 enum machine_mode mode ATTRIBUTE_UNUSED;
1073 return ( GET_CODE (op) == CONST_INT
1074 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1077 /* Return 1 if OP is a constant that can fit in a D field. */
1080 short_cint_operand (op, mode)
1081 rtx op;
1082 enum machine_mode mode ATTRIBUTE_UNUSED;
1084 return (GET_CODE (op) == CONST_INT
1085 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1088 /* Similar for an unsigned D field. */
1091 u_short_cint_operand (op, mode)
1092 rtx op;
1093 enum machine_mode mode ATTRIBUTE_UNUSED;
1095 return (GET_CODE (op) == CONST_INT
1096 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1099 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1102 non_short_cint_operand (op, mode)
1103 rtx op;
1104 enum machine_mode mode ATTRIBUTE_UNUSED;
1106 return (GET_CODE (op) == CONST_INT
1107 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1110 /* Returns 1 if OP is a CONST_INT that is a positive value
1111 and an exact power of 2. */
1114 exact_log2_cint_operand (op, mode)
1115 rtx op;
1116 enum machine_mode mode ATTRIBUTE_UNUSED;
1118 return (GET_CODE (op) == CONST_INT
1119 && INTVAL (op) > 0
1120 && exact_log2 (INTVAL (op)) >= 0);
1123 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1124 ctr, or lr). */
1127 gpc_reg_operand (op, mode)
1128 rtx op;
1129 enum machine_mode mode;
1131 return (register_operand (op, mode)
1132 && (GET_CODE (op) != REG
1133 || (REGNO (op) >= ARG_POINTER_REGNUM
1134 && !XER_REGNO_P (REGNO (op)))
1135 || REGNO (op) < MQ_REGNO));
1138 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1139 CR field. */
1142 cc_reg_operand (op, mode)
1143 rtx op;
1144 enum machine_mode mode;
1146 return (register_operand (op, mode)
1147 && (GET_CODE (op) != REG
1148 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1149 || CR_REGNO_P (REGNO (op))));
1152 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1153 CR field that isn't CR0. */
1156 cc_reg_not_cr0_operand (op, mode)
1157 rtx op;
1158 enum machine_mode mode;
1160 return (register_operand (op, mode)
1161 && (GET_CODE (op) != REG
1162 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1163 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1166 /* Returns 1 if OP is either a constant integer valid for a D-field or
1167 a non-special register. If a register, it must be in the proper
1168 mode unless MODE is VOIDmode. */
1171 reg_or_short_operand (op, mode)
1172 rtx op;
1173 enum machine_mode mode;
1175 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1178 /* Similar, except check if the negation of the constant would be
1179 valid for a D-field. */
1182 reg_or_neg_short_operand (op, mode)
1183 rtx op;
1184 enum machine_mode mode;
1186 if (GET_CODE (op) == CONST_INT)
1187 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1189 return gpc_reg_operand (op, mode);
1192 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1193 a non-special register. If a register, it must be in the proper
1194 mode unless MODE is VOIDmode. */
1197 reg_or_aligned_short_operand (op, mode)
1198 rtx op;
1199 enum machine_mode mode;
1201 if (gpc_reg_operand (op, mode))
1202 return 1;
1203 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1204 return 1;
1206 return 0;
1210 /* Return 1 if the operand is either a register or an integer whose
1211 high-order 16 bits are zero. */
1214 reg_or_u_short_operand (op, mode)
1215 rtx op;
1216 enum machine_mode mode;
1218 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1221 /* Return 1 is the operand is either a non-special register or ANY
1222 constant integer. */
1225 reg_or_cint_operand (op, mode)
1226 rtx op;
1227 enum machine_mode mode;
1229 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1232 /* Return 1 is the operand is either a non-special register or ANY
1233 32-bit signed constant integer. */
1236 reg_or_arith_cint_operand (op, mode)
1237 rtx op;
1238 enum machine_mode mode;
1240 return (gpc_reg_operand (op, mode)
1241 || (GET_CODE (op) == CONST_INT
1242 #if HOST_BITS_PER_WIDE_INT != 32
1243 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1244 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1245 #endif
1249 /* Return 1 is the operand is either a non-special register or a 32-bit
1250 signed constant integer valid for 64-bit addition. */
1253 reg_or_add_cint64_operand (op, mode)
1254 rtx op;
1255 enum machine_mode mode;
1257 return (gpc_reg_operand (op, mode)
1258 || (GET_CODE (op) == CONST_INT
1259 #if HOST_BITS_PER_WIDE_INT == 32
1260 && INTVAL (op) < 0x7fff8000
1261 #else
1262 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1263 < 0x100000000ll)
1264 #endif
1268 /* Return 1 is the operand is either a non-special register or a 32-bit
1269 signed constant integer valid for 64-bit subtraction. */
1272 reg_or_sub_cint64_operand (op, mode)
1273 rtx op;
1274 enum machine_mode mode;
1276 return (gpc_reg_operand (op, mode)
1277 || (GET_CODE (op) == CONST_INT
1278 #if HOST_BITS_PER_WIDE_INT == 32
1279 && (- INTVAL (op)) < 0x7fff8000
1280 #else
1281 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1282 < 0x100000000ll)
1283 #endif
1287 /* Return 1 is the operand is either a non-special register or ANY
1288 32-bit unsigned constant integer. */
1291 reg_or_logical_cint_operand (op, mode)
1292 rtx op;
1293 enum machine_mode mode;
1295 if (GET_CODE (op) == CONST_INT)
1297 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1299 if (GET_MODE_BITSIZE (mode) <= 32)
1300 abort ();
1302 if (INTVAL (op) < 0)
1303 return 0;
1306 return ((INTVAL (op) & GET_MODE_MASK (mode)
1307 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1309 else if (GET_CODE (op) == CONST_DOUBLE)
1311 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1312 || mode != DImode)
1313 abort ();
1315 return CONST_DOUBLE_HIGH (op) == 0;
1317 else
1318 return gpc_reg_operand (op, mode);
1321 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1324 got_operand (op, mode)
1325 rtx op;
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 return (GET_CODE (op) == SYMBOL_REF
1329 || GET_CODE (op) == CONST
1330 || GET_CODE (op) == LABEL_REF);
1333 /* Return 1 if the operand is a simple references that can be loaded via
1334 the GOT (labels involving addition aren't allowed). */
1337 got_no_const_operand (op, mode)
1338 rtx op;
1339 enum machine_mode mode ATTRIBUTE_UNUSED;
1341 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1344 /* Return the number of instructions it takes to form a constant in an
1345 integer register. */
1347 static int
1348 num_insns_constant_wide (value)
1349 HOST_WIDE_INT value;
1351 /* signed constant loadable with {cal|addi} */
1352 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1353 return 1;
1355 /* constant loadable with {cau|addis} */
1356 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1357 return 1;
1359 #if HOST_BITS_PER_WIDE_INT == 64
1360 else if (TARGET_POWERPC64)
1362 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1363 HOST_WIDE_INT high = value >> 31;
1365 if (high == 0 || high == -1)
1366 return 2;
1368 high >>= 1;
1370 if (low == 0)
1371 return num_insns_constant_wide (high) + 1;
1372 else
1373 return (num_insns_constant_wide (high)
1374 + num_insns_constant_wide (low) + 1);
1376 #endif
1378 else
1379 return 2;
1383 num_insns_constant (op, mode)
1384 rtx op;
1385 enum machine_mode mode;
1387 if (GET_CODE (op) == CONST_INT)
1389 #if HOST_BITS_PER_WIDE_INT == 64
1390 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1391 && mask64_operand (op, mode))
1392 return 2;
1393 else
1394 #endif
1395 return num_insns_constant_wide (INTVAL (op));
1398 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1400 long l;
1401 REAL_VALUE_TYPE rv;
1403 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1404 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1405 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1408 else if (GET_CODE (op) == CONST_DOUBLE)
1410 HOST_WIDE_INT low;
1411 HOST_WIDE_INT high;
1412 long l[2];
1413 REAL_VALUE_TYPE rv;
1414 int endian = (WORDS_BIG_ENDIAN == 0);
1416 if (mode == VOIDmode || mode == DImode)
1418 high = CONST_DOUBLE_HIGH (op);
1419 low = CONST_DOUBLE_LOW (op);
1421 else
1423 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1424 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1425 high = l[endian];
1426 low = l[1 - endian];
1429 if (TARGET_32BIT)
1430 return (num_insns_constant_wide (low)
1431 + num_insns_constant_wide (high));
1433 else
1435 if (high == 0 && low >= 0)
1436 return num_insns_constant_wide (low);
1438 else if (high == -1 && low < 0)
1439 return num_insns_constant_wide (low);
1441 else if (mask64_operand (op, mode))
1442 return 2;
1444 else if (low == 0)
1445 return num_insns_constant_wide (high) + 1;
1447 else
1448 return (num_insns_constant_wide (high)
1449 + num_insns_constant_wide (low) + 1);
1453 else
1454 abort ();
1457 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1458 register with one instruction per word. We only do this if we can
1459 safely read CONST_DOUBLE_{LOW,HIGH}. */
1462 easy_fp_constant (op, mode)
1463 rtx op;
1464 enum machine_mode mode;
1466 if (GET_CODE (op) != CONST_DOUBLE
1467 || GET_MODE (op) != mode
1468 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1469 return 0;
1471 /* Consider all constants with -msoft-float to be easy. */
1472 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1473 && mode != DImode)
1474 return 1;
1476 /* If we are using V.4 style PIC, consider all constants to be hard. */
1477 if (flag_pic && DEFAULT_ABI == ABI_V4)
1478 return 0;
1480 #ifdef TARGET_RELOCATABLE
1481 /* Similarly if we are using -mrelocatable, consider all constants
1482 to be hard. */
1483 if (TARGET_RELOCATABLE)
1484 return 0;
1485 #endif
1487 if (mode == TFmode)
1489 long k[4];
1490 REAL_VALUE_TYPE rv;
1492 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1493 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1495 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1496 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1497 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1498 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1501 else if (mode == DFmode)
1503 long k[2];
1504 REAL_VALUE_TYPE rv;
1506 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1507 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1509 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1510 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1513 else if (mode == SFmode)
1515 long l;
1516 REAL_VALUE_TYPE rv;
1518 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1519 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1521 return num_insns_constant_wide (l) == 1;
1524 else if (mode == DImode)
1525 return ((TARGET_POWERPC64
1526 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1527 || (num_insns_constant (op, DImode) <= 2));
1529 else if (mode == SImode)
1530 return 1;
1531 else
1532 abort ();
1535 /* Return non zero if all elements of a vector have the same value. */
1537 static int
1538 easy_vector_same (op, mode)
1539 rtx op;
1540 enum machine_mode mode ATTRIBUTE_UNUSED;
1542 int units, i, cst;
1544 units = CONST_VECTOR_NUNITS (op);
1546 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1547 for (i = 1; i < units; ++i)
1548 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1549 break;
1550 if (i == units)
1551 return 1;
1552 return 0;
1555 /* Return 1 if the operand is a CONST_INT and can be put into a
1556 register without using memory. */
1559 easy_vector_constant (op, mode)
1560 rtx op;
1561 enum machine_mode mode;
1563 int cst, cst2;
1565 if (GET_CODE (op) != CONST_VECTOR
1566 || (!TARGET_ALTIVEC
1567 && !TARGET_SPE))
1568 return 0;
1570 if (zero_constant (op, mode)
1571 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1572 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1573 return 1;
1575 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1576 return 0;
1578 if (TARGET_SPE && mode == V1DImode)
1579 return 0;
1581 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1582 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1584 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1585 li r0, CONSTANT1
1586 evmergelo r0, r0, r0
1587 li r0, CONSTANT2
1589 I don't know how efficient it would be to allow bigger constants,
1590 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1591 instructions is better than a 64-bit memory load, but I don't
1592 have the e500 timing specs. */
1593 if (TARGET_SPE && mode == V2SImode
1594 && cst >= -0x7fff && cst <= 0x7fff
1595 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1596 return 1;
1598 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1599 return 1;
1601 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1602 return 1;
1604 return 0;
1607 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1610 easy_vector_constant_add_self (op, mode)
1611 rtx op;
1612 enum machine_mode mode;
1614 int cst;
1616 if (!easy_vector_constant (op, mode))
1617 return 0;
1619 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1621 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1624 const char *
1625 output_vec_const_move (operands)
1626 rtx *operands;
1628 int cst, cst2;
1629 enum machine_mode mode;
1630 rtx dest, vec;
1632 dest = operands[0];
1633 vec = operands[1];
1635 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1636 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1637 mode = GET_MODE (dest);
1639 if (TARGET_ALTIVEC)
1641 if (zero_constant (vec, mode))
1642 return "vxor %0,%0,%0";
1643 else if (EASY_VECTOR_15 (cst, vec, mode))
1645 operands[1] = GEN_INT (cst);
1646 switch (mode)
1648 case V4SImode:
1649 return "vspltisw %0,%1";
1650 case V8HImode:
1651 return "vspltish %0,%1";
1652 case V16QImode:
1653 return "vspltisb %0,%1";
1654 default:
1655 abort ();
1658 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1659 return "#";
1660 else
1661 abort ();
1664 if (TARGET_SPE)
1666 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1667 pattern of V1DI, V4HI, and V2SF.
1669 FIXME: We should probabl return # and add post reload
1670 splitters for these, but this way is so easy ;-).
1672 operands[1] = GEN_INT (cst);
1673 operands[2] = GEN_INT (cst2);
1674 if (cst == cst2)
1675 return "li %0,%1\n\tevmergelo %0,%0,%0";
1676 else
1677 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1680 abort ();
1683 /* Return 1 if the operand is the constant 0. This works for scalars
1684 as well as vectors. */
1686 zero_constant (op, mode)
1687 rtx op;
1688 enum machine_mode mode;
1690 return op == CONST0_RTX (mode);
1693 /* Return 1 if the operand is 0.0. */
1695 zero_fp_constant (op, mode)
1696 rtx op;
1697 enum machine_mode mode;
1699 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1702 /* Return 1 if the operand is in volatile memory. Note that during
1703 the RTL generation phase, memory_operand does not return TRUE for
1704 volatile memory references. So this function allows us to
1705 recognize volatile references where its safe. */
1708 volatile_mem_operand (op, mode)
1709 rtx op;
1710 enum machine_mode mode;
1712 if (GET_CODE (op) != MEM)
1713 return 0;
1715 if (!MEM_VOLATILE_P (op))
1716 return 0;
1718 if (mode != GET_MODE (op))
1719 return 0;
1721 if (reload_completed)
1722 return memory_operand (op, mode);
1724 if (reload_in_progress)
1725 return strict_memory_address_p (mode, XEXP (op, 0));
1727 return memory_address_p (mode, XEXP (op, 0));
1730 /* Return 1 if the operand is an offsettable memory operand. */
1733 offsettable_mem_operand (op, mode)
1734 rtx op;
1735 enum machine_mode mode;
1737 return ((GET_CODE (op) == MEM)
1738 && offsettable_address_p (reload_completed || reload_in_progress,
1739 mode, XEXP (op, 0)));
1742 /* Return 1 if the operand is either an easy FP constant (see above) or
1743 memory. */
1746 mem_or_easy_const_operand (op, mode)
1747 rtx op;
1748 enum machine_mode mode;
1750 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1753 /* Return 1 if the operand is either a non-special register or an item
1754 that can be used as the operand of a `mode' add insn. */
1757 add_operand (op, mode)
1758 rtx op;
1759 enum machine_mode mode;
1761 if (GET_CODE (op) == CONST_INT)
1762 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1763 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1765 return gpc_reg_operand (op, mode);
1768 /* Return 1 if OP is a constant but not a valid add_operand. */
1771 non_add_cint_operand (op, mode)
1772 rtx op;
1773 enum machine_mode mode ATTRIBUTE_UNUSED;
1775 return (GET_CODE (op) == CONST_INT
1776 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1777 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1780 /* Return 1 if the operand is a non-special register or a constant that
1781 can be used as the operand of an OR or XOR insn on the RS/6000. */
1784 logical_operand (op, mode)
1785 rtx op;
1786 enum machine_mode mode;
1788 HOST_WIDE_INT opl, oph;
1790 if (gpc_reg_operand (op, mode))
1791 return 1;
1793 if (GET_CODE (op) == CONST_INT)
1795 opl = INTVAL (op) & GET_MODE_MASK (mode);
1797 #if HOST_BITS_PER_WIDE_INT <= 32
1798 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1799 return 0;
1800 #endif
1802 else if (GET_CODE (op) == CONST_DOUBLE)
1804 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1805 abort ();
1807 opl = CONST_DOUBLE_LOW (op);
1808 oph = CONST_DOUBLE_HIGH (op);
1809 if (oph != 0)
1810 return 0;
1812 else
1813 return 0;
1815 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1816 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1819 /* Return 1 if C is a constant that is not a logical operand (as
1820 above), but could be split into one. */
1823 non_logical_cint_operand (op, mode)
1824 rtx op;
1825 enum machine_mode mode;
1827 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1828 && ! logical_operand (op, mode)
1829 && reg_or_logical_cint_operand (op, mode));
1832 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1833 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1834 Reject all ones and all zeros, since these should have been optimized
1835 away and confuse the making of MB and ME. */
1838 mask_operand (op, mode)
1839 rtx op;
1840 enum machine_mode mode ATTRIBUTE_UNUSED;
1842 HOST_WIDE_INT c, lsb;
1844 if (GET_CODE (op) != CONST_INT)
1845 return 0;
1847 c = INTVAL (op);
1849 /* Fail in 64-bit mode if the mask wraps around because the upper
1850 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1851 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1852 return 0;
1854 /* We don't change the number of transitions by inverting,
1855 so make sure we start with the LS bit zero. */
1856 if (c & 1)
1857 c = ~c;
1859 /* Reject all zeros or all ones. */
1860 if (c == 0)
1861 return 0;
1863 /* Find the first transition. */
1864 lsb = c & -c;
1866 /* Invert to look for a second transition. */
1867 c = ~c;
1869 /* Erase first transition. */
1870 c &= -lsb;
1872 /* Find the second transition (if any). */
1873 lsb = c & -c;
1875 /* Match if all the bits above are 1's (or c is zero). */
1876 return c == -lsb;
1879 /* Return 1 for the PowerPC64 rlwinm corner case. */
1882 mask_operand_wrap (op, mode)
1883 rtx op;
1884 enum machine_mode mode ATTRIBUTE_UNUSED;
1886 HOST_WIDE_INT c, lsb;
1888 if (GET_CODE (op) != CONST_INT)
1889 return 0;
1891 c = INTVAL (op);
1893 if ((c & 0x80000001) != 0x80000001)
1894 return 0;
1896 c = ~c;
1897 if (c == 0)
1898 return 0;
1900 lsb = c & -c;
1901 c = ~c;
1902 c &= -lsb;
1903 lsb = c & -c;
1904 return c == -lsb;
1907 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1908 It is if there are no more than one 1->0 or 0->1 transitions.
1909 Reject all zeros, since zero should have been optimized away and
1910 confuses the making of MB and ME. */
1913 mask64_operand (op, mode)
1914 rtx op;
1915 enum machine_mode mode ATTRIBUTE_UNUSED;
1917 if (GET_CODE (op) == CONST_INT)
1919 HOST_WIDE_INT c, lsb;
1921 c = INTVAL (op);
1923 /* Reject all zeros. */
1924 if (c == 0)
1925 return 0;
1927 /* We don't change the number of transitions by inverting,
1928 so make sure we start with the LS bit zero. */
1929 if (c & 1)
1930 c = ~c;
1932 /* Find the transition, and check that all bits above are 1's. */
1933 lsb = c & -c;
1935 /* Match if all the bits above are 1's (or c is zero). */
1936 return c == -lsb;
1938 return 0;
1941 /* Like mask64_operand, but allow up to three transitions. This
1942 predicate is used by insn patterns that generate two rldicl or
1943 rldicr machine insns. */
1946 mask64_2_operand (op, mode)
1947 rtx op;
1948 enum machine_mode mode ATTRIBUTE_UNUSED;
1950 if (GET_CODE (op) == CONST_INT)
1952 HOST_WIDE_INT c, lsb;
1954 c = INTVAL (op);
1956 /* Disallow all zeros. */
1957 if (c == 0)
1958 return 0;
1960 /* We don't change the number of transitions by inverting,
1961 so make sure we start with the LS bit zero. */
1962 if (c & 1)
1963 c = ~c;
1965 /* Find the first transition. */
1966 lsb = c & -c;
1968 /* Invert to look for a second transition. */
1969 c = ~c;
1971 /* Erase first transition. */
1972 c &= -lsb;
1974 /* Find the second transition. */
1975 lsb = c & -c;
1977 /* Invert to look for a third transition. */
1978 c = ~c;
1980 /* Erase second transition. */
1981 c &= -lsb;
1983 /* Find the third transition (if any). */
1984 lsb = c & -c;
1986 /* Match if all the bits above are 1's (or c is zero). */
1987 return c == -lsb;
1989 return 0;
1992 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1993 implement ANDing by the mask IN. */
1994 void
1995 build_mask64_2_operands (in, out)
1996 rtx in;
1997 rtx *out;
1999 #if HOST_BITS_PER_WIDE_INT >= 64
2000 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2001 int shift;
2003 if (GET_CODE (in) != CONST_INT)
2004 abort ();
2006 c = INTVAL (in);
2007 if (c & 1)
2009 /* Assume c initially something like 0x00fff000000fffff. The idea
2010 is to rotate the word so that the middle ^^^^^^ group of zeros
2011 is at the MS end and can be cleared with an rldicl mask. We then
2012 rotate back and clear off the MS ^^ group of zeros with a
2013 second rldicl. */
2014 c = ~c; /* c == 0xff000ffffff00000 */
2015 lsb = c & -c; /* lsb == 0x0000000000100000 */
2016 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2017 c = ~c; /* c == 0x00fff000000fffff */
2018 c &= -lsb; /* c == 0x00fff00000000000 */
2019 lsb = c & -c; /* lsb == 0x0000100000000000 */
2020 c = ~c; /* c == 0xff000fffffffffff */
2021 c &= -lsb; /* c == 0xff00000000000000 */
2022 shift = 0;
2023 while ((lsb >>= 1) != 0)
2024 shift++; /* shift == 44 on exit from loop */
2025 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2026 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2027 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2029 else
2031 /* Assume c initially something like 0xff000f0000000000. The idea
2032 is to rotate the word so that the ^^^ middle group of zeros
2033 is at the LS end and can be cleared with an rldicr mask. We then
2034 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2035 a second rldicr. */
2036 lsb = c & -c; /* lsb == 0x0000010000000000 */
2037 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2038 c = ~c; /* c == 0x00fff0ffffffffff */
2039 c &= -lsb; /* c == 0x00fff00000000000 */
2040 lsb = c & -c; /* lsb == 0x0000100000000000 */
2041 c = ~c; /* c == 0xff000fffffffffff */
2042 c &= -lsb; /* c == 0xff00000000000000 */
2043 shift = 0;
2044 while ((lsb >>= 1) != 0)
2045 shift++; /* shift == 44 on exit from loop */
2046 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2047 m1 >>= shift; /* m1 == 0x0000000000000fff */
2048 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2051 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2052 masks will be all 1's. We are guaranteed more than one transition. */
2053 out[0] = GEN_INT (64 - shift);
2054 out[1] = GEN_INT (m1);
2055 out[2] = GEN_INT (shift);
2056 out[3] = GEN_INT (m2);
2057 #else
2058 (void)in;
2059 (void)out;
2060 abort ();
2061 #endif
2064 /* Return 1 if the operand is either a non-special register or a constant
2065 that can be used as the operand of a PowerPC64 logical AND insn. */
2068 and64_operand (op, mode)
2069 rtx op;
2070 enum machine_mode mode;
2072 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2073 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2075 return (logical_operand (op, mode) || mask64_operand (op, mode));
2078 /* Like the above, but also match constants that can be implemented
2079 with two rldicl or rldicr insns. */
2082 and64_2_operand (op, mode)
2083 rtx op;
2084 enum machine_mode mode;
2086 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2087 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2089 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2092 /* Return 1 if the operand is either a non-special register or a
2093 constant that can be used as the operand of an RS/6000 logical AND insn. */
2096 and_operand (op, mode)
2097 rtx op;
2098 enum machine_mode mode;
2100 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2101 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2103 return (logical_operand (op, mode) || mask_operand (op, mode));
2106 /* Return 1 if the operand is a general register or memory operand. */
2109 reg_or_mem_operand (op, mode)
2110 rtx op;
2111 enum machine_mode mode;
2113 return (gpc_reg_operand (op, mode)
2114 || memory_operand (op, mode)
2115 || volatile_mem_operand (op, mode));
2118 /* Return 1 if the operand is a general register or memory operand without
2119 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2120 instruction. */
2123 lwa_operand (op, mode)
2124 rtx op;
2125 enum machine_mode mode;
2127 rtx inner = op;
2129 if (reload_completed && GET_CODE (inner) == SUBREG)
2130 inner = SUBREG_REG (inner);
2132 return gpc_reg_operand (inner, mode)
2133 || (memory_operand (inner, mode)
2134 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2135 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2136 && (GET_CODE (XEXP (inner, 0)) != PLUS
2137 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2138 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2141 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2144 symbol_ref_operand (op, mode)
2145 rtx op;
2146 enum machine_mode mode;
2148 if (mode != VOIDmode && GET_MODE (op) != mode)
2149 return 0;
2151 return (GET_CODE (op) == SYMBOL_REF
2152 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2155 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2156 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2159 call_operand (op, mode)
2160 rtx op;
2161 enum machine_mode mode;
2163 if (mode != VOIDmode && GET_MODE (op) != mode)
2164 return 0;
2166 return (GET_CODE (op) == SYMBOL_REF
2167 || (GET_CODE (op) == REG
2168 && (REGNO (op) == LINK_REGISTER_REGNUM
2169 || REGNO (op) == COUNT_REGISTER_REGNUM
2170 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2173 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2174 this file. */
2177 current_file_function_operand (op, mode)
2178 rtx op;
2179 enum machine_mode mode ATTRIBUTE_UNUSED;
2181 return (GET_CODE (op) == SYMBOL_REF
2182 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2183 && (SYMBOL_REF_LOCAL_P (op)
2184 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2187 /* Return 1 if this operand is a valid input for a move insn. */
2190 input_operand (op, mode)
2191 rtx op;
2192 enum machine_mode mode;
2194 /* Memory is always valid. */
2195 if (memory_operand (op, mode))
2196 return 1;
2198 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2199 if (GET_CODE (op) == CONSTANT_P_RTX)
2200 return 1;
2202 /* For floating-point, easy constants are valid. */
2203 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2204 && CONSTANT_P (op)
2205 && easy_fp_constant (op, mode))
2206 return 1;
2208 /* Allow any integer constant. */
2209 if (GET_MODE_CLASS (mode) == MODE_INT
2210 && (GET_CODE (op) == CONST_INT
2211 || GET_CODE (op) == CONST_DOUBLE))
2212 return 1;
2214 /* Allow easy vector constants. */
2215 if (GET_CODE (op) == CONST_VECTOR
2216 && easy_vector_constant (op, mode))
2217 return 1;
2219 /* For floating-point or multi-word mode, the only remaining valid type
2220 is a register. */
2221 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2222 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2223 return register_operand (op, mode);
2225 /* The only cases left are integral modes one word or smaller (we
2226 do not get called for MODE_CC values). These can be in any
2227 register. */
2228 if (register_operand (op, mode))
2229 return 1;
2231 /* A SYMBOL_REF referring to the TOC is valid. */
2232 if (legitimate_constant_pool_address_p (op))
2233 return 1;
2235 /* A constant pool expression (relative to the TOC) is valid */
2236 if (toc_relative_expr_p (op))
2237 return 1;
2239 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2240 to be valid. */
2241 if (DEFAULT_ABI == ABI_V4
2242 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2243 && small_data_operand (op, Pmode))
2244 return 1;
2246 return 0;
2249 /* Return 1 for an operand in small memory on V.4/eabi. */
2252 small_data_operand (op, mode)
2253 rtx op ATTRIBUTE_UNUSED;
2254 enum machine_mode mode ATTRIBUTE_UNUSED;
2256 #if TARGET_ELF
2257 rtx sym_ref;
2259 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2260 return 0;
2262 if (DEFAULT_ABI != ABI_V4)
2263 return 0;
2265 if (GET_CODE (op) == SYMBOL_REF)
2266 sym_ref = op;
2268 else if (GET_CODE (op) != CONST
2269 || GET_CODE (XEXP (op, 0)) != PLUS
2270 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2271 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2272 return 0;
2274 else
2276 rtx sum = XEXP (op, 0);
2277 HOST_WIDE_INT summand;
2279 /* We have to be careful here, because it is the referenced address
2280 that must be 32k from _SDA_BASE_, not just the symbol. */
2281 summand = INTVAL (XEXP (sum, 1));
2282 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2283 return 0;
2285 sym_ref = XEXP (sum, 0);
2288 return SYMBOL_REF_SMALL_P (sym_ref);
2289 #else
2290 return 0;
2291 #endif
2294 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2296 static int
2297 constant_pool_expr_1 (op, have_sym, have_toc)
2298 rtx op;
2299 int *have_sym;
2300 int *have_toc;
2302 switch (GET_CODE(op))
2304 case SYMBOL_REF:
2305 if (RS6000_SYMBOL_REF_TLS_P (op))
2306 return 0;
2307 else if (CONSTANT_POOL_ADDRESS_P (op))
2309 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2311 *have_sym = 1;
2312 return 1;
2314 else
2315 return 0;
2317 else if (! strcmp (XSTR (op, 0), toc_label_name))
2319 *have_toc = 1;
2320 return 1;
2322 else
2323 return 0;
2324 case PLUS:
2325 case MINUS:
2326 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2327 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2328 case CONST:
2329 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2330 case CONST_INT:
2331 return 1;
2332 default:
2333 return 0;
2337 static bool
2338 constant_pool_expr_p (op)
2339 rtx op;
2341 int have_sym = 0;
2342 int have_toc = 0;
2343 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2346 static bool
2347 toc_relative_expr_p (op)
2348 rtx op;
2350 int have_sym = 0;
2351 int have_toc = 0;
2352 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2355 /* SPE offset addressing is limited to 5-bits worth of double words. */
2356 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2358 bool
2359 legitimate_constant_pool_address_p (x)
2360 rtx x;
2362 return (TARGET_TOC
2363 && GET_CODE (x) == PLUS
2364 && GET_CODE (XEXP (x, 0)) == REG
2365 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2366 && constant_pool_expr_p (XEXP (x, 1)));
2369 static bool
2370 legitimate_small_data_p (mode, x)
2371 enum machine_mode mode;
2372 rtx x;
2374 return (DEFAULT_ABI == ABI_V4
2375 && !flag_pic && !TARGET_TOC
2376 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2377 && small_data_operand (x, mode));
2380 static bool
2381 legitimate_offset_address_p (mode, x, strict)
2382 enum machine_mode mode;
2383 rtx x;
2384 int strict;
2386 unsigned HOST_WIDE_INT offset, extra;
2388 if (GET_CODE (x) != PLUS)
2389 return false;
2390 if (GET_CODE (XEXP (x, 0)) != REG)
2391 return false;
2392 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2393 return false;
2394 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2395 return false;
2397 offset = INTVAL (XEXP (x, 1));
2398 extra = 0;
2399 switch (mode)
2401 case V16QImode:
2402 case V8HImode:
2403 case V4SFmode:
2404 case V4SImode:
2405 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2406 which leaves the only valid constant offset of zero, which by
2407 canonicalization rules is also invalid. */
2408 return false;
2410 case V4HImode:
2411 case V2SImode:
2412 case V1DImode:
2413 case V2SFmode:
2414 /* SPE vector modes. */
2415 return SPE_CONST_OFFSET_OK (offset);
2417 case DFmode:
2418 case DImode:
2419 if (TARGET_32BIT)
2420 extra = 4;
2421 else if (offset & 3)
2422 return false;
2423 break;
2425 case TFmode:
2426 case TImode:
2427 if (TARGET_32BIT)
2428 extra = 12;
2429 else if (offset & 3)
2430 return false;
2431 else
2432 extra = 8;
2433 break;
2435 default:
2436 break;
2439 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2442 static bool
2443 legitimate_indexed_address_p (x, strict)
2444 rtx x;
2445 int strict;
2447 rtx op0, op1;
2449 if (GET_CODE (x) != PLUS)
2450 return false;
2451 op0 = XEXP (x, 0);
2452 op1 = XEXP (x, 1);
2454 if (!REG_P (op0) || !REG_P (op1))
2455 return false;
2457 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2458 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2459 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2460 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2463 static inline bool
2464 legitimate_indirect_address_p (x, strict)
2465 rtx x;
2466 int strict;
2468 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2471 static bool
2472 legitimate_lo_sum_address_p (mode, x, strict)
2473 enum machine_mode mode;
2474 rtx x;
2475 int strict;
2477 if (GET_CODE (x) != LO_SUM)
2478 return false;
2479 if (GET_CODE (XEXP (x, 0)) != REG)
2480 return false;
2481 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2482 return false;
2483 x = XEXP (x, 1);
2485 if (TARGET_ELF)
2487 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2488 return false;
2489 if (TARGET_TOC)
2490 return false;
2491 if (GET_MODE_NUNITS (mode) != 1)
2492 return false;
2493 if (GET_MODE_BITSIZE (mode) > 32
2494 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2495 return false;
2497 return CONSTANT_P (x);
2500 return false;
2504 /* Try machine-dependent ways of modifying an illegitimate address
2505 to be legitimate. If we find one, return the new, valid address.
2506 This is used from only one place: `memory_address' in explow.c.
2508 OLDX is the address as it was before break_out_memory_refs was
2509 called. In some cases it is useful to look at this to decide what
2510 needs to be done.
2512 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2514 It is always safe for this function to do nothing. It exists to
2515 recognize opportunities to optimize the output.
2517 On RS/6000, first check for the sum of a register with a constant
2518 integer that is out of range. If so, generate code to add the
2519 constant with the low-order 16 bits masked to the register and force
2520 this result into another register (this can be done with `cau').
2521 Then generate an address of REG+(CONST&0xffff), allowing for the
2522 possibility of bit 16 being a one.
2524 Then check for the sum of a register and something not constant, try to
2525 load the other things into a register and return the sum. */
2528 rs6000_legitimize_address (x, oldx, mode)
2529 rtx x;
2530 rtx oldx ATTRIBUTE_UNUSED;
2531 enum machine_mode mode;
2533 if (GET_CODE (x) == SYMBOL_REF)
2535 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2536 if (model != 0)
2537 return rs6000_legitimize_tls_address (x, model);
2540 if (GET_CODE (x) == PLUS
2541 && GET_CODE (XEXP (x, 0)) == REG
2542 && GET_CODE (XEXP (x, 1)) == CONST_INT
2543 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2545 HOST_WIDE_INT high_int, low_int;
2546 rtx sum;
2547 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2548 high_int = INTVAL (XEXP (x, 1)) - low_int;
2549 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2550 GEN_INT (high_int)), 0);
2551 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2553 else if (GET_CODE (x) == PLUS
2554 && GET_CODE (XEXP (x, 0)) == REG
2555 && GET_CODE (XEXP (x, 1)) != CONST_INT
2556 && GET_MODE_NUNITS (mode) == 1
2557 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2558 || TARGET_POWERPC64
2559 || (mode != DFmode && mode != TFmode))
2560 && (TARGET_POWERPC64 || mode != DImode)
2561 && mode != TImode)
2563 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2564 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2566 else if (ALTIVEC_VECTOR_MODE (mode))
2568 rtx reg;
2570 /* Make sure both operands are registers. */
2571 if (GET_CODE (x) == PLUS)
2572 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2573 force_reg (Pmode, XEXP (x, 1)));
2575 reg = force_reg (Pmode, x);
2576 return reg;
2578 else if (SPE_VECTOR_MODE (mode))
2580 /* We accept [reg + reg] and [reg + OFFSET]. */
2582 if (GET_CODE (x) == PLUS)
2584 rtx op1 = XEXP (x, 0);
2585 rtx op2 = XEXP (x, 1);
2587 op1 = force_reg (Pmode, op1);
2589 if (GET_CODE (op2) != REG
2590 && (GET_CODE (op2) != CONST_INT
2591 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2592 op2 = force_reg (Pmode, op2);
2594 return gen_rtx_PLUS (Pmode, op1, op2);
2597 return force_reg (Pmode, x);
2599 else if (TARGET_ELF
2600 && TARGET_32BIT
2601 && TARGET_NO_TOC
2602 && ! flag_pic
2603 && GET_CODE (x) != CONST_INT
2604 && GET_CODE (x) != CONST_DOUBLE
2605 && CONSTANT_P (x)
2606 && GET_MODE_NUNITS (mode) == 1
2607 && (GET_MODE_BITSIZE (mode) <= 32
2608 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2610 rtx reg = gen_reg_rtx (Pmode);
2611 emit_insn (gen_elf_high (reg, (x)));
2612 return gen_rtx_LO_SUM (Pmode, reg, (x));
2614 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2615 && ! flag_pic
2616 #if TARGET_MACHO
2617 && ! MACHO_DYNAMIC_NO_PIC_P
2618 #endif
2619 && GET_CODE (x) != CONST_INT
2620 && GET_CODE (x) != CONST_DOUBLE
2621 && CONSTANT_P (x)
2622 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2623 && mode != DImode
2624 && mode != TImode)
2626 rtx reg = gen_reg_rtx (Pmode);
2627 emit_insn (gen_macho_high (reg, (x)));
2628 return gen_rtx_LO_SUM (Pmode, reg, (x));
2630 else if (TARGET_TOC
2631 && constant_pool_expr_p (x)
2632 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2634 return create_TOC_reference (x);
2636 else
2637 return NULL_RTX;
2640 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2642 static GTY(()) rtx rs6000_tls_symbol;
2643 static rtx
2644 rs6000_tls_get_addr ()
2646 if (!rs6000_tls_symbol)
2647 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2649 return rs6000_tls_symbol;
2652 /* Construct the SYMBOL_REF for TLS GOT references. */
2654 static GTY(()) rtx rs6000_got_symbol;
2655 static rtx
2656 rs6000_got_sym ()
2658 if (!rs6000_got_symbol)
2660 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2661 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2662 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2665 return rs6000_got_symbol;
2668 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2669 this (thread-local) address. */
2671 static rtx
2672 rs6000_legitimize_tls_address (addr, model)
2673 rtx addr;
2674 enum tls_model model;
2676 rtx dest, insn;
2678 dest = gen_reg_rtx (Pmode);
2679 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2681 rtx tlsreg;
2683 if (TARGET_64BIT)
2685 tlsreg = gen_rtx_REG (Pmode, 13);
2686 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2688 else
2690 tlsreg = gen_rtx_REG (Pmode, 2);
2691 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2693 emit_insn (insn);
2695 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2697 rtx tlsreg, tmp;
2699 tmp = gen_reg_rtx (Pmode);
2700 if (TARGET_64BIT)
2702 tlsreg = gen_rtx_REG (Pmode, 13);
2703 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2705 else
2707 tlsreg = gen_rtx_REG (Pmode, 2);
2708 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2710 emit_insn (insn);
2711 if (TARGET_64BIT)
2712 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2713 else
2714 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2715 emit_insn (insn);
2717 else
2719 rtx r3, got, tga, tmp1, tmp2, eqv;
2721 if (TARGET_64BIT)
2722 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2723 else
2725 if (flag_pic == 1)
2726 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2727 else
2729 rtx gsym = rs6000_got_sym ();
2730 got = gen_reg_rtx (Pmode);
2731 if (flag_pic == 0)
2732 rs6000_emit_move (got, gsym, Pmode);
2733 else
2735 char buf[30];
2736 static int tls_got_labelno = 0;
2737 rtx tempLR, lab, tmp3, mem;
2738 rtx first, last;
2740 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2741 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2742 tempLR = gen_reg_rtx (Pmode);
2743 tmp1 = gen_reg_rtx (Pmode);
2744 tmp2 = gen_reg_rtx (Pmode);
2745 tmp3 = gen_reg_rtx (Pmode);
2746 mem = gen_rtx_MEM (Pmode, tmp1);
2747 RTX_UNCHANGING_P (mem) = 1;
2749 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2750 gsym));
2751 emit_move_insn (tmp1, tempLR);
2752 emit_move_insn (tmp2, mem);
2753 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2754 last = emit_move_insn (got, tmp3);
2755 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2756 REG_NOTES (last));
2757 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2758 REG_NOTES (first));
2759 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2760 REG_NOTES (last));
2765 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2767 r3 = gen_rtx_REG (Pmode, 3);
2768 if (TARGET_64BIT)
2769 insn = gen_tls_gd_64 (r3, got, addr);
2770 else
2771 insn = gen_tls_gd_32 (r3, got, addr);
2772 start_sequence ();
2773 emit_insn (insn);
2774 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2775 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2776 insn = emit_call_insn (insn);
2777 CONST_OR_PURE_CALL_P (insn) = 1;
2778 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2779 insn = get_insns ();
2780 end_sequence ();
2781 emit_libcall_block (insn, dest, r3, addr);
2783 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2785 r3 = gen_rtx_REG (Pmode, 3);
2786 if (TARGET_64BIT)
2787 insn = gen_tls_ld_64 (r3, got);
2788 else
2789 insn = gen_tls_ld_32 (r3, got);
2790 start_sequence ();
2791 emit_insn (insn);
2792 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2793 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2794 insn = emit_call_insn (insn);
2795 CONST_OR_PURE_CALL_P (insn) = 1;
2796 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2797 insn = get_insns ();
2798 end_sequence ();
2799 tmp1 = gen_reg_rtx (Pmode);
2800 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2801 UNSPEC_TLSLD);
2802 emit_libcall_block (insn, tmp1, r3, eqv);
2803 if (rs6000_tls_size == 16)
2805 if (TARGET_64BIT)
2806 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2807 else
2808 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2810 else if (rs6000_tls_size == 32)
2812 tmp2 = gen_reg_rtx (Pmode);
2813 if (TARGET_64BIT)
2814 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2815 else
2816 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2817 emit_insn (insn);
2818 if (TARGET_64BIT)
2819 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2820 else
2821 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2823 else
2825 tmp2 = gen_reg_rtx (Pmode);
2826 if (TARGET_64BIT)
2827 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2828 else
2829 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2830 emit_insn (insn);
2831 insn = gen_rtx_SET (Pmode, dest,
2832 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2834 emit_insn (insn);
2836 else
2838 /* IE, or 64 bit offset LE. */
2839 tmp2 = gen_reg_rtx (Pmode);
2840 if (TARGET_64BIT)
2841 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2842 else
2843 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2844 emit_insn (insn);
2845 if (TARGET_64BIT)
2846 insn = gen_tls_tls_64 (dest, tmp2, addr);
2847 else
2848 insn = gen_tls_tls_32 (dest, tmp2, addr);
2849 emit_insn (insn);
2853 return dest;
2856 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2857 instruction definitions. */
2860 rs6000_tls_symbol_ref (x, mode)
2861 rtx x;
2862 enum machine_mode mode ATTRIBUTE_UNUSED;
2864 return RS6000_SYMBOL_REF_TLS_P (x);
2867 /* Return 1 if X contains a thread-local symbol. */
2869 bool
2870 rs6000_tls_referenced_p (x)
2871 rtx x;
2873 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2876 /* Return 1 if *X is a thread-local symbol. This is the same as
2877 rs6000_tls_symbol_ref except for the type of the unused argument. */
2879 static inline int
2880 rs6000_tls_symbol_ref_1 (x, data)
2881 rtx *x;
2882 void *data ATTRIBUTE_UNUSED;
2884 return RS6000_SYMBOL_REF_TLS_P (*x);
2887 /* The convention appears to be to define this wherever it is used.
2888 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2889 is now used here. */
2890 #ifndef REG_MODE_OK_FOR_BASE_P
2891 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2892 #endif
2894 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2895 replace the input X, or the original X if no replacement is called for.
2896 The output parameter *WIN is 1 if the calling macro should goto WIN,
2897 0 if it should not.
2899 For RS/6000, we wish to handle large displacements off a base
2900 register by splitting the addend across an addiu/addis and the mem insn.
2901 This cuts number of extra insns needed from 3 to 1.
2903 On Darwin, we use this to generate code for floating point constants.
2904 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2905 The Darwin code is inside #if TARGET_MACHO because only then is
2906 machopic_function_base_name() defined. */
2908 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2909 rtx x;
2910 enum machine_mode mode;
2911 int opnum;
2912 int type;
2913 int ind_levels ATTRIBUTE_UNUSED;
2914 int *win;
2916 /* We must recognize output that we have already generated ourselves. */
2917 if (GET_CODE (x) == PLUS
2918 && GET_CODE (XEXP (x, 0)) == PLUS
2919 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2920 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2921 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2923 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2924 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2925 opnum, (enum reload_type)type);
2926 *win = 1;
2927 return x;
2930 #if TARGET_MACHO
2931 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2932 && GET_CODE (x) == LO_SUM
2933 && GET_CODE (XEXP (x, 0)) == PLUS
2934 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2935 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2936 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2937 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2938 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2939 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2940 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2942 /* Result of previous invocation of this function on Darwin
2943 floating point constant. */
2944 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2945 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2946 opnum, (enum reload_type)type);
2947 *win = 1;
2948 return x;
2950 #endif
2951 if (GET_CODE (x) == PLUS
2952 && GET_CODE (XEXP (x, 0)) == REG
2953 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2954 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2955 && GET_CODE (XEXP (x, 1)) == CONST_INT
2956 && !SPE_VECTOR_MODE (mode)
2957 && !ALTIVEC_VECTOR_MODE (mode))
2959 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2960 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2961 HOST_WIDE_INT high
2962 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2964 /* Check for 32-bit overflow. */
2965 if (high + low != val)
2967 *win = 0;
2968 return x;
2971 /* Reload the high part into a base reg; leave the low part
2972 in the mem directly. */
2974 x = gen_rtx_PLUS (GET_MODE (x),
2975 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2976 GEN_INT (high)),
2977 GEN_INT (low));
2979 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2980 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2981 opnum, (enum reload_type)type);
2982 *win = 1;
2983 return x;
2985 #if TARGET_MACHO
2986 if (GET_CODE (x) == SYMBOL_REF
2987 && DEFAULT_ABI == ABI_DARWIN
2988 && !ALTIVEC_VECTOR_MODE (mode)
2989 && flag_pic)
2991 /* Darwin load of floating point constant. */
2992 rtx offset = gen_rtx (CONST, Pmode,
2993 gen_rtx (MINUS, Pmode, x,
2994 gen_rtx (SYMBOL_REF, Pmode,
2995 machopic_function_base_name ())));
2996 x = gen_rtx (LO_SUM, GET_MODE (x),
2997 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2998 gen_rtx (HIGH, Pmode, offset)), offset);
2999 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3000 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3001 opnum, (enum reload_type)type);
3002 *win = 1;
3003 return x;
3005 if (GET_CODE (x) == SYMBOL_REF
3006 && DEFAULT_ABI == ABI_DARWIN
3007 && !ALTIVEC_VECTOR_MODE (mode)
3008 && MACHO_DYNAMIC_NO_PIC_P)
3010 /* Darwin load of floating point constant. */
3011 x = gen_rtx (LO_SUM, GET_MODE (x),
3012 gen_rtx (HIGH, Pmode, x), x);
3013 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3014 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3015 opnum, (enum reload_type)type);
3016 *win = 1;
3017 return x;
3019 #endif
3020 if (TARGET_TOC
3021 && constant_pool_expr_p (x)
3022 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3024 (x) = create_TOC_reference (x);
3025 *win = 1;
3026 return x;
3028 *win = 0;
3029 return x;
3032 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3033 that is a valid memory address for an instruction.
3034 The MODE argument is the machine mode for the MEM expression
3035 that wants to use this address.
3037 On the RS/6000, there are four valid address: a SYMBOL_REF that
3038 refers to a constant pool entry of an address (or the sum of it
3039 plus a constant), a short (16-bit signed) constant plus a register,
3040 the sum of two registers, or a register indirect, possibly with an
3041 auto-increment. For DFmode and DImode with a constant plus register,
3042 we must ensure that both words are addressable or PowerPC64 with offset
3043 word aligned.
3045 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3046 32-bit DImode, TImode), indexed addressing cannot be used because
3047 adjacent memory cells are accessed by adding word-sized offsets
3048 during assembly output. */
3050 rs6000_legitimate_address (mode, x, reg_ok_strict)
3051 enum machine_mode mode;
3052 rtx x;
3053 int reg_ok_strict;
3055 if (RS6000_SYMBOL_REF_TLS_P (x))
3056 return 0;
3057 if (legitimate_indirect_address_p (x, reg_ok_strict))
3058 return 1;
3059 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3060 && !ALTIVEC_VECTOR_MODE (mode)
3061 && !SPE_VECTOR_MODE (mode)
3062 && TARGET_UPDATE
3063 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3064 return 1;
3065 if (legitimate_small_data_p (mode, x))
3066 return 1;
3067 if (legitimate_constant_pool_address_p (x))
3068 return 1;
3069 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3070 if (! reg_ok_strict
3071 && GET_CODE (x) == PLUS
3072 && GET_CODE (XEXP (x, 0)) == REG
3073 && XEXP (x, 0) == virtual_stack_vars_rtx
3074 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3075 return 1;
3076 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3077 return 1;
3078 if (mode != TImode
3079 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3080 || TARGET_POWERPC64
3081 || (mode != DFmode && mode != TFmode))
3082 && (TARGET_POWERPC64 || mode != DImode)
3083 && legitimate_indexed_address_p (x, reg_ok_strict))
3084 return 1;
3085 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3086 return 1;
3087 return 0;
3090 /* Go to LABEL if ADDR (a legitimate address expression)
3091 has an effect that depends on the machine mode it is used for.
3093 On the RS/6000 this is true of all integral offsets (since AltiVec
3094 modes don't allow them) or is a pre-increment or decrement.
3096 ??? Except that due to conceptual problems in offsettable_address_p
3097 we can't really report the problems of integral offsets. So leave
3098 this assuming that the adjustable offset must be valid for the
3099 sub-words of a TFmode operand, which is what we had before. */
3101 bool
3102 rs6000_mode_dependent_address (addr)
3103 rtx addr;
3105 switch (GET_CODE (addr))
3107 case PLUS:
3108 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3110 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3111 return val + 12 + 0x8000 >= 0x10000;
3113 break;
3115 case LO_SUM:
3116 return true;
3118 case PRE_INC:
3119 case PRE_DEC:
3120 return TARGET_UPDATE;
3122 default:
3123 break;
3126 return false;
3129 /* Try to output insns to set TARGET equal to the constant C if it can
3130 be done in less than N insns. Do all computations in MODE.
3131 Returns the place where the output has been placed if it can be
3132 done and the insns have been emitted. If it would take more than N
3133 insns, zero is returned and no insns and emitted. */
3136 rs6000_emit_set_const (dest, mode, source, n)
3137 rtx dest, source;
3138 enum machine_mode mode;
3139 int n ATTRIBUTE_UNUSED;
3141 rtx result, insn, set;
3142 HOST_WIDE_INT c0, c1;
3144 if (mode == QImode || mode == HImode)
3146 if (dest == NULL)
3147 dest = gen_reg_rtx (mode);
3148 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3149 return dest;
3151 else if (mode == SImode)
3153 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3155 emit_insn (gen_rtx_SET (VOIDmode, result,
3156 GEN_INT (INTVAL (source)
3157 & (~ (HOST_WIDE_INT) 0xffff))));
3158 emit_insn (gen_rtx_SET (VOIDmode, dest,
3159 gen_rtx_IOR (SImode, result,
3160 GEN_INT (INTVAL (source) & 0xffff))));
3161 result = dest;
3163 else if (mode == DImode)
3165 if (GET_CODE (source) == CONST_INT)
3167 c0 = INTVAL (source);
3168 c1 = -(c0 < 0);
3170 else if (GET_CODE (source) == CONST_DOUBLE)
3172 #if HOST_BITS_PER_WIDE_INT >= 64
3173 c0 = CONST_DOUBLE_LOW (source);
3174 c1 = -(c0 < 0);
3175 #else
3176 c0 = CONST_DOUBLE_LOW (source);
3177 c1 = CONST_DOUBLE_HIGH (source);
3178 #endif
3180 else
3181 abort ();
3183 result = rs6000_emit_set_long_const (dest, c0, c1);
3185 else
3186 abort ();
3188 insn = get_last_insn ();
3189 set = single_set (insn);
3190 if (! CONSTANT_P (SET_SRC (set)))
3191 set_unique_reg_note (insn, REG_EQUAL, source);
3193 return result;
3196 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3197 fall back to a straight forward decomposition. We do this to avoid
3198 exponential run times encountered when looking for longer sequences
3199 with rs6000_emit_set_const. */
3200 static rtx
3201 rs6000_emit_set_long_const (dest, c1, c2)
3202 rtx dest;
3203 HOST_WIDE_INT c1, c2;
3205 if (!TARGET_POWERPC64)
3207 rtx operand1, operand2;
3209 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3210 DImode);
3211 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3212 DImode);
3213 emit_move_insn (operand1, GEN_INT (c1));
3214 emit_move_insn (operand2, GEN_INT (c2));
3216 else
3218 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3220 ud1 = c1 & 0xffff;
3221 ud2 = (c1 & 0xffff0000) >> 16;
3222 #if HOST_BITS_PER_WIDE_INT >= 64
3223 c2 = c1 >> 32;
3224 #endif
3225 ud3 = c2 & 0xffff;
3226 ud4 = (c2 & 0xffff0000) >> 16;
3228 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3229 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3231 if (ud1 & 0x8000)
3232 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3233 else
3234 emit_move_insn (dest, GEN_INT (ud1));
3237 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3238 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3240 if (ud2 & 0x8000)
3241 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3242 - 0x80000000));
3243 else
3244 emit_move_insn (dest, GEN_INT (ud2 << 16));
3245 if (ud1 != 0)
3246 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3248 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3249 || (ud4 == 0 && ! (ud3 & 0x8000)))
3251 if (ud3 & 0x8000)
3252 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3253 - 0x80000000));
3254 else
3255 emit_move_insn (dest, GEN_INT (ud3 << 16));
3257 if (ud2 != 0)
3258 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3259 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3260 if (ud1 != 0)
3261 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3263 else
3265 if (ud4 & 0x8000)
3266 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3267 - 0x80000000));
3268 else
3269 emit_move_insn (dest, GEN_INT (ud4 << 16));
3271 if (ud3 != 0)
3272 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3274 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3275 if (ud2 != 0)
3276 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3277 GEN_INT (ud2 << 16)));
3278 if (ud1 != 0)
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3282 return dest;
3285 /* Emit a move from SOURCE to DEST in mode MODE. */
3286 void
3287 rs6000_emit_move (dest, source, mode)
3288 rtx dest;
3289 rtx source;
3290 enum machine_mode mode;
3292 rtx operands[2];
3293 operands[0] = dest;
3294 operands[1] = source;
3296 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3297 if (GET_CODE (operands[1]) == CONST_DOUBLE
3298 && ! FLOAT_MODE_P (mode)
3299 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3301 /* FIXME. This should never happen. */
3302 /* Since it seems that it does, do the safe thing and convert
3303 to a CONST_INT. */
3304 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3306 if (GET_CODE (operands[1]) == CONST_DOUBLE
3307 && ! FLOAT_MODE_P (mode)
3308 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3309 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3310 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3311 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3312 abort ();
3314 /* Check if GCC is setting up a block move that will end up using FP
3315 registers as temporaries. We must make sure this is acceptable. */
3316 if (GET_CODE (operands[0]) == MEM
3317 && GET_CODE (operands[1]) == MEM
3318 && mode == DImode
3319 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3320 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3321 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3322 ? 32 : MEM_ALIGN (operands[0])))
3323 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3324 ? 32
3325 : MEM_ALIGN (operands[1]))))
3326 && ! MEM_VOLATILE_P (operands [0])
3327 && ! MEM_VOLATILE_P (operands [1]))
3329 emit_move_insn (adjust_address (operands[0], SImode, 0),
3330 adjust_address (operands[1], SImode, 0));
3331 emit_move_insn (adjust_address (operands[0], SImode, 4),
3332 adjust_address (operands[1], SImode, 4));
3333 return;
3336 if (!no_new_pseudos)
3338 if (GET_CODE (operands[1]) == MEM && optimize > 0
3339 && (mode == QImode || mode == HImode || mode == SImode)
3340 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3342 rtx reg = gen_reg_rtx (word_mode);
3344 emit_insn (gen_rtx_SET (word_mode, reg,
3345 gen_rtx_ZERO_EXTEND (word_mode,
3346 operands[1])));
3347 operands[1] = gen_lowpart (mode, reg);
3349 if (GET_CODE (operands[0]) != REG)
3350 operands[1] = force_reg (mode, operands[1]);
3353 if (mode == SFmode && ! TARGET_POWERPC
3354 && TARGET_HARD_FLOAT && TARGET_FPRS
3355 && GET_CODE (operands[0]) == MEM)
3357 int regnum;
3359 if (reload_in_progress || reload_completed)
3360 regnum = true_regnum (operands[1]);
3361 else if (GET_CODE (operands[1]) == REG)
3362 regnum = REGNO (operands[1]);
3363 else
3364 regnum = -1;
3366 /* If operands[1] is a register, on POWER it may have
3367 double-precision data in it, so truncate it to single
3368 precision. */
3369 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3371 rtx newreg;
3372 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3373 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3374 operands[1] = newreg;
3378 /* Recognize the case where operand[1] is a reference to thread-local
3379 data and load its address to a register. */
3380 if (GET_CODE (operands[1]) == SYMBOL_REF)
3382 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3383 if (model != 0)
3384 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3387 /* Handle the case where reload calls us with an invalid address. */
3388 if (reload_in_progress && mode == Pmode
3389 && (! general_operand (operands[1], mode)
3390 || ! nonimmediate_operand (operands[0], mode)))
3391 goto emit_set;
3393 /* Handle the case of CONSTANT_P_RTX. */
3394 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3395 goto emit_set;
3397 /* FIXME: In the long term, this switch statement should go away
3398 and be replaced by a sequence of tests based on things like
3399 mode == Pmode. */
3400 switch (mode)
3402 case HImode:
3403 case QImode:
3404 if (CONSTANT_P (operands[1])
3405 && GET_CODE (operands[1]) != CONST_INT)
3406 operands[1] = force_const_mem (mode, operands[1]);
3407 break;
3409 case TFmode:
3410 case DFmode:
3411 case SFmode:
3412 if (CONSTANT_P (operands[1])
3413 && ! easy_fp_constant (operands[1], mode))
3414 operands[1] = force_const_mem (mode, operands[1]);
3415 break;
3417 case V16QImode:
3418 case V8HImode:
3419 case V4SFmode:
3420 case V4SImode:
3421 case V4HImode:
3422 case V2SFmode:
3423 case V2SImode:
3424 case V1DImode:
3425 if (CONSTANT_P (operands[1])
3426 && !easy_vector_constant (operands[1], mode))
3427 operands[1] = force_const_mem (mode, operands[1]);
3428 break;
3430 case SImode:
3431 case DImode:
3432 /* Use default pattern for address of ELF small data */
3433 if (TARGET_ELF
3434 && mode == Pmode
3435 && DEFAULT_ABI == ABI_V4
3436 && (GET_CODE (operands[1]) == SYMBOL_REF
3437 || GET_CODE (operands[1]) == CONST)
3438 && small_data_operand (operands[1], mode))
3440 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3441 return;
3444 if (DEFAULT_ABI == ABI_V4
3445 && mode == Pmode && mode == SImode
3446 && flag_pic == 1 && got_operand (operands[1], mode))
3448 emit_insn (gen_movsi_got (operands[0], operands[1]));
3449 return;
3452 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3453 && TARGET_NO_TOC
3454 && ! flag_pic
3455 && mode == Pmode
3456 && CONSTANT_P (operands[1])
3457 && GET_CODE (operands[1]) != HIGH
3458 && GET_CODE (operands[1]) != CONST_INT)
3460 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3462 /* If this is a function address on -mcall-aixdesc,
3463 convert it to the address of the descriptor. */
3464 if (DEFAULT_ABI == ABI_AIX
3465 && GET_CODE (operands[1]) == SYMBOL_REF
3466 && XSTR (operands[1], 0)[0] == '.')
3468 const char *name = XSTR (operands[1], 0);
3469 rtx new_ref;
3470 while (*name == '.')
3471 name++;
3472 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3473 CONSTANT_POOL_ADDRESS_P (new_ref)
3474 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3475 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3476 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3477 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3478 operands[1] = new_ref;
3481 if (DEFAULT_ABI == ABI_DARWIN)
3483 #if TARGET_MACHO
3484 if (MACHO_DYNAMIC_NO_PIC_P)
3486 /* Take care of any required data indirection. */
3487 operands[1] = rs6000_machopic_legitimize_pic_address (
3488 operands[1], mode, operands[0]);
3489 if (operands[0] != operands[1])
3490 emit_insn (gen_rtx_SET (VOIDmode,
3491 operands[0], operands[1]));
3492 return;
3494 #endif
3495 emit_insn (gen_macho_high (target, operands[1]));
3496 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3497 return;
3500 emit_insn (gen_elf_high (target, operands[1]));
3501 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3502 return;
3505 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3506 and we have put it in the TOC, we just need to make a TOC-relative
3507 reference to it. */
3508 if (TARGET_TOC
3509 && GET_CODE (operands[1]) == SYMBOL_REF
3510 && constant_pool_expr_p (operands[1])
3511 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3512 get_pool_mode (operands[1])))
3514 operands[1] = create_TOC_reference (operands[1]);
3516 else if (mode == Pmode
3517 && CONSTANT_P (operands[1])
3518 && ((GET_CODE (operands[1]) != CONST_INT
3519 && ! easy_fp_constant (operands[1], mode))
3520 || (GET_CODE (operands[1]) == CONST_INT
3521 && num_insns_constant (operands[1], mode) > 2)
3522 || (GET_CODE (operands[0]) == REG
3523 && FP_REGNO_P (REGNO (operands[0]))))
3524 && GET_CODE (operands[1]) != HIGH
3525 && ! legitimate_constant_pool_address_p (operands[1])
3526 && ! toc_relative_expr_p (operands[1]))
3528 /* Emit a USE operation so that the constant isn't deleted if
3529 expensive optimizations are turned on because nobody
3530 references it. This should only be done for operands that
3531 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3532 This should not be done for operands that contain LABEL_REFs.
3533 For now, we just handle the obvious case. */
3534 if (GET_CODE (operands[1]) != LABEL_REF)
3535 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3537 #if TARGET_MACHO
3538 /* Darwin uses a special PIC legitimizer. */
3539 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3541 operands[1] =
3542 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3543 operands[0]);
3544 if (operands[0] != operands[1])
3545 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3546 return;
3548 #endif
3550 /* If we are to limit the number of things we put in the TOC and
3551 this is a symbol plus a constant we can add in one insn,
3552 just put the symbol in the TOC and add the constant. Don't do
3553 this if reload is in progress. */
3554 if (GET_CODE (operands[1]) == CONST
3555 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3556 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3557 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3558 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3559 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3560 && ! side_effects_p (operands[0]))
3562 rtx sym =
3563 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3564 rtx other = XEXP (XEXP (operands[1], 0), 1);
3566 sym = force_reg (mode, sym);
3567 if (mode == SImode)
3568 emit_insn (gen_addsi3 (operands[0], sym, other));
3569 else
3570 emit_insn (gen_adddi3 (operands[0], sym, other));
3571 return;
3574 operands[1] = force_const_mem (mode, operands[1]);
3576 if (TARGET_TOC
3577 && constant_pool_expr_p (XEXP (operands[1], 0))
3578 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3579 get_pool_constant (XEXP (operands[1], 0)),
3580 get_pool_mode (XEXP (operands[1], 0))))
3582 operands[1]
3583 = gen_rtx_MEM (mode,
3584 create_TOC_reference (XEXP (operands[1], 0)));
3585 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3586 RTX_UNCHANGING_P (operands[1]) = 1;
3589 break;
3591 case TImode:
3592 if (GET_CODE (operands[0]) == MEM
3593 && GET_CODE (XEXP (operands[0], 0)) != REG
3594 && ! reload_in_progress)
3595 operands[0]
3596 = replace_equiv_address (operands[0],
3597 copy_addr_to_reg (XEXP (operands[0], 0)));
3599 if (GET_CODE (operands[1]) == MEM
3600 && GET_CODE (XEXP (operands[1], 0)) != REG
3601 && ! reload_in_progress)
3602 operands[1]
3603 = replace_equiv_address (operands[1],
3604 copy_addr_to_reg (XEXP (operands[1], 0)));
3605 if (TARGET_POWER)
3607 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3608 gen_rtvec (2,
3609 gen_rtx_SET (VOIDmode,
3610 operands[0], operands[1]),
3611 gen_rtx_CLOBBER (VOIDmode,
3612 gen_rtx_SCRATCH (SImode)))));
3613 return;
3615 break;
3617 default:
3618 abort ();
3621 /* Above, we may have called force_const_mem which may have returned
3622 an invalid address. If we can, fix this up; otherwise, reload will
3623 have to deal with it. */
3624 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3625 operands[1] = validize_mem (operands[1]);
3627 emit_set:
3628 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3631 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3632 for a call to a function whose data type is FNTYPE.
3633 For a library call, FNTYPE is 0.
3635 For incoming args we set the number of arguments in the prototype large
3636 so we never return a PARALLEL. */
3638 void
3639 init_cumulative_args (cum, fntype, libname, incoming)
3640 CUMULATIVE_ARGS *cum;
3641 tree fntype;
3642 rtx libname ATTRIBUTE_UNUSED;
3643 int incoming;
3645 static CUMULATIVE_ARGS zero_cumulative;
3647 *cum = zero_cumulative;
3648 cum->words = 0;
3649 cum->fregno = FP_ARG_MIN_REG;
3650 cum->vregno = ALTIVEC_ARG_MIN_REG;
3651 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3652 cum->call_cookie = CALL_NORMAL;
3653 cum->sysv_gregno = GP_ARG_MIN_REG;
3655 if (incoming)
3656 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3658 else if (cum->prototype)
3659 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3660 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3661 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3663 else
3664 cum->nargs_prototype = 0;
3666 cum->orig_nargs = cum->nargs_prototype;
3668 /* Check for a longcall attribute. */
3669 if (fntype
3670 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3671 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3672 cum->call_cookie = CALL_LONG;
3674 if (TARGET_DEBUG_ARG)
3676 fprintf (stderr, "\ninit_cumulative_args:");
3677 if (fntype)
3679 tree ret_type = TREE_TYPE (fntype);
3680 fprintf (stderr, " ret code = %s,",
3681 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3684 if (cum->call_cookie & CALL_LONG)
3685 fprintf (stderr, " longcall,");
3687 fprintf (stderr, " proto = %d, nargs = %d\n",
3688 cum->prototype, cum->nargs_prototype);
3692 /* If defined, a C expression which determines whether, and in which
3693 direction, to pad out an argument with extra space. The value
3694 should be of type `enum direction': either `upward' to pad above
3695 the argument, `downward' to pad below, or `none' to inhibit
3696 padding.
3698 For the AIX ABI structs are always stored left shifted in their
3699 argument slot. */
3701 enum direction
3702 function_arg_padding (mode, type)
3703 enum machine_mode mode;
3704 tree type;
3706 if (type != 0 && AGGREGATE_TYPE_P (type))
3707 return upward;
3709 /* This is the default definition. */
3710 return (! BYTES_BIG_ENDIAN
3711 ? upward
3712 : ((mode == BLKmode
3713 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3714 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3715 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3716 ? downward : upward));
3719 /* If defined, a C expression that gives the alignment boundary, in bits,
3720 of an argument with the specified mode and type. If it is not defined,
3721 PARM_BOUNDARY is used for all arguments.
3723 V.4 wants long longs to be double word aligned. */
3726 function_arg_boundary (mode, type)
3727 enum machine_mode mode;
3728 tree type ATTRIBUTE_UNUSED;
3730 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3731 return 64;
3732 else if (SPE_VECTOR_MODE (mode))
3733 return 64;
3734 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3735 return 128;
3736 else
3737 return PARM_BOUNDARY;
3740 /* Update the data in CUM to advance over an argument
3741 of mode MODE and data type TYPE.
3742 (TYPE is null for libcalls where that information may not be available.) */
3744 void
3745 function_arg_advance (cum, mode, type, named)
3746 CUMULATIVE_ARGS *cum;
3747 enum machine_mode mode;
3748 tree type;
3749 int named;
3751 cum->nargs_prototype--;
3753 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3755 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3756 cum->vregno++;
3757 else
3758 cum->words += RS6000_ARG_SIZE (mode, type);
3760 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3761 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3762 cum->sysv_gregno++;
3763 else if (DEFAULT_ABI == ABI_V4)
3765 if (TARGET_HARD_FLOAT && TARGET_FPRS
3766 && (mode == SFmode || mode == DFmode))
3768 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3769 cum->fregno++;
3770 else
3772 if (mode == DFmode)
3773 cum->words += cum->words & 1;
3774 cum->words += RS6000_ARG_SIZE (mode, type);
3777 else
3779 int n_words;
3780 int gregno = cum->sysv_gregno;
3782 /* Aggregates and IEEE quad get passed by reference. */
3783 if ((type && AGGREGATE_TYPE_P (type))
3784 || mode == TFmode)
3785 n_words = 1;
3786 else
3787 n_words = RS6000_ARG_SIZE (mode, type);
3789 /* Long long and SPE vectors are put in odd registers. */
3790 if (n_words == 2 && (gregno & 1) == 0)
3791 gregno += 1;
3793 /* Long long and SPE vectors are not split between registers
3794 and stack. */
3795 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3797 /* Long long is aligned on the stack. */
3798 if (n_words == 2)
3799 cum->words += cum->words & 1;
3800 cum->words += n_words;
3803 /* Note: continuing to accumulate gregno past when we've started
3804 spilling to the stack indicates the fact that we've started
3805 spilling to the stack to expand_builtin_saveregs. */
3806 cum->sysv_gregno = gregno + n_words;
3809 if (TARGET_DEBUG_ARG)
3811 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3812 cum->words, cum->fregno);
3813 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3814 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3815 fprintf (stderr, "mode = %4s, named = %d\n",
3816 GET_MODE_NAME (mode), named);
3819 else
3821 int align = (TARGET_32BIT && (cum->words & 1) != 0
3822 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3824 cum->words += align + RS6000_ARG_SIZE (mode, type);
3826 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3827 && TARGET_HARD_FLOAT && TARGET_FPRS)
3828 cum->fregno += (mode == TFmode ? 2 : 1);
3830 if (TARGET_DEBUG_ARG)
3832 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3833 cum->words, cum->fregno);
3834 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3835 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3836 fprintf (stderr, "named = %d, align = %d\n", named, align);
3841 /* Determine where to put an argument to a function.
3842 Value is zero to push the argument on the stack,
3843 or a hard register in which to store the argument.
3845 MODE is the argument's machine mode.
3846 TYPE is the data type of the argument (as a tree).
3847 This is null for libcalls where that information may
3848 not be available.
3849 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3850 the preceding args and about the function being called.
3851 NAMED is nonzero if this argument is a named parameter
3852 (otherwise it is an extra parameter matching an ellipsis).
3854 On RS/6000 the first eight words of non-FP are normally in registers
3855 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3856 Under V.4, the first 8 FP args are in registers.
3858 If this is floating-point and no prototype is specified, we use
3859 both an FP and integer register (or possibly FP reg and stack). Library
3860 functions (when TYPE is zero) always have the proper types for args,
3861 so we can pass the FP value just in one register. emit_library_function
3862 doesn't support PARALLEL anyway. */
3864 struct rtx_def *
3865 function_arg (cum, mode, type, named)
3866 CUMULATIVE_ARGS *cum;
3867 enum machine_mode mode;
3868 tree type;
3869 int named;
3871 enum rs6000_abi abi = DEFAULT_ABI;
3873 /* Return a marker to indicate whether CR1 needs to set or clear the
3874 bit that V.4 uses to say fp args were passed in registers.
3875 Assume that we don't need the marker for software floating point,
3876 or compiler generated library calls. */
3877 if (mode == VOIDmode)
3879 if (abi == ABI_V4
3880 && cum->nargs_prototype < 0
3881 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3883 /* For the SPE, we need to crxor CR6 always. */
3884 if (TARGET_SPE_ABI)
3885 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3886 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3887 return GEN_INT (cum->call_cookie
3888 | ((cum->fregno == FP_ARG_MIN_REG)
3889 ? CALL_V4_SET_FP_ARGS
3890 : CALL_V4_CLEAR_FP_ARGS));
3893 return GEN_INT (cum->call_cookie);
3896 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3898 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3899 return gen_rtx_REG (mode, cum->vregno);
3900 else
3901 return NULL;
3903 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3905 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3906 return gen_rtx_REG (mode, cum->sysv_gregno);
3907 else
3908 return NULL;
3910 else if (abi == ABI_V4)
3912 if (TARGET_HARD_FLOAT && TARGET_FPRS
3913 && (mode == SFmode || mode == DFmode))
3915 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3916 return gen_rtx_REG (mode, cum->fregno);
3917 else
3918 return NULL;
3920 else
3922 int n_words;
3923 int gregno = cum->sysv_gregno;
3925 /* Aggregates and IEEE quad get passed by reference. */
3926 if ((type && AGGREGATE_TYPE_P (type))
3927 || mode == TFmode)
3928 n_words = 1;
3929 else
3930 n_words = RS6000_ARG_SIZE (mode, type);
3932 /* Long long and SPE vectors are put in odd registers. */
3933 if (n_words == 2 && (gregno & 1) == 0)
3934 gregno += 1;
3936 /* Long long and SPE vectors are not split between registers
3937 and stack. */
3938 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3940 /* SPE vectors in ... get split into 2 registers. */
3941 if (TARGET_SPE && TARGET_SPE_ABI
3942 && SPE_VECTOR_MODE (mode) && !named)
3944 rtx r1, r2;
3945 enum machine_mode m = SImode;
3947 r1 = gen_rtx_REG (m, gregno);
3948 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3949 r2 = gen_rtx_REG (m, gregno + 1);
3950 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3951 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3953 return gen_rtx_REG (mode, gregno);
3955 else
3956 return NULL;
3959 else
3961 int align = (TARGET_32BIT && (cum->words & 1) != 0
3962 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3963 int align_words = cum->words + align;
3965 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3966 return NULL_RTX;
3968 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3970 if (! type
3971 || ((cum->nargs_prototype > 0)
3972 /* IBM AIX extended its linkage convention definition always
3973 to require FP args after register save area hole on the
3974 stack. */
3975 && (DEFAULT_ABI != ABI_AIX
3976 || ! TARGET_XL_CALL
3977 || (align_words < GP_ARG_NUM_REG))))
3978 return gen_rtx_REG (mode, cum->fregno);
3980 return gen_rtx_PARALLEL (mode,
3981 gen_rtvec (2,
3982 gen_rtx_EXPR_LIST (VOIDmode,
3983 ((align_words >= GP_ARG_NUM_REG)
3984 ? NULL_RTX
3985 : (align_words
3986 + RS6000_ARG_SIZE (mode, type)
3987 > GP_ARG_NUM_REG
3988 /* If this is partially on the stack, then
3989 we only include the portion actually
3990 in registers here. */
3991 ? gen_rtx_REG (SImode,
3992 GP_ARG_MIN_REG + align_words)
3993 : gen_rtx_REG (mode,
3994 GP_ARG_MIN_REG + align_words))),
3995 const0_rtx),
3996 gen_rtx_EXPR_LIST (VOIDmode,
3997 gen_rtx_REG (mode, cum->fregno),
3998 const0_rtx)));
4000 else if (align_words < GP_ARG_NUM_REG)
4001 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4002 else
4003 return NULL_RTX;
4007 /* For an arg passed partly in registers and partly in memory,
4008 this is the number of registers used.
4009 For args passed entirely in registers or entirely in memory, zero. */
4012 function_arg_partial_nregs (cum, mode, type, named)
4013 CUMULATIVE_ARGS *cum;
4014 enum machine_mode mode;
4015 tree type;
4016 int named ATTRIBUTE_UNUSED;
4018 if (DEFAULT_ABI == ABI_V4)
4019 return 0;
4021 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4022 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4024 if (cum->nargs_prototype >= 0)
4025 return 0;
4028 if (cum->words < GP_ARG_NUM_REG
4029 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4031 int ret = GP_ARG_NUM_REG - cum->words;
4032 if (ret && TARGET_DEBUG_ARG)
4033 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4035 return ret;
4038 return 0;
4041 /* A C expression that indicates when an argument must be passed by
4042 reference. If nonzero for an argument, a copy of that argument is
4043 made in memory and a pointer to the argument is passed instead of
4044 the argument itself. The pointer is passed in whatever way is
4045 appropriate for passing a pointer to that type.
4047 Under V.4, structures and unions are passed by reference.
4049 As an extension to all ABIs, variable sized types are passed by
4050 reference. */
4053 function_arg_pass_by_reference (cum, mode, type, named)
4054 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4055 enum machine_mode mode ATTRIBUTE_UNUSED;
4056 tree type;
4057 int named ATTRIBUTE_UNUSED;
4059 if (DEFAULT_ABI == ABI_V4
4060 && ((type && AGGREGATE_TYPE_P (type))
4061 || mode == TFmode))
4063 if (TARGET_DEBUG_ARG)
4064 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4066 return 1;
4068 return type && int_size_in_bytes (type) <= 0;
4071 /* Perform any needed actions needed for a function that is receiving a
4072 variable number of arguments.
4074 CUM is as above.
4076 MODE and TYPE are the mode and type of the current parameter.
4078 PRETEND_SIZE is a variable that should be set to the amount of stack
4079 that must be pushed by the prolog to pretend that our caller pushed
4082 Normally, this macro will push all remaining incoming registers on the
4083 stack and set PRETEND_SIZE to the length of the registers pushed. */
4085 void
4086 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4087 CUMULATIVE_ARGS *cum;
4088 enum machine_mode mode;
4089 tree type;
4090 int *pretend_size ATTRIBUTE_UNUSED;
4091 int no_rtl;
4094 CUMULATIVE_ARGS next_cum;
4095 int reg_size = TARGET_32BIT ? 4 : 8;
4096 rtx save_area = NULL_RTX, mem;
4097 int first_reg_offset, set;
4098 tree fntype;
4099 int stdarg_p;
4101 fntype = TREE_TYPE (current_function_decl);
4102 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4103 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4104 != void_type_node));
4106 /* For varargs, we do not want to skip the dummy va_dcl argument.
4107 For stdargs, we do want to skip the last named argument. */
4108 next_cum = *cum;
4109 if (stdarg_p)
4110 function_arg_advance (&next_cum, mode, type, 1);
4112 if (DEFAULT_ABI == ABI_V4)
4114 /* Indicate to allocate space on the stack for varargs save area. */
4115 cfun->machine->sysv_varargs_p = 1;
4116 if (! no_rtl)
4117 save_area = plus_constant (virtual_stack_vars_rtx,
4118 - RS6000_VARARGS_SIZE);
4120 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4122 else
4124 first_reg_offset = next_cum.words;
4125 save_area = virtual_incoming_args_rtx;
4126 cfun->machine->sysv_varargs_p = 0;
4128 if (MUST_PASS_IN_STACK (mode, type))
4129 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4132 set = get_varargs_alias_set ();
4133 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4135 mem = gen_rtx_MEM (BLKmode,
4136 plus_constant (save_area,
4137 first_reg_offset * reg_size)),
4138 set_mem_alias_set (mem, set);
4139 set_mem_align (mem, BITS_PER_WORD);
4141 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4142 GP_ARG_NUM_REG - first_reg_offset);
4145 /* Save FP registers if needed. */
4146 if (DEFAULT_ABI == ABI_V4
4147 && TARGET_HARD_FLOAT && TARGET_FPRS
4148 && ! no_rtl
4149 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4151 int fregno = next_cum.fregno;
4152 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4153 rtx lab = gen_label_rtx ();
4154 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4156 emit_jump_insn (gen_rtx_SET (VOIDmode,
4157 pc_rtx,
4158 gen_rtx_IF_THEN_ELSE (VOIDmode,
4159 gen_rtx_NE (VOIDmode, cr1,
4160 const0_rtx),
4161 gen_rtx_LABEL_REF (VOIDmode, lab),
4162 pc_rtx)));
4164 while (fregno <= FP_ARG_V4_MAX_REG)
4166 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4167 set_mem_alias_set (mem, set);
4168 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4169 fregno++;
4170 off += 8;
4173 emit_label (lab);
4177 /* Create the va_list data type. */
4179 tree
4180 rs6000_build_va_list ()
4182 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4184 /* For AIX, prefer 'char *' because that's what the system
4185 header files like. */
4186 if (DEFAULT_ABI != ABI_V4)
4187 return build_pointer_type (char_type_node);
4189 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4190 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4192 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4193 unsigned_char_type_node);
4194 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4195 unsigned_char_type_node);
4196 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4197 ptr_type_node);
4198 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4199 ptr_type_node);
4201 DECL_FIELD_CONTEXT (f_gpr) = record;
4202 DECL_FIELD_CONTEXT (f_fpr) = record;
4203 DECL_FIELD_CONTEXT (f_ovf) = record;
4204 DECL_FIELD_CONTEXT (f_sav) = record;
4206 TREE_CHAIN (record) = type_decl;
4207 TYPE_NAME (record) = type_decl;
4208 TYPE_FIELDS (record) = f_gpr;
4209 TREE_CHAIN (f_gpr) = f_fpr;
4210 TREE_CHAIN (f_fpr) = f_ovf;
4211 TREE_CHAIN (f_ovf) = f_sav;
4213 layout_type (record);
4215 /* The correct type is an array type of one element. */
4216 return build_array_type (record, build_index_type (size_zero_node));
4219 /* Implement va_start. */
4221 void
4222 rs6000_va_start (valist, nextarg)
4223 tree valist;
4224 rtx nextarg;
4226 HOST_WIDE_INT words, n_gpr, n_fpr;
4227 tree f_gpr, f_fpr, f_ovf, f_sav;
4228 tree gpr, fpr, ovf, sav, t;
4230 /* Only SVR4 needs something special. */
4231 if (DEFAULT_ABI != ABI_V4)
4233 std_expand_builtin_va_start (valist, nextarg);
4234 return;
4237 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4238 f_fpr = TREE_CHAIN (f_gpr);
4239 f_ovf = TREE_CHAIN (f_fpr);
4240 f_sav = TREE_CHAIN (f_ovf);
4242 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4243 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4244 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4245 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4246 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4248 /* Count number of gp and fp argument registers used. */
4249 words = current_function_args_info.words;
4250 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4251 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4253 if (TARGET_DEBUG_ARG)
4254 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4255 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4256 words, n_gpr, n_fpr);
4258 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4259 TREE_SIDE_EFFECTS (t) = 1;
4260 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4262 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4263 TREE_SIDE_EFFECTS (t) = 1;
4264 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4266 /* Find the overflow area. */
4267 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4268 if (words != 0)
4269 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4270 build_int_2 (words * UNITS_PER_WORD, 0));
4271 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4272 TREE_SIDE_EFFECTS (t) = 1;
4273 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4275 /* Find the register save area. */
4276 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4277 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4278 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4279 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4280 TREE_SIDE_EFFECTS (t) = 1;
4281 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4284 /* Implement va_arg. */
4287 rs6000_va_arg (valist, type)
4288 tree valist, type;
4290 tree f_gpr, f_fpr, f_ovf, f_sav;
4291 tree gpr, fpr, ovf, sav, reg, t, u;
4292 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4293 rtx lab_false, lab_over, addr_rtx, r;
4295 if (DEFAULT_ABI != ABI_V4)
4297 /* Variable sized types are passed by reference. */
4298 if (int_size_in_bytes (type) <= 0)
4300 u = build_pointer_type (type);
4302 /* Args grow upward. */
4303 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4304 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4305 TREE_SIDE_EFFECTS (t) = 1;
4307 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4308 TREE_SIDE_EFFECTS (t) = 1;
4310 t = build1 (INDIRECT_REF, u, t);
4311 TREE_SIDE_EFFECTS (t) = 1;
4313 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4315 else
4316 return std_expand_builtin_va_arg (valist, type);
4319 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4320 f_fpr = TREE_CHAIN (f_gpr);
4321 f_ovf = TREE_CHAIN (f_fpr);
4322 f_sav = TREE_CHAIN (f_ovf);
4324 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4325 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4326 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4327 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4328 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4330 size = int_size_in_bytes (type);
4331 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4333 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4335 /* Aggregates and long doubles are passed by reference. */
4336 indirect_p = 1;
4337 reg = gpr;
4338 n_reg = 1;
4339 sav_ofs = 0;
4340 sav_scale = 4;
4341 size = UNITS_PER_WORD;
4342 rsize = 1;
4344 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4346 /* FP args go in FP registers, if present. */
4347 indirect_p = 0;
4348 reg = fpr;
4349 n_reg = 1;
4350 sav_ofs = 8*4;
4351 sav_scale = 8;
4353 else
4355 /* Otherwise into GP registers. */
4356 indirect_p = 0;
4357 reg = gpr;
4358 n_reg = rsize;
4359 sav_ofs = 0;
4360 sav_scale = 4;
4363 /* Pull the value out of the saved registers ... */
4365 lab_false = gen_label_rtx ();
4366 lab_over = gen_label_rtx ();
4367 addr_rtx = gen_reg_rtx (Pmode);
4369 /* AltiVec vectors never go in registers. */
4370 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4372 TREE_THIS_VOLATILE (reg) = 1;
4373 emit_cmp_and_jump_insns
4374 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4375 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4376 lab_false);
4378 /* Long long is aligned in the registers. */
4379 if (n_reg > 1)
4381 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4382 build_int_2 (n_reg - 1, 0));
4383 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4384 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4385 TREE_SIDE_EFFECTS (u) = 1;
4386 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4389 if (sav_ofs)
4390 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4391 else
4392 t = sav;
4394 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4395 build_int_2 (n_reg, 0));
4396 TREE_SIDE_EFFECTS (u) = 1;
4398 u = build1 (CONVERT_EXPR, integer_type_node, u);
4399 TREE_SIDE_EFFECTS (u) = 1;
4401 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4402 TREE_SIDE_EFFECTS (u) = 1;
4404 t = build (PLUS_EXPR, ptr_type_node, t, u);
4405 TREE_SIDE_EFFECTS (t) = 1;
4407 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4408 if (r != addr_rtx)
4409 emit_move_insn (addr_rtx, r);
4411 emit_jump_insn (gen_jump (lab_over));
4412 emit_barrier ();
4415 emit_label (lab_false);
4417 /* ... otherwise out of the overflow area. */
4419 /* Make sure we don't find reg 7 for the next int arg.
4421 All AltiVec vectors go in the overflow area. So in the AltiVec
4422 case we need to get the vectors from the overflow area, but
4423 remember where the GPRs and FPRs are. */
4424 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4425 || !TARGET_ALTIVEC))
4427 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4428 TREE_SIDE_EFFECTS (t) = 1;
4429 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4432 /* Care for on-stack alignment if needed. */
4433 if (rsize <= 1)
4434 t = ovf;
4435 else
4437 int align;
4439 /* AltiVec vectors are 16 byte aligned. */
4440 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4441 align = 15;
4442 else
4443 align = 7;
4445 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4446 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4448 t = save_expr (t);
4450 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4451 if (r != addr_rtx)
4452 emit_move_insn (addr_rtx, r);
4454 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4455 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4456 TREE_SIDE_EFFECTS (t) = 1;
4457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4459 emit_label (lab_over);
4461 if (indirect_p)
4463 r = gen_rtx_MEM (Pmode, addr_rtx);
4464 set_mem_alias_set (r, get_varargs_alias_set ());
4465 emit_move_insn (addr_rtx, r);
4468 return addr_rtx;
4471 /* Builtins. */
4473 #define def_builtin(MASK, NAME, TYPE, CODE) \
4474 do { \
4475 if ((MASK) & target_flags) \
4476 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4477 NULL, NULL_TREE); \
4478 } while (0)
4480 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4482 static const struct builtin_description bdesc_3arg[] =
4484 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4485 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4486 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4487 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4488 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4489 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4490 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4491 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4492 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4493 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4494 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4495 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4496 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4497 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4498 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4499 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4500 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4501 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4502 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4503 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4504 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4505 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4506 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4509 /* DST operations: void foo (void *, const int, const char). */
4511 static const struct builtin_description bdesc_dst[] =
4513 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4514 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4515 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4516 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4519 /* Simple binary operations: VECc = foo (VECa, VECb). */
4521 static struct builtin_description bdesc_2arg[] =
4523 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4524 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4525 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4526 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4534 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4540 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4541 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4542 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4543 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4544 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4545 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4554 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4555 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4556 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4557 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4558 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4559 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4560 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4561 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4562 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4563 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4564 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4565 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4567 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4569 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4571 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4572 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4573 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4574 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4575 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4576 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4577 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4578 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4581 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4582 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4583 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4584 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4588 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4608 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4609 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4610 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4611 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4618 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4619 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4620 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4621 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4622 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4623 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4624 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4625 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4626 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4627 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4635 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4637 /* Place holder, leave as first spe builtin. */
4638 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4639 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4640 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4641 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4642 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4643 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4644 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4645 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4646 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4647 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4648 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4649 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4650 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4651 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4652 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4653 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4654 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4655 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4656 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4657 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4658 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4659 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4660 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4661 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4662 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4663 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4664 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4665 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4666 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4667 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4668 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4669 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4670 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4671 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4672 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4673 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4674 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4675 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4676 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4677 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4678 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4679 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4680 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4681 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4682 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4683 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4684 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4685 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4686 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4687 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4688 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4689 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4690 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4691 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4692 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4693 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4694 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4695 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4696 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4697 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4698 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4699 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4700 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4701 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4702 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4703 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4704 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4705 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4706 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4707 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4708 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4709 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4710 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4711 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4712 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4713 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4714 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4715 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4716 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4717 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4718 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4719 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4720 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4721 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4722 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4723 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4724 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4725 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4726 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4727 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4728 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4729 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4730 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4731 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4732 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4733 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4734 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4735 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4736 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4737 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4738 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4739 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4740 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4741 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4742 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4743 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4744 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4745 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4746 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4748 /* SPE binary operations expecting a 5-bit unsigned literal. */
4749 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4751 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4752 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4753 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4754 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4755 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4756 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4757 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4758 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4759 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4760 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4761 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4762 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4763 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4764 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4765 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4766 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4767 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4768 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4769 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4770 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4771 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4772 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4773 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4774 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4775 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4776 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4778 /* Place-holder. Leave as last binary SPE builtin. */
4779 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4782 /* AltiVec predicates. */
4784 struct builtin_description_predicates
4786 const unsigned int mask;
4787 const enum insn_code icode;
4788 const char *opcode;
4789 const char *const name;
4790 const enum rs6000_builtins code;
4793 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4795 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4796 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4797 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4798 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4799 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4800 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4801 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4804 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4807 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4810 /* SPE predicates. */
4811 static struct builtin_description bdesc_spe_predicates[] =
4813 /* Place-holder. Leave as first. */
4814 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4815 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4816 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4817 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4818 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4819 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4820 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4821 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4822 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4823 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4824 /* Place-holder. Leave as last. */
4825 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4828 /* SPE evsel predicates. */
4829 static struct builtin_description bdesc_spe_evsel[] =
4831 /* Place-holder. Leave as first. */
4832 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4833 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4834 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4835 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4836 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4837 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4838 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4839 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4840 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4841 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4842 /* Place-holder. Leave as last. */
4843 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4846 /* ABS* operations. */
4848 static const struct builtin_description bdesc_abs[] =
4850 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4851 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4852 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4853 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4854 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4856 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4859 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4860 foo (VECa). */
4862 static struct builtin_description bdesc_1arg[] =
4864 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4869 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4870 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4872 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4873 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4874 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4875 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4876 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4877 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4878 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4879 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4880 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4882 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4883 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4884 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4885 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4886 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4887 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4888 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4889 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4890 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4891 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4892 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4893 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4894 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4895 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4896 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4897 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4898 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4899 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4900 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4901 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4902 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4903 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4904 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4905 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4906 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4907 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4908 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4909 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4910 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4911 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4912 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4913 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4915 /* Place-holder. Leave as last unary SPE builtin. */
4916 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4919 static rtx
4920 rs6000_expand_unop_builtin (icode, arglist, target)
4921 enum insn_code icode;
4922 tree arglist;
4923 rtx target;
4925 rtx pat;
4926 tree arg0 = TREE_VALUE (arglist);
4927 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4928 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4929 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4931 if (icode == CODE_FOR_nothing)
4932 /* Builtin not supported on this processor. */
4933 return 0;
4935 /* If we got invalid arguments bail out before generating bad rtl. */
4936 if (arg0 == error_mark_node)
4937 return const0_rtx;
4939 if (icode == CODE_FOR_altivec_vspltisb
4940 || icode == CODE_FOR_altivec_vspltish
4941 || icode == CODE_FOR_altivec_vspltisw
4942 || icode == CODE_FOR_spe_evsplatfi
4943 || icode == CODE_FOR_spe_evsplati)
4945 /* Only allow 5-bit *signed* literals. */
4946 if (GET_CODE (op0) != CONST_INT
4947 || INTVAL (op0) > 0x1f
4948 || INTVAL (op0) < -0x1f)
4950 error ("argument 1 must be a 5-bit signed literal");
4951 return const0_rtx;
4955 if (target == 0
4956 || GET_MODE (target) != tmode
4957 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4958 target = gen_reg_rtx (tmode);
4960 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4961 op0 = copy_to_mode_reg (mode0, op0);
4963 pat = GEN_FCN (icode) (target, op0);
4964 if (! pat)
4965 return 0;
4966 emit_insn (pat);
4968 return target;
4971 static rtx
4972 altivec_expand_abs_builtin (icode, arglist, target)
4973 enum insn_code icode;
4974 tree arglist;
4975 rtx target;
4977 rtx pat, scratch1, scratch2;
4978 tree arg0 = TREE_VALUE (arglist);
4979 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4980 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4981 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4983 /* If we have invalid arguments, bail out before generating bad rtl. */
4984 if (arg0 == error_mark_node)
4985 return const0_rtx;
4987 if (target == 0
4988 || GET_MODE (target) != tmode
4989 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4990 target = gen_reg_rtx (tmode);
4992 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4993 op0 = copy_to_mode_reg (mode0, op0);
4995 scratch1 = gen_reg_rtx (mode0);
4996 scratch2 = gen_reg_rtx (mode0);
4998 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4999 if (! pat)
5000 return 0;
5001 emit_insn (pat);
5003 return target;
5006 static rtx
5007 rs6000_expand_binop_builtin (icode, arglist, target)
5008 enum insn_code icode;
5009 tree arglist;
5010 rtx target;
5012 rtx pat;
5013 tree arg0 = TREE_VALUE (arglist);
5014 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5015 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5016 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5017 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5018 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5019 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5021 if (icode == CODE_FOR_nothing)
5022 /* Builtin not supported on this processor. */
5023 return 0;
5025 /* If we got invalid arguments bail out before generating bad rtl. */
5026 if (arg0 == error_mark_node || arg1 == error_mark_node)
5027 return const0_rtx;
5029 if (icode == CODE_FOR_altivec_vcfux
5030 || icode == CODE_FOR_altivec_vcfsx
5031 || icode == CODE_FOR_altivec_vctsxs
5032 || icode == CODE_FOR_altivec_vctuxs
5033 || icode == CODE_FOR_altivec_vspltb
5034 || icode == CODE_FOR_altivec_vsplth
5035 || icode == CODE_FOR_altivec_vspltw
5036 || icode == CODE_FOR_spe_evaddiw
5037 || icode == CODE_FOR_spe_evldd
5038 || icode == CODE_FOR_spe_evldh
5039 || icode == CODE_FOR_spe_evldw
5040 || icode == CODE_FOR_spe_evlhhesplat
5041 || icode == CODE_FOR_spe_evlhhossplat
5042 || icode == CODE_FOR_spe_evlhhousplat
5043 || icode == CODE_FOR_spe_evlwhe
5044 || icode == CODE_FOR_spe_evlwhos
5045 || icode == CODE_FOR_spe_evlwhou
5046 || icode == CODE_FOR_spe_evlwhsplat
5047 || icode == CODE_FOR_spe_evlwwsplat
5048 || icode == CODE_FOR_spe_evrlwi
5049 || icode == CODE_FOR_spe_evslwi
5050 || icode == CODE_FOR_spe_evsrwis
5051 || icode == CODE_FOR_spe_evsubifw
5052 || icode == CODE_FOR_spe_evsrwiu)
5054 /* Only allow 5-bit unsigned literals. */
5055 if (TREE_CODE (arg1) != INTEGER_CST
5056 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5058 error ("argument 2 must be a 5-bit unsigned literal");
5059 return const0_rtx;
5063 if (target == 0
5064 || GET_MODE (target) != tmode
5065 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5066 target = gen_reg_rtx (tmode);
5068 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5069 op0 = copy_to_mode_reg (mode0, op0);
5070 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5071 op1 = copy_to_mode_reg (mode1, op1);
5073 pat = GEN_FCN (icode) (target, op0, op1);
5074 if (! pat)
5075 return 0;
5076 emit_insn (pat);
5078 return target;
5081 static rtx
5082 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5083 enum insn_code icode;
5084 const char *opcode;
5085 tree arglist;
5086 rtx target;
5088 rtx pat, scratch;
5089 tree cr6_form = TREE_VALUE (arglist);
5090 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5091 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5092 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5093 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5094 enum machine_mode tmode = SImode;
5095 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5096 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5097 int cr6_form_int;
5099 if (TREE_CODE (cr6_form) != INTEGER_CST)
5101 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5102 return const0_rtx;
5104 else
5105 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5107 if (mode0 != mode1)
5108 abort ();
5110 /* If we have invalid arguments, bail out before generating bad rtl. */
5111 if (arg0 == error_mark_node || arg1 == error_mark_node)
5112 return const0_rtx;
5114 if (target == 0
5115 || GET_MODE (target) != tmode
5116 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5117 target = gen_reg_rtx (tmode);
5119 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5120 op0 = copy_to_mode_reg (mode0, op0);
5121 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5122 op1 = copy_to_mode_reg (mode1, op1);
5124 scratch = gen_reg_rtx (mode0);
5126 pat = GEN_FCN (icode) (scratch, op0, op1,
5127 gen_rtx (SYMBOL_REF, Pmode, opcode));
5128 if (! pat)
5129 return 0;
5130 emit_insn (pat);
5132 /* The vec_any* and vec_all* predicates use the same opcodes for two
5133 different operations, but the bits in CR6 will be different
5134 depending on what information we want. So we have to play tricks
5135 with CR6 to get the right bits out.
5137 If you think this is disgusting, look at the specs for the
5138 AltiVec predicates. */
5140 switch (cr6_form_int)
5142 case 0:
5143 emit_insn (gen_cr6_test_for_zero (target));
5144 break;
5145 case 1:
5146 emit_insn (gen_cr6_test_for_zero_reverse (target));
5147 break;
5148 case 2:
5149 emit_insn (gen_cr6_test_for_lt (target));
5150 break;
5151 case 3:
5152 emit_insn (gen_cr6_test_for_lt_reverse (target));
5153 break;
5154 default:
5155 error ("argument 1 of __builtin_altivec_predicate is out of range");
5156 break;
5159 return target;
5162 static rtx
5163 altivec_expand_stv_builtin (icode, arglist)
5164 enum insn_code icode;
5165 tree arglist;
5167 tree arg0 = TREE_VALUE (arglist);
5168 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5169 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5170 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5171 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5172 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5173 rtx pat;
5174 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5175 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5176 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5178 /* Invalid arguments. Bail before doing anything stoopid! */
5179 if (arg0 == error_mark_node
5180 || arg1 == error_mark_node
5181 || arg2 == error_mark_node)
5182 return const0_rtx;
5184 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5185 op0 = copy_to_mode_reg (mode2, op0);
5186 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5187 op1 = copy_to_mode_reg (mode0, op1);
5188 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5189 op2 = copy_to_mode_reg (mode1, op2);
5191 pat = GEN_FCN (icode) (op1, op2, op0);
5192 if (pat)
5193 emit_insn (pat);
5194 return NULL_RTX;
5197 static rtx
5198 rs6000_expand_ternop_builtin (icode, arglist, target)
5199 enum insn_code icode;
5200 tree arglist;
5201 rtx target;
5203 rtx pat;
5204 tree arg0 = TREE_VALUE (arglist);
5205 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5206 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5207 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5208 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5209 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5210 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5211 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5212 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5213 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5215 if (icode == CODE_FOR_nothing)
5216 /* Builtin not supported on this processor. */
5217 return 0;
5219 /* If we got invalid arguments bail out before generating bad rtl. */
5220 if (arg0 == error_mark_node
5221 || arg1 == error_mark_node
5222 || arg2 == error_mark_node)
5223 return const0_rtx;
5225 if (icode == CODE_FOR_altivec_vsldoi_4sf
5226 || icode == CODE_FOR_altivec_vsldoi_4si
5227 || icode == CODE_FOR_altivec_vsldoi_8hi
5228 || icode == CODE_FOR_altivec_vsldoi_16qi)
5230 /* Only allow 4-bit unsigned literals. */
5231 if (TREE_CODE (arg2) != INTEGER_CST
5232 || TREE_INT_CST_LOW (arg2) & ~0xf)
5234 error ("argument 3 must be a 4-bit unsigned literal");
5235 return const0_rtx;
5239 if (target == 0
5240 || GET_MODE (target) != tmode
5241 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5242 target = gen_reg_rtx (tmode);
5244 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5245 op0 = copy_to_mode_reg (mode0, op0);
5246 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5247 op1 = copy_to_mode_reg (mode1, op1);
5248 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5249 op2 = copy_to_mode_reg (mode2, op2);
5251 pat = GEN_FCN (icode) (target, op0, op1, op2);
5252 if (! pat)
5253 return 0;
5254 emit_insn (pat);
5256 return target;
5259 /* Expand the lvx builtins. */
5260 static rtx
5261 altivec_expand_ld_builtin (exp, target, expandedp)
5262 tree exp;
5263 rtx target;
5264 bool *expandedp;
5266 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5267 tree arglist = TREE_OPERAND (exp, 1);
5268 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5269 tree arg0;
5270 enum machine_mode tmode, mode0;
5271 rtx pat, op0;
5272 enum insn_code icode;
5274 switch (fcode)
5276 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5277 icode = CODE_FOR_altivec_lvx_16qi;
5278 break;
5279 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5280 icode = CODE_FOR_altivec_lvx_8hi;
5281 break;
5282 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5283 icode = CODE_FOR_altivec_lvx_4si;
5284 break;
5285 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5286 icode = CODE_FOR_altivec_lvx_4sf;
5287 break;
5288 default:
5289 *expandedp = false;
5290 return NULL_RTX;
5293 *expandedp = true;
5295 arg0 = TREE_VALUE (arglist);
5296 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5297 tmode = insn_data[icode].operand[0].mode;
5298 mode0 = insn_data[icode].operand[1].mode;
5300 if (target == 0
5301 || GET_MODE (target) != tmode
5302 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5303 target = gen_reg_rtx (tmode);
5305 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5306 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5308 pat = GEN_FCN (icode) (target, op0);
5309 if (! pat)
5310 return 0;
5311 emit_insn (pat);
5312 return target;
5315 /* Expand the stvx builtins. */
5316 static rtx
5317 altivec_expand_st_builtin (exp, target, expandedp)
5318 tree exp;
5319 rtx target ATTRIBUTE_UNUSED;
5320 bool *expandedp;
5322 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5323 tree arglist = TREE_OPERAND (exp, 1);
5324 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5325 tree arg0, arg1;
5326 enum machine_mode mode0, mode1;
5327 rtx pat, op0, op1;
5328 enum insn_code icode;
5330 switch (fcode)
5332 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5333 icode = CODE_FOR_altivec_stvx_16qi;
5334 break;
5335 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5336 icode = CODE_FOR_altivec_stvx_8hi;
5337 break;
5338 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5339 icode = CODE_FOR_altivec_stvx_4si;
5340 break;
5341 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5342 icode = CODE_FOR_altivec_stvx_4sf;
5343 break;
5344 default:
5345 *expandedp = false;
5346 return NULL_RTX;
5349 arg0 = TREE_VALUE (arglist);
5350 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5351 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5352 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5353 mode0 = insn_data[icode].operand[0].mode;
5354 mode1 = insn_data[icode].operand[1].mode;
5356 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5357 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5358 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5359 op1 = copy_to_mode_reg (mode1, op1);
5361 pat = GEN_FCN (icode) (op0, op1);
5362 if (pat)
5363 emit_insn (pat);
5365 *expandedp = true;
5366 return NULL_RTX;
5369 /* Expand the dst builtins. */
5370 static rtx
5371 altivec_expand_dst_builtin (exp, target, expandedp)
5372 tree exp;
5373 rtx target ATTRIBUTE_UNUSED;
5374 bool *expandedp;
5376 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5377 tree arglist = TREE_OPERAND (exp, 1);
5378 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5379 tree arg0, arg1, arg2;
5380 enum machine_mode mode0, mode1, mode2;
5381 rtx pat, op0, op1, op2;
5382 struct builtin_description *d;
5383 size_t i;
5385 *expandedp = false;
5387 /* Handle DST variants. */
5388 d = (struct builtin_description *) bdesc_dst;
5389 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5390 if (d->code == fcode)
5392 arg0 = TREE_VALUE (arglist);
5393 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5394 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5395 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5396 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5397 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5398 mode0 = insn_data[d->icode].operand[0].mode;
5399 mode1 = insn_data[d->icode].operand[1].mode;
5400 mode2 = insn_data[d->icode].operand[2].mode;
5402 /* Invalid arguments, bail out before generating bad rtl. */
5403 if (arg0 == error_mark_node
5404 || arg1 == error_mark_node
5405 || arg2 == error_mark_node)
5406 return const0_rtx;
5408 if (TREE_CODE (arg2) != INTEGER_CST
5409 || TREE_INT_CST_LOW (arg2) & ~0x3)
5411 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5412 return const0_rtx;
5415 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5416 op0 = copy_to_mode_reg (mode0, op0);
5417 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5418 op1 = copy_to_mode_reg (mode1, op1);
5420 pat = GEN_FCN (d->icode) (op0, op1, op2);
5421 if (pat != 0)
5422 emit_insn (pat);
5424 *expandedp = true;
5425 return NULL_RTX;
5428 return NULL_RTX;
5431 /* Expand the builtin in EXP and store the result in TARGET. Store
5432 true in *EXPANDEDP if we found a builtin to expand. */
5433 static rtx
5434 altivec_expand_builtin (exp, target, expandedp)
5435 tree exp;
5436 rtx target;
5437 bool *expandedp;
5439 struct builtin_description *d;
5440 struct builtin_description_predicates *dp;
5441 size_t i;
5442 enum insn_code icode;
5443 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5444 tree arglist = TREE_OPERAND (exp, 1);
5445 tree arg0;
5446 rtx op0, pat;
5447 enum machine_mode tmode, mode0;
5448 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5450 target = altivec_expand_ld_builtin (exp, target, expandedp);
5451 if (*expandedp)
5452 return target;
5454 target = altivec_expand_st_builtin (exp, target, expandedp);
5455 if (*expandedp)
5456 return target;
5458 target = altivec_expand_dst_builtin (exp, target, expandedp);
5459 if (*expandedp)
5460 return target;
5462 *expandedp = true;
5464 switch (fcode)
5466 case ALTIVEC_BUILTIN_STVX:
5467 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5468 case ALTIVEC_BUILTIN_STVEBX:
5469 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5470 case ALTIVEC_BUILTIN_STVEHX:
5471 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5472 case ALTIVEC_BUILTIN_STVEWX:
5473 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5474 case ALTIVEC_BUILTIN_STVXL:
5475 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5477 case ALTIVEC_BUILTIN_MFVSCR:
5478 icode = CODE_FOR_altivec_mfvscr;
5479 tmode = insn_data[icode].operand[0].mode;
5481 if (target == 0
5482 || GET_MODE (target) != tmode
5483 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5484 target = gen_reg_rtx (tmode);
5486 pat = GEN_FCN (icode) (target);
5487 if (! pat)
5488 return 0;
5489 emit_insn (pat);
5490 return target;
5492 case ALTIVEC_BUILTIN_MTVSCR:
5493 icode = CODE_FOR_altivec_mtvscr;
5494 arg0 = TREE_VALUE (arglist);
5495 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5496 mode0 = insn_data[icode].operand[0].mode;
5498 /* If we got invalid arguments bail out before generating bad rtl. */
5499 if (arg0 == error_mark_node)
5500 return const0_rtx;
5502 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5503 op0 = copy_to_mode_reg (mode0, op0);
5505 pat = GEN_FCN (icode) (op0);
5506 if (pat)
5507 emit_insn (pat);
5508 return NULL_RTX;
5510 case ALTIVEC_BUILTIN_DSSALL:
5511 emit_insn (gen_altivec_dssall ());
5512 return NULL_RTX;
5514 case ALTIVEC_BUILTIN_DSS:
5515 icode = CODE_FOR_altivec_dss;
5516 arg0 = TREE_VALUE (arglist);
5517 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5518 mode0 = insn_data[icode].operand[0].mode;
5520 /* If we got invalid arguments bail out before generating bad rtl. */
5521 if (arg0 == error_mark_node)
5522 return const0_rtx;
5524 if (TREE_CODE (arg0) != INTEGER_CST
5525 || TREE_INT_CST_LOW (arg0) & ~0x3)
5527 error ("argument to dss must be a 2-bit unsigned literal");
5528 return const0_rtx;
5531 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5532 op0 = copy_to_mode_reg (mode0, op0);
5534 emit_insn (gen_altivec_dss (op0));
5535 return NULL_RTX;
5538 /* Expand abs* operations. */
5539 d = (struct builtin_description *) bdesc_abs;
5540 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5541 if (d->code == fcode)
5542 return altivec_expand_abs_builtin (d->icode, arglist, target);
5544 /* Expand the AltiVec predicates. */
5545 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5546 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5547 if (dp->code == fcode)
5548 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5550 /* LV* are funky. We initialized them differently. */
5551 switch (fcode)
5553 case ALTIVEC_BUILTIN_LVSL:
5554 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5555 arglist, target);
5556 case ALTIVEC_BUILTIN_LVSR:
5557 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5558 arglist, target);
5559 case ALTIVEC_BUILTIN_LVEBX:
5560 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5561 arglist, target);
5562 case ALTIVEC_BUILTIN_LVEHX:
5563 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5564 arglist, target);
5565 case ALTIVEC_BUILTIN_LVEWX:
5566 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5567 arglist, target);
5568 case ALTIVEC_BUILTIN_LVXL:
5569 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5570 arglist, target);
5571 case ALTIVEC_BUILTIN_LVX:
5572 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5573 arglist, target);
5574 default:
5575 break;
5576 /* Fall through. */
5579 *expandedp = false;
5580 return NULL_RTX;
5583 /* Binops that need to be initialized manually, but can be expanded
5584 automagically by rs6000_expand_binop_builtin. */
5585 static struct builtin_description bdesc_2arg_spe[] =
5587 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5588 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5589 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5590 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5591 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5592 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5593 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5594 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5595 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5596 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5597 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5598 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5599 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5600 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5601 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5602 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5603 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5604 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5605 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5606 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5607 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5608 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5611 /* Expand the builtin in EXP and store the result in TARGET. Store
5612 true in *EXPANDEDP if we found a builtin to expand.
5614 This expands the SPE builtins that are not simple unary and binary
5615 operations. */
5616 static rtx
5617 spe_expand_builtin (exp, target, expandedp)
5618 tree exp;
5619 rtx target;
5620 bool *expandedp;
5622 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5623 tree arglist = TREE_OPERAND (exp, 1);
5624 tree arg1, arg0;
5625 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5626 enum insn_code icode;
5627 enum machine_mode tmode, mode0;
5628 rtx pat, op0;
5629 struct builtin_description *d;
5630 size_t i;
5632 *expandedp = true;
5634 /* Syntax check for a 5-bit unsigned immediate. */
5635 switch (fcode)
5637 case SPE_BUILTIN_EVSTDD:
5638 case SPE_BUILTIN_EVSTDH:
5639 case SPE_BUILTIN_EVSTDW:
5640 case SPE_BUILTIN_EVSTWHE:
5641 case SPE_BUILTIN_EVSTWHO:
5642 case SPE_BUILTIN_EVSTWWE:
5643 case SPE_BUILTIN_EVSTWWO:
5644 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5645 if (TREE_CODE (arg1) != INTEGER_CST
5646 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5648 error ("argument 2 must be a 5-bit unsigned literal");
5649 return const0_rtx;
5651 break;
5652 default:
5653 break;
5656 d = (struct builtin_description *) bdesc_2arg_spe;
5657 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5658 if (d->code == fcode)
5659 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5661 d = (struct builtin_description *) bdesc_spe_predicates;
5662 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5663 if (d->code == fcode)
5664 return spe_expand_predicate_builtin (d->icode, arglist, target);
5666 d = (struct builtin_description *) bdesc_spe_evsel;
5667 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5668 if (d->code == fcode)
5669 return spe_expand_evsel_builtin (d->icode, arglist, target);
5671 switch (fcode)
5673 case SPE_BUILTIN_EVSTDDX:
5674 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5675 case SPE_BUILTIN_EVSTDHX:
5676 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5677 case SPE_BUILTIN_EVSTDWX:
5678 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5679 case SPE_BUILTIN_EVSTWHEX:
5680 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5681 case SPE_BUILTIN_EVSTWHOX:
5682 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5683 case SPE_BUILTIN_EVSTWWEX:
5684 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5685 case SPE_BUILTIN_EVSTWWOX:
5686 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5687 case SPE_BUILTIN_EVSTDD:
5688 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5689 case SPE_BUILTIN_EVSTDH:
5690 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5691 case SPE_BUILTIN_EVSTDW:
5692 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5693 case SPE_BUILTIN_EVSTWHE:
5694 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5695 case SPE_BUILTIN_EVSTWHO:
5696 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5697 case SPE_BUILTIN_EVSTWWE:
5698 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5699 case SPE_BUILTIN_EVSTWWO:
5700 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5701 case SPE_BUILTIN_MFSPEFSCR:
5702 icode = CODE_FOR_spe_mfspefscr;
5703 tmode = insn_data[icode].operand[0].mode;
5705 if (target == 0
5706 || GET_MODE (target) != tmode
5707 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5708 target = gen_reg_rtx (tmode);
5710 pat = GEN_FCN (icode) (target);
5711 if (! pat)
5712 return 0;
5713 emit_insn (pat);
5714 return target;
5715 case SPE_BUILTIN_MTSPEFSCR:
5716 icode = CODE_FOR_spe_mtspefscr;
5717 arg0 = TREE_VALUE (arglist);
5718 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5719 mode0 = insn_data[icode].operand[0].mode;
5721 if (arg0 == error_mark_node)
5722 return const0_rtx;
5724 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5725 op0 = copy_to_mode_reg (mode0, op0);
5727 pat = GEN_FCN (icode) (op0);
5728 if (pat)
5729 emit_insn (pat);
5730 return NULL_RTX;
5731 default:
5732 break;
5735 *expandedp = false;
5736 return NULL_RTX;
5739 static rtx
5740 spe_expand_predicate_builtin (icode, arglist, target)
5741 enum insn_code icode;
5742 tree arglist;
5743 rtx target;
5745 rtx pat, scratch, tmp;
5746 tree form = TREE_VALUE (arglist);
5747 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5748 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5749 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5750 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5751 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5752 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5753 int form_int;
5754 enum rtx_code code;
5756 if (TREE_CODE (form) != INTEGER_CST)
5758 error ("argument 1 of __builtin_spe_predicate must be a constant");
5759 return const0_rtx;
5761 else
5762 form_int = TREE_INT_CST_LOW (form);
5764 if (mode0 != mode1)
5765 abort ();
5767 if (arg0 == error_mark_node || arg1 == error_mark_node)
5768 return const0_rtx;
5770 if (target == 0
5771 || GET_MODE (target) != SImode
5772 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5773 target = gen_reg_rtx (SImode);
5775 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5776 op0 = copy_to_mode_reg (mode0, op0);
5777 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5778 op1 = copy_to_mode_reg (mode1, op1);
5780 scratch = gen_reg_rtx (CCmode);
5782 pat = GEN_FCN (icode) (scratch, op0, op1);
5783 if (! pat)
5784 return const0_rtx;
5785 emit_insn (pat);
5787 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5788 _lower_. We use one compare, but look in different bits of the
5789 CR for each variant.
5791 There are 2 elements in each SPE simd type (upper/lower). The CR
5792 bits are set as follows:
5794 BIT0 | BIT 1 | BIT 2 | BIT 3
5795 U | L | (U | L) | (U & L)
5797 So, for an "all" relationship, BIT 3 would be set.
5798 For an "any" relationship, BIT 2 would be set. Etc.
5800 Following traditional nomenclature, these bits map to:
5802 BIT0 | BIT 1 | BIT 2 | BIT 3
5803 LT | GT | EQ | OV
5805 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5808 switch (form_int)
5810 /* All variant. OV bit. */
5811 case 0:
5812 /* We need to get to the OV bit, which is the ORDERED bit. We
5813 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5814 that's ugly and will trigger a validate_condition_mode abort.
5815 So let's just use another pattern. */
5816 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5817 return target;
5818 /* Any variant. EQ bit. */
5819 case 1:
5820 code = EQ;
5821 break;
5822 /* Upper variant. LT bit. */
5823 case 2:
5824 code = LT;
5825 break;
5826 /* Lower variant. GT bit. */
5827 case 3:
5828 code = GT;
5829 break;
5830 default:
5831 error ("argument 1 of __builtin_spe_predicate is out of range");
5832 return const0_rtx;
5835 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5836 emit_move_insn (target, tmp);
5838 return target;
5841 /* The evsel builtins look like this:
5843 e = __builtin_spe_evsel_OP (a, b, c, d);
5845 and work like this:
5847 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5848 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5851 static rtx
5852 spe_expand_evsel_builtin (icode, arglist, target)
5853 enum insn_code icode;
5854 tree arglist;
5855 rtx target;
5857 rtx pat, scratch;
5858 tree arg0 = TREE_VALUE (arglist);
5859 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5860 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5861 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5862 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5863 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5864 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5865 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5866 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5867 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5869 if (mode0 != mode1)
5870 abort ();
5872 if (arg0 == error_mark_node || arg1 == error_mark_node
5873 || arg2 == error_mark_node || arg3 == error_mark_node)
5874 return const0_rtx;
5876 if (target == 0
5877 || GET_MODE (target) != mode0
5878 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5879 target = gen_reg_rtx (mode0);
5881 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5882 op0 = copy_to_mode_reg (mode0, op0);
5883 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5884 op1 = copy_to_mode_reg (mode0, op1);
5885 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5886 op2 = copy_to_mode_reg (mode0, op2);
5887 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5888 op3 = copy_to_mode_reg (mode0, op3);
5890 /* Generate the compare. */
5891 scratch = gen_reg_rtx (CCmode);
5892 pat = GEN_FCN (icode) (scratch, op0, op1);
5893 if (! pat)
5894 return const0_rtx;
5895 emit_insn (pat);
5897 if (mode0 == V2SImode)
5898 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5899 else
5900 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5902 return target;
5905 /* Expand an expression EXP that calls a built-in function,
5906 with result going to TARGET if that's convenient
5907 (and in mode MODE if that's convenient).
5908 SUBTARGET may be used as the target for computing one of EXP's operands.
5909 IGNORE is nonzero if the value is to be ignored. */
5911 static rtx
5912 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5913 tree exp;
5914 rtx target;
5915 rtx subtarget ATTRIBUTE_UNUSED;
5916 enum machine_mode mode ATTRIBUTE_UNUSED;
5917 int ignore ATTRIBUTE_UNUSED;
5919 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5920 tree arglist = TREE_OPERAND (exp, 1);
5921 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5922 struct builtin_description *d;
5923 size_t i;
5924 rtx ret;
5925 bool success;
5927 if (TARGET_ALTIVEC)
5929 ret = altivec_expand_builtin (exp, target, &success);
5931 if (success)
5932 return ret;
5934 if (TARGET_SPE)
5936 ret = spe_expand_builtin (exp, target, &success);
5938 if (success)
5939 return ret;
5942 if (TARGET_ALTIVEC || TARGET_SPE)
5944 /* Handle simple unary operations. */
5945 d = (struct builtin_description *) bdesc_1arg;
5946 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5947 if (d->code == fcode)
5948 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5950 /* Handle simple binary operations. */
5951 d = (struct builtin_description *) bdesc_2arg;
5952 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5953 if (d->code == fcode)
5954 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5956 /* Handle simple ternary operations. */
5957 d = (struct builtin_description *) bdesc_3arg;
5958 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5959 if (d->code == fcode)
5960 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5963 abort ();
5964 return NULL_RTX;
5967 static void
5968 rs6000_init_builtins ()
5970 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5971 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5972 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5974 if (TARGET_SPE)
5975 spe_init_builtins ();
5976 if (TARGET_ALTIVEC)
5977 altivec_init_builtins ();
5978 if (TARGET_ALTIVEC || TARGET_SPE)
5979 rs6000_common_init_builtins ();
5982 /* Search through a set of builtins and enable the mask bits.
5983 DESC is an array of builtins.
5984 SIZE is the total number of builtins.
5985 START is the builtin enum at which to start.
5986 END is the builtin enum at which to end. */
5987 static void
5988 enable_mask_for_builtins (desc, size, start, end)
5989 struct builtin_description *desc;
5990 int size;
5991 enum rs6000_builtins start, end;
5993 int i;
5995 for (i = 0; i < size; ++i)
5996 if (desc[i].code == start)
5997 break;
5999 if (i == size)
6000 return;
6002 for (; i < size; ++i)
6004 /* Flip all the bits on. */
6005 desc[i].mask = target_flags;
6006 if (desc[i].code == end)
6007 break;
6011 static void
6012 spe_init_builtins ()
6014 tree endlink = void_list_node;
6015 tree puint_type_node = build_pointer_type (unsigned_type_node);
6016 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6017 struct builtin_description *d;
6018 size_t i;
6020 tree v2si_ftype_4_v2si
6021 = build_function_type
6022 (opaque_V2SI_type_node,
6023 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6024 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6025 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6026 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6027 endlink)))));
6029 tree v2sf_ftype_4_v2sf
6030 = build_function_type
6031 (opaque_V2SF_type_node,
6032 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6033 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6034 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6035 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6036 endlink)))));
6038 tree int_ftype_int_v2si_v2si
6039 = build_function_type
6040 (integer_type_node,
6041 tree_cons (NULL_TREE, integer_type_node,
6042 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6043 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6044 endlink))));
6046 tree int_ftype_int_v2sf_v2sf
6047 = build_function_type
6048 (integer_type_node,
6049 tree_cons (NULL_TREE, integer_type_node,
6050 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6051 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6052 endlink))));
6054 tree void_ftype_v2si_puint_int
6055 = build_function_type (void_type_node,
6056 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6057 tree_cons (NULL_TREE, puint_type_node,
6058 tree_cons (NULL_TREE,
6059 integer_type_node,
6060 endlink))));
6062 tree void_ftype_v2si_puint_char
6063 = build_function_type (void_type_node,
6064 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6065 tree_cons (NULL_TREE, puint_type_node,
6066 tree_cons (NULL_TREE,
6067 char_type_node,
6068 endlink))));
6070 tree void_ftype_v2si_pv2si_int
6071 = build_function_type (void_type_node,
6072 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6073 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6074 tree_cons (NULL_TREE,
6075 integer_type_node,
6076 endlink))));
6078 tree void_ftype_v2si_pv2si_char
6079 = build_function_type (void_type_node,
6080 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6081 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6082 tree_cons (NULL_TREE,
6083 char_type_node,
6084 endlink))));
6086 tree void_ftype_int
6087 = build_function_type (void_type_node,
6088 tree_cons (NULL_TREE, integer_type_node, endlink));
6090 tree int_ftype_void
6091 = build_function_type (integer_type_node, endlink);
6093 tree v2si_ftype_pv2si_int
6094 = build_function_type (opaque_V2SI_type_node,
6095 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6096 tree_cons (NULL_TREE, integer_type_node,
6097 endlink)));
6099 tree v2si_ftype_puint_int
6100 = build_function_type (opaque_V2SI_type_node,
6101 tree_cons (NULL_TREE, puint_type_node,
6102 tree_cons (NULL_TREE, integer_type_node,
6103 endlink)));
6105 tree v2si_ftype_pushort_int
6106 = build_function_type (opaque_V2SI_type_node,
6107 tree_cons (NULL_TREE, pushort_type_node,
6108 tree_cons (NULL_TREE, integer_type_node,
6109 endlink)));
6111 /* The initialization of the simple binary and unary builtins is
6112 done in rs6000_common_init_builtins, but we have to enable the
6113 mask bits here manually because we have run out of `target_flags'
6114 bits. We really need to redesign this mask business. */
6116 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6117 ARRAY_SIZE (bdesc_2arg),
6118 SPE_BUILTIN_EVADDW,
6119 SPE_BUILTIN_EVXOR);
6120 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6121 ARRAY_SIZE (bdesc_1arg),
6122 SPE_BUILTIN_EVABS,
6123 SPE_BUILTIN_EVSUBFUSIAAW);
6124 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6125 ARRAY_SIZE (bdesc_spe_predicates),
6126 SPE_BUILTIN_EVCMPEQ,
6127 SPE_BUILTIN_EVFSTSTLT);
6128 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6129 ARRAY_SIZE (bdesc_spe_evsel),
6130 SPE_BUILTIN_EVSEL_CMPGTS,
6131 SPE_BUILTIN_EVSEL_FSTSTEQ);
6133 /* Initialize irregular SPE builtins. */
6135 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6136 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6137 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6138 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6139 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6140 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6141 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6142 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6143 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6144 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6145 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6146 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6147 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6148 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6149 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6150 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6152 /* Loads. */
6153 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6154 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6155 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6156 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6157 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6158 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6159 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6160 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6161 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6162 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6163 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6164 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6165 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6166 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6167 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6168 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6169 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6170 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6171 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6172 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6173 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6174 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6176 /* Predicates. */
6177 d = (struct builtin_description *) bdesc_spe_predicates;
6178 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6180 tree type;
6182 switch (insn_data[d->icode].operand[1].mode)
6184 case V2SImode:
6185 type = int_ftype_int_v2si_v2si;
6186 break;
6187 case V2SFmode:
6188 type = int_ftype_int_v2sf_v2sf;
6189 break;
6190 default:
6191 abort ();
6194 def_builtin (d->mask, d->name, type, d->code);
6197 /* Evsel predicates. */
6198 d = (struct builtin_description *) bdesc_spe_evsel;
6199 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6201 tree type;
6203 switch (insn_data[d->icode].operand[1].mode)
6205 case V2SImode:
6206 type = v2si_ftype_4_v2si;
6207 break;
6208 case V2SFmode:
6209 type = v2sf_ftype_4_v2sf;
6210 break;
6211 default:
6212 abort ();
6215 def_builtin (d->mask, d->name, type, d->code);
6219 static void
6220 altivec_init_builtins ()
6222 struct builtin_description *d;
6223 struct builtin_description_predicates *dp;
6224 size_t i;
6225 tree pfloat_type_node = build_pointer_type (float_type_node);
6226 tree pint_type_node = build_pointer_type (integer_type_node);
6227 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6228 tree pchar_type_node = build_pointer_type (char_type_node);
6230 tree pvoid_type_node = build_pointer_type (void_type_node);
6232 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6233 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6234 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6235 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6237 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6239 tree int_ftype_int_v4si_v4si
6240 = build_function_type_list (integer_type_node,
6241 integer_type_node, V4SI_type_node,
6242 V4SI_type_node, NULL_TREE);
6243 tree v4sf_ftype_pcfloat
6244 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6245 tree void_ftype_pfloat_v4sf
6246 = build_function_type_list (void_type_node,
6247 pfloat_type_node, V4SF_type_node, NULL_TREE);
6248 tree v4si_ftype_pcint
6249 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6250 tree void_ftype_pint_v4si
6251 = build_function_type_list (void_type_node,
6252 pint_type_node, V4SI_type_node, NULL_TREE);
6253 tree v8hi_ftype_pcshort
6254 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6255 tree void_ftype_pshort_v8hi
6256 = build_function_type_list (void_type_node,
6257 pshort_type_node, V8HI_type_node, NULL_TREE);
6258 tree v16qi_ftype_pcchar
6259 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6260 tree void_ftype_pchar_v16qi
6261 = build_function_type_list (void_type_node,
6262 pchar_type_node, V16QI_type_node, NULL_TREE);
6263 tree void_ftype_v4si
6264 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6265 tree v8hi_ftype_void
6266 = build_function_type (V8HI_type_node, void_list_node);
6267 tree void_ftype_void
6268 = build_function_type (void_type_node, void_list_node);
6269 tree void_ftype_qi
6270 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6272 tree v16qi_ftype_int_pcvoid
6273 = build_function_type_list (V16QI_type_node,
6274 integer_type_node, pcvoid_type_node, NULL_TREE);
6275 tree v8hi_ftype_int_pcvoid
6276 = build_function_type_list (V8HI_type_node,
6277 integer_type_node, pcvoid_type_node, NULL_TREE);
6278 tree v4si_ftype_int_pcvoid
6279 = build_function_type_list (V4SI_type_node,
6280 integer_type_node, pcvoid_type_node, NULL_TREE);
6282 tree void_ftype_v4si_int_pvoid
6283 = build_function_type_list (void_type_node,
6284 V4SI_type_node, integer_type_node,
6285 pvoid_type_node, NULL_TREE);
6286 tree void_ftype_v16qi_int_pvoid
6287 = build_function_type_list (void_type_node,
6288 V16QI_type_node, integer_type_node,
6289 pvoid_type_node, NULL_TREE);
6290 tree void_ftype_v8hi_int_pvoid
6291 = build_function_type_list (void_type_node,
6292 V8HI_type_node, integer_type_node,
6293 pvoid_type_node, NULL_TREE);
6294 tree int_ftype_int_v8hi_v8hi
6295 = build_function_type_list (integer_type_node,
6296 integer_type_node, V8HI_type_node,
6297 V8HI_type_node, NULL_TREE);
6298 tree int_ftype_int_v16qi_v16qi
6299 = build_function_type_list (integer_type_node,
6300 integer_type_node, V16QI_type_node,
6301 V16QI_type_node, NULL_TREE);
6302 tree int_ftype_int_v4sf_v4sf
6303 = build_function_type_list (integer_type_node,
6304 integer_type_node, V4SF_type_node,
6305 V4SF_type_node, NULL_TREE);
6306 tree v4si_ftype_v4si
6307 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6308 tree v8hi_ftype_v8hi
6309 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6310 tree v16qi_ftype_v16qi
6311 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6312 tree v4sf_ftype_v4sf
6313 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6314 tree void_ftype_pcvoid_int_char
6315 = build_function_type_list (void_type_node,
6316 pcvoid_type_node, integer_type_node,
6317 char_type_node, NULL_TREE);
6319 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6320 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6321 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6322 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6323 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6324 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6325 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6326 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6327 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6328 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6329 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6330 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6331 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6332 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6333 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6334 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6335 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6336 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6337 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6338 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6339 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6340 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6341 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6342 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6343 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6344 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6345 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6346 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6347 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6348 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6349 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6350 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6352 /* Add the DST variants. */
6353 d = (struct builtin_description *) bdesc_dst;
6354 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6355 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6357 /* Initialize the predicates. */
6358 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6359 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6361 enum machine_mode mode1;
6362 tree type;
6364 mode1 = insn_data[dp->icode].operand[1].mode;
6366 switch (mode1)
6368 case V4SImode:
6369 type = int_ftype_int_v4si_v4si;
6370 break;
6371 case V8HImode:
6372 type = int_ftype_int_v8hi_v8hi;
6373 break;
6374 case V16QImode:
6375 type = int_ftype_int_v16qi_v16qi;
6376 break;
6377 case V4SFmode:
6378 type = int_ftype_int_v4sf_v4sf;
6379 break;
6380 default:
6381 abort ();
6384 def_builtin (dp->mask, dp->name, type, dp->code);
6387 /* Initialize the abs* operators. */
6388 d = (struct builtin_description *) bdesc_abs;
6389 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6391 enum machine_mode mode0;
6392 tree type;
6394 mode0 = insn_data[d->icode].operand[0].mode;
6396 switch (mode0)
6398 case V4SImode:
6399 type = v4si_ftype_v4si;
6400 break;
6401 case V8HImode:
6402 type = v8hi_ftype_v8hi;
6403 break;
6404 case V16QImode:
6405 type = v16qi_ftype_v16qi;
6406 break;
6407 case V4SFmode:
6408 type = v4sf_ftype_v4sf;
6409 break;
6410 default:
6411 abort ();
6414 def_builtin (d->mask, d->name, type, d->code);
6418 static void
6419 rs6000_common_init_builtins ()
6421 struct builtin_description *d;
6422 size_t i;
6424 tree v4sf_ftype_v4sf_v4sf_v16qi
6425 = build_function_type_list (V4SF_type_node,
6426 V4SF_type_node, V4SF_type_node,
6427 V16QI_type_node, NULL_TREE);
6428 tree v4si_ftype_v4si_v4si_v16qi
6429 = build_function_type_list (V4SI_type_node,
6430 V4SI_type_node, V4SI_type_node,
6431 V16QI_type_node, NULL_TREE);
6432 tree v8hi_ftype_v8hi_v8hi_v16qi
6433 = build_function_type_list (V8HI_type_node,
6434 V8HI_type_node, V8HI_type_node,
6435 V16QI_type_node, NULL_TREE);
6436 tree v16qi_ftype_v16qi_v16qi_v16qi
6437 = build_function_type_list (V16QI_type_node,
6438 V16QI_type_node, V16QI_type_node,
6439 V16QI_type_node, NULL_TREE);
6440 tree v4si_ftype_char
6441 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6442 tree v8hi_ftype_char
6443 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6444 tree v16qi_ftype_char
6445 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6446 tree v8hi_ftype_v16qi
6447 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6448 tree v4sf_ftype_v4sf
6449 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6451 tree v2si_ftype_v2si_v2si
6452 = build_function_type_list (opaque_V2SI_type_node,
6453 opaque_V2SI_type_node,
6454 opaque_V2SI_type_node, NULL_TREE);
6456 tree v2sf_ftype_v2sf_v2sf
6457 = build_function_type_list (opaque_V2SF_type_node,
6458 opaque_V2SF_type_node,
6459 opaque_V2SF_type_node, NULL_TREE);
6461 tree v2si_ftype_int_int
6462 = build_function_type_list (opaque_V2SI_type_node,
6463 integer_type_node, integer_type_node,
6464 NULL_TREE);
6466 tree v2si_ftype_v2si
6467 = build_function_type_list (opaque_V2SI_type_node,
6468 opaque_V2SI_type_node, NULL_TREE);
6470 tree v2sf_ftype_v2sf
6471 = build_function_type_list (opaque_V2SF_type_node,
6472 opaque_V2SF_type_node, NULL_TREE);
6474 tree v2sf_ftype_v2si
6475 = build_function_type_list (opaque_V2SF_type_node,
6476 opaque_V2SI_type_node, NULL_TREE);
6478 tree v2si_ftype_v2sf
6479 = build_function_type_list (opaque_V2SI_type_node,
6480 opaque_V2SF_type_node, NULL_TREE);
6482 tree v2si_ftype_v2si_char
6483 = build_function_type_list (opaque_V2SI_type_node,
6484 opaque_V2SI_type_node,
6485 char_type_node, NULL_TREE);
6487 tree v2si_ftype_int_char
6488 = build_function_type_list (opaque_V2SI_type_node,
6489 integer_type_node, char_type_node, NULL_TREE);
6491 tree v2si_ftype_char
6492 = build_function_type_list (opaque_V2SI_type_node,
6493 char_type_node, NULL_TREE);
6495 tree int_ftype_int_int
6496 = build_function_type_list (integer_type_node,
6497 integer_type_node, integer_type_node,
6498 NULL_TREE);
6500 tree v4si_ftype_v4si_v4si
6501 = build_function_type_list (V4SI_type_node,
6502 V4SI_type_node, V4SI_type_node, NULL_TREE);
6503 tree v4sf_ftype_v4si_char
6504 = build_function_type_list (V4SF_type_node,
6505 V4SI_type_node, char_type_node, NULL_TREE);
6506 tree v4si_ftype_v4sf_char
6507 = build_function_type_list (V4SI_type_node,
6508 V4SF_type_node, char_type_node, NULL_TREE);
6509 tree v4si_ftype_v4si_char
6510 = build_function_type_list (V4SI_type_node,
6511 V4SI_type_node, char_type_node, NULL_TREE);
6512 tree v8hi_ftype_v8hi_char
6513 = build_function_type_list (V8HI_type_node,
6514 V8HI_type_node, char_type_node, NULL_TREE);
6515 tree v16qi_ftype_v16qi_char
6516 = build_function_type_list (V16QI_type_node,
6517 V16QI_type_node, char_type_node, NULL_TREE);
6518 tree v16qi_ftype_v16qi_v16qi_char
6519 = build_function_type_list (V16QI_type_node,
6520 V16QI_type_node, V16QI_type_node,
6521 char_type_node, NULL_TREE);
6522 tree v8hi_ftype_v8hi_v8hi_char
6523 = build_function_type_list (V8HI_type_node,
6524 V8HI_type_node, V8HI_type_node,
6525 char_type_node, NULL_TREE);
6526 tree v4si_ftype_v4si_v4si_char
6527 = build_function_type_list (V4SI_type_node,
6528 V4SI_type_node, V4SI_type_node,
6529 char_type_node, NULL_TREE);
6530 tree v4sf_ftype_v4sf_v4sf_char
6531 = build_function_type_list (V4SF_type_node,
6532 V4SF_type_node, V4SF_type_node,
6533 char_type_node, NULL_TREE);
6534 tree v4sf_ftype_v4sf_v4sf
6535 = build_function_type_list (V4SF_type_node,
6536 V4SF_type_node, V4SF_type_node, NULL_TREE);
6537 tree v4sf_ftype_v4sf_v4sf_v4si
6538 = build_function_type_list (V4SF_type_node,
6539 V4SF_type_node, V4SF_type_node,
6540 V4SI_type_node, NULL_TREE);
6541 tree v4sf_ftype_v4sf_v4sf_v4sf
6542 = build_function_type_list (V4SF_type_node,
6543 V4SF_type_node, V4SF_type_node,
6544 V4SF_type_node, NULL_TREE);
6545 tree v4si_ftype_v4si_v4si_v4si
6546 = build_function_type_list (V4SI_type_node,
6547 V4SI_type_node, V4SI_type_node,
6548 V4SI_type_node, NULL_TREE);
6549 tree v8hi_ftype_v8hi_v8hi
6550 = build_function_type_list (V8HI_type_node,
6551 V8HI_type_node, V8HI_type_node, NULL_TREE);
6552 tree v8hi_ftype_v8hi_v8hi_v8hi
6553 = build_function_type_list (V8HI_type_node,
6554 V8HI_type_node, V8HI_type_node,
6555 V8HI_type_node, NULL_TREE);
6556 tree v4si_ftype_v8hi_v8hi_v4si
6557 = build_function_type_list (V4SI_type_node,
6558 V8HI_type_node, V8HI_type_node,
6559 V4SI_type_node, NULL_TREE);
6560 tree v4si_ftype_v16qi_v16qi_v4si
6561 = build_function_type_list (V4SI_type_node,
6562 V16QI_type_node, V16QI_type_node,
6563 V4SI_type_node, NULL_TREE);
6564 tree v16qi_ftype_v16qi_v16qi
6565 = build_function_type_list (V16QI_type_node,
6566 V16QI_type_node, V16QI_type_node, NULL_TREE);
6567 tree v4si_ftype_v4sf_v4sf
6568 = build_function_type_list (V4SI_type_node,
6569 V4SF_type_node, V4SF_type_node, NULL_TREE);
6570 tree v8hi_ftype_v16qi_v16qi
6571 = build_function_type_list (V8HI_type_node,
6572 V16QI_type_node, V16QI_type_node, NULL_TREE);
6573 tree v4si_ftype_v8hi_v8hi
6574 = build_function_type_list (V4SI_type_node,
6575 V8HI_type_node, V8HI_type_node, NULL_TREE);
6576 tree v8hi_ftype_v4si_v4si
6577 = build_function_type_list (V8HI_type_node,
6578 V4SI_type_node, V4SI_type_node, NULL_TREE);
6579 tree v16qi_ftype_v8hi_v8hi
6580 = build_function_type_list (V16QI_type_node,
6581 V8HI_type_node, V8HI_type_node, NULL_TREE);
6582 tree v4si_ftype_v16qi_v4si
6583 = build_function_type_list (V4SI_type_node,
6584 V16QI_type_node, V4SI_type_node, NULL_TREE);
6585 tree v4si_ftype_v16qi_v16qi
6586 = build_function_type_list (V4SI_type_node,
6587 V16QI_type_node, V16QI_type_node, NULL_TREE);
6588 tree v4si_ftype_v8hi_v4si
6589 = build_function_type_list (V4SI_type_node,
6590 V8HI_type_node, V4SI_type_node, NULL_TREE);
6591 tree v4si_ftype_v8hi
6592 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6593 tree int_ftype_v4si_v4si
6594 = build_function_type_list (integer_type_node,
6595 V4SI_type_node, V4SI_type_node, NULL_TREE);
6596 tree int_ftype_v4sf_v4sf
6597 = build_function_type_list (integer_type_node,
6598 V4SF_type_node, V4SF_type_node, NULL_TREE);
6599 tree int_ftype_v16qi_v16qi
6600 = build_function_type_list (integer_type_node,
6601 V16QI_type_node, V16QI_type_node, NULL_TREE);
6602 tree int_ftype_v8hi_v8hi
6603 = build_function_type_list (integer_type_node,
6604 V8HI_type_node, V8HI_type_node, NULL_TREE);
6606 /* Add the simple ternary operators. */
6607 d = (struct builtin_description *) bdesc_3arg;
6608 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6611 enum machine_mode mode0, mode1, mode2, mode3;
6612 tree type;
6614 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6615 continue;
6617 mode0 = insn_data[d->icode].operand[0].mode;
6618 mode1 = insn_data[d->icode].operand[1].mode;
6619 mode2 = insn_data[d->icode].operand[2].mode;
6620 mode3 = insn_data[d->icode].operand[3].mode;
6622 /* When all four are of the same mode. */
6623 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6625 switch (mode0)
6627 case V4SImode:
6628 type = v4si_ftype_v4si_v4si_v4si;
6629 break;
6630 case V4SFmode:
6631 type = v4sf_ftype_v4sf_v4sf_v4sf;
6632 break;
6633 case V8HImode:
6634 type = v8hi_ftype_v8hi_v8hi_v8hi;
6635 break;
6636 case V16QImode:
6637 type = v16qi_ftype_v16qi_v16qi_v16qi;
6638 break;
6639 default:
6640 abort();
6643 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6645 switch (mode0)
6647 case V4SImode:
6648 type = v4si_ftype_v4si_v4si_v16qi;
6649 break;
6650 case V4SFmode:
6651 type = v4sf_ftype_v4sf_v4sf_v16qi;
6652 break;
6653 case V8HImode:
6654 type = v8hi_ftype_v8hi_v8hi_v16qi;
6655 break;
6656 case V16QImode:
6657 type = v16qi_ftype_v16qi_v16qi_v16qi;
6658 break;
6659 default:
6660 abort();
6663 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6664 && mode3 == V4SImode)
6665 type = v4si_ftype_v16qi_v16qi_v4si;
6666 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6667 && mode3 == V4SImode)
6668 type = v4si_ftype_v8hi_v8hi_v4si;
6669 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6670 && mode3 == V4SImode)
6671 type = v4sf_ftype_v4sf_v4sf_v4si;
6673 /* vchar, vchar, vchar, 4 bit literal. */
6674 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6675 && mode3 == QImode)
6676 type = v16qi_ftype_v16qi_v16qi_char;
6678 /* vshort, vshort, vshort, 4 bit literal. */
6679 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6680 && mode3 == QImode)
6681 type = v8hi_ftype_v8hi_v8hi_char;
6683 /* vint, vint, vint, 4 bit literal. */
6684 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6685 && mode3 == QImode)
6686 type = v4si_ftype_v4si_v4si_char;
6688 /* vfloat, vfloat, vfloat, 4 bit literal. */
6689 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6690 && mode3 == QImode)
6691 type = v4sf_ftype_v4sf_v4sf_char;
6693 else
6694 abort ();
6696 def_builtin (d->mask, d->name, type, d->code);
6699 /* Add the simple binary operators. */
6700 d = (struct builtin_description *) bdesc_2arg;
6701 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6703 enum machine_mode mode0, mode1, mode2;
6704 tree type;
6706 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6707 continue;
6709 mode0 = insn_data[d->icode].operand[0].mode;
6710 mode1 = insn_data[d->icode].operand[1].mode;
6711 mode2 = insn_data[d->icode].operand[2].mode;
6713 /* When all three operands are of the same mode. */
6714 if (mode0 == mode1 && mode1 == mode2)
6716 switch (mode0)
6718 case V4SFmode:
6719 type = v4sf_ftype_v4sf_v4sf;
6720 break;
6721 case V4SImode:
6722 type = v4si_ftype_v4si_v4si;
6723 break;
6724 case V16QImode:
6725 type = v16qi_ftype_v16qi_v16qi;
6726 break;
6727 case V8HImode:
6728 type = v8hi_ftype_v8hi_v8hi;
6729 break;
6730 case V2SImode:
6731 type = v2si_ftype_v2si_v2si;
6732 break;
6733 case V2SFmode:
6734 type = v2sf_ftype_v2sf_v2sf;
6735 break;
6736 case SImode:
6737 type = int_ftype_int_int;
6738 break;
6739 default:
6740 abort ();
6744 /* A few other combos we really don't want to do manually. */
6746 /* vint, vfloat, vfloat. */
6747 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6748 type = v4si_ftype_v4sf_v4sf;
6750 /* vshort, vchar, vchar. */
6751 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6752 type = v8hi_ftype_v16qi_v16qi;
6754 /* vint, vshort, vshort. */
6755 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6756 type = v4si_ftype_v8hi_v8hi;
6758 /* vshort, vint, vint. */
6759 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6760 type = v8hi_ftype_v4si_v4si;
6762 /* vchar, vshort, vshort. */
6763 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6764 type = v16qi_ftype_v8hi_v8hi;
6766 /* vint, vchar, vint. */
6767 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6768 type = v4si_ftype_v16qi_v4si;
6770 /* vint, vchar, vchar. */
6771 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6772 type = v4si_ftype_v16qi_v16qi;
6774 /* vint, vshort, vint. */
6775 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6776 type = v4si_ftype_v8hi_v4si;
6778 /* vint, vint, 5 bit literal. */
6779 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6780 type = v4si_ftype_v4si_char;
6782 /* vshort, vshort, 5 bit literal. */
6783 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6784 type = v8hi_ftype_v8hi_char;
6786 /* vchar, vchar, 5 bit literal. */
6787 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6788 type = v16qi_ftype_v16qi_char;
6790 /* vfloat, vint, 5 bit literal. */
6791 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6792 type = v4sf_ftype_v4si_char;
6794 /* vint, vfloat, 5 bit literal. */
6795 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6796 type = v4si_ftype_v4sf_char;
6798 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6799 type = v2si_ftype_int_int;
6801 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6802 type = v2si_ftype_v2si_char;
6804 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6805 type = v2si_ftype_int_char;
6807 /* int, x, x. */
6808 else if (mode0 == SImode)
6810 switch (mode1)
6812 case V4SImode:
6813 type = int_ftype_v4si_v4si;
6814 break;
6815 case V4SFmode:
6816 type = int_ftype_v4sf_v4sf;
6817 break;
6818 case V16QImode:
6819 type = int_ftype_v16qi_v16qi;
6820 break;
6821 case V8HImode:
6822 type = int_ftype_v8hi_v8hi;
6823 break;
6824 default:
6825 abort ();
6829 else
6830 abort ();
6832 def_builtin (d->mask, d->name, type, d->code);
6835 /* Add the simple unary operators. */
6836 d = (struct builtin_description *) bdesc_1arg;
6837 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6839 enum machine_mode mode0, mode1;
6840 tree type;
6842 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6843 continue;
6845 mode0 = insn_data[d->icode].operand[0].mode;
6846 mode1 = insn_data[d->icode].operand[1].mode;
6848 if (mode0 == V4SImode && mode1 == QImode)
6849 type = v4si_ftype_char;
6850 else if (mode0 == V8HImode && mode1 == QImode)
6851 type = v8hi_ftype_char;
6852 else if (mode0 == V16QImode && mode1 == QImode)
6853 type = v16qi_ftype_char;
6854 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6855 type = v4sf_ftype_v4sf;
6856 else if (mode0 == V8HImode && mode1 == V16QImode)
6857 type = v8hi_ftype_v16qi;
6858 else if (mode0 == V4SImode && mode1 == V8HImode)
6859 type = v4si_ftype_v8hi;
6860 else if (mode0 == V2SImode && mode1 == V2SImode)
6861 type = v2si_ftype_v2si;
6862 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6863 type = v2sf_ftype_v2sf;
6864 else if (mode0 == V2SFmode && mode1 == V2SImode)
6865 type = v2sf_ftype_v2si;
6866 else if (mode0 == V2SImode && mode1 == V2SFmode)
6867 type = v2si_ftype_v2sf;
6868 else if (mode0 == V2SImode && mode1 == QImode)
6869 type = v2si_ftype_char;
6870 else
6871 abort ();
6873 def_builtin (d->mask, d->name, type, d->code);
6878 /* Expand a block move operation, and return 1 if successful. Return 0
6879 if we should let the compiler generate normal code.
6881 operands[0] is the destination
6882 operands[1] is the source
6883 operands[2] is the length
6884 operands[3] is the alignment */
6886 #define MAX_MOVE_REG 4
6889 expand_block_move (operands)
6890 rtx operands[];
6892 rtx orig_dest = operands[0];
6893 rtx orig_src = operands[1];
6894 rtx bytes_rtx = operands[2];
6895 rtx align_rtx = operands[3];
6896 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6897 int align;
6898 int bytes;
6899 int offset;
6900 int move_bytes;
6901 rtx stores[MAX_MOVE_REG];
6902 int num_reg = 0;
6904 /* If this is not a fixed size move, just call memcpy */
6905 if (! constp)
6906 return 0;
6908 /* If this is not a fixed size alignment, abort */
6909 if (GET_CODE (align_rtx) != CONST_INT)
6910 abort ();
6911 align = INTVAL (align_rtx);
6913 /* Anything to move? */
6914 bytes = INTVAL (bytes_rtx);
6915 if (bytes <= 0)
6916 return 1;
6918 /* store_one_arg depends on expand_block_move to handle at least the size of
6919 reg_parm_stack_space. */
6920 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6921 return 0;
6923 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6925 union {
6926 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6927 rtx (*mov) PARAMS ((rtx, rtx));
6928 } gen_func;
6929 enum machine_mode mode = BLKmode;
6930 rtx src, dest;
6932 if (TARGET_STRING
6933 && bytes > 24 /* move up to 32 bytes at a time */
6934 && ! fixed_regs[5]
6935 && ! fixed_regs[6]
6936 && ! fixed_regs[7]
6937 && ! fixed_regs[8]
6938 && ! fixed_regs[9]
6939 && ! fixed_regs[10]
6940 && ! fixed_regs[11]
6941 && ! fixed_regs[12])
6943 move_bytes = (bytes > 32) ? 32 : bytes;
6944 gen_func.movstrsi = gen_movstrsi_8reg;
6946 else if (TARGET_STRING
6947 && bytes > 16 /* move up to 24 bytes at a time */
6948 && ! fixed_regs[5]
6949 && ! fixed_regs[6]
6950 && ! fixed_regs[7]
6951 && ! fixed_regs[8]
6952 && ! fixed_regs[9]
6953 && ! fixed_regs[10])
6955 move_bytes = (bytes > 24) ? 24 : bytes;
6956 gen_func.movstrsi = gen_movstrsi_6reg;
6958 else if (TARGET_STRING
6959 && bytes > 8 /* move up to 16 bytes at a time */
6960 && ! fixed_regs[5]
6961 && ! fixed_regs[6]
6962 && ! fixed_regs[7]
6963 && ! fixed_regs[8])
6965 move_bytes = (bytes > 16) ? 16 : bytes;
6966 gen_func.movstrsi = gen_movstrsi_4reg;
6968 else if (bytes >= 8 && TARGET_POWERPC64
6969 /* 64-bit loads and stores require word-aligned
6970 displacements. */
6971 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6973 move_bytes = 8;
6974 mode = DImode;
6975 gen_func.mov = gen_movdi;
6977 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
6978 { /* move up to 8 bytes at a time */
6979 move_bytes = (bytes > 8) ? 8 : bytes;
6980 gen_func.movstrsi = gen_movstrsi_2reg;
6982 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6983 { /* move 4 bytes */
6984 move_bytes = 4;
6985 mode = SImode;
6986 gen_func.mov = gen_movsi;
6988 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6989 { /* move 2 bytes */
6990 move_bytes = 2;
6991 mode = HImode;
6992 gen_func.mov = gen_movhi;
6994 else if (TARGET_STRING && bytes > 1)
6995 { /* move up to 4 bytes at a time */
6996 move_bytes = (bytes > 4) ? 4 : bytes;
6997 gen_func.movstrsi = gen_movstrsi_1reg;
6999 else /* move 1 byte at a time */
7001 move_bytes = 1;
7002 mode = QImode;
7003 gen_func.mov = gen_movqi;
7006 src = adjust_address (orig_src, mode, offset);
7007 dest = adjust_address (orig_dest, mode, offset);
7009 if (mode != BLKmode)
7011 rtx tmp_reg = gen_reg_rtx (mode);
7013 emit_insn ((*gen_func.mov) (tmp_reg, src));
7014 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7017 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7019 int i;
7020 for (i = 0; i < num_reg; i++)
7021 emit_insn (stores[i]);
7022 num_reg = 0;
7025 if (mode == BLKmode)
7027 /* Move the address into scratch registers. The movstrsi
7028 patterns require zero offset. */
7029 if (!REG_P (XEXP (src, 0)))
7031 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7032 src = replace_equiv_address (src, src_reg);
7034 set_mem_size (src, GEN_INT (move_bytes));
7036 if (!REG_P (XEXP (dest, 0)))
7038 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7039 dest = replace_equiv_address (dest, dest_reg);
7041 set_mem_size (dest, GEN_INT (move_bytes));
7043 emit_insn ((*gen_func.movstrsi) (dest, src,
7044 GEN_INT (move_bytes & 31),
7045 align_rtx));
7049 return 1;
7053 /* Return 1 if OP is a load multiple operation. It is known to be a
7054 PARALLEL and the first section will be tested. */
7057 load_multiple_operation (op, mode)
7058 rtx op;
7059 enum machine_mode mode ATTRIBUTE_UNUSED;
7061 int count = XVECLEN (op, 0);
7062 unsigned int dest_regno;
7063 rtx src_addr;
7064 int i;
7066 /* Perform a quick check so we don't blow up below. */
7067 if (count <= 1
7068 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7069 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7070 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7071 return 0;
7073 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7074 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7076 for (i = 1; i < count; i++)
7078 rtx elt = XVECEXP (op, 0, i);
7080 if (GET_CODE (elt) != SET
7081 || GET_CODE (SET_DEST (elt)) != REG
7082 || GET_MODE (SET_DEST (elt)) != SImode
7083 || REGNO (SET_DEST (elt)) != dest_regno + i
7084 || GET_CODE (SET_SRC (elt)) != MEM
7085 || GET_MODE (SET_SRC (elt)) != SImode
7086 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7087 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7088 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7089 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7090 return 0;
7093 return 1;
7096 /* Similar, but tests for store multiple. Here, the second vector element
7097 is a CLOBBER. It will be tested later. */
7100 store_multiple_operation (op, mode)
7101 rtx op;
7102 enum machine_mode mode ATTRIBUTE_UNUSED;
7104 int count = XVECLEN (op, 0) - 1;
7105 unsigned int src_regno;
7106 rtx dest_addr;
7107 int i;
7109 /* Perform a quick check so we don't blow up below. */
7110 if (count <= 1
7111 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7112 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7113 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7114 return 0;
7116 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7117 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7119 for (i = 1; i < count; i++)
7121 rtx elt = XVECEXP (op, 0, i + 1);
7123 if (GET_CODE (elt) != SET
7124 || GET_CODE (SET_SRC (elt)) != REG
7125 || GET_MODE (SET_SRC (elt)) != SImode
7126 || REGNO (SET_SRC (elt)) != src_regno + i
7127 || GET_CODE (SET_DEST (elt)) != MEM
7128 || GET_MODE (SET_DEST (elt)) != SImode
7129 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7130 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7131 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7132 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7133 return 0;
7136 return 1;
7139 /* Return a string to perform a load_multiple operation.
7140 operands[0] is the vector.
7141 operands[1] is the source address.
7142 operands[2] is the first destination register. */
7144 const char *
7145 rs6000_output_load_multiple (operands)
7146 rtx operands[3];
7148 /* We have to handle the case where the pseudo used to contain the address
7149 is assigned to one of the output registers. */
7150 int i, j;
7151 int words = XVECLEN (operands[0], 0);
7152 rtx xop[10];
7154 if (XVECLEN (operands[0], 0) == 1)
7155 return "{l|lwz} %2,0(%1)";
7157 for (i = 0; i < words; i++)
7158 if (refers_to_regno_p (REGNO (operands[2]) + i,
7159 REGNO (operands[2]) + i + 1, operands[1], 0))
7161 if (i == words-1)
7163 xop[0] = GEN_INT (4 * (words-1));
7164 xop[1] = operands[1];
7165 xop[2] = operands[2];
7166 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7167 return "";
7169 else if (i == 0)
7171 xop[0] = GEN_INT (4 * (words-1));
7172 xop[1] = operands[1];
7173 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7174 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7175 return "";
7177 else
7179 for (j = 0; j < words; j++)
7180 if (j != i)
7182 xop[0] = GEN_INT (j * 4);
7183 xop[1] = operands[1];
7184 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7185 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7187 xop[0] = GEN_INT (i * 4);
7188 xop[1] = operands[1];
7189 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7190 return "";
7194 return "{lsi|lswi} %2,%1,%N0";
7197 /* Return 1 for a parallel vrsave operation. */
7200 vrsave_operation (op, mode)
7201 rtx op;
7202 enum machine_mode mode ATTRIBUTE_UNUSED;
7204 int count = XVECLEN (op, 0);
7205 unsigned int dest_regno, src_regno;
7206 int i;
7208 if (count <= 1
7209 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7210 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7211 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7212 return 0;
7214 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7215 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7217 if (dest_regno != VRSAVE_REGNO
7218 && src_regno != VRSAVE_REGNO)
7219 return 0;
7221 for (i = 1; i < count; i++)
7223 rtx elt = XVECEXP (op, 0, i);
7225 if (GET_CODE (elt) != CLOBBER
7226 && GET_CODE (elt) != SET)
7227 return 0;
7230 return 1;
7233 /* Return 1 for an PARALLEL suitable for mtcrf. */
7236 mtcrf_operation (op, mode)
7237 rtx op;
7238 enum machine_mode mode ATTRIBUTE_UNUSED;
7240 int count = XVECLEN (op, 0);
7241 int i;
7242 rtx src_reg;
7244 /* Perform a quick check so we don't blow up below. */
7245 if (count < 1
7246 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7247 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7248 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7249 return 0;
7250 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7252 if (GET_CODE (src_reg) != REG
7253 || GET_MODE (src_reg) != SImode
7254 || ! INT_REGNO_P (REGNO (src_reg)))
7255 return 0;
7257 for (i = 0; i < count; i++)
7259 rtx exp = XVECEXP (op, 0, i);
7260 rtx unspec;
7261 int maskval;
7263 if (GET_CODE (exp) != SET
7264 || GET_CODE (SET_DEST (exp)) != REG
7265 || GET_MODE (SET_DEST (exp)) != CCmode
7266 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7267 return 0;
7268 unspec = SET_SRC (exp);
7269 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7271 if (GET_CODE (unspec) != UNSPEC
7272 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7273 || XVECLEN (unspec, 0) != 2
7274 || XVECEXP (unspec, 0, 0) != src_reg
7275 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7276 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7277 return 0;
7279 return 1;
7282 /* Return 1 for an PARALLEL suitable for lmw. */
7285 lmw_operation (op, mode)
7286 rtx op;
7287 enum machine_mode mode ATTRIBUTE_UNUSED;
7289 int count = XVECLEN (op, 0);
7290 unsigned int dest_regno;
7291 rtx src_addr;
7292 unsigned int base_regno;
7293 HOST_WIDE_INT offset;
7294 int i;
7296 /* Perform a quick check so we don't blow up below. */
7297 if (count <= 1
7298 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7299 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7300 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7301 return 0;
7303 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7304 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7306 if (dest_regno > 31
7307 || count != 32 - (int) dest_regno)
7308 return 0;
7310 if (legitimate_indirect_address_p (src_addr, 0))
7312 offset = 0;
7313 base_regno = REGNO (src_addr);
7314 if (base_regno == 0)
7315 return 0;
7317 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7319 offset = INTVAL (XEXP (src_addr, 1));
7320 base_regno = REGNO (XEXP (src_addr, 0));
7322 else
7323 return 0;
7325 for (i = 0; i < count; i++)
7327 rtx elt = XVECEXP (op, 0, i);
7328 rtx newaddr;
7329 rtx addr_reg;
7330 HOST_WIDE_INT newoffset;
7332 if (GET_CODE (elt) != SET
7333 || GET_CODE (SET_DEST (elt)) != REG
7334 || GET_MODE (SET_DEST (elt)) != SImode
7335 || REGNO (SET_DEST (elt)) != dest_regno + i
7336 || GET_CODE (SET_SRC (elt)) != MEM
7337 || GET_MODE (SET_SRC (elt)) != SImode)
7338 return 0;
7339 newaddr = XEXP (SET_SRC (elt), 0);
7340 if (legitimate_indirect_address_p (newaddr, 0))
7342 newoffset = 0;
7343 addr_reg = newaddr;
7345 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7347 addr_reg = XEXP (newaddr, 0);
7348 newoffset = INTVAL (XEXP (newaddr, 1));
7350 else
7351 return 0;
7352 if (REGNO (addr_reg) != base_regno
7353 || newoffset != offset + 4 * i)
7354 return 0;
7357 return 1;
7360 /* Return 1 for an PARALLEL suitable for stmw. */
7363 stmw_operation (op, mode)
7364 rtx op;
7365 enum machine_mode mode ATTRIBUTE_UNUSED;
7367 int count = XVECLEN (op, 0);
7368 unsigned int src_regno;
7369 rtx dest_addr;
7370 unsigned int base_regno;
7371 HOST_WIDE_INT offset;
7372 int i;
7374 /* Perform a quick check so we don't blow up below. */
7375 if (count <= 1
7376 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7377 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7378 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7379 return 0;
7381 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7382 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7384 if (src_regno > 31
7385 || count != 32 - (int) src_regno)
7386 return 0;
7388 if (legitimate_indirect_address_p (dest_addr, 0))
7390 offset = 0;
7391 base_regno = REGNO (dest_addr);
7392 if (base_regno == 0)
7393 return 0;
7395 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7397 offset = INTVAL (XEXP (dest_addr, 1));
7398 base_regno = REGNO (XEXP (dest_addr, 0));
7400 else
7401 return 0;
7403 for (i = 0; i < count; i++)
7405 rtx elt = XVECEXP (op, 0, i);
7406 rtx newaddr;
7407 rtx addr_reg;
7408 HOST_WIDE_INT newoffset;
7410 if (GET_CODE (elt) != SET
7411 || GET_CODE (SET_SRC (elt)) != REG
7412 || GET_MODE (SET_SRC (elt)) != SImode
7413 || REGNO (SET_SRC (elt)) != src_regno + i
7414 || GET_CODE (SET_DEST (elt)) != MEM
7415 || GET_MODE (SET_DEST (elt)) != SImode)
7416 return 0;
7417 newaddr = XEXP (SET_DEST (elt), 0);
7418 if (legitimate_indirect_address_p (newaddr, 0))
7420 newoffset = 0;
7421 addr_reg = newaddr;
7423 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7425 addr_reg = XEXP (newaddr, 0);
7426 newoffset = INTVAL (XEXP (newaddr, 1));
7428 else
7429 return 0;
7430 if (REGNO (addr_reg) != base_regno
7431 || newoffset != offset + 4 * i)
7432 return 0;
7435 return 1;
7438 /* A validation routine: say whether CODE, a condition code, and MODE
7439 match. The other alternatives either don't make sense or should
7440 never be generated. */
7442 static void
7443 validate_condition_mode (code, mode)
7444 enum rtx_code code;
7445 enum machine_mode mode;
7447 if (GET_RTX_CLASS (code) != '<'
7448 || GET_MODE_CLASS (mode) != MODE_CC)
7449 abort ();
7451 /* These don't make sense. */
7452 if ((code == GT || code == LT || code == GE || code == LE)
7453 && mode == CCUNSmode)
7454 abort ();
7456 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7457 && mode != CCUNSmode)
7458 abort ();
7460 if (mode != CCFPmode
7461 && (code == ORDERED || code == UNORDERED
7462 || code == UNEQ || code == LTGT
7463 || code == UNGT || code == UNLT
7464 || code == UNGE || code == UNLE))
7465 abort ();
7467 /* These should never be generated except for
7468 flag_finite_math_only. */
7469 if (mode == CCFPmode
7470 && ! flag_finite_math_only
7471 && (code == LE || code == GE
7472 || code == UNEQ || code == LTGT
7473 || code == UNGT || code == UNLT))
7474 abort ();
7476 /* These are invalid; the information is not there. */
7477 if (mode == CCEQmode
7478 && code != EQ && code != NE)
7479 abort ();
7482 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7483 We only check the opcode against the mode of the CC value here. */
7486 branch_comparison_operator (op, mode)
7487 rtx op;
7488 enum machine_mode mode ATTRIBUTE_UNUSED;
7490 enum rtx_code code = GET_CODE (op);
7491 enum machine_mode cc_mode;
7493 if (GET_RTX_CLASS (code) != '<')
7494 return 0;
7496 cc_mode = GET_MODE (XEXP (op, 0));
7497 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7498 return 0;
7500 validate_condition_mode (code, cc_mode);
7502 return 1;
7505 /* Return 1 if OP is a comparison operation that is valid for a branch
7506 insn and which is true if the corresponding bit in the CC register
7507 is set. */
7510 branch_positive_comparison_operator (op, mode)
7511 rtx op;
7512 enum machine_mode mode;
7514 enum rtx_code code;
7516 if (! branch_comparison_operator (op, mode))
7517 return 0;
7519 code = GET_CODE (op);
7520 return (code == EQ || code == LT || code == GT
7521 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7522 || code == LTU || code == GTU
7523 || code == UNORDERED);
7526 /* Return 1 if OP is a comparison operation that is valid for an scc
7527 insn: it must be a positive comparison. */
7530 scc_comparison_operator (op, mode)
7531 rtx op;
7532 enum machine_mode mode;
7534 return branch_positive_comparison_operator (op, mode);
7538 trap_comparison_operator (op, mode)
7539 rtx op;
7540 enum machine_mode mode;
7542 if (mode != VOIDmode && mode != GET_MODE (op))
7543 return 0;
7544 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7548 boolean_operator (op, mode)
7549 rtx op;
7550 enum machine_mode mode ATTRIBUTE_UNUSED;
7552 enum rtx_code code = GET_CODE (op);
7553 return (code == AND || code == IOR || code == XOR);
7557 boolean_or_operator (op, mode)
7558 rtx op;
7559 enum machine_mode mode ATTRIBUTE_UNUSED;
7561 enum rtx_code code = GET_CODE (op);
7562 return (code == IOR || code == XOR);
7566 min_max_operator (op, mode)
7567 rtx op;
7568 enum machine_mode mode ATTRIBUTE_UNUSED;
7570 enum rtx_code code = GET_CODE (op);
7571 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7574 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7575 mask required to convert the result of a rotate insn into a shift
7576 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7579 includes_lshift_p (shiftop, andop)
7580 rtx shiftop;
7581 rtx andop;
7583 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7585 shift_mask <<= INTVAL (shiftop);
7587 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7590 /* Similar, but for right shift. */
7593 includes_rshift_p (shiftop, andop)
7594 rtx shiftop;
7595 rtx andop;
7597 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7599 shift_mask >>= INTVAL (shiftop);
7601 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7604 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7605 to perform a left shift. It must have exactly SHIFTOP least
7606 significant 0's, then one or more 1's, then zero or more 0's. */
7609 includes_rldic_lshift_p (shiftop, andop)
7610 rtx shiftop;
7611 rtx andop;
7613 if (GET_CODE (andop) == CONST_INT)
7615 HOST_WIDE_INT c, lsb, shift_mask;
7617 c = INTVAL (andop);
7618 if (c == 0 || c == ~0)
7619 return 0;
7621 shift_mask = ~0;
7622 shift_mask <<= INTVAL (shiftop);
7624 /* Find the least significant one bit. */
7625 lsb = c & -c;
7627 /* It must coincide with the LSB of the shift mask. */
7628 if (-lsb != shift_mask)
7629 return 0;
7631 /* Invert to look for the next transition (if any). */
7632 c = ~c;
7634 /* Remove the low group of ones (originally low group of zeros). */
7635 c &= -lsb;
7637 /* Again find the lsb, and check we have all 1's above. */
7638 lsb = c & -c;
7639 return c == -lsb;
7641 else if (GET_CODE (andop) == CONST_DOUBLE
7642 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7644 HOST_WIDE_INT low, high, lsb;
7645 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7647 low = CONST_DOUBLE_LOW (andop);
7648 if (HOST_BITS_PER_WIDE_INT < 64)
7649 high = CONST_DOUBLE_HIGH (andop);
7651 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7652 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7653 return 0;
7655 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7657 shift_mask_high = ~0;
7658 if (INTVAL (shiftop) > 32)
7659 shift_mask_high <<= INTVAL (shiftop) - 32;
7661 lsb = high & -high;
7663 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7664 return 0;
7666 high = ~high;
7667 high &= -lsb;
7669 lsb = high & -high;
7670 return high == -lsb;
7673 shift_mask_low = ~0;
7674 shift_mask_low <<= INTVAL (shiftop);
7676 lsb = low & -low;
7678 if (-lsb != shift_mask_low)
7679 return 0;
7681 if (HOST_BITS_PER_WIDE_INT < 64)
7682 high = ~high;
7683 low = ~low;
7684 low &= -lsb;
7686 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7688 lsb = high & -high;
7689 return high == -lsb;
7692 lsb = low & -low;
7693 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7695 else
7696 return 0;
7699 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7700 to perform a left shift. It must have SHIFTOP or more least
7701 signifigant 0's, with the remainder of the word 1's. */
7704 includes_rldicr_lshift_p (shiftop, andop)
7705 rtx shiftop;
7706 rtx andop;
7708 if (GET_CODE (andop) == CONST_INT)
7710 HOST_WIDE_INT c, lsb, shift_mask;
7712 shift_mask = ~0;
7713 shift_mask <<= INTVAL (shiftop);
7714 c = INTVAL (andop);
7716 /* Find the least signifigant one bit. */
7717 lsb = c & -c;
7719 /* It must be covered by the shift mask.
7720 This test also rejects c == 0. */
7721 if ((lsb & shift_mask) == 0)
7722 return 0;
7724 /* Check we have all 1's above the transition, and reject all 1's. */
7725 return c == -lsb && lsb != 1;
7727 else if (GET_CODE (andop) == CONST_DOUBLE
7728 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7730 HOST_WIDE_INT low, lsb, shift_mask_low;
7732 low = CONST_DOUBLE_LOW (andop);
7734 if (HOST_BITS_PER_WIDE_INT < 64)
7736 HOST_WIDE_INT high, shift_mask_high;
7738 high = CONST_DOUBLE_HIGH (andop);
7740 if (low == 0)
7742 shift_mask_high = ~0;
7743 if (INTVAL (shiftop) > 32)
7744 shift_mask_high <<= INTVAL (shiftop) - 32;
7746 lsb = high & -high;
7748 if ((lsb & shift_mask_high) == 0)
7749 return 0;
7751 return high == -lsb;
7753 if (high != ~0)
7754 return 0;
7757 shift_mask_low = ~0;
7758 shift_mask_low <<= INTVAL (shiftop);
7760 lsb = low & -low;
7762 if ((lsb & shift_mask_low) == 0)
7763 return 0;
7765 return low == -lsb && lsb != 1;
7767 else
7768 return 0;
7771 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7772 for lfq and stfq insns.
7774 Note reg1 and reg2 *must* be hard registers. To be sure we will
7775 abort if we are passed pseudo registers. */
7778 registers_ok_for_quad_peep (reg1, reg2)
7779 rtx reg1, reg2;
7781 /* We might have been passed a SUBREG. */
7782 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7783 return 0;
7785 return (REGNO (reg1) == REGNO (reg2) - 1);
7788 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7789 addr1 and addr2 must be in consecutive memory locations
7790 (addr2 == addr1 + 8). */
7793 addrs_ok_for_quad_peep (addr1, addr2)
7794 rtx addr1;
7795 rtx addr2;
7797 unsigned int reg1;
7798 int offset1;
7800 /* Extract an offset (if used) from the first addr. */
7801 if (GET_CODE (addr1) == PLUS)
7803 /* If not a REG, return zero. */
7804 if (GET_CODE (XEXP (addr1, 0)) != REG)
7805 return 0;
7806 else
7808 reg1 = REGNO (XEXP (addr1, 0));
7809 /* The offset must be constant! */
7810 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7811 return 0;
7812 offset1 = INTVAL (XEXP (addr1, 1));
7815 else if (GET_CODE (addr1) != REG)
7816 return 0;
7817 else
7819 reg1 = REGNO (addr1);
7820 /* This was a simple (mem (reg)) expression. Offset is 0. */
7821 offset1 = 0;
7824 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7825 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7826 register as addr1. */
7827 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7828 return 1;
7829 if (GET_CODE (addr2) != PLUS)
7830 return 0;
7832 if (GET_CODE (XEXP (addr2, 0)) != REG
7833 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7834 return 0;
7836 if (reg1 != REGNO (XEXP (addr2, 0)))
7837 return 0;
7839 /* The offset for the second addr must be 8 more than the first addr. */
7840 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7841 return 0;
7843 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7844 instructions. */
7845 return 1;
7848 /* Return the register class of a scratch register needed to copy IN into
7849 or out of a register in CLASS in MODE. If it can be done directly,
7850 NO_REGS is returned. */
7852 enum reg_class
7853 secondary_reload_class (class, mode, in)
7854 enum reg_class class;
7855 enum machine_mode mode ATTRIBUTE_UNUSED;
7856 rtx in;
7858 int regno;
7860 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7861 #if TARGET_MACHO
7862 && MACHOPIC_INDIRECT
7863 #endif
7866 /* We cannot copy a symbolic operand directly into anything
7867 other than BASE_REGS for TARGET_ELF. So indicate that a
7868 register from BASE_REGS is needed as an intermediate
7869 register.
7871 On Darwin, pic addresses require a load from memory, which
7872 needs a base register. */
7873 if (class != BASE_REGS
7874 && (GET_CODE (in) == SYMBOL_REF
7875 || GET_CODE (in) == HIGH
7876 || GET_CODE (in) == LABEL_REF
7877 || GET_CODE (in) == CONST))
7878 return BASE_REGS;
7881 if (GET_CODE (in) == REG)
7883 regno = REGNO (in);
7884 if (regno >= FIRST_PSEUDO_REGISTER)
7886 regno = true_regnum (in);
7887 if (regno >= FIRST_PSEUDO_REGISTER)
7888 regno = -1;
7891 else if (GET_CODE (in) == SUBREG)
7893 regno = true_regnum (in);
7894 if (regno >= FIRST_PSEUDO_REGISTER)
7895 regno = -1;
7897 else
7898 regno = -1;
7900 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7901 into anything. */
7902 if (class == GENERAL_REGS || class == BASE_REGS
7903 || (regno >= 0 && INT_REGNO_P (regno)))
7904 return NO_REGS;
7906 /* Constants, memory, and FP registers can go into FP registers. */
7907 if ((regno == -1 || FP_REGNO_P (regno))
7908 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7909 return NO_REGS;
7911 /* Memory, and AltiVec registers can go into AltiVec registers. */
7912 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7913 && class == ALTIVEC_REGS)
7914 return NO_REGS;
7916 /* We can copy among the CR registers. */
7917 if ((class == CR_REGS || class == CR0_REGS)
7918 && regno >= 0 && CR_REGNO_P (regno))
7919 return NO_REGS;
7921 /* Otherwise, we need GENERAL_REGS. */
7922 return GENERAL_REGS;
7925 /* Given a comparison operation, return the bit number in CCR to test. We
7926 know this is a valid comparison.
7928 SCC_P is 1 if this is for an scc. That means that %D will have been
7929 used instead of %C, so the bits will be in different places.
7931 Return -1 if OP isn't a valid comparison for some reason. */
7934 ccr_bit (op, scc_p)
7935 rtx op;
7936 int scc_p;
7938 enum rtx_code code = GET_CODE (op);
7939 enum machine_mode cc_mode;
7940 int cc_regnum;
7941 int base_bit;
7942 rtx reg;
7944 if (GET_RTX_CLASS (code) != '<')
7945 return -1;
7947 reg = XEXP (op, 0);
7949 if (GET_CODE (reg) != REG
7950 || ! CR_REGNO_P (REGNO (reg)))
7951 abort ();
7953 cc_mode = GET_MODE (reg);
7954 cc_regnum = REGNO (reg);
7955 base_bit = 4 * (cc_regnum - CR0_REGNO);
7957 validate_condition_mode (code, cc_mode);
7959 /* When generating a sCOND operation, only positive conditions are
7960 allowed. */
7961 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7962 && code != GTU && code != LTU)
7963 abort ();
7965 switch (code)
7967 case NE:
7968 if (TARGET_E500 && !TARGET_FPRS
7969 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7970 return base_bit + 1;
7971 return scc_p ? base_bit + 3 : base_bit + 2;
7972 case EQ:
7973 if (TARGET_E500 && !TARGET_FPRS
7974 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7975 return base_bit + 1;
7976 return base_bit + 2;
7977 case GT: case GTU: case UNLE:
7978 return base_bit + 1;
7979 case LT: case LTU: case UNGE:
7980 return base_bit;
7981 case ORDERED: case UNORDERED:
7982 return base_bit + 3;
7984 case GE: case GEU:
7985 /* If scc, we will have done a cror to put the bit in the
7986 unordered position. So test that bit. For integer, this is ! LT
7987 unless this is an scc insn. */
7988 return scc_p ? base_bit + 3 : base_bit;
7990 case LE: case LEU:
7991 return scc_p ? base_bit + 3 : base_bit + 1;
7993 default:
7994 abort ();
7998 /* Return the GOT register. */
8000 struct rtx_def *
8001 rs6000_got_register (value)
8002 rtx value ATTRIBUTE_UNUSED;
8004 /* The second flow pass currently (June 1999) can't update
8005 regs_ever_live without disturbing other parts of the compiler, so
8006 update it here to make the prolog/epilogue code happy. */
8007 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8008 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8010 current_function_uses_pic_offset_table = 1;
8012 return pic_offset_table_rtx;
8015 /* Function to init struct machine_function.
8016 This will be called, via a pointer variable,
8017 from push_function_context. */
8019 static struct machine_function *
8020 rs6000_init_machine_status ()
8022 return ggc_alloc_cleared (sizeof (machine_function));
8025 /* These macros test for integers and extract the low-order bits. */
8026 #define INT_P(X) \
8027 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8028 && GET_MODE (X) == VOIDmode)
8030 #define INT_LOWPART(X) \
8031 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8034 extract_MB (op)
8035 rtx op;
8037 int i;
8038 unsigned long val = INT_LOWPART (op);
8040 /* If the high bit is zero, the value is the first 1 bit we find
8041 from the left. */
8042 if ((val & 0x80000000) == 0)
8044 if ((val & 0xffffffff) == 0)
8045 abort ();
8047 i = 1;
8048 while (((val <<= 1) & 0x80000000) == 0)
8049 ++i;
8050 return i;
8053 /* If the high bit is set and the low bit is not, or the mask is all
8054 1's, the value is zero. */
8055 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8056 return 0;
8058 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8059 from the right. */
8060 i = 31;
8061 while (((val >>= 1) & 1) != 0)
8062 --i;
8064 return i;
8068 extract_ME (op)
8069 rtx op;
8071 int i;
8072 unsigned long val = INT_LOWPART (op);
8074 /* If the low bit is zero, the value is the first 1 bit we find from
8075 the right. */
8076 if ((val & 1) == 0)
8078 if ((val & 0xffffffff) == 0)
8079 abort ();
8081 i = 30;
8082 while (((val >>= 1) & 1) == 0)
8083 --i;
8085 return i;
8088 /* If the low bit is set and the high bit is not, or the mask is all
8089 1's, the value is 31. */
8090 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8091 return 31;
8093 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8094 from the left. */
8095 i = 0;
8096 while (((val <<= 1) & 0x80000000) != 0)
8097 ++i;
8099 return i;
8102 /* Locate some local-dynamic symbol still in use by this function
8103 so that we can print its name in some tls_ld pattern. */
8105 static const char *
8106 rs6000_get_some_local_dynamic_name ()
8108 rtx insn;
8110 if (cfun->machine->some_ld_name)
8111 return cfun->machine->some_ld_name;
8113 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8114 if (INSN_P (insn)
8115 && for_each_rtx (&PATTERN (insn),
8116 rs6000_get_some_local_dynamic_name_1, 0))
8117 return cfun->machine->some_ld_name;
8119 abort ();
8122 /* Helper function for rs6000_get_some_local_dynamic_name. */
8124 static int
8125 rs6000_get_some_local_dynamic_name_1 (px, data)
8126 rtx *px;
8127 void *data ATTRIBUTE_UNUSED;
8129 rtx x = *px;
8131 if (GET_CODE (x) == SYMBOL_REF)
8133 const char *str = XSTR (x, 0);
8134 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8136 cfun->machine->some_ld_name = str;
8137 return 1;
8141 return 0;
8144 /* Print an operand. Recognize special options, documented below. */
8146 #if TARGET_ELF
8147 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8148 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8149 #else
8150 #define SMALL_DATA_RELOC "sda21"
8151 #define SMALL_DATA_REG 0
8152 #endif
8154 void
8155 print_operand (file, x, code)
8156 FILE *file;
8157 rtx x;
8158 int code;
8160 int i;
8161 HOST_WIDE_INT val;
8162 unsigned HOST_WIDE_INT uval;
8164 switch (code)
8166 case '.':
8167 /* Write out an instruction after the call which may be replaced
8168 with glue code by the loader. This depends on the AIX version. */
8169 asm_fprintf (file, RS6000_CALL_GLUE);
8170 return;
8172 /* %a is output_address. */
8174 case 'A':
8175 /* If X is a constant integer whose low-order 5 bits are zero,
8176 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8177 in the AIX assembler where "sri" with a zero shift count
8178 writes a trash instruction. */
8179 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8180 putc ('l', file);
8181 else
8182 putc ('r', file);
8183 return;
8185 case 'b':
8186 /* If constant, low-order 16 bits of constant, unsigned.
8187 Otherwise, write normally. */
8188 if (INT_P (x))
8189 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8190 else
8191 print_operand (file, x, 0);
8192 return;
8194 case 'B':
8195 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8196 for 64-bit mask direction. */
8197 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8198 return;
8200 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8201 output_operand. */
8203 case 'E':
8204 /* X is a CR register. Print the number of the EQ bit of the CR */
8205 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8206 output_operand_lossage ("invalid %%E value");
8207 else
8208 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8209 return;
8211 case 'f':
8212 /* X is a CR register. Print the shift count needed to move it
8213 to the high-order four bits. */
8214 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8215 output_operand_lossage ("invalid %%f value");
8216 else
8217 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8218 return;
8220 case 'F':
8221 /* Similar, but print the count for the rotate in the opposite
8222 direction. */
8223 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8224 output_operand_lossage ("invalid %%F value");
8225 else
8226 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8227 return;
8229 case 'G':
8230 /* X is a constant integer. If it is negative, print "m",
8231 otherwise print "z". This is to make an aze or ame insn. */
8232 if (GET_CODE (x) != CONST_INT)
8233 output_operand_lossage ("invalid %%G value");
8234 else if (INTVAL (x) >= 0)
8235 putc ('z', file);
8236 else
8237 putc ('m', file);
8238 return;
8240 case 'h':
8241 /* If constant, output low-order five bits. Otherwise, write
8242 normally. */
8243 if (INT_P (x))
8244 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8245 else
8246 print_operand (file, x, 0);
8247 return;
8249 case 'H':
8250 /* If constant, output low-order six bits. Otherwise, write
8251 normally. */
8252 if (INT_P (x))
8253 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8254 else
8255 print_operand (file, x, 0);
8256 return;
8258 case 'I':
8259 /* Print `i' if this is a constant, else nothing. */
8260 if (INT_P (x))
8261 putc ('i', file);
8262 return;
8264 case 'j':
8265 /* Write the bit number in CCR for jump. */
8266 i = ccr_bit (x, 0);
8267 if (i == -1)
8268 output_operand_lossage ("invalid %%j code");
8269 else
8270 fprintf (file, "%d", i);
8271 return;
8273 case 'J':
8274 /* Similar, but add one for shift count in rlinm for scc and pass
8275 scc flag to `ccr_bit'. */
8276 i = ccr_bit (x, 1);
8277 if (i == -1)
8278 output_operand_lossage ("invalid %%J code");
8279 else
8280 /* If we want bit 31, write a shift count of zero, not 32. */
8281 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8282 return;
8284 case 'k':
8285 /* X must be a constant. Write the 1's complement of the
8286 constant. */
8287 if (! INT_P (x))
8288 output_operand_lossage ("invalid %%k value");
8289 else
8290 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8291 return;
8293 case 'K':
8294 /* X must be a symbolic constant on ELF. Write an
8295 expression suitable for an 'addi' that adds in the low 16
8296 bits of the MEM. */
8297 if (GET_CODE (x) != CONST)
8299 print_operand_address (file, x);
8300 fputs ("@l", file);
8302 else
8304 if (GET_CODE (XEXP (x, 0)) != PLUS
8305 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8306 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8307 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8308 output_operand_lossage ("invalid %%K value");
8309 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8310 fputs ("@l", file);
8311 /* For GNU as, there must be a non-alphanumeric character
8312 between 'l' and the number. The '-' is added by
8313 print_operand() already. */
8314 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8315 fputs ("+", file);
8316 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8318 return;
8320 /* %l is output_asm_label. */
8322 case 'L':
8323 /* Write second word of DImode or DFmode reference. Works on register
8324 or non-indexed memory only. */
8325 if (GET_CODE (x) == REG)
8326 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8327 else if (GET_CODE (x) == MEM)
8329 /* Handle possible auto-increment. Since it is pre-increment and
8330 we have already done it, we can just use an offset of word. */
8331 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8332 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8333 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8334 UNITS_PER_WORD));
8335 else
8336 output_address (XEXP (adjust_address_nv (x, SImode,
8337 UNITS_PER_WORD),
8338 0));
8340 if (small_data_operand (x, GET_MODE (x)))
8341 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8342 reg_names[SMALL_DATA_REG]);
8344 return;
8346 case 'm':
8347 /* MB value for a mask operand. */
8348 if (! mask_operand (x, SImode))
8349 output_operand_lossage ("invalid %%m value");
8351 fprintf (file, "%d", extract_MB (x));
8352 return;
8354 case 'M':
8355 /* ME value for a mask operand. */
8356 if (! mask_operand (x, SImode))
8357 output_operand_lossage ("invalid %%M value");
8359 fprintf (file, "%d", extract_ME (x));
8360 return;
8362 /* %n outputs the negative of its operand. */
8364 case 'N':
8365 /* Write the number of elements in the vector times 4. */
8366 if (GET_CODE (x) != PARALLEL)
8367 output_operand_lossage ("invalid %%N value");
8368 else
8369 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8370 return;
8372 case 'O':
8373 /* Similar, but subtract 1 first. */
8374 if (GET_CODE (x) != PARALLEL)
8375 output_operand_lossage ("invalid %%O value");
8376 else
8377 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8378 return;
8380 case 'p':
8381 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8382 if (! INT_P (x)
8383 || INT_LOWPART (x) < 0
8384 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8385 output_operand_lossage ("invalid %%p value");
8386 else
8387 fprintf (file, "%d", i);
8388 return;
8390 case 'P':
8391 /* The operand must be an indirect memory reference. The result
8392 is the register number. */
8393 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8394 || REGNO (XEXP (x, 0)) >= 32)
8395 output_operand_lossage ("invalid %%P value");
8396 else
8397 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8398 return;
8400 case 'q':
8401 /* This outputs the logical code corresponding to a boolean
8402 expression. The expression may have one or both operands
8403 negated (if one, only the first one). For condition register
8404 logical operations, it will also treat the negated
8405 CR codes as NOTs, but not handle NOTs of them. */
8407 const char *const *t = 0;
8408 const char *s;
8409 enum rtx_code code = GET_CODE (x);
8410 static const char * const tbl[3][3] = {
8411 { "and", "andc", "nor" },
8412 { "or", "orc", "nand" },
8413 { "xor", "eqv", "xor" } };
8415 if (code == AND)
8416 t = tbl[0];
8417 else if (code == IOR)
8418 t = tbl[1];
8419 else if (code == XOR)
8420 t = tbl[2];
8421 else
8422 output_operand_lossage ("invalid %%q value");
8424 if (GET_CODE (XEXP (x, 0)) != NOT)
8425 s = t[0];
8426 else
8428 if (GET_CODE (XEXP (x, 1)) == NOT)
8429 s = t[2];
8430 else
8431 s = t[1];
8434 fputs (s, file);
8436 return;
8438 case 'R':
8439 /* X is a CR register. Print the mask for `mtcrf'. */
8440 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8441 output_operand_lossage ("invalid %%R value");
8442 else
8443 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8444 return;
8446 case 's':
8447 /* Low 5 bits of 32 - value */
8448 if (! INT_P (x))
8449 output_operand_lossage ("invalid %%s value");
8450 else
8451 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8452 return;
8454 case 'S':
8455 /* PowerPC64 mask position. All 0's is excluded.
8456 CONST_INT 32-bit mask is considered sign-extended so any
8457 transition must occur within the CONST_INT, not on the boundary. */
8458 if (! mask64_operand (x, DImode))
8459 output_operand_lossage ("invalid %%S value");
8461 uval = INT_LOWPART (x);
8463 if (uval & 1) /* Clear Left */
8465 #if HOST_BITS_PER_WIDE_INT > 64
8466 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8467 #endif
8468 i = 64;
8470 else /* Clear Right */
8472 uval = ~uval;
8473 #if HOST_BITS_PER_WIDE_INT > 64
8474 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8475 #endif
8476 i = 63;
8478 while (uval != 0)
8479 --i, uval >>= 1;
8480 if (i < 0)
8481 abort ();
8482 fprintf (file, "%d", i);
8483 return;
8485 case 't':
8486 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8487 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8488 abort ();
8490 /* Bit 3 is OV bit. */
8491 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8493 /* If we want bit 31, write a shift count of zero, not 32. */
8494 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8495 return;
8497 case 'T':
8498 /* Print the symbolic name of a branch target register. */
8499 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8500 && REGNO (x) != COUNT_REGISTER_REGNUM))
8501 output_operand_lossage ("invalid %%T value");
8502 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8503 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8504 else
8505 fputs ("ctr", file);
8506 return;
8508 case 'u':
8509 /* High-order 16 bits of constant for use in unsigned operand. */
8510 if (! INT_P (x))
8511 output_operand_lossage ("invalid %%u value");
8512 else
8513 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8514 (INT_LOWPART (x) >> 16) & 0xffff);
8515 return;
8517 case 'v':
8518 /* High-order 16 bits of constant for use in signed operand. */
8519 if (! INT_P (x))
8520 output_operand_lossage ("invalid %%v value");
8521 else
8522 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8523 (INT_LOWPART (x) >> 16) & 0xffff);
8524 return;
8526 case 'U':
8527 /* Print `u' if this has an auto-increment or auto-decrement. */
8528 if (GET_CODE (x) == MEM
8529 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8530 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8531 putc ('u', file);
8532 return;
8534 case 'V':
8535 /* Print the trap code for this operand. */
8536 switch (GET_CODE (x))
8538 case EQ:
8539 fputs ("eq", file); /* 4 */
8540 break;
8541 case NE:
8542 fputs ("ne", file); /* 24 */
8543 break;
8544 case LT:
8545 fputs ("lt", file); /* 16 */
8546 break;
8547 case LE:
8548 fputs ("le", file); /* 20 */
8549 break;
8550 case GT:
8551 fputs ("gt", file); /* 8 */
8552 break;
8553 case GE:
8554 fputs ("ge", file); /* 12 */
8555 break;
8556 case LTU:
8557 fputs ("llt", file); /* 2 */
8558 break;
8559 case LEU:
8560 fputs ("lle", file); /* 6 */
8561 break;
8562 case GTU:
8563 fputs ("lgt", file); /* 1 */
8564 break;
8565 case GEU:
8566 fputs ("lge", file); /* 5 */
8567 break;
8568 default:
8569 abort ();
8571 break;
8573 case 'w':
8574 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8575 normally. */
8576 if (INT_P (x))
8577 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8578 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8579 else
8580 print_operand (file, x, 0);
8581 return;
8583 case 'W':
8584 /* MB value for a PowerPC64 rldic operand. */
8585 val = (GET_CODE (x) == CONST_INT
8586 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8588 if (val < 0)
8589 i = -1;
8590 else
8591 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8592 if ((val <<= 1) < 0)
8593 break;
8595 #if HOST_BITS_PER_WIDE_INT == 32
8596 if (GET_CODE (x) == CONST_INT && i >= 0)
8597 i += 32; /* zero-extend high-part was all 0's */
8598 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8600 val = CONST_DOUBLE_LOW (x);
8602 if (val == 0)
8603 abort ();
8604 else if (val < 0)
8605 --i;
8606 else
8607 for ( ; i < 64; i++)
8608 if ((val <<= 1) < 0)
8609 break;
8611 #endif
8613 fprintf (file, "%d", i + 1);
8614 return;
8616 case 'X':
8617 if (GET_CODE (x) == MEM
8618 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8619 putc ('x', file);
8620 return;
8622 case 'Y':
8623 /* Like 'L', for third word of TImode */
8624 if (GET_CODE (x) == REG)
8625 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8626 else if (GET_CODE (x) == MEM)
8628 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8629 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8630 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8631 else
8632 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8633 if (small_data_operand (x, GET_MODE (x)))
8634 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8635 reg_names[SMALL_DATA_REG]);
8637 return;
8639 case 'z':
8640 /* X is a SYMBOL_REF. Write out the name preceded by a
8641 period and without any trailing data in brackets. Used for function
8642 names. If we are configured for System V (or the embedded ABI) on
8643 the PowerPC, do not emit the period, since those systems do not use
8644 TOCs and the like. */
8645 if (GET_CODE (x) != SYMBOL_REF)
8646 abort ();
8648 if (XSTR (x, 0)[0] != '.')
8650 switch (DEFAULT_ABI)
8652 default:
8653 abort ();
8655 case ABI_AIX:
8656 putc ('.', file);
8657 break;
8659 case ABI_V4:
8660 case ABI_DARWIN:
8661 break;
8664 #if TARGET_AIX
8665 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8666 #else
8667 assemble_name (file, XSTR (x, 0));
8668 #endif
8669 return;
8671 case 'Z':
8672 /* Like 'L', for last word of TImode. */
8673 if (GET_CODE (x) == REG)
8674 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8675 else if (GET_CODE (x) == MEM)
8677 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8678 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8679 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8680 else
8681 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8682 if (small_data_operand (x, GET_MODE (x)))
8683 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8684 reg_names[SMALL_DATA_REG]);
8686 return;
8688 /* Print AltiVec or SPE memory operand. */
8689 case 'y':
8691 rtx tmp;
8693 if (GET_CODE (x) != MEM)
8694 abort ();
8696 tmp = XEXP (x, 0);
8698 if (TARGET_E500)
8700 /* Handle [reg]. */
8701 if (GET_CODE (tmp) == REG)
8703 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8704 break;
8706 /* Handle [reg+UIMM]. */
8707 else if (GET_CODE (tmp) == PLUS &&
8708 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8710 int x;
8712 if (GET_CODE (XEXP (tmp, 0)) != REG)
8713 abort ();
8715 x = INTVAL (XEXP (tmp, 1));
8716 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8717 break;
8720 /* Fall through. Must be [reg+reg]. */
8722 if (GET_CODE (tmp) == REG)
8723 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8724 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8726 if (REGNO (XEXP (tmp, 0)) == 0)
8727 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8728 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8729 else
8730 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8731 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8733 else
8734 abort ();
8735 break;
8738 case 0:
8739 if (GET_CODE (x) == REG)
8740 fprintf (file, "%s", reg_names[REGNO (x)]);
8741 else if (GET_CODE (x) == MEM)
8743 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8744 know the width from the mode. */
8745 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8746 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8747 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8748 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8749 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8750 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8751 else
8752 output_address (XEXP (x, 0));
8754 else
8755 output_addr_const (file, x);
8756 return;
8758 case '&':
8759 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8760 return;
8762 default:
8763 output_operand_lossage ("invalid %%xn code");
8767 /* Print the address of an operand. */
8769 void
8770 print_operand_address (file, x)
8771 FILE *file;
8772 rtx x;
8774 if (GET_CODE (x) == REG)
8775 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8776 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8777 || GET_CODE (x) == LABEL_REF)
8779 output_addr_const (file, x);
8780 if (small_data_operand (x, GET_MODE (x)))
8781 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8782 reg_names[SMALL_DATA_REG]);
8783 else if (TARGET_TOC)
8784 abort ();
8786 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8788 if (REGNO (XEXP (x, 0)) == 0)
8789 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8790 reg_names[ REGNO (XEXP (x, 0)) ]);
8791 else
8792 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8793 reg_names[ REGNO (XEXP (x, 1)) ]);
8795 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8796 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8797 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8798 #if TARGET_ELF
8799 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8800 && CONSTANT_P (XEXP (x, 1)))
8802 output_addr_const (file, XEXP (x, 1));
8803 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8805 #endif
8806 #if TARGET_MACHO
8807 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8808 && CONSTANT_P (XEXP (x, 1)))
8810 fprintf (file, "lo16(");
8811 output_addr_const (file, XEXP (x, 1));
8812 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8814 #endif
8815 else if (legitimate_constant_pool_address_p (x))
8817 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8819 rtx contains_minus = XEXP (x, 1);
8820 rtx minus, symref;
8821 const char *name;
8823 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8824 turn it into (sym) for output_addr_const. */
8825 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8826 contains_minus = XEXP (contains_minus, 0);
8828 minus = XEXP (contains_minus, 0);
8829 symref = XEXP (minus, 0);
8830 XEXP (contains_minus, 0) = symref;
8831 if (TARGET_ELF)
8833 char *newname;
8835 name = XSTR (symref, 0);
8836 newname = alloca (strlen (name) + sizeof ("@toc"));
8837 strcpy (newname, name);
8838 strcat (newname, "@toc");
8839 XSTR (symref, 0) = newname;
8841 output_addr_const (file, XEXP (x, 1));
8842 if (TARGET_ELF)
8843 XSTR (symref, 0) = name;
8844 XEXP (contains_minus, 0) = minus;
8846 else
8847 output_addr_const (file, XEXP (x, 1));
8849 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8851 else
8852 abort ();
8855 /* Target hook for assembling integer objects. The PowerPC version has
8856 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8857 is defined. It also needs to handle DI-mode objects on 64-bit
8858 targets. */
8860 static bool
8861 rs6000_assemble_integer (x, size, aligned_p)
8862 rtx x;
8863 unsigned int size;
8864 int aligned_p;
8866 #ifdef RELOCATABLE_NEEDS_FIXUP
8867 /* Special handling for SI values. */
8868 if (size == 4 && aligned_p)
8870 extern int in_toc_section PARAMS ((void));
8871 static int recurse = 0;
8873 /* For -mrelocatable, we mark all addresses that need to be fixed up
8874 in the .fixup section. */
8875 if (TARGET_RELOCATABLE
8876 && !in_toc_section ()
8877 && !in_text_section ()
8878 && !recurse
8879 && GET_CODE (x) != CONST_INT
8880 && GET_CODE (x) != CONST_DOUBLE
8881 && CONSTANT_P (x))
8883 char buf[256];
8885 recurse = 1;
8886 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8887 fixuplabelno++;
8888 ASM_OUTPUT_LABEL (asm_out_file, buf);
8889 fprintf (asm_out_file, "\t.long\t(");
8890 output_addr_const (asm_out_file, x);
8891 fprintf (asm_out_file, ")@fixup\n");
8892 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8893 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8894 fprintf (asm_out_file, "\t.long\t");
8895 assemble_name (asm_out_file, buf);
8896 fprintf (asm_out_file, "\n\t.previous\n");
8897 recurse = 0;
8898 return true;
8900 /* Remove initial .'s to turn a -mcall-aixdesc function
8901 address into the address of the descriptor, not the function
8902 itself. */
8903 else if (GET_CODE (x) == SYMBOL_REF
8904 && XSTR (x, 0)[0] == '.'
8905 && DEFAULT_ABI == ABI_AIX)
8907 const char *name = XSTR (x, 0);
8908 while (*name == '.')
8909 name++;
8911 fprintf (asm_out_file, "\t.long\t%s\n", name);
8912 return true;
8915 #endif /* RELOCATABLE_NEEDS_FIXUP */
8916 return default_assemble_integer (x, size, aligned_p);
8919 #ifdef HAVE_GAS_HIDDEN
8920 /* Emit an assembler directive to set symbol visibility for DECL to
8921 VISIBILITY_TYPE. */
8923 static void
8924 rs6000_assemble_visibility (decl, vis)
8925 tree decl;
8926 int vis;
8928 /* Functions need to have their entry point symbol visibility set as
8929 well as their descriptor symbol visibility. */
8930 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8932 static const char * const visibility_types[] = {
8933 NULL, "internal", "hidden", "protected"
8936 const char *name, *type;
8938 name = ((* targetm.strip_name_encoding)
8939 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8940 type = visibility_types[vis];
8942 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8943 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8945 else
8946 default_assemble_visibility (decl, vis);
8948 #endif
8950 enum rtx_code
8951 rs6000_reverse_condition (mode, code)
8952 enum machine_mode mode;
8953 enum rtx_code code;
8955 /* Reversal of FP compares takes care -- an ordered compare
8956 becomes an unordered compare and vice versa. */
8957 if (mode == CCFPmode
8958 && (!flag_finite_math_only
8959 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8960 || code == UNEQ || code == LTGT))
8961 return reverse_condition_maybe_unordered (code);
8962 else
8963 return reverse_condition (code);
8966 /* Generate a compare for CODE. Return a brand-new rtx that
8967 represents the result of the compare. */
8969 static rtx
8970 rs6000_generate_compare (code)
8971 enum rtx_code code;
8973 enum machine_mode comp_mode;
8974 rtx compare_result;
8976 if (rs6000_compare_fp_p)
8977 comp_mode = CCFPmode;
8978 else if (code == GTU || code == LTU
8979 || code == GEU || code == LEU)
8980 comp_mode = CCUNSmode;
8981 else
8982 comp_mode = CCmode;
8984 /* First, the compare. */
8985 compare_result = gen_reg_rtx (comp_mode);
8987 /* SPE FP compare instructions on the GPRs. Yuck! */
8988 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8989 && rs6000_compare_fp_p)
8991 rtx cmp, or1, or2, or_result, compare_result2;
8993 switch (code)
8995 case EQ:
8996 case UNEQ:
8997 case NE:
8998 case LTGT:
8999 cmp = flag_finite_math_only
9000 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9001 rs6000_compare_op1)
9002 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9003 rs6000_compare_op1);
9004 break;
9005 case GT:
9006 case GTU:
9007 case UNGT:
9008 case UNGE:
9009 case GE:
9010 case GEU:
9011 cmp = flag_finite_math_only
9012 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9013 rs6000_compare_op1)
9014 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9015 rs6000_compare_op1);
9016 break;
9017 case LT:
9018 case LTU:
9019 case UNLT:
9020 case UNLE:
9021 case LE:
9022 case LEU:
9023 cmp = flag_finite_math_only
9024 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9025 rs6000_compare_op1)
9026 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9027 rs6000_compare_op1);
9028 break;
9029 default:
9030 abort ();
9033 /* Synthesize LE and GE from LT/GT || EQ. */
9034 if (code == LE || code == GE || code == LEU || code == GEU)
9036 /* Synthesize GE/LE frome GT/LT || EQ. */
9038 emit_insn (cmp);
9040 switch (code)
9042 case LE: code = LT; break;
9043 case GE: code = GT; break;
9044 case LEU: code = LT; break;
9045 case GEU: code = GT; break;
9046 default: abort ();
9049 or1 = gen_reg_rtx (SImode);
9050 or2 = gen_reg_rtx (SImode);
9051 or_result = gen_reg_rtx (CCEQmode);
9052 compare_result2 = gen_reg_rtx (CCFPmode);
9054 /* Do the EQ. */
9055 cmp = flag_finite_math_only
9056 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9057 rs6000_compare_op1)
9058 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9059 rs6000_compare_op1);
9060 emit_insn (cmp);
9062 /* The MC8540 FP compare instructions set the CR bits
9063 differently than other PPC compare instructions. For
9064 that matter, there is no generic test instruction, but a
9065 testgt, testlt, and testeq. For a true condition, bit 2
9066 is set (x1xx) in the CR. Following the traditional CR
9067 values:
9069 LT GT EQ OV
9070 bit3 bit2 bit1 bit0
9072 ... bit 2 would be a GT CR alias, so later on we
9073 look in the GT bits for the branch instructions.
9074 However, we must be careful to emit correct RTL in
9075 the meantime, so optimizations don't get confused. */
9077 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9078 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9080 /* OR them together. */
9081 cmp = gen_rtx_SET (VOIDmode, or_result,
9082 gen_rtx_COMPARE (CCEQmode,
9083 gen_rtx_IOR (SImode, or1, or2),
9084 const_true_rtx));
9085 compare_result = or_result;
9086 code = EQ;
9088 else
9090 /* We only care about 1 bit (x1xx), so map everything to NE to
9091 maintain rtl sanity. We'll get to the right bit (x1xx) at
9092 code output time. */
9093 if (code == NE || code == LTGT)
9094 /* Do the inverse here because we have no cmpne
9095 instruction. We use the cmpeq instruction and expect
9096 to get a 0 instead. */
9097 code = EQ;
9098 else
9099 code = NE;
9102 emit_insn (cmp);
9104 else
9105 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9106 gen_rtx_COMPARE (comp_mode,
9107 rs6000_compare_op0,
9108 rs6000_compare_op1)));
9110 /* Some kinds of FP comparisons need an OR operation;
9111 under flag_finite_math_only we don't bother. */
9112 if (rs6000_compare_fp_p
9113 && ! flag_finite_math_only
9114 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9115 && (code == LE || code == GE
9116 || code == UNEQ || code == LTGT
9117 || code == UNGT || code == UNLT))
9119 enum rtx_code or1, or2;
9120 rtx or1_rtx, or2_rtx, compare2_rtx;
9121 rtx or_result = gen_reg_rtx (CCEQmode);
9123 switch (code)
9125 case LE: or1 = LT; or2 = EQ; break;
9126 case GE: or1 = GT; or2 = EQ; break;
9127 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9128 case LTGT: or1 = LT; or2 = GT; break;
9129 case UNGT: or1 = UNORDERED; or2 = GT; break;
9130 case UNLT: or1 = UNORDERED; or2 = LT; break;
9131 default: abort ();
9133 validate_condition_mode (or1, comp_mode);
9134 validate_condition_mode (or2, comp_mode);
9135 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9136 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9137 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9138 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9139 const_true_rtx);
9140 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9142 compare_result = or_result;
9143 code = EQ;
9146 validate_condition_mode (code, GET_MODE (compare_result));
9148 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9152 /* Emit the RTL for an sCOND pattern. */
9154 void
9155 rs6000_emit_sCOND (code, result)
9156 enum rtx_code code;
9157 rtx result;
9159 rtx condition_rtx;
9160 enum machine_mode op_mode;
9161 enum rtx_code cond_code;
9163 condition_rtx = rs6000_generate_compare (code);
9164 cond_code = GET_CODE (condition_rtx);
9166 if (cond_code == NE
9167 || cond_code == GE || cond_code == LE
9168 || cond_code == GEU || cond_code == LEU
9169 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9171 rtx not_result = gen_reg_rtx (CCEQmode);
9172 rtx not_op, rev_cond_rtx;
9173 enum machine_mode cc_mode;
9175 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9177 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9178 SImode, XEXP (condition_rtx, 0), const0_rtx);
9179 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9180 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9181 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9184 op_mode = GET_MODE (rs6000_compare_op0);
9185 if (op_mode == VOIDmode)
9186 op_mode = GET_MODE (rs6000_compare_op1);
9188 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9190 PUT_MODE (condition_rtx, DImode);
9191 convert_move (result, condition_rtx, 0);
9193 else
9195 PUT_MODE (condition_rtx, SImode);
9196 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9200 /* Emit a branch of kind CODE to location LOC. */
9202 void
9203 rs6000_emit_cbranch (code, loc)
9204 enum rtx_code code;
9205 rtx loc;
9207 rtx condition_rtx, loc_ref;
9209 condition_rtx = rs6000_generate_compare (code);
9210 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9211 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9212 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9213 loc_ref, pc_rtx)));
9216 /* Return the string to output a conditional branch to LABEL, which is
9217 the operand number of the label, or -1 if the branch is really a
9218 conditional return.
9220 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9221 condition code register and its mode specifies what kind of
9222 comparison we made.
9224 REVERSED is nonzero if we should reverse the sense of the comparison.
9226 INSN is the insn. */
9228 char *
9229 output_cbranch (op, label, reversed, insn)
9230 rtx op;
9231 const char * label;
9232 int reversed;
9233 rtx insn;
9235 static char string[64];
9236 enum rtx_code code = GET_CODE (op);
9237 rtx cc_reg = XEXP (op, 0);
9238 enum machine_mode mode = GET_MODE (cc_reg);
9239 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9240 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9241 int really_reversed = reversed ^ need_longbranch;
9242 char *s = string;
9243 const char *ccode;
9244 const char *pred;
9245 rtx note;
9247 validate_condition_mode (code, mode);
9249 /* Work out which way this really branches. We could use
9250 reverse_condition_maybe_unordered here always but this
9251 makes the resulting assembler clearer. */
9252 if (really_reversed)
9254 /* Reversal of FP compares takes care -- an ordered compare
9255 becomes an unordered compare and vice versa. */
9256 if (mode == CCFPmode)
9257 code = reverse_condition_maybe_unordered (code);
9258 else
9259 code = reverse_condition (code);
9262 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9264 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9265 to the GT bit. */
9266 if (code == EQ)
9267 /* Opposite of GT. */
9268 code = UNLE;
9269 else if (code == NE)
9270 code = GT;
9271 else
9272 abort ();
9275 switch (code)
9277 /* Not all of these are actually distinct opcodes, but
9278 we distinguish them for clarity of the resulting assembler. */
9279 case NE: case LTGT:
9280 ccode = "ne"; break;
9281 case EQ: case UNEQ:
9282 ccode = "eq"; break;
9283 case GE: case GEU:
9284 ccode = "ge"; break;
9285 case GT: case GTU: case UNGT:
9286 ccode = "gt"; break;
9287 case LE: case LEU:
9288 ccode = "le"; break;
9289 case LT: case LTU: case UNLT:
9290 ccode = "lt"; break;
9291 case UNORDERED: ccode = "un"; break;
9292 case ORDERED: ccode = "nu"; break;
9293 case UNGE: ccode = "nl"; break;
9294 case UNLE: ccode = "ng"; break;
9295 default:
9296 abort ();
9299 /* Maybe we have a guess as to how likely the branch is.
9300 The old mnemonics don't have a way to specify this information. */
9301 pred = "";
9302 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9303 if (note != NULL_RTX)
9305 /* PROB is the difference from 50%. */
9306 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9307 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9309 /* Only hint for highly probable/improbable branches on newer
9310 cpus as static prediction overrides processor dynamic
9311 prediction. For older cpus we may as well always hint, but
9312 assume not taken for branches that are very close to 50% as a
9313 mispredicted taken branch is more expensive than a
9314 mispredicted not-taken branch. */
9315 if (always_hint
9316 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9318 if (abs (prob) > REG_BR_PROB_BASE / 20
9319 && ((prob > 0) ^ need_longbranch))
9320 pred = "+";
9321 else
9322 pred = "-";
9326 if (label == NULL)
9327 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9328 else
9329 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9331 /* We need to escape any '%' characters in the reg_names string.
9332 Assume they'd only be the first character... */
9333 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9334 *s++ = '%';
9335 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9337 if (label != NULL)
9339 /* If the branch distance was too far, we may have to use an
9340 unconditional branch to go the distance. */
9341 if (need_longbranch)
9342 s += sprintf (s, ",$+8\n\tb %s", label);
9343 else
9344 s += sprintf (s, ",%s", label);
9347 return string;
9350 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9351 operands of the last comparison is nonzero/true, FALSE_COND if it
9352 is zero/false. Return 0 if the hardware has no such operation. */
9355 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9356 rtx dest;
9357 rtx op;
9358 rtx true_cond;
9359 rtx false_cond;
9361 enum rtx_code code = GET_CODE (op);
9362 rtx op0 = rs6000_compare_op0;
9363 rtx op1 = rs6000_compare_op1;
9364 REAL_VALUE_TYPE c1;
9365 enum machine_mode compare_mode = GET_MODE (op0);
9366 enum machine_mode result_mode = GET_MODE (dest);
9367 rtx temp;
9369 /* These modes should always match. */
9370 if (GET_MODE (op1) != compare_mode
9371 /* In the isel case however, we can use a compare immediate, so
9372 op1 may be a small constant. */
9373 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9374 return 0;
9375 if (GET_MODE (true_cond) != result_mode)
9376 return 0;
9377 if (GET_MODE (false_cond) != result_mode)
9378 return 0;
9380 /* First, work out if the hardware can do this at all, or
9381 if it's too slow... */
9382 if (! rs6000_compare_fp_p)
9384 if (TARGET_ISEL)
9385 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9386 return 0;
9389 /* Eliminate half of the comparisons by switching operands, this
9390 makes the remaining code simpler. */
9391 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9392 || code == LTGT || code == LT || code == UNLE)
9394 code = reverse_condition_maybe_unordered (code);
9395 temp = true_cond;
9396 true_cond = false_cond;
9397 false_cond = temp;
9400 /* UNEQ and LTGT take four instructions for a comparison with zero,
9401 it'll probably be faster to use a branch here too. */
9402 if (code == UNEQ && HONOR_NANS (compare_mode))
9403 return 0;
9405 if (GET_CODE (op1) == CONST_DOUBLE)
9406 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9408 /* We're going to try to implement comparisons by performing
9409 a subtract, then comparing against zero. Unfortunately,
9410 Inf - Inf is NaN which is not zero, and so if we don't
9411 know that the operand is finite and the comparison
9412 would treat EQ different to UNORDERED, we can't do it. */
9413 if (HONOR_INFINITIES (compare_mode)
9414 && code != GT && code != UNGE
9415 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9416 /* Constructs of the form (a OP b ? a : b) are safe. */
9417 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9418 || (! rtx_equal_p (op0, true_cond)
9419 && ! rtx_equal_p (op1, true_cond))))
9420 return 0;
9421 /* At this point we know we can use fsel. */
9423 /* Reduce the comparison to a comparison against zero. */
9424 temp = gen_reg_rtx (compare_mode);
9425 emit_insn (gen_rtx_SET (VOIDmode, temp,
9426 gen_rtx_MINUS (compare_mode, op0, op1)));
9427 op0 = temp;
9428 op1 = CONST0_RTX (compare_mode);
9430 /* If we don't care about NaNs we can reduce some of the comparisons
9431 down to faster ones. */
9432 if (! HONOR_NANS (compare_mode))
9433 switch (code)
9435 case GT:
9436 code = LE;
9437 temp = true_cond;
9438 true_cond = false_cond;
9439 false_cond = temp;
9440 break;
9441 case UNGE:
9442 code = GE;
9443 break;
9444 case UNEQ:
9445 code = EQ;
9446 break;
9447 default:
9448 break;
9451 /* Now, reduce everything down to a GE. */
9452 switch (code)
9454 case GE:
9455 break;
9457 case LE:
9458 temp = gen_reg_rtx (compare_mode);
9459 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9460 op0 = temp;
9461 break;
9463 case ORDERED:
9464 temp = gen_reg_rtx (compare_mode);
9465 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9466 op0 = temp;
9467 break;
9469 case EQ:
9470 temp = gen_reg_rtx (compare_mode);
9471 emit_insn (gen_rtx_SET (VOIDmode, temp,
9472 gen_rtx_NEG (compare_mode,
9473 gen_rtx_ABS (compare_mode, op0))));
9474 op0 = temp;
9475 break;
9477 case UNGE:
9478 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9479 temp = gen_reg_rtx (result_mode);
9480 emit_insn (gen_rtx_SET (VOIDmode, temp,
9481 gen_rtx_IF_THEN_ELSE (result_mode,
9482 gen_rtx_GE (VOIDmode,
9483 op0, op1),
9484 true_cond, false_cond)));
9485 false_cond = true_cond;
9486 true_cond = temp;
9488 temp = gen_reg_rtx (compare_mode);
9489 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9490 op0 = temp;
9491 break;
9493 case GT:
9494 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9495 temp = gen_reg_rtx (result_mode);
9496 emit_insn (gen_rtx_SET (VOIDmode, temp,
9497 gen_rtx_IF_THEN_ELSE (result_mode,
9498 gen_rtx_GE (VOIDmode,
9499 op0, op1),
9500 true_cond, false_cond)));
9501 true_cond = false_cond;
9502 false_cond = temp;
9504 temp = gen_reg_rtx (compare_mode);
9505 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9506 op0 = temp;
9507 break;
9509 default:
9510 abort ();
9513 emit_insn (gen_rtx_SET (VOIDmode, dest,
9514 gen_rtx_IF_THEN_ELSE (result_mode,
9515 gen_rtx_GE (VOIDmode,
9516 op0, op1),
9517 true_cond, false_cond)));
9518 return 1;
9521 /* Same as above, but for ints (isel). */
9523 static int
9524 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9525 rtx dest;
9526 rtx op;
9527 rtx true_cond;
9528 rtx false_cond;
9530 rtx condition_rtx, cr;
9532 /* All isel implementations thus far are 32-bits. */
9533 if (GET_MODE (rs6000_compare_op0) != SImode)
9534 return 0;
9536 /* We still have to do the compare, because isel doesn't do a
9537 compare, it just looks at the CRx bits set by a previous compare
9538 instruction. */
9539 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9540 cr = XEXP (condition_rtx, 0);
9542 if (GET_MODE (cr) == CCmode)
9543 emit_insn (gen_isel_signed (dest, condition_rtx,
9544 true_cond, false_cond, cr));
9545 else
9546 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9547 true_cond, false_cond, cr));
9549 return 1;
9552 const char *
9553 output_isel (operands)
9554 rtx *operands;
9556 enum rtx_code code;
9558 code = GET_CODE (operands[1]);
9559 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9561 PUT_CODE (operands[1], reverse_condition (code));
9562 return "isel %0,%3,%2,%j1";
9564 else
9565 return "isel %0,%2,%3,%j1";
9568 void
9569 rs6000_emit_minmax (dest, code, op0, op1)
9570 rtx dest;
9571 enum rtx_code code;
9572 rtx op0;
9573 rtx op1;
9575 enum machine_mode mode = GET_MODE (op0);
9576 enum rtx_code c;
9577 rtx target;
9579 if (code == SMAX || code == SMIN)
9580 c = GE;
9581 else
9582 c = GEU;
9584 if (code == SMAX || code == UMAX)
9585 target = emit_conditional_move (dest, c, op0, op1, mode,
9586 op0, op1, mode, 0);
9587 else
9588 target = emit_conditional_move (dest, c, op0, op1, mode,
9589 op1, op0, mode, 0);
9590 if (target == NULL_RTX)
9591 abort ();
9592 if (target != dest)
9593 emit_move_insn (dest, target);
9596 /* This page contains routines that are used to determine what the
9597 function prologue and epilogue code will do and write them out. */
9599 /* Return the first fixed-point register that is required to be
9600 saved. 32 if none. */
9603 first_reg_to_save ()
9605 int first_reg;
9607 /* Find lowest numbered live register. */
9608 for (first_reg = 13; first_reg <= 31; first_reg++)
9609 if (regs_ever_live[first_reg]
9610 && (! call_used_regs[first_reg]
9611 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9612 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9613 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9614 break;
9616 #if TARGET_MACHO
9617 if (flag_pic
9618 && current_function_uses_pic_offset_table
9619 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9620 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9621 #endif
9623 return first_reg;
9626 /* Similar, for FP regs. */
9629 first_fp_reg_to_save ()
9631 int first_reg;
9633 /* Find lowest numbered live register. */
9634 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9635 if (regs_ever_live[first_reg])
9636 break;
9638 return first_reg;
9641 /* Similar, for AltiVec regs. */
9643 static int
9644 first_altivec_reg_to_save ()
9646 int i;
9648 /* Stack frame remains as is unless we are in AltiVec ABI. */
9649 if (! TARGET_ALTIVEC_ABI)
9650 return LAST_ALTIVEC_REGNO + 1;
9652 /* Find lowest numbered live register. */
9653 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9654 if (regs_ever_live[i])
9655 break;
9657 return i;
9660 /* Return a 32-bit mask of the AltiVec registers we need to set in
9661 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9662 the 32-bit word is 0. */
9664 static unsigned int
9665 compute_vrsave_mask ()
9667 unsigned int i, mask = 0;
9669 /* First, find out if we use _any_ altivec registers. */
9670 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9671 if (regs_ever_live[i])
9672 mask |= ALTIVEC_REG_BIT (i);
9674 if (mask == 0)
9675 return mask;
9677 /* Next, remove the argument registers from the set. These must
9678 be in the VRSAVE mask set by the caller, so we don't need to add
9679 them in again. More importantly, the mask we compute here is
9680 used to generate CLOBBERs in the set_vrsave insn, and we do not
9681 wish the argument registers to die. */
9682 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9683 mask &= ~ALTIVEC_REG_BIT (i);
9685 /* Similarly, remove the return value from the set. */
9687 bool yes = false;
9688 diddle_return_value (is_altivec_return_reg, &yes);
9689 if (yes)
9690 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9693 return mask;
9696 static void
9697 is_altivec_return_reg (reg, xyes)
9698 rtx reg;
9699 void *xyes;
9701 bool *yes = (bool *) xyes;
9702 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9703 *yes = true;
9707 /* Calculate the stack information for the current function. This is
9708 complicated by having two separate calling sequences, the AIX calling
9709 sequence and the V.4 calling sequence.
9711 AIX (and Darwin/Mac OS X) stack frames look like:
9712 32-bit 64-bit
9713 SP----> +---------------------------------------+
9714 | back chain to caller | 0 0
9715 +---------------------------------------+
9716 | saved CR | 4 8 (8-11)
9717 +---------------------------------------+
9718 | saved LR | 8 16
9719 +---------------------------------------+
9720 | reserved for compilers | 12 24
9721 +---------------------------------------+
9722 | reserved for binders | 16 32
9723 +---------------------------------------+
9724 | saved TOC pointer | 20 40
9725 +---------------------------------------+
9726 | Parameter save area (P) | 24 48
9727 +---------------------------------------+
9728 | Alloca space (A) | 24+P etc.
9729 +---------------------------------------+
9730 | Local variable space (L) | 24+P+A
9731 +---------------------------------------+
9732 | Float/int conversion temporary (X) | 24+P+A+L
9733 +---------------------------------------+
9734 | Save area for AltiVec registers (W) | 24+P+A+L+X
9735 +---------------------------------------+
9736 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9737 +---------------------------------------+
9738 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9739 +---------------------------------------+
9740 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9741 +---------------------------------------+
9742 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9743 +---------------------------------------+
9744 old SP->| back chain to caller's caller |
9745 +---------------------------------------+
9747 The required alignment for AIX configurations is two words (i.e., 8
9748 or 16 bytes).
9751 V.4 stack frames look like:
9753 SP----> +---------------------------------------+
9754 | back chain to caller | 0
9755 +---------------------------------------+
9756 | caller's saved LR | 4
9757 +---------------------------------------+
9758 | Parameter save area (P) | 8
9759 +---------------------------------------+
9760 | Alloca space (A) | 8+P
9761 +---------------------------------------+
9762 | Varargs save area (V) | 8+P+A
9763 +---------------------------------------+
9764 | Local variable space (L) | 8+P+A+V
9765 +---------------------------------------+
9766 | Float/int conversion temporary (X) | 8+P+A+V+L
9767 +---------------------------------------+
9768 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9769 +---------------------------------------+
9770 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9771 +---------------------------------------+
9772 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9773 +---------------------------------------+
9774 | SPE: area for 64-bit GP registers |
9775 +---------------------------------------+
9776 | SPE alignment padding |
9777 +---------------------------------------+
9778 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9779 +---------------------------------------+
9780 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9781 +---------------------------------------+
9782 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9783 +---------------------------------------+
9784 old SP->| back chain to caller's caller |
9785 +---------------------------------------+
9787 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9788 given. (But note below and in sysv4.h that we require only 8 and
9789 may round up the size of our stack frame anyways. The historical
9790 reason is early versions of powerpc-linux which didn't properly
9791 align the stack at program startup. A happy side-effect is that
9792 -mno-eabi libraries can be used with -meabi programs.)
9794 The EABI configuration defaults to the V.4 layout. However,
9795 the stack alignment requirements may differ. If -mno-eabi is not
9796 given, the required stack alignment is 8 bytes; if -mno-eabi is
9797 given, the required alignment is 16 bytes. (But see V.4 comment
9798 above.) */
9800 #ifndef ABI_STACK_BOUNDARY
9801 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9802 #endif
9804 rs6000_stack_t *
9805 rs6000_stack_info ()
9807 static rs6000_stack_t info, zero_info;
9808 rs6000_stack_t *info_ptr = &info;
9809 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9810 int ehrd_size;
9811 int total_raw_size;
9813 /* Zero all fields portably. */
9814 info = zero_info;
9816 if (TARGET_SPE)
9818 /* Cache value so we don't rescan instruction chain over and over. */
9819 if (cfun->machine->insn_chain_scanned_p == 0)
9821 cfun->machine->insn_chain_scanned_p = 1;
9822 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9826 /* Select which calling sequence. */
9827 info_ptr->abi = DEFAULT_ABI;
9829 /* Calculate which registers need to be saved & save area size. */
9830 info_ptr->first_gp_reg_save = first_reg_to_save ();
9831 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9832 even if it currently looks like we won't. */
9833 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9834 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9835 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9836 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9837 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9838 else
9839 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9841 /* For the SPE, we have an additional upper 32-bits on each GPR.
9842 Ideally we should save the entire 64-bits only when the upper
9843 half is used in SIMD instructions. Since we only record
9844 registers live (not the size they are used in), this proves
9845 difficult because we'd have to traverse the instruction chain at
9846 the right time, taking reload into account. This is a real pain,
9847 so we opt to save the GPRs in 64-bits always if but one register
9848 gets used in 64-bits. Otherwise, all the registers in the frame
9849 get saved in 32-bits.
9851 So... since when we save all GPRs (except the SP) in 64-bits, the
9852 traditional GP save area will be empty. */
9853 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9854 info_ptr->gp_size = 0;
9856 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9857 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9859 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9860 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9861 - info_ptr->first_altivec_reg_save);
9863 /* Does this function call anything? */
9864 info_ptr->calls_p = (! current_function_is_leaf
9865 || cfun->machine->ra_needs_full_frame);
9867 /* Determine if we need to save the link register. */
9868 if (rs6000_ra_ever_killed ()
9869 || (DEFAULT_ABI == ABI_AIX
9870 && current_function_profile
9871 && !TARGET_PROFILE_KERNEL)
9872 #ifdef TARGET_RELOCATABLE
9873 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9874 #endif
9875 || (info_ptr->first_fp_reg_save != 64
9876 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9877 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9878 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9879 || (DEFAULT_ABI == ABI_DARWIN
9880 && flag_pic
9881 && current_function_uses_pic_offset_table)
9882 || info_ptr->calls_p)
9884 info_ptr->lr_save_p = 1;
9885 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9888 /* Determine if we need to save the condition code registers. */
9889 if (regs_ever_live[CR2_REGNO]
9890 || regs_ever_live[CR3_REGNO]
9891 || regs_ever_live[CR4_REGNO])
9893 info_ptr->cr_save_p = 1;
9894 if (DEFAULT_ABI == ABI_V4)
9895 info_ptr->cr_size = reg_size;
9898 /* If the current function calls __builtin_eh_return, then we need
9899 to allocate stack space for registers that will hold data for
9900 the exception handler. */
9901 if (current_function_calls_eh_return)
9903 unsigned int i;
9904 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9905 continue;
9907 /* SPE saves EH registers in 64-bits. */
9908 ehrd_size = i * (TARGET_SPE_ABI
9909 && info_ptr->spe_64bit_regs_used != 0
9910 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9912 else
9913 ehrd_size = 0;
9915 /* Determine various sizes. */
9916 info_ptr->reg_size = reg_size;
9917 info_ptr->fixed_size = RS6000_SAVE_AREA;
9918 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9919 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9920 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9923 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9924 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9925 else
9926 info_ptr->spe_gp_size = 0;
9928 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9930 info_ptr->vrsave_mask = compute_vrsave_mask ();
9931 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9933 else
9935 info_ptr->vrsave_mask = 0;
9936 info_ptr->vrsave_size = 0;
9939 /* Calculate the offsets. */
9940 switch (DEFAULT_ABI)
9942 case ABI_NONE:
9943 default:
9944 abort ();
9946 case ABI_AIX:
9947 case ABI_DARWIN:
9948 info_ptr->fp_save_offset = - info_ptr->fp_size;
9949 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9951 if (TARGET_ALTIVEC_ABI)
9953 info_ptr->vrsave_save_offset
9954 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9956 /* Align stack so vector save area is on a quadword boundary. */
9957 if (info_ptr->altivec_size != 0)
9958 info_ptr->altivec_padding_size
9959 = 16 - (-info_ptr->vrsave_save_offset % 16);
9960 else
9961 info_ptr->altivec_padding_size = 0;
9963 info_ptr->altivec_save_offset
9964 = info_ptr->vrsave_save_offset
9965 - info_ptr->altivec_padding_size
9966 - info_ptr->altivec_size;
9968 /* Adjust for AltiVec case. */
9969 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9971 else
9972 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9973 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9974 info_ptr->lr_save_offset = 2*reg_size;
9975 break;
9977 case ABI_V4:
9978 info_ptr->fp_save_offset = - info_ptr->fp_size;
9979 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9980 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9982 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9984 /* Align stack so SPE GPR save area is aligned on a
9985 double-word boundary. */
9986 if (info_ptr->spe_gp_size != 0)
9987 info_ptr->spe_padding_size
9988 = 8 - (-info_ptr->cr_save_offset % 8);
9989 else
9990 info_ptr->spe_padding_size = 0;
9992 info_ptr->spe_gp_save_offset
9993 = info_ptr->cr_save_offset
9994 - info_ptr->spe_padding_size
9995 - info_ptr->spe_gp_size;
9997 /* Adjust for SPE case. */
9998 info_ptr->toc_save_offset
9999 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10001 else if (TARGET_ALTIVEC_ABI)
10003 info_ptr->vrsave_save_offset
10004 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10006 /* Align stack so vector save area is on a quadword boundary. */
10007 if (info_ptr->altivec_size != 0)
10008 info_ptr->altivec_padding_size
10009 = 16 - (-info_ptr->vrsave_save_offset % 16);
10010 else
10011 info_ptr->altivec_padding_size = 0;
10013 info_ptr->altivec_save_offset
10014 = info_ptr->vrsave_save_offset
10015 - info_ptr->altivec_padding_size
10016 - info_ptr->altivec_size;
10018 /* Adjust for AltiVec case. */
10019 info_ptr->toc_save_offset
10020 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10022 else
10023 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10024 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10025 info_ptr->lr_save_offset = reg_size;
10026 break;
10029 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10030 + info_ptr->gp_size
10031 + info_ptr->altivec_size
10032 + info_ptr->altivec_padding_size
10033 + info_ptr->spe_gp_size
10034 + info_ptr->spe_padding_size
10035 + ehrd_size
10036 + info_ptr->cr_size
10037 + info_ptr->lr_size
10038 + info_ptr->vrsave_size
10039 + info_ptr->toc_size,
10040 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10041 ? 16 : 8);
10043 total_raw_size = (info_ptr->vars_size
10044 + info_ptr->parm_size
10045 + info_ptr->save_size
10046 + info_ptr->varargs_size
10047 + info_ptr->fixed_size);
10049 info_ptr->total_size =
10050 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10052 /* Determine if we need to allocate any stack frame:
10054 For AIX we need to push the stack if a frame pointer is needed
10055 (because the stack might be dynamically adjusted), if we are
10056 debugging, if we make calls, or if the sum of fp_save, gp_save,
10057 and local variables are more than the space needed to save all
10058 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10059 + 18*8 = 288 (GPR13 reserved).
10061 For V.4 we don't have the stack cushion that AIX uses, but assume
10062 that the debugger can handle stackless frames. */
10064 if (info_ptr->calls_p)
10065 info_ptr->push_p = 1;
10067 else if (DEFAULT_ABI == ABI_V4)
10068 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10070 else if (frame_pointer_needed)
10071 info_ptr->push_p = 1;
10073 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10074 info_ptr->push_p = 1;
10076 else
10077 info_ptr->push_p
10078 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10080 /* Zero offsets if we're not saving those registers. */
10081 if (info_ptr->fp_size == 0)
10082 info_ptr->fp_save_offset = 0;
10084 if (info_ptr->gp_size == 0)
10085 info_ptr->gp_save_offset = 0;
10087 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10088 info_ptr->altivec_save_offset = 0;
10090 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10091 info_ptr->vrsave_save_offset = 0;
10093 if (! TARGET_SPE_ABI
10094 || info_ptr->spe_64bit_regs_used == 0
10095 || info_ptr->spe_gp_size == 0)
10096 info_ptr->spe_gp_save_offset = 0;
10098 if (! info_ptr->lr_save_p)
10099 info_ptr->lr_save_offset = 0;
10101 if (! info_ptr->cr_save_p)
10102 info_ptr->cr_save_offset = 0;
10104 if (! info_ptr->toc_save_p)
10105 info_ptr->toc_save_offset = 0;
10107 return info_ptr;
10110 /* Return true if the current function uses any GPRs in 64-bit SIMD
10111 mode. */
10113 static bool
10114 spe_func_has_64bit_regs_p ()
10116 rtx insns, insn;
10118 /* Functions that save and restore all the call-saved registers will
10119 need to save/restore the registers in 64-bits. */
10120 if (current_function_calls_eh_return
10121 || current_function_calls_setjmp
10122 || current_function_has_nonlocal_goto)
10123 return true;
10125 insns = get_insns ();
10127 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10129 if (INSN_P (insn))
10131 rtx i;
10133 i = PATTERN (insn);
10134 if (GET_CODE (i) == SET
10135 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10136 return true;
10140 return false;
10143 void
10144 debug_stack_info (info)
10145 rs6000_stack_t *info;
10147 const char *abi_string;
10149 if (! info)
10150 info = rs6000_stack_info ();
10152 fprintf (stderr, "\nStack information for function %s:\n",
10153 ((current_function_decl && DECL_NAME (current_function_decl))
10154 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10155 : "<unknown>"));
10157 switch (info->abi)
10159 default: abi_string = "Unknown"; break;
10160 case ABI_NONE: abi_string = "NONE"; break;
10161 case ABI_AIX: abi_string = "AIX"; break;
10162 case ABI_DARWIN: abi_string = "Darwin"; break;
10163 case ABI_V4: abi_string = "V.4"; break;
10166 fprintf (stderr, "\tABI = %5s\n", abi_string);
10168 if (TARGET_ALTIVEC_ABI)
10169 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10171 if (TARGET_SPE_ABI)
10172 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10174 if (info->first_gp_reg_save != 32)
10175 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10177 if (info->first_fp_reg_save != 64)
10178 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10180 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10181 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10182 info->first_altivec_reg_save);
10184 if (info->lr_save_p)
10185 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10187 if (info->cr_save_p)
10188 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10190 if (info->toc_save_p)
10191 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10193 if (info->vrsave_mask)
10194 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10196 if (info->push_p)
10197 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10199 if (info->calls_p)
10200 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10202 if (info->gp_save_offset)
10203 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10205 if (info->fp_save_offset)
10206 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10208 if (info->altivec_save_offset)
10209 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10210 info->altivec_save_offset);
10212 if (info->spe_gp_save_offset)
10213 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10214 info->spe_gp_save_offset);
10216 if (info->vrsave_save_offset)
10217 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10218 info->vrsave_save_offset);
10220 if (info->lr_save_offset)
10221 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10223 if (info->cr_save_offset)
10224 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10226 if (info->toc_save_offset)
10227 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10229 if (info->varargs_save_offset)
10230 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10232 if (info->total_size)
10233 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10235 if (info->varargs_size)
10236 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10238 if (info->vars_size)
10239 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10241 if (info->parm_size)
10242 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10244 if (info->fixed_size)
10245 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10247 if (info->gp_size)
10248 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10250 if (info->spe_gp_size)
10251 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10253 if (info->fp_size)
10254 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10256 if (info->altivec_size)
10257 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10259 if (info->vrsave_size)
10260 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10262 if (info->altivec_padding_size)
10263 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10264 info->altivec_padding_size);
10266 if (info->spe_padding_size)
10267 fprintf (stderr, "\tspe_padding_size = %5d\n",
10268 info->spe_padding_size);
10270 if (info->lr_size)
10271 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10273 if (info->cr_size)
10274 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10276 if (info->toc_size)
10277 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10279 if (info->save_size)
10280 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10282 if (info->reg_size != 4)
10283 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10285 fprintf (stderr, "\n");
10289 rs6000_return_addr (count, frame)
10290 int count;
10291 rtx frame;
10293 /* Currently we don't optimize very well between prolog and body
10294 code and for PIC code the code can be actually quite bad, so
10295 don't try to be too clever here. */
10296 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10298 cfun->machine->ra_needs_full_frame = 1;
10300 return
10301 gen_rtx_MEM
10302 (Pmode,
10303 memory_address
10304 (Pmode,
10305 plus_constant (copy_to_reg
10306 (gen_rtx_MEM (Pmode,
10307 memory_address (Pmode, frame))),
10308 RETURN_ADDRESS_OFFSET)));
10311 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10314 /* Say whether a function is a candidate for sibcall handling or not.
10315 We do not allow indirect calls to be optimized into sibling calls.
10316 Also, we can't do it if there are any vector parameters; there's
10317 nowhere to put the VRsave code so it works; note that functions with
10318 vector parameters are required to have a prototype, so the argument
10319 type info must be available here. (The tail recursion case can work
10320 with vector parameters, but there's no way to distinguish here.) */
10321 static bool
10322 rs6000_function_ok_for_sibcall (decl, exp)
10323 tree decl;
10324 tree exp ATTRIBUTE_UNUSED;
10326 tree type;
10327 if (decl)
10329 if (TARGET_ALTIVEC_VRSAVE)
10331 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10332 type; type = TREE_CHAIN (type))
10334 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10335 return false;
10338 if (DEFAULT_ABI == ABI_DARWIN
10339 || (*targetm.binds_local_p) (decl))
10341 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10343 if (!lookup_attribute ("longcall", attr_list)
10344 || lookup_attribute ("shortcall", attr_list))
10345 return true;
10348 return false;
10351 static int
10352 rs6000_ra_ever_killed ()
10354 rtx top;
10355 rtx reg;
10356 rtx insn;
10358 /* Irritatingly, there are two kinds of thunks -- those created with
10359 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10360 through the regular part of the compiler. This is a very hacky
10361 way to tell them apart. */
10362 if (current_function_is_thunk && !no_new_pseudos)
10363 return 0;
10365 /* regs_ever_live has LR marked as used if any sibcalls are present,
10366 but this should not force saving and restoring in the
10367 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10368 clobbers LR, so that is inappropriate. */
10370 /* Also, the prologue can generate a store into LR that
10371 doesn't really count, like this:
10373 move LR->R0
10374 bcl to set PIC register
10375 move LR->R31
10376 move R0->LR
10378 When we're called from the epilogue, we need to avoid counting
10379 this as a store. */
10381 push_topmost_sequence ();
10382 top = get_insns ();
10383 pop_topmost_sequence ();
10384 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10386 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10388 if (INSN_P (insn))
10390 if (FIND_REG_INC_NOTE (insn, reg))
10391 return 1;
10392 else if (GET_CODE (insn) == CALL_INSN
10393 && !SIBLING_CALL_P (insn))
10394 return 1;
10395 else if (set_of (reg, insn) != NULL_RTX
10396 && !prologue_epilogue_contains (insn))
10397 return 1;
10400 return 0;
10403 /* Add a REG_MAYBE_DEAD note to the insn. */
10404 static void
10405 rs6000_maybe_dead (insn)
10406 rtx insn;
10408 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10409 const0_rtx,
10410 REG_NOTES (insn));
10413 /* Emit instructions needed to load the TOC register.
10414 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10415 a constant pool; or for SVR4 -fpic. */
10417 void
10418 rs6000_emit_load_toc_table (fromprolog)
10419 int fromprolog;
10421 rtx dest, insn;
10422 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10424 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10426 rtx temp = (fromprolog
10427 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10428 : gen_reg_rtx (Pmode));
10429 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10430 if (fromprolog)
10431 rs6000_maybe_dead (insn);
10432 insn = emit_move_insn (dest, temp);
10433 if (fromprolog)
10434 rs6000_maybe_dead (insn);
10436 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10438 char buf[30];
10439 rtx tempLR = (fromprolog
10440 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10441 : gen_reg_rtx (Pmode));
10442 rtx temp0 = (fromprolog
10443 ? gen_rtx_REG (Pmode, 0)
10444 : gen_reg_rtx (Pmode));
10445 rtx symF;
10447 /* possibly create the toc section */
10448 if (! toc_initialized)
10450 toc_section ();
10451 function_section (current_function_decl);
10454 if (fromprolog)
10456 rtx symL;
10458 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10459 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10461 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10462 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10464 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10465 symF)));
10466 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10467 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10468 symL,
10469 symF)));
10471 else
10473 rtx tocsym;
10474 static int reload_toc_labelno = 0;
10476 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10478 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10479 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10481 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10482 emit_move_insn (dest, tempLR);
10483 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10485 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10486 if (fromprolog)
10487 rs6000_maybe_dead (insn);
10489 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10491 /* This is for AIX code running in non-PIC ELF32. */
10492 char buf[30];
10493 rtx realsym;
10494 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10495 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10497 insn = emit_insn (gen_elf_high (dest, realsym));
10498 if (fromprolog)
10499 rs6000_maybe_dead (insn);
10500 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10501 if (fromprolog)
10502 rs6000_maybe_dead (insn);
10504 else if (DEFAULT_ABI == ABI_AIX)
10506 if (TARGET_32BIT)
10507 insn = emit_insn (gen_load_toc_aix_si (dest));
10508 else
10509 insn = emit_insn (gen_load_toc_aix_di (dest));
10510 if (fromprolog)
10511 rs6000_maybe_dead (insn);
10513 else
10514 abort ();
10517 int
10518 get_TOC_alias_set ()
10520 static int set = -1;
10521 if (set == -1)
10522 set = new_alias_set ();
10523 return set;
10526 /* This retuns nonzero if the current function uses the TOC. This is
10527 determined by the presence of (unspec ... UNSPEC_TOC) or
10528 use (unspec ... UNSPEC_TOC), which are generated by the various
10529 load_toc_* patterns. */
10532 uses_TOC ()
10534 rtx insn;
10536 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10537 if (INSN_P (insn))
10539 rtx pat = PATTERN (insn);
10540 int i;
10542 if (GET_CODE (pat) == PARALLEL)
10543 for (i = 0; i < XVECLEN (pat, 0); i++)
10545 rtx sub = XVECEXP (pat, 0, i);
10546 if (GET_CODE (sub) == USE)
10548 sub = XEXP (sub, 0);
10549 if (GET_CODE (sub) == UNSPEC
10550 && XINT (sub, 1) == UNSPEC_TOC)
10551 return 1;
10555 return 0;
10559 create_TOC_reference (symbol)
10560 rtx symbol;
10562 return gen_rtx_PLUS (Pmode,
10563 gen_rtx_REG (Pmode, TOC_REGISTER),
10564 gen_rtx_CONST (Pmode,
10565 gen_rtx_MINUS (Pmode, symbol,
10566 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10569 #if TARGET_AIX
10570 /* __throw will restore its own return address to be the same as the
10571 return address of the function that the throw is being made to.
10572 This is unfortunate, because we want to check the original
10573 return address to see if we need to restore the TOC.
10574 So we have to squirrel it away here.
10575 This is used only in compiling __throw and __rethrow.
10577 Most of this code should be removed by CSE. */
10578 static rtx insn_after_throw;
10580 /* This does the saving... */
10581 void
10582 rs6000_aix_emit_builtin_unwind_init ()
10584 rtx mem;
10585 rtx stack_top = gen_reg_rtx (Pmode);
10586 rtx opcode_addr = gen_reg_rtx (Pmode);
10588 insn_after_throw = gen_reg_rtx (SImode);
10590 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10591 emit_move_insn (stack_top, mem);
10593 mem = gen_rtx_MEM (Pmode,
10594 gen_rtx_PLUS (Pmode, stack_top,
10595 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10596 emit_move_insn (opcode_addr, mem);
10597 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10600 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10601 in _eh.o). Only used on AIX.
10603 The idea is that on AIX, function calls look like this:
10604 bl somefunction-trampoline
10605 lwz r2,20(sp)
10607 and later,
10608 somefunction-trampoline:
10609 stw r2,20(sp)
10610 ... load function address in the count register ...
10611 bctr
10612 or like this, if the linker determines that this is not a cross-module call
10613 and so the TOC need not be restored:
10614 bl somefunction
10616 or like this, if the compiler could determine that this is not a
10617 cross-module call:
10618 bl somefunction
10619 now, the tricky bit here is that register 2 is saved and restored
10620 by the _linker_, so we can't readily generate debugging information
10621 for it. So we need to go back up the call chain looking at the
10622 insns at return addresses to see which calls saved the TOC register
10623 and so see where it gets restored from.
10625 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10626 just before the actual epilogue.
10628 On the bright side, this incurs no space or time overhead unless an
10629 exception is thrown, except for the extra code in libgcc.a.
10631 The parameter STACKSIZE is a register containing (at runtime)
10632 the amount to be popped off the stack in addition to the stack frame
10633 of this routine (which will be __throw or __rethrow, and so is
10634 guaranteed to have a stack frame). */
10636 void
10637 rs6000_emit_eh_toc_restore (stacksize)
10638 rtx stacksize;
10640 rtx top_of_stack;
10641 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10642 rtx tocompare = gen_reg_rtx (SImode);
10643 rtx opcode = gen_reg_rtx (SImode);
10644 rtx opcode_addr = gen_reg_rtx (Pmode);
10645 rtx mem;
10646 rtx loop_start = gen_label_rtx ();
10647 rtx no_toc_restore_needed = gen_label_rtx ();
10648 rtx loop_exit = gen_label_rtx ();
10650 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10651 set_mem_alias_set (mem, rs6000_sr_alias_set);
10652 emit_move_insn (bottom_of_stack, mem);
10654 top_of_stack = expand_binop (Pmode, add_optab,
10655 bottom_of_stack, stacksize,
10656 NULL_RTX, 1, OPTAB_WIDEN);
10658 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10659 : 0xE8410028, SImode));
10661 if (insn_after_throw == NULL_RTX)
10662 abort ();
10663 emit_move_insn (opcode, insn_after_throw);
10665 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10666 emit_label (loop_start);
10668 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10669 SImode, NULL_RTX, NULL_RTX,
10670 no_toc_restore_needed);
10672 mem = gen_rtx_MEM (Pmode,
10673 gen_rtx_PLUS (Pmode, bottom_of_stack,
10674 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10675 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10677 emit_label (no_toc_restore_needed);
10678 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10679 Pmode, NULL_RTX, NULL_RTX,
10680 loop_exit);
10682 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10683 set_mem_alias_set (mem, rs6000_sr_alias_set);
10684 emit_move_insn (bottom_of_stack, mem);
10686 mem = gen_rtx_MEM (Pmode,
10687 gen_rtx_PLUS (Pmode, bottom_of_stack,
10688 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10689 emit_move_insn (opcode_addr, mem);
10690 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10692 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10693 emit_jump (loop_start);
10694 emit_note (NULL, NOTE_INSN_LOOP_END);
10695 emit_label (loop_exit);
10697 #endif /* TARGET_AIX */
10699 /* This ties together stack memory (MEM with an alias set of
10700 rs6000_sr_alias_set) and the change to the stack pointer. */
10702 static void
10703 rs6000_emit_stack_tie ()
10705 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10707 set_mem_alias_set (mem, rs6000_sr_alias_set);
10708 emit_insn (gen_stack_tie (mem));
10711 /* Emit the correct code for allocating stack space, as insns.
10712 If COPY_R12, make sure a copy of the old frame is left in r12.
10713 The generated code may use hard register 0 as a temporary. */
10715 static void
10716 rs6000_emit_allocate_stack (size, copy_r12)
10717 HOST_WIDE_INT size;
10718 int copy_r12;
10720 rtx insn;
10721 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10722 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10723 rtx todec = GEN_INT (-size);
10725 if (current_function_limit_stack)
10727 if (REG_P (stack_limit_rtx)
10728 && REGNO (stack_limit_rtx) > 1
10729 && REGNO (stack_limit_rtx) <= 31)
10731 emit_insn (TARGET_32BIT
10732 ? gen_addsi3 (tmp_reg,
10733 stack_limit_rtx,
10734 GEN_INT (size))
10735 : gen_adddi3 (tmp_reg,
10736 stack_limit_rtx,
10737 GEN_INT (size)));
10739 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10740 const0_rtx));
10742 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10743 && TARGET_32BIT
10744 && DEFAULT_ABI == ABI_V4)
10746 rtx toload = gen_rtx_CONST (VOIDmode,
10747 gen_rtx_PLUS (Pmode,
10748 stack_limit_rtx,
10749 GEN_INT (size)));
10751 emit_insn (gen_elf_high (tmp_reg, toload));
10752 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10753 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10754 const0_rtx));
10756 else
10757 warning ("stack limit expression is not supported");
10760 if (copy_r12 || ! TARGET_UPDATE)
10761 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10763 if (TARGET_UPDATE)
10765 if (size > 32767)
10767 /* Need a note here so that try_split doesn't get confused. */
10768 if (get_last_insn() == NULL_RTX)
10769 emit_note (0, NOTE_INSN_DELETED);
10770 insn = emit_move_insn (tmp_reg, todec);
10771 try_split (PATTERN (insn), insn, 0);
10772 todec = tmp_reg;
10775 insn = emit_insn (TARGET_32BIT
10776 ? gen_movsi_update (stack_reg, stack_reg,
10777 todec, stack_reg)
10778 : gen_movdi_update (stack_reg, stack_reg,
10779 todec, stack_reg));
10781 else
10783 insn = emit_insn (TARGET_32BIT
10784 ? gen_addsi3 (stack_reg, stack_reg, todec)
10785 : gen_adddi3 (stack_reg, stack_reg, todec));
10786 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10787 gen_rtx_REG (Pmode, 12));
10790 RTX_FRAME_RELATED_P (insn) = 1;
10791 REG_NOTES (insn) =
10792 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10793 gen_rtx_SET (VOIDmode, stack_reg,
10794 gen_rtx_PLUS (Pmode, stack_reg,
10795 GEN_INT (-size))),
10796 REG_NOTES (insn));
10799 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10800 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10801 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10802 deduce these equivalences by itself so it wasn't necessary to hold
10803 its hand so much. */
10805 static void
10806 rs6000_frame_related (insn, reg, val, reg2, rreg)
10807 rtx insn;
10808 rtx reg;
10809 HOST_WIDE_INT val;
10810 rtx reg2;
10811 rtx rreg;
10813 rtx real, temp;
10815 /* copy_rtx will not make unique copies of registers, so we need to
10816 ensure we don't have unwanted sharing here. */
10817 if (reg == reg2)
10818 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10820 if (reg == rreg)
10821 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10823 real = copy_rtx (PATTERN (insn));
10825 if (reg2 != NULL_RTX)
10826 real = replace_rtx (real, reg2, rreg);
10828 real = replace_rtx (real, reg,
10829 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10830 STACK_POINTER_REGNUM),
10831 GEN_INT (val)));
10833 /* We expect that 'real' is either a SET or a PARALLEL containing
10834 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10835 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10837 if (GET_CODE (real) == SET)
10839 rtx set = real;
10841 temp = simplify_rtx (SET_SRC (set));
10842 if (temp)
10843 SET_SRC (set) = temp;
10844 temp = simplify_rtx (SET_DEST (set));
10845 if (temp)
10846 SET_DEST (set) = temp;
10847 if (GET_CODE (SET_DEST (set)) == MEM)
10849 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10850 if (temp)
10851 XEXP (SET_DEST (set), 0) = temp;
10854 else if (GET_CODE (real) == PARALLEL)
10856 int i;
10857 for (i = 0; i < XVECLEN (real, 0); i++)
10858 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10860 rtx set = XVECEXP (real, 0, i);
10862 temp = simplify_rtx (SET_SRC (set));
10863 if (temp)
10864 SET_SRC (set) = temp;
10865 temp = simplify_rtx (SET_DEST (set));
10866 if (temp)
10867 SET_DEST (set) = temp;
10868 if (GET_CODE (SET_DEST (set)) == MEM)
10870 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10871 if (temp)
10872 XEXP (SET_DEST (set), 0) = temp;
10874 RTX_FRAME_RELATED_P (set) = 1;
10877 else
10878 abort ();
10880 if (TARGET_SPE)
10881 real = spe_synthesize_frame_save (real);
10883 RTX_FRAME_RELATED_P (insn) = 1;
10884 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10885 real,
10886 REG_NOTES (insn));
10889 /* Given an SPE frame note, return a PARALLEL of SETs with the
10890 original note, plus a synthetic register save. */
10892 static rtx
10893 spe_synthesize_frame_save (real)
10894 rtx real;
10896 rtx synth, offset, reg, real2;
10898 if (GET_CODE (real) != SET
10899 || GET_MODE (SET_SRC (real)) != V2SImode)
10900 return real;
10902 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10903 frame related note. The parallel contains a set of the register
10904 being saved, and another set to a synthetic register (n+1200).
10905 This is so we can differentiate between 64-bit and 32-bit saves.
10906 Words cannot describe this nastiness. */
10908 if (GET_CODE (SET_DEST (real)) != MEM
10909 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10910 || GET_CODE (SET_SRC (real)) != REG)
10911 abort ();
10913 /* Transform:
10914 (set (mem (plus (reg x) (const y)))
10915 (reg z))
10916 into:
10917 (set (mem (plus (reg x) (const y+4)))
10918 (reg z+1200))
10921 real2 = copy_rtx (real);
10922 PUT_MODE (SET_DEST (real2), SImode);
10923 reg = SET_SRC (real2);
10924 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10925 synth = copy_rtx (real2);
10927 if (BYTES_BIG_ENDIAN)
10929 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10930 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10933 reg = SET_SRC (synth);
10935 synth = replace_rtx (synth, reg,
10936 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10938 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10939 synth = replace_rtx (synth, offset,
10940 GEN_INT (INTVAL (offset)
10941 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10943 RTX_FRAME_RELATED_P (synth) = 1;
10944 RTX_FRAME_RELATED_P (real2) = 1;
10945 if (BYTES_BIG_ENDIAN)
10946 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10947 else
10948 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10950 return real;
10953 /* Returns an insn that has a vrsave set operation with the
10954 appropriate CLOBBERs. */
10956 static rtx
10957 generate_set_vrsave (reg, info, epiloguep)
10958 rtx reg;
10959 rs6000_stack_t *info;
10960 int epiloguep;
10962 int nclobs, i;
10963 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10964 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10966 clobs[0]
10967 = gen_rtx_SET (VOIDmode,
10968 vrsave,
10969 gen_rtx_UNSPEC_VOLATILE (SImode,
10970 gen_rtvec (2, reg, vrsave),
10971 30));
10973 nclobs = 1;
10975 /* We need to clobber the registers in the mask so the scheduler
10976 does not move sets to VRSAVE before sets of AltiVec registers.
10978 However, if the function receives nonlocal gotos, reload will set
10979 all call saved registers live. We will end up with:
10981 (set (reg 999) (mem))
10982 (parallel [ (set (reg vrsave) (unspec blah))
10983 (clobber (reg 999))])
10985 The clobber will cause the store into reg 999 to be dead, and
10986 flow will attempt to delete an epilogue insn. In this case, we
10987 need an unspec use/set of the register. */
10989 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10990 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10992 if (!epiloguep || call_used_regs [i])
10993 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10994 gen_rtx_REG (V4SImode, i));
10995 else
10997 rtx reg = gen_rtx_REG (V4SImode, i);
10999 clobs[nclobs++]
11000 = gen_rtx_SET (VOIDmode,
11001 reg,
11002 gen_rtx_UNSPEC (V4SImode,
11003 gen_rtvec (1, reg), 27));
11007 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11009 for (i = 0; i < nclobs; ++i)
11010 XVECEXP (insn, 0, i) = clobs[i];
11012 return insn;
11015 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11016 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11018 static void
11019 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11020 rtx frame_reg;
11021 rtx frame_ptr;
11022 enum machine_mode mode;
11023 unsigned int regno;
11024 int offset;
11025 int total_size;
11027 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11028 rtx replacea, replaceb;
11030 int_rtx = GEN_INT (offset);
11032 /* Some cases that need register indexed addressing. */
11033 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11034 || (TARGET_SPE_ABI
11035 && SPE_VECTOR_MODE (mode)
11036 && !SPE_CONST_OFFSET_OK (offset)))
11038 /* Whomever calls us must make sure r11 is available in the
11039 flow path of instructions in the prologue. */
11040 offset_rtx = gen_rtx_REG (Pmode, 11);
11041 emit_move_insn (offset_rtx, int_rtx);
11043 replacea = offset_rtx;
11044 replaceb = int_rtx;
11046 else
11048 offset_rtx = int_rtx;
11049 replacea = NULL_RTX;
11050 replaceb = NULL_RTX;
11053 reg = gen_rtx_REG (mode, regno);
11054 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11055 mem = gen_rtx_MEM (mode, addr);
11056 set_mem_alias_set (mem, rs6000_sr_alias_set);
11058 insn = emit_move_insn (mem, reg);
11060 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11063 /* Emit an offset memory reference suitable for a frame store, while
11064 converting to a valid addressing mode. */
11066 static rtx
11067 gen_frame_mem_offset (mode, reg, offset)
11068 enum machine_mode mode;
11069 rtx reg;
11070 int offset;
11072 rtx int_rtx, offset_rtx;
11074 int_rtx = GEN_INT (offset);
11076 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11078 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11079 emit_move_insn (offset_rtx, int_rtx);
11081 else
11082 offset_rtx = int_rtx;
11084 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11087 /* Emit function prologue as insns. */
11089 void
11090 rs6000_emit_prologue ()
11092 rs6000_stack_t *info = rs6000_stack_info ();
11093 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11094 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11095 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11096 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11097 rtx frame_reg_rtx = sp_reg_rtx;
11098 rtx cr_save_rtx = NULL;
11099 rtx insn;
11100 int saving_FPRs_inline;
11101 int using_store_multiple;
11102 HOST_WIDE_INT sp_offset = 0;
11104 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11106 reg_mode = V2SImode;
11107 reg_size = 8;
11110 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11111 && (!TARGET_SPE_ABI
11112 || info->spe_64bit_regs_used == 0)
11113 && info->first_gp_reg_save < 31);
11114 saving_FPRs_inline = (info->first_fp_reg_save == 64
11115 || FP_SAVE_INLINE (info->first_fp_reg_save));
11117 /* For V.4, update stack before we do any saving and set back pointer. */
11118 if (info->push_p && DEFAULT_ABI == ABI_V4)
11120 if (info->total_size < 32767)
11121 sp_offset = info->total_size;
11122 else
11123 frame_reg_rtx = frame_ptr_rtx;
11124 rs6000_emit_allocate_stack (info->total_size,
11125 (frame_reg_rtx != sp_reg_rtx
11126 && (info->cr_save_p
11127 || info->lr_save_p
11128 || info->first_fp_reg_save < 64
11129 || info->first_gp_reg_save < 32
11130 )));
11131 if (frame_reg_rtx != sp_reg_rtx)
11132 rs6000_emit_stack_tie ();
11135 /* Save AltiVec registers if needed. */
11136 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11138 int i;
11140 /* There should be a non inline version of this, for when we
11141 are saving lots of vector registers. */
11142 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11143 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11145 rtx areg, savereg, mem;
11146 int offset;
11148 offset = info->altivec_save_offset + sp_offset
11149 + 16 * (i - info->first_altivec_reg_save);
11151 savereg = gen_rtx_REG (V4SImode, i);
11153 areg = gen_rtx_REG (Pmode, 0);
11154 emit_move_insn (areg, GEN_INT (offset));
11156 /* AltiVec addressing mode is [reg+reg]. */
11157 mem = gen_rtx_MEM (V4SImode,
11158 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11160 set_mem_alias_set (mem, rs6000_sr_alias_set);
11162 insn = emit_move_insn (mem, savereg);
11164 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11165 areg, GEN_INT (offset));
11169 /* VRSAVE is a bit vector representing which AltiVec registers
11170 are used. The OS uses this to determine which vector
11171 registers to save on a context switch. We need to save
11172 VRSAVE on the stack frame, add whatever AltiVec registers we
11173 used in this function, and do the corresponding magic in the
11174 epilogue. */
11176 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11178 rtx reg, mem, vrsave;
11179 int offset;
11181 /* Get VRSAVE onto a GPR. */
11182 reg = gen_rtx_REG (SImode, 12);
11183 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11184 if (TARGET_MACHO)
11185 emit_insn (gen_get_vrsave_internal (reg));
11186 else
11187 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11189 /* Save VRSAVE. */
11190 offset = info->vrsave_save_offset + sp_offset;
11192 = gen_rtx_MEM (SImode,
11193 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11194 set_mem_alias_set (mem, rs6000_sr_alias_set);
11195 insn = emit_move_insn (mem, reg);
11197 /* Include the registers in the mask. */
11198 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11200 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11203 /* If we use the link register, get it into r0. */
11204 if (info->lr_save_p)
11205 emit_move_insn (gen_rtx_REG (Pmode, 0),
11206 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11208 /* If we need to save CR, put it into r12. */
11209 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11211 cr_save_rtx = gen_rtx_REG (SImode, 12);
11212 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11215 /* Do any required saving of fpr's. If only one or two to save, do
11216 it ourselves. Otherwise, call function. */
11217 if (saving_FPRs_inline)
11219 int i;
11220 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11221 if ((regs_ever_live[info->first_fp_reg_save+i]
11222 && ! call_used_regs[info->first_fp_reg_save+i]))
11223 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11224 info->first_fp_reg_save + i,
11225 info->fp_save_offset + sp_offset + 8 * i,
11226 info->total_size);
11228 else if (info->first_fp_reg_save != 64)
11230 int i;
11231 char rname[30];
11232 const char *alloc_rname;
11233 rtvec p;
11234 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11236 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11237 gen_rtx_REG (Pmode,
11238 LINK_REGISTER_REGNUM));
11239 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11240 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11241 alloc_rname = ggc_strdup (rname);
11242 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11243 gen_rtx_SYMBOL_REF (Pmode,
11244 alloc_rname));
11245 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11247 rtx addr, reg, mem;
11248 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11249 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11250 GEN_INT (info->fp_save_offset
11251 + sp_offset + 8*i));
11252 mem = gen_rtx_MEM (DFmode, addr);
11253 set_mem_alias_set (mem, rs6000_sr_alias_set);
11255 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11257 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11258 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11259 NULL_RTX, NULL_RTX);
11262 /* Save GPRs. This is done as a PARALLEL if we are using
11263 the store-multiple instructions. */
11264 if (using_store_multiple)
11266 rtvec p;
11267 int i;
11268 p = rtvec_alloc (32 - info->first_gp_reg_save);
11269 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11271 rtx addr, reg, mem;
11272 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11273 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11274 GEN_INT (info->gp_save_offset
11275 + sp_offset
11276 + reg_size * i));
11277 mem = gen_rtx_MEM (reg_mode, addr);
11278 set_mem_alias_set (mem, rs6000_sr_alias_set);
11280 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11282 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11283 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11284 NULL_RTX, NULL_RTX);
11286 else
11288 int i;
11289 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11290 if ((regs_ever_live[info->first_gp_reg_save+i]
11291 && ! call_used_regs[info->first_gp_reg_save+i])
11292 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11293 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11294 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11296 rtx addr, reg, mem;
11297 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11299 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11301 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11302 rtx b;
11304 if (!SPE_CONST_OFFSET_OK (offset))
11306 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11307 emit_move_insn (b, GEN_INT (offset));
11309 else
11310 b = GEN_INT (offset);
11312 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11313 mem = gen_rtx_MEM (V2SImode, addr);
11314 set_mem_alias_set (mem, rs6000_sr_alias_set);
11315 insn = emit_move_insn (mem, reg);
11317 if (GET_CODE (b) == CONST_INT)
11318 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11319 NULL_RTX, NULL_RTX);
11320 else
11321 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11322 b, GEN_INT (offset));
11324 else
11326 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11327 GEN_INT (info->gp_save_offset
11328 + sp_offset
11329 + reg_size * i));
11330 mem = gen_rtx_MEM (reg_mode, addr);
11331 set_mem_alias_set (mem, rs6000_sr_alias_set);
11333 insn = emit_move_insn (mem, reg);
11334 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11335 NULL_RTX, NULL_RTX);
11340 /* ??? There's no need to emit actual instructions here, but it's the
11341 easiest way to get the frame unwind information emitted. */
11342 if (current_function_calls_eh_return)
11344 unsigned int i, regno;
11346 for (i = 0; ; ++i)
11348 regno = EH_RETURN_DATA_REGNO (i);
11349 if (regno == INVALID_REGNUM)
11350 break;
11352 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11353 info->ehrd_offset + sp_offset
11354 + reg_size * (int) i,
11355 info->total_size);
11359 /* Save lr if we used it. */
11360 if (info->lr_save_p)
11362 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11363 GEN_INT (info->lr_save_offset + sp_offset));
11364 rtx reg = gen_rtx_REG (Pmode, 0);
11365 rtx mem = gen_rtx_MEM (Pmode, addr);
11366 /* This should not be of rs6000_sr_alias_set, because of
11367 __builtin_return_address. */
11369 insn = emit_move_insn (mem, reg);
11370 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11371 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11374 /* Save CR if we use any that must be preserved. */
11375 if (info->cr_save_p)
11377 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11378 GEN_INT (info->cr_save_offset + sp_offset));
11379 rtx mem = gen_rtx_MEM (SImode, addr);
11381 set_mem_alias_set (mem, rs6000_sr_alias_set);
11383 /* If r12 was used to hold the original sp, copy cr into r0 now
11384 that it's free. */
11385 if (REGNO (frame_reg_rtx) == 12)
11387 cr_save_rtx = gen_rtx_REG (SImode, 0);
11388 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11390 insn = emit_move_insn (mem, cr_save_rtx);
11392 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11393 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11394 But that's OK. All we have to do is specify that _one_ condition
11395 code register is saved in this stack slot. The thrower's epilogue
11396 will then restore all the call-saved registers.
11397 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11398 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11399 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11402 /* Update stack and set back pointer unless this is V.4,
11403 for which it was done previously. */
11404 if (info->push_p && DEFAULT_ABI != ABI_V4)
11405 rs6000_emit_allocate_stack (info->total_size, FALSE);
11407 /* Set frame pointer, if needed. */
11408 if (frame_pointer_needed)
11410 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11411 sp_reg_rtx);
11412 RTX_FRAME_RELATED_P (insn) = 1;
11415 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11416 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11417 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11418 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11420 /* If emit_load_toc_table will use the link register, we need to save
11421 it. We use R12 for this purpose because emit_load_toc_table
11422 can use register 0. This allows us to use a plain 'blr' to return
11423 from the procedure more often. */
11424 int save_LR_around_toc_setup = (TARGET_ELF
11425 && DEFAULT_ABI != ABI_AIX
11426 && flag_pic
11427 && ! info->lr_save_p
11428 && EXIT_BLOCK_PTR->pred != NULL);
11429 if (save_LR_around_toc_setup)
11431 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11432 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11433 rs6000_emit_load_toc_table (TRUE);
11434 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11436 else
11437 rs6000_emit_load_toc_table (TRUE);
11440 #if TARGET_MACHO
11441 if (DEFAULT_ABI == ABI_DARWIN
11442 && flag_pic && current_function_uses_pic_offset_table)
11444 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11445 const char *picbase = machopic_function_base_name ();
11446 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11448 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11450 rs6000_maybe_dead (
11451 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11452 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11454 #endif
11457 /* Write function prologue. */
11459 static void
11460 rs6000_output_function_prologue (file, size)
11461 FILE *file;
11462 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11464 rs6000_stack_t *info = rs6000_stack_info ();
11466 if (TARGET_DEBUG_STACK)
11467 debug_stack_info (info);
11469 /* Write .extern for any function we will call to save and restore
11470 fp values. */
11471 if (info->first_fp_reg_save < 64
11472 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11473 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11474 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11475 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11476 RESTORE_FP_SUFFIX);
11478 /* Write .extern for AIX common mode routines, if needed. */
11479 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11481 fputs ("\t.extern __mulh\n", file);
11482 fputs ("\t.extern __mull\n", file);
11483 fputs ("\t.extern __divss\n", file);
11484 fputs ("\t.extern __divus\n", file);
11485 fputs ("\t.extern __quoss\n", file);
11486 fputs ("\t.extern __quous\n", file);
11487 common_mode_defined = 1;
11490 if (! HAVE_prologue)
11492 start_sequence ();
11494 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11495 the "toplevel" insn chain. */
11496 emit_note (0, NOTE_INSN_DELETED);
11497 rs6000_emit_prologue ();
11498 emit_note (0, NOTE_INSN_DELETED);
11500 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11502 rtx insn;
11503 unsigned addr = 0;
11504 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11506 INSN_ADDRESSES_NEW (insn, addr);
11507 addr += 4;
11511 if (TARGET_DEBUG_STACK)
11512 debug_rtx_list (get_insns (), 100);
11513 final (get_insns (), file, FALSE, FALSE);
11514 end_sequence ();
11517 rs6000_pic_labelno++;
11520 /* Emit function epilogue as insns.
11522 At present, dwarf2out_frame_debug_expr doesn't understand
11523 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11524 anywhere in the epilogue. Most of the insns below would in any case
11525 need special notes to explain where r11 is in relation to the stack. */
11527 void
11528 rs6000_emit_epilogue (sibcall)
11529 int sibcall;
11531 rs6000_stack_t *info;
11532 int restoring_FPRs_inline;
11533 int using_load_multiple;
11534 int using_mfcr_multiple;
11535 int use_backchain_to_restore_sp;
11536 int sp_offset = 0;
11537 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11538 rtx frame_reg_rtx = sp_reg_rtx;
11539 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11540 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11541 int i;
11543 info = rs6000_stack_info ();
11545 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11547 reg_mode = V2SImode;
11548 reg_size = 8;
11551 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11552 && (!TARGET_SPE_ABI
11553 || info->spe_64bit_regs_used == 0)
11554 && info->first_gp_reg_save < 31);
11555 restoring_FPRs_inline = (sibcall
11556 || current_function_calls_eh_return
11557 || info->first_fp_reg_save == 64
11558 || FP_SAVE_INLINE (info->first_fp_reg_save));
11559 use_backchain_to_restore_sp = (frame_pointer_needed
11560 || current_function_calls_alloca
11561 || info->total_size > 32767);
11562 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11563 || rs6000_cpu == PROCESSOR_PPC603
11564 || rs6000_cpu == PROCESSOR_PPC750
11565 || optimize_size);
11567 /* If we have a frame pointer, a call to alloca, or a large stack
11568 frame, restore the old stack pointer using the backchain. Otherwise,
11569 we know what size to update it with. */
11570 if (use_backchain_to_restore_sp)
11572 /* Under V.4, don't reset the stack pointer until after we're done
11573 loading the saved registers. */
11574 if (DEFAULT_ABI == ABI_V4)
11575 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11577 emit_move_insn (frame_reg_rtx,
11578 gen_rtx_MEM (Pmode, sp_reg_rtx));
11581 else if (info->push_p)
11583 if (DEFAULT_ABI == ABI_V4)
11584 sp_offset = info->total_size;
11585 else
11587 emit_insn (TARGET_32BIT
11588 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11589 GEN_INT (info->total_size))
11590 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11591 GEN_INT (info->total_size)));
11595 /* Restore AltiVec registers if needed. */
11596 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11598 int i;
11600 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11601 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11603 rtx addr, areg, mem;
11605 areg = gen_rtx_REG (Pmode, 0);
11606 emit_move_insn
11607 (areg, GEN_INT (info->altivec_save_offset
11608 + sp_offset
11609 + 16 * (i - info->first_altivec_reg_save)));
11611 /* AltiVec addressing mode is [reg+reg]. */
11612 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11613 mem = gen_rtx_MEM (V4SImode, addr);
11614 set_mem_alias_set (mem, rs6000_sr_alias_set);
11616 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11620 /* Restore VRSAVE if needed. */
11621 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11623 rtx addr, mem, reg;
11625 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11626 GEN_INT (info->vrsave_save_offset + sp_offset));
11627 mem = gen_rtx_MEM (SImode, addr);
11628 set_mem_alias_set (mem, rs6000_sr_alias_set);
11629 reg = gen_rtx_REG (SImode, 12);
11630 emit_move_insn (reg, mem);
11632 emit_insn (generate_set_vrsave (reg, info, 1));
11635 /* Get the old lr if we saved it. */
11636 if (info->lr_save_p)
11638 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11639 info->lr_save_offset + sp_offset);
11641 set_mem_alias_set (mem, rs6000_sr_alias_set);
11643 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11646 /* Get the old cr if we saved it. */
11647 if (info->cr_save_p)
11649 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11650 GEN_INT (info->cr_save_offset + sp_offset));
11651 rtx mem = gen_rtx_MEM (SImode, addr);
11653 set_mem_alias_set (mem, rs6000_sr_alias_set);
11655 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11658 /* Set LR here to try to overlap restores below. */
11659 if (info->lr_save_p)
11660 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11661 gen_rtx_REG (Pmode, 0));
11663 /* Load exception handler data registers, if needed. */
11664 if (current_function_calls_eh_return)
11666 unsigned int i, regno;
11668 for (i = 0; ; ++i)
11670 rtx mem;
11672 regno = EH_RETURN_DATA_REGNO (i);
11673 if (regno == INVALID_REGNUM)
11674 break;
11676 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11677 info->ehrd_offset + sp_offset
11678 + reg_size * (int) i);
11679 set_mem_alias_set (mem, rs6000_sr_alias_set);
11681 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11685 /* Restore GPRs. This is done as a PARALLEL if we are using
11686 the load-multiple instructions. */
11687 if (using_load_multiple)
11689 rtvec p;
11690 p = rtvec_alloc (32 - info->first_gp_reg_save);
11691 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11693 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11694 GEN_INT (info->gp_save_offset
11695 + sp_offset
11696 + reg_size * i));
11697 rtx mem = gen_rtx_MEM (reg_mode, addr);
11699 set_mem_alias_set (mem, rs6000_sr_alias_set);
11701 RTVEC_ELT (p, i) =
11702 gen_rtx_SET (VOIDmode,
11703 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11704 mem);
11706 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11708 else
11709 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11710 if ((regs_ever_live[info->first_gp_reg_save+i]
11711 && ! call_used_regs[info->first_gp_reg_save+i])
11712 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11713 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11714 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11716 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11717 GEN_INT (info->gp_save_offset
11718 + sp_offset
11719 + reg_size * i));
11720 rtx mem = gen_rtx_MEM (reg_mode, addr);
11722 /* Restore 64-bit quantities for SPE. */
11723 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11725 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11726 rtx b;
11728 if (!SPE_CONST_OFFSET_OK (offset))
11730 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11731 emit_move_insn (b, GEN_INT (offset));
11733 else
11734 b = GEN_INT (offset);
11736 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11737 mem = gen_rtx_MEM (V2SImode, addr);
11740 set_mem_alias_set (mem, rs6000_sr_alias_set);
11742 emit_move_insn (gen_rtx_REG (reg_mode,
11743 info->first_gp_reg_save + i), mem);
11746 /* Restore fpr's if we need to do it without calling a function. */
11747 if (restoring_FPRs_inline)
11748 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11749 if ((regs_ever_live[info->first_fp_reg_save+i]
11750 && ! call_used_regs[info->first_fp_reg_save+i]))
11752 rtx addr, mem;
11753 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11754 GEN_INT (info->fp_save_offset
11755 + sp_offset
11756 + 8 * i));
11757 mem = gen_rtx_MEM (DFmode, addr);
11758 set_mem_alias_set (mem, rs6000_sr_alias_set);
11760 emit_move_insn (gen_rtx_REG (DFmode,
11761 info->first_fp_reg_save + i),
11762 mem);
11765 /* If we saved cr, restore it here. Just those that were used. */
11766 if (info->cr_save_p)
11768 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11769 int count = 0;
11771 if (using_mfcr_multiple)
11773 for (i = 0; i < 8; i++)
11774 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11775 count++;
11776 if (count == 0)
11777 abort ();
11780 if (using_mfcr_multiple && count > 1)
11782 rtvec p;
11783 int ndx;
11785 p = rtvec_alloc (count);
11787 ndx = 0;
11788 for (i = 0; i < 8; i++)
11789 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11791 rtvec r = rtvec_alloc (2);
11792 RTVEC_ELT (r, 0) = r12_rtx;
11793 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11794 RTVEC_ELT (p, ndx) =
11795 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11796 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11797 ndx++;
11799 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11800 if (ndx != count)
11801 abort ();
11803 else
11804 for (i = 0; i < 8; i++)
11805 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11807 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11808 CR0_REGNO+i),
11809 r12_rtx));
11813 /* If this is V.4, unwind the stack pointer after all of the loads
11814 have been done. We need to emit a block here so that sched
11815 doesn't decide to move the sp change before the register restores
11816 (which may not have any obvious dependency on the stack). This
11817 doesn't hurt performance, because there is no scheduling that can
11818 be done after this point. */
11819 if (DEFAULT_ABI == ABI_V4)
11821 if (frame_reg_rtx != sp_reg_rtx)
11822 rs6000_emit_stack_tie ();
11824 if (use_backchain_to_restore_sp)
11826 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11828 else if (sp_offset != 0)
11830 emit_insn (TARGET_32BIT
11831 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11832 GEN_INT (sp_offset))
11833 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11834 GEN_INT (sp_offset)));
11838 if (current_function_calls_eh_return)
11840 rtx sa = EH_RETURN_STACKADJ_RTX;
11841 emit_insn (TARGET_32BIT
11842 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11843 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11846 if (!sibcall)
11848 rtvec p;
11849 if (! restoring_FPRs_inline)
11850 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11851 else
11852 p = rtvec_alloc (2);
11854 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11855 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11856 gen_rtx_REG (Pmode,
11857 LINK_REGISTER_REGNUM));
11859 /* If we have to restore more than two FP registers, branch to the
11860 restore function. It will return to our caller. */
11861 if (! restoring_FPRs_inline)
11863 int i;
11864 char rname[30];
11865 const char *alloc_rname;
11867 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11868 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11869 alloc_rname = ggc_strdup (rname);
11870 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11871 gen_rtx_SYMBOL_REF (Pmode,
11872 alloc_rname));
11874 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11876 rtx addr, mem;
11877 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11878 GEN_INT (info->fp_save_offset + 8*i));
11879 mem = gen_rtx_MEM (DFmode, addr);
11880 set_mem_alias_set (mem, rs6000_sr_alias_set);
11882 RTVEC_ELT (p, i+3) =
11883 gen_rtx_SET (VOIDmode,
11884 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11885 mem);
11889 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11893 /* Write function epilogue. */
11895 static void
11896 rs6000_output_function_epilogue (file, size)
11897 FILE *file;
11898 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11900 rs6000_stack_t *info = rs6000_stack_info ();
11902 if (! HAVE_epilogue)
11904 rtx insn = get_last_insn ();
11905 /* If the last insn was a BARRIER, we don't have to write anything except
11906 the trace table. */
11907 if (GET_CODE (insn) == NOTE)
11908 insn = prev_nonnote_insn (insn);
11909 if (insn == 0 || GET_CODE (insn) != BARRIER)
11911 /* This is slightly ugly, but at least we don't have two
11912 copies of the epilogue-emitting code. */
11913 start_sequence ();
11915 /* A NOTE_INSN_DELETED is supposed to be at the start
11916 and end of the "toplevel" insn chain. */
11917 emit_note (0, NOTE_INSN_DELETED);
11918 rs6000_emit_epilogue (FALSE);
11919 emit_note (0, NOTE_INSN_DELETED);
11921 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11923 rtx insn;
11924 unsigned addr = 0;
11925 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11927 INSN_ADDRESSES_NEW (insn, addr);
11928 addr += 4;
11932 if (TARGET_DEBUG_STACK)
11933 debug_rtx_list (get_insns (), 100);
11934 final (get_insns (), file, FALSE, FALSE);
11935 end_sequence ();
11939 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11940 on its format.
11942 We don't output a traceback table if -finhibit-size-directive was
11943 used. The documentation for -finhibit-size-directive reads
11944 ``don't output a @code{.size} assembler directive, or anything
11945 else that would cause trouble if the function is split in the
11946 middle, and the two halves are placed at locations far apart in
11947 memory.'' The traceback table has this property, since it
11948 includes the offset from the start of the function to the
11949 traceback table itself.
11951 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11952 different traceback table. */
11953 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11954 && rs6000_traceback != traceback_none)
11956 const char *fname = NULL;
11957 const char *language_string = lang_hooks.name;
11958 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11959 int i;
11960 int optional_tbtab;
11962 if (rs6000_traceback == traceback_full)
11963 optional_tbtab = 1;
11964 else if (rs6000_traceback == traceback_part)
11965 optional_tbtab = 0;
11966 else
11967 optional_tbtab = !optimize_size && !TARGET_ELF;
11969 if (optional_tbtab)
11971 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11972 while (*fname == '.') /* V.4 encodes . in the name */
11973 fname++;
11975 /* Need label immediately before tbtab, so we can compute
11976 its offset from the function start. */
11977 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11978 ASM_OUTPUT_LABEL (file, fname);
11981 /* The .tbtab pseudo-op can only be used for the first eight
11982 expressions, since it can't handle the possibly variable
11983 length fields that follow. However, if you omit the optional
11984 fields, the assembler outputs zeros for all optional fields
11985 anyways, giving each variable length field is minimum length
11986 (as defined in sys/debug.h). Thus we can not use the .tbtab
11987 pseudo-op at all. */
11989 /* An all-zero word flags the start of the tbtab, for debuggers
11990 that have to find it by searching forward from the entry
11991 point or from the current pc. */
11992 fputs ("\t.long 0\n", file);
11994 /* Tbtab format type. Use format type 0. */
11995 fputs ("\t.byte 0,", file);
11997 /* Language type. Unfortunately, there doesn't seem to be any
11998 official way to get this info, so we use language_string. C
11999 is 0. C++ is 9. No number defined for Obj-C, so use the
12000 value for C for now. There is no official value for Java,
12001 although IBM appears to be using 13. There is no official value
12002 for Chill, so we've chosen 44 pseudo-randomly. */
12003 if (! strcmp (language_string, "GNU C")
12004 || ! strcmp (language_string, "GNU Objective-C"))
12005 i = 0;
12006 else if (! strcmp (language_string, "GNU F77"))
12007 i = 1;
12008 else if (! strcmp (language_string, "GNU Ada"))
12009 i = 3;
12010 else if (! strcmp (language_string, "GNU Pascal"))
12011 i = 2;
12012 else if (! strcmp (language_string, "GNU C++"))
12013 i = 9;
12014 else if (! strcmp (language_string, "GNU Java"))
12015 i = 13;
12016 else if (! strcmp (language_string, "GNU CHILL"))
12017 i = 44;
12018 else
12019 abort ();
12020 fprintf (file, "%d,", i);
12022 /* 8 single bit fields: global linkage (not set for C extern linkage,
12023 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12024 from start of procedure stored in tbtab, internal function, function
12025 has controlled storage, function has no toc, function uses fp,
12026 function logs/aborts fp operations. */
12027 /* Assume that fp operations are used if any fp reg must be saved. */
12028 fprintf (file, "%d,",
12029 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12031 /* 6 bitfields: function is interrupt handler, name present in
12032 proc table, function calls alloca, on condition directives
12033 (controls stack walks, 3 bits), saves condition reg, saves
12034 link reg. */
12035 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12036 set up as a frame pointer, even when there is no alloca call. */
12037 fprintf (file, "%d,",
12038 ((optional_tbtab << 6)
12039 | ((optional_tbtab & frame_pointer_needed) << 5)
12040 | (info->cr_save_p << 1)
12041 | (info->lr_save_p)));
12043 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12044 (6 bits). */
12045 fprintf (file, "%d,",
12046 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12048 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12049 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12051 if (optional_tbtab)
12053 /* Compute the parameter info from the function decl argument
12054 list. */
12055 tree decl;
12056 int next_parm_info_bit = 31;
12058 for (decl = DECL_ARGUMENTS (current_function_decl);
12059 decl; decl = TREE_CHAIN (decl))
12061 rtx parameter = DECL_INCOMING_RTL (decl);
12062 enum machine_mode mode = GET_MODE (parameter);
12064 if (GET_CODE (parameter) == REG)
12066 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12068 int bits;
12070 float_parms++;
12072 if (mode == SFmode)
12073 bits = 0x2;
12074 else if (mode == DFmode || mode == TFmode)
12075 bits = 0x3;
12076 else
12077 abort ();
12079 /* If only one bit will fit, don't or in this entry. */
12080 if (next_parm_info_bit > 0)
12081 parm_info |= (bits << (next_parm_info_bit - 1));
12082 next_parm_info_bit -= 2;
12084 else
12086 fixed_parms += ((GET_MODE_SIZE (mode)
12087 + (UNITS_PER_WORD - 1))
12088 / UNITS_PER_WORD);
12089 next_parm_info_bit -= 1;
12095 /* Number of fixed point parameters. */
12096 /* This is actually the number of words of fixed point parameters; thus
12097 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12098 fprintf (file, "%d,", fixed_parms);
12100 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12101 all on stack. */
12102 /* This is actually the number of fp registers that hold parameters;
12103 and thus the maximum value is 13. */
12104 /* Set parameters on stack bit if parameters are not in their original
12105 registers, regardless of whether they are on the stack? Xlc
12106 seems to set the bit when not optimizing. */
12107 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12109 if (! optional_tbtab)
12110 return;
12112 /* Optional fields follow. Some are variable length. */
12114 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12115 11 double float. */
12116 /* There is an entry for each parameter in a register, in the order that
12117 they occur in the parameter list. Any intervening arguments on the
12118 stack are ignored. If the list overflows a long (max possible length
12119 34 bits) then completely leave off all elements that don't fit. */
12120 /* Only emit this long if there was at least one parameter. */
12121 if (fixed_parms || float_parms)
12122 fprintf (file, "\t.long %d\n", parm_info);
12124 /* Offset from start of code to tb table. */
12125 fputs ("\t.long ", file);
12126 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12127 #if TARGET_AIX
12128 RS6000_OUTPUT_BASENAME (file, fname);
12129 #else
12130 assemble_name (file, fname);
12131 #endif
12132 fputs ("-.", file);
12133 #if TARGET_AIX
12134 RS6000_OUTPUT_BASENAME (file, fname);
12135 #else
12136 assemble_name (file, fname);
12137 #endif
12138 putc ('\n', file);
12140 /* Interrupt handler mask. */
12141 /* Omit this long, since we never set the interrupt handler bit
12142 above. */
12144 /* Number of CTL (controlled storage) anchors. */
12145 /* Omit this long, since the has_ctl bit is never set above. */
12147 /* Displacement into stack of each CTL anchor. */
12148 /* Omit this list of longs, because there are no CTL anchors. */
12150 /* Length of function name. */
12151 if (*fname == '*')
12152 ++fname;
12153 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12155 /* Function name. */
12156 assemble_string (fname, strlen (fname));
12158 /* Register for alloca automatic storage; this is always reg 31.
12159 Only emit this if the alloca bit was set above. */
12160 if (frame_pointer_needed)
12161 fputs ("\t.byte 31\n", file);
12163 fputs ("\t.align 2\n", file);
12167 /* A C compound statement that outputs the assembler code for a thunk
12168 function, used to implement C++ virtual function calls with
12169 multiple inheritance. The thunk acts as a wrapper around a virtual
12170 function, adjusting the implicit object parameter before handing
12171 control off to the real function.
12173 First, emit code to add the integer DELTA to the location that
12174 contains the incoming first argument. Assume that this argument
12175 contains a pointer, and is the one used to pass the `this' pointer
12176 in C++. This is the incoming argument *before* the function
12177 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12178 values of all other incoming arguments.
12180 After the addition, emit code to jump to FUNCTION, which is a
12181 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12182 not touch the return address. Hence returning from FUNCTION will
12183 return to whoever called the current `thunk'.
12185 The effect must be as if FUNCTION had been called directly with the
12186 adjusted first argument. This macro is responsible for emitting
12187 all of the code for a thunk function; output_function_prologue()
12188 and output_function_epilogue() are not invoked.
12190 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12191 been extracted from it.) It might possibly be useful on some
12192 targets, but probably not.
12194 If you do not define this macro, the target-independent code in the
12195 C++ frontend will generate a less efficient heavyweight thunk that
12196 calls FUNCTION instead of jumping to it. The generic approach does
12197 not support varargs. */
12199 static void
12200 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12201 FILE *file;
12202 tree thunk_fndecl ATTRIBUTE_UNUSED;
12203 HOST_WIDE_INT delta;
12204 HOST_WIDE_INT vcall_offset;
12205 tree function;
12207 rtx this, insn, funexp;
12209 reload_completed = 1;
12210 no_new_pseudos = 1;
12212 /* Mark the end of the (empty) prologue. */
12213 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
12215 /* Find the "this" pointer. If the function returns a structure,
12216 the structure return pointer is in r3. */
12217 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12218 this = gen_rtx_REG (Pmode, 4);
12219 else
12220 this = gen_rtx_REG (Pmode, 3);
12222 /* Apply the constant offset, if required. */
12223 if (delta)
12225 rtx delta_rtx = GEN_INT (delta);
12226 emit_insn (TARGET_32BIT
12227 ? gen_addsi3 (this, this, delta_rtx)
12228 : gen_adddi3 (this, this, delta_rtx));
12231 /* Apply the offset from the vtable, if required. */
12232 if (vcall_offset)
12234 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12235 rtx tmp = gen_rtx_REG (Pmode, 12);
12237 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12238 emit_insn (TARGET_32BIT
12239 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12240 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12241 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12242 emit_insn (TARGET_32BIT
12243 ? gen_addsi3 (this, this, tmp)
12244 : gen_adddi3 (this, this, tmp));
12247 /* Generate a tail call to the target function. */
12248 if (!TREE_USED (function))
12250 assemble_external (function);
12251 TREE_USED (function) = 1;
12253 funexp = XEXP (DECL_RTL (function), 0);
12254 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12255 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12257 #if TARGET_MACHO
12258 if (MACHOPIC_INDIRECT)
12259 funexp = machopic_indirect_call_target (funexp);
12260 #endif
12262 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12263 generate sibcall RTL explicitly to avoid constraint abort. */
12264 insn = emit_call_insn (
12265 gen_rtx_PARALLEL (VOIDmode,
12266 gen_rtvec (4,
12267 gen_rtx_CALL (VOIDmode,
12268 funexp, const0_rtx),
12269 gen_rtx_USE (VOIDmode, const0_rtx),
12270 gen_rtx_USE (VOIDmode,
12271 gen_rtx_REG (SImode,
12272 LINK_REGISTER_REGNUM)),
12273 gen_rtx_RETURN (VOIDmode))));
12274 SIBLING_CALL_P (insn) = 1;
12275 emit_barrier ();
12277 /* Run just enough of rest_of_compilation to get the insns emitted.
12278 There's not really enough bulk here to make other passes such as
12279 instruction scheduling worth while. Note that use_thunk calls
12280 assemble_start_function and assemble_end_function. */
12281 insn = get_insns ();
12282 shorten_branches (insn);
12283 final_start_function (insn, file, 1);
12284 final (insn, file, 1, 0);
12285 final_end_function ();
12287 reload_completed = 0;
12288 no_new_pseudos = 0;
12291 /* A quick summary of the various types of 'constant-pool tables'
12292 under PowerPC:
12294 Target Flags Name One table per
12295 AIX (none) AIX TOC object file
12296 AIX -mfull-toc AIX TOC object file
12297 AIX -mminimal-toc AIX minimal TOC translation unit
12298 SVR4/EABI (none) SVR4 SDATA object file
12299 SVR4/EABI -fpic SVR4 pic object file
12300 SVR4/EABI -fPIC SVR4 PIC translation unit
12301 SVR4/EABI -mrelocatable EABI TOC function
12302 SVR4/EABI -maix AIX TOC object file
12303 SVR4/EABI -maix -mminimal-toc
12304 AIX minimal TOC translation unit
12306 Name Reg. Set by entries contains:
12307 made by addrs? fp? sum?
12309 AIX TOC 2 crt0 as Y option option
12310 AIX minimal TOC 30 prolog gcc Y Y option
12311 SVR4 SDATA 13 crt0 gcc N Y N
12312 SVR4 pic 30 prolog ld Y not yet N
12313 SVR4 PIC 30 prolog gcc Y option option
12314 EABI TOC 30 prolog gcc Y option option
12318 /* Hash functions for the hash table. */
12320 static unsigned
12321 rs6000_hash_constant (k)
12322 rtx k;
12324 enum rtx_code code = GET_CODE (k);
12325 enum machine_mode mode = GET_MODE (k);
12326 unsigned result = (code << 3) ^ mode;
12327 const char *format;
12328 int flen, fidx;
12330 format = GET_RTX_FORMAT (code);
12331 flen = strlen (format);
12332 fidx = 0;
12334 switch (code)
12336 case LABEL_REF:
12337 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12339 case CONST_DOUBLE:
12340 if (mode != VOIDmode)
12341 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12342 flen = 2;
12343 break;
12345 case CODE_LABEL:
12346 fidx = 3;
12347 break;
12349 default:
12350 break;
12353 for (; fidx < flen; fidx++)
12354 switch (format[fidx])
12356 case 's':
12358 unsigned i, len;
12359 const char *str = XSTR (k, fidx);
12360 len = strlen (str);
12361 result = result * 613 + len;
12362 for (i = 0; i < len; i++)
12363 result = result * 613 + (unsigned) str[i];
12364 break;
12366 case 'u':
12367 case 'e':
12368 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12369 break;
12370 case 'i':
12371 case 'n':
12372 result = result * 613 + (unsigned) XINT (k, fidx);
12373 break;
12374 case 'w':
12375 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12376 result = result * 613 + (unsigned) XWINT (k, fidx);
12377 else
12379 size_t i;
12380 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12381 result = result * 613 + (unsigned) (XWINT (k, fidx)
12382 >> CHAR_BIT * i);
12384 break;
12385 case '0':
12386 break;
12387 default:
12388 abort ();
12391 return result;
12394 static unsigned
12395 toc_hash_function (hash_entry)
12396 const void * hash_entry;
12398 const struct toc_hash_struct *thc =
12399 (const struct toc_hash_struct *) hash_entry;
12400 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12403 /* Compare H1 and H2 for equivalence. */
12405 static int
12406 toc_hash_eq (h1, h2)
12407 const void * h1;
12408 const void * h2;
12410 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12411 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12413 if (((const struct toc_hash_struct *) h1)->key_mode
12414 != ((const struct toc_hash_struct *) h2)->key_mode)
12415 return 0;
12417 return rtx_equal_p (r1, r2);
12420 /* These are the names given by the C++ front-end to vtables, and
12421 vtable-like objects. Ideally, this logic should not be here;
12422 instead, there should be some programmatic way of inquiring as
12423 to whether or not an object is a vtable. */
12425 #define VTABLE_NAME_P(NAME) \
12426 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12427 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12428 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12429 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12431 void
12432 rs6000_output_symbol_ref (file, x)
12433 FILE *file;
12434 rtx x;
12436 /* Currently C++ toc references to vtables can be emitted before it
12437 is decided whether the vtable is public or private. If this is
12438 the case, then the linker will eventually complain that there is
12439 a reference to an unknown section. Thus, for vtables only,
12440 we emit the TOC reference to reference the symbol and not the
12441 section. */
12442 const char *name = XSTR (x, 0);
12444 if (VTABLE_NAME_P (name))
12446 RS6000_OUTPUT_BASENAME (file, name);
12448 else
12449 assemble_name (file, name);
12452 /* Output a TOC entry. We derive the entry name from what is being
12453 written. */
12455 void
12456 output_toc (file, x, labelno, mode)
12457 FILE *file;
12458 rtx x;
12459 int labelno;
12460 enum machine_mode mode;
12462 char buf[256];
12463 const char *name = buf;
12464 const char *real_name;
12465 rtx base = x;
12466 int offset = 0;
12468 if (TARGET_NO_TOC)
12469 abort ();
12471 /* When the linker won't eliminate them, don't output duplicate
12472 TOC entries (this happens on AIX if there is any kind of TOC,
12473 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12474 CODE_LABELs. */
12475 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12477 struct toc_hash_struct *h;
12478 void * * found;
12480 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12481 time because GGC is not initialised at that point. */
12482 if (toc_hash_table == NULL)
12483 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12484 toc_hash_eq, NULL);
12486 h = ggc_alloc (sizeof (*h));
12487 h->key = x;
12488 h->key_mode = mode;
12489 h->labelno = labelno;
12491 found = htab_find_slot (toc_hash_table, h, 1);
12492 if (*found == NULL)
12493 *found = h;
12494 else /* This is indeed a duplicate.
12495 Set this label equal to that label. */
12497 fputs ("\t.set ", file);
12498 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12499 fprintf (file, "%d,", labelno);
12500 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12501 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12502 found)->labelno));
12503 return;
12507 /* If we're going to put a double constant in the TOC, make sure it's
12508 aligned properly when strict alignment is on. */
12509 if (GET_CODE (x) == CONST_DOUBLE
12510 && STRICT_ALIGNMENT
12511 && GET_MODE_BITSIZE (mode) >= 64
12512 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12513 ASM_OUTPUT_ALIGN (file, 3);
12516 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12518 /* Handle FP constants specially. Note that if we have a minimal
12519 TOC, things we put here aren't actually in the TOC, so we can allow
12520 FP constants. */
12521 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12523 REAL_VALUE_TYPE rv;
12524 long k[4];
12526 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12527 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12529 if (TARGET_64BIT)
12531 if (TARGET_MINIMAL_TOC)
12532 fputs (DOUBLE_INT_ASM_OP, file);
12533 else
12534 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12535 k[0] & 0xffffffff, k[1] & 0xffffffff,
12536 k[2] & 0xffffffff, k[3] & 0xffffffff);
12537 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12538 k[0] & 0xffffffff, k[1] & 0xffffffff,
12539 k[2] & 0xffffffff, k[3] & 0xffffffff);
12540 return;
12542 else
12544 if (TARGET_MINIMAL_TOC)
12545 fputs ("\t.long ", file);
12546 else
12547 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12548 k[0] & 0xffffffff, k[1] & 0xffffffff,
12549 k[2] & 0xffffffff, k[3] & 0xffffffff);
12550 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12551 k[0] & 0xffffffff, k[1] & 0xffffffff,
12552 k[2] & 0xffffffff, k[3] & 0xffffffff);
12553 return;
12556 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12558 REAL_VALUE_TYPE rv;
12559 long k[2];
12561 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12562 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12564 if (TARGET_64BIT)
12566 if (TARGET_MINIMAL_TOC)
12567 fputs (DOUBLE_INT_ASM_OP, file);
12568 else
12569 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12570 k[0] & 0xffffffff, k[1] & 0xffffffff);
12571 fprintf (file, "0x%lx%08lx\n",
12572 k[0] & 0xffffffff, k[1] & 0xffffffff);
12573 return;
12575 else
12577 if (TARGET_MINIMAL_TOC)
12578 fputs ("\t.long ", file);
12579 else
12580 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12581 k[0] & 0xffffffff, k[1] & 0xffffffff);
12582 fprintf (file, "0x%lx,0x%lx\n",
12583 k[0] & 0xffffffff, k[1] & 0xffffffff);
12584 return;
12587 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12589 REAL_VALUE_TYPE rv;
12590 long l;
12592 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12593 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12595 if (TARGET_64BIT)
12597 if (TARGET_MINIMAL_TOC)
12598 fputs (DOUBLE_INT_ASM_OP, file);
12599 else
12600 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12601 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12602 return;
12604 else
12606 if (TARGET_MINIMAL_TOC)
12607 fputs ("\t.long ", file);
12608 else
12609 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12610 fprintf (file, "0x%lx\n", l & 0xffffffff);
12611 return;
12614 else if (GET_MODE (x) == VOIDmode
12615 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12617 unsigned HOST_WIDE_INT low;
12618 HOST_WIDE_INT high;
12620 if (GET_CODE (x) == CONST_DOUBLE)
12622 low = CONST_DOUBLE_LOW (x);
12623 high = CONST_DOUBLE_HIGH (x);
12625 else
12626 #if HOST_BITS_PER_WIDE_INT == 32
12628 low = INTVAL (x);
12629 high = (low & 0x80000000) ? ~0 : 0;
12631 #else
12633 low = INTVAL (x) & 0xffffffff;
12634 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12636 #endif
12638 /* TOC entries are always Pmode-sized, but since this
12639 is a bigendian machine then if we're putting smaller
12640 integer constants in the TOC we have to pad them.
12641 (This is still a win over putting the constants in
12642 a separate constant pool, because then we'd have
12643 to have both a TOC entry _and_ the actual constant.)
12645 For a 32-bit target, CONST_INT values are loaded and shifted
12646 entirely within `low' and can be stored in one TOC entry. */
12648 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12649 abort ();/* It would be easy to make this work, but it doesn't now. */
12651 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12653 #if HOST_BITS_PER_WIDE_INT == 32
12654 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12655 POINTER_SIZE, &low, &high, 0);
12656 #else
12657 low |= high << 32;
12658 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12659 high = (HOST_WIDE_INT) low >> 32;
12660 low &= 0xffffffff;
12661 #endif
12664 if (TARGET_64BIT)
12666 if (TARGET_MINIMAL_TOC)
12667 fputs (DOUBLE_INT_ASM_OP, file);
12668 else
12669 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12670 (long) high & 0xffffffff, (long) low & 0xffffffff);
12671 fprintf (file, "0x%lx%08lx\n",
12672 (long) high & 0xffffffff, (long) low & 0xffffffff);
12673 return;
12675 else
12677 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12679 if (TARGET_MINIMAL_TOC)
12680 fputs ("\t.long ", file);
12681 else
12682 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12683 (long) high & 0xffffffff, (long) low & 0xffffffff);
12684 fprintf (file, "0x%lx,0x%lx\n",
12685 (long) high & 0xffffffff, (long) low & 0xffffffff);
12687 else
12689 if (TARGET_MINIMAL_TOC)
12690 fputs ("\t.long ", file);
12691 else
12692 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12693 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12695 return;
12699 if (GET_CODE (x) == CONST)
12701 if (GET_CODE (XEXP (x, 0)) != PLUS)
12702 abort ();
12704 base = XEXP (XEXP (x, 0), 0);
12705 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12708 if (GET_CODE (base) == SYMBOL_REF)
12709 name = XSTR (base, 0);
12710 else if (GET_CODE (base) == LABEL_REF)
12711 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12712 else if (GET_CODE (base) == CODE_LABEL)
12713 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12714 else
12715 abort ();
12717 real_name = (*targetm.strip_name_encoding) (name);
12718 if (TARGET_MINIMAL_TOC)
12719 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12720 else
12722 fprintf (file, "\t.tc %s", real_name);
12724 if (offset < 0)
12725 fprintf (file, ".N%d", - offset);
12726 else if (offset)
12727 fprintf (file, ".P%d", offset);
12729 fputs ("[TC],", file);
12732 /* Currently C++ toc references to vtables can be emitted before it
12733 is decided whether the vtable is public or private. If this is
12734 the case, then the linker will eventually complain that there is
12735 a TOC reference to an unknown section. Thus, for vtables only,
12736 we emit the TOC reference to reference the symbol and not the
12737 section. */
12738 if (VTABLE_NAME_P (name))
12740 RS6000_OUTPUT_BASENAME (file, name);
12741 if (offset < 0)
12742 fprintf (file, "%d", offset);
12743 else if (offset > 0)
12744 fprintf (file, "+%d", offset);
12746 else
12747 output_addr_const (file, x);
12748 putc ('\n', file);
12751 /* Output an assembler pseudo-op to write an ASCII string of N characters
12752 starting at P to FILE.
12754 On the RS/6000, we have to do this using the .byte operation and
12755 write out special characters outside the quoted string.
12756 Also, the assembler is broken; very long strings are truncated,
12757 so we must artificially break them up early. */
12759 void
12760 output_ascii (file, p, n)
12761 FILE *file;
12762 const char *p;
12763 int n;
12765 char c;
12766 int i, count_string;
12767 const char *for_string = "\t.byte \"";
12768 const char *for_decimal = "\t.byte ";
12769 const char *to_close = NULL;
12771 count_string = 0;
12772 for (i = 0; i < n; i++)
12774 c = *p++;
12775 if (c >= ' ' && c < 0177)
12777 if (for_string)
12778 fputs (for_string, file);
12779 putc (c, file);
12781 /* Write two quotes to get one. */
12782 if (c == '"')
12784 putc (c, file);
12785 ++count_string;
12788 for_string = NULL;
12789 for_decimal = "\"\n\t.byte ";
12790 to_close = "\"\n";
12791 ++count_string;
12793 if (count_string >= 512)
12795 fputs (to_close, file);
12797 for_string = "\t.byte \"";
12798 for_decimal = "\t.byte ";
12799 to_close = NULL;
12800 count_string = 0;
12803 else
12805 if (for_decimal)
12806 fputs (for_decimal, file);
12807 fprintf (file, "%d", c);
12809 for_string = "\n\t.byte \"";
12810 for_decimal = ", ";
12811 to_close = "\n";
12812 count_string = 0;
12816 /* Now close the string if we have written one. Then end the line. */
12817 if (to_close)
12818 fputs (to_close, file);
12821 /* Generate a unique section name for FILENAME for a section type
12822 represented by SECTION_DESC. Output goes into BUF.
12824 SECTION_DESC can be any string, as long as it is different for each
12825 possible section type.
12827 We name the section in the same manner as xlc. The name begins with an
12828 underscore followed by the filename (after stripping any leading directory
12829 names) with the last period replaced by the string SECTION_DESC. If
12830 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12831 the name. */
12833 void
12834 rs6000_gen_section_name (buf, filename, section_desc)
12835 char **buf;
12836 const char *filename;
12837 const char *section_desc;
12839 const char *q, *after_last_slash, *last_period = 0;
12840 char *p;
12841 int len;
12843 after_last_slash = filename;
12844 for (q = filename; *q; q++)
12846 if (*q == '/')
12847 after_last_slash = q + 1;
12848 else if (*q == '.')
12849 last_period = q;
12852 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12853 *buf = (char *) xmalloc (len);
12855 p = *buf;
12856 *p++ = '_';
12858 for (q = after_last_slash; *q; q++)
12860 if (q == last_period)
12862 strcpy (p, section_desc);
12863 p += strlen (section_desc);
12864 break;
12867 else if (ISALNUM (*q))
12868 *p++ = *q;
12871 if (last_period == 0)
12872 strcpy (p, section_desc);
12873 else
12874 *p = '\0';
12877 /* Emit profile function. */
12879 void
12880 output_profile_hook (labelno)
12881 int labelno ATTRIBUTE_UNUSED;
12883 if (TARGET_PROFILE_KERNEL)
12884 return;
12886 if (DEFAULT_ABI == ABI_AIX)
12888 #ifdef NO_PROFILE_COUNTERS
12889 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12890 #else
12891 char buf[30];
12892 const char *label_name;
12893 rtx fun;
12895 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12896 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12897 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12899 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12900 fun, Pmode);
12901 #endif
12903 else if (DEFAULT_ABI == ABI_DARWIN)
12905 const char *mcount_name = RS6000_MCOUNT;
12906 int caller_addr_regno = LINK_REGISTER_REGNUM;
12908 /* Be conservative and always set this, at least for now. */
12909 current_function_uses_pic_offset_table = 1;
12911 #if TARGET_MACHO
12912 /* For PIC code, set up a stub and collect the caller's address
12913 from r0, which is where the prologue puts it. */
12914 if (MACHOPIC_INDIRECT)
12916 mcount_name = machopic_stub_name (mcount_name);
12917 if (current_function_uses_pic_offset_table)
12918 caller_addr_regno = 0;
12920 #endif
12921 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12922 0, VOIDmode, 1,
12923 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12927 /* Write function profiler code. */
12929 void
12930 output_function_profiler (file, labelno)
12931 FILE *file;
12932 int labelno;
12934 char buf[100];
12935 int save_lr = 8;
12937 switch (DEFAULT_ABI)
12939 default:
12940 abort ();
12942 case ABI_V4:
12943 save_lr = 4;
12944 if (!TARGET_32BIT)
12946 warning ("no profiling of 64-bit code for this ABI");
12947 return;
12949 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12950 fprintf (file, "\tmflr %s\n", reg_names[0]);
12951 if (flag_pic == 1)
12953 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12954 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12955 reg_names[0], save_lr, reg_names[1]);
12956 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12957 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12958 assemble_name (file, buf);
12959 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12961 else if (flag_pic > 1)
12963 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12964 reg_names[0], save_lr, reg_names[1]);
12965 /* Now, we need to get the address of the label. */
12966 fputs ("\tbl 1f\n\t.long ", file);
12967 assemble_name (file, buf);
12968 fputs ("-.\n1:", file);
12969 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12970 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12971 reg_names[0], reg_names[11]);
12972 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12973 reg_names[0], reg_names[0], reg_names[11]);
12975 else
12977 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12978 assemble_name (file, buf);
12979 fputs ("@ha\n", file);
12980 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12981 reg_names[0], save_lr, reg_names[1]);
12982 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12983 assemble_name (file, buf);
12984 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12987 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12988 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12989 break;
12991 case ABI_AIX:
12992 case ABI_DARWIN:
12993 if (!TARGET_PROFILE_KERNEL)
12995 /* Don't do anything, done in output_profile_hook (). */
12997 else
12999 if (TARGET_32BIT)
13000 abort ();
13002 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13003 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13005 if (current_function_needs_context)
13007 asm_fprintf (file, "\tstd %s,24(%s)\n",
13008 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13009 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13010 asm_fprintf (file, "\tld %s,24(%s)\n",
13011 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13013 else
13014 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13016 break;
13021 static int
13022 rs6000_use_dfa_pipeline_interface ()
13024 return 1;
13027 /* Power4 load update and store update instructions are cracked into a
13028 load or store and an integer insn which are executed in the same cycle.
13029 Branches have their own dispatch slot which does not count against the
13030 GCC issue rate, but it changes the program flow so there are no other
13031 instructions to issue in this cycle. */
13033 static int
13034 rs6000_variable_issue (stream, verbose, insn, more)
13035 FILE *stream ATTRIBUTE_UNUSED;
13036 int verbose ATTRIBUTE_UNUSED;
13037 rtx insn;
13038 int more;
13040 if (GET_CODE (PATTERN (insn)) == USE
13041 || GET_CODE (PATTERN (insn)) == CLOBBER)
13042 return more;
13044 if (rs6000_cpu == PROCESSOR_POWER4)
13046 enum attr_type type = get_attr_type (insn);
13047 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13048 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13049 return 0;
13050 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13051 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13052 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13053 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13054 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13055 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13056 || type == TYPE_IDIV || type == TYPE_LDIV)
13057 return more > 2 ? more - 2 : 0;
13060 return more - 1;
13063 /* Adjust the cost of a scheduling dependency. Return the new cost of
13064 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13066 static int
13067 rs6000_adjust_cost (insn, link, dep_insn, cost)
13068 rtx insn;
13069 rtx link;
13070 rtx dep_insn ATTRIBUTE_UNUSED;
13071 int cost;
13073 if (! recog_memoized (insn))
13074 return 0;
13076 if (REG_NOTE_KIND (link) != 0)
13077 return 0;
13079 if (REG_NOTE_KIND (link) == 0)
13081 /* Data dependency; DEP_INSN writes a register that INSN reads
13082 some cycles later. */
13083 switch (get_attr_type (insn))
13085 case TYPE_JMPREG:
13086 /* Tell the first scheduling pass about the latency between
13087 a mtctr and bctr (and mtlr and br/blr). The first
13088 scheduling pass will not know about this latency since
13089 the mtctr instruction, which has the latency associated
13090 to it, will be generated by reload. */
13091 return TARGET_POWER ? 5 : 4;
13092 case TYPE_BRANCH:
13093 /* Leave some extra cycles between a compare and its
13094 dependent branch, to inhibit expensive mispredicts. */
13095 if ((rs6000_cpu_attr == CPU_PPC603
13096 || rs6000_cpu_attr == CPU_PPC604
13097 || rs6000_cpu_attr == CPU_PPC604E
13098 || rs6000_cpu_attr == CPU_PPC620
13099 || rs6000_cpu_attr == CPU_PPC630
13100 || rs6000_cpu_attr == CPU_PPC750
13101 || rs6000_cpu_attr == CPU_PPC7400
13102 || rs6000_cpu_attr == CPU_PPC7450
13103 || rs6000_cpu_attr == CPU_POWER4)
13104 && recog_memoized (dep_insn)
13105 && (INSN_CODE (dep_insn) >= 0)
13106 && (get_attr_type (dep_insn) == TYPE_CMP
13107 || get_attr_type (dep_insn) == TYPE_COMPARE
13108 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13109 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13110 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13111 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13112 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13113 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13114 return cost + 2;
13115 default:
13116 break;
13118 /* Fall out to return default cost. */
13121 return cost;
13124 /* A C statement (sans semicolon) to update the integer scheduling
13125 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13126 INSN earlier, increase the priority to execute INSN later. Do not
13127 define this macro if you do not need to adjust the scheduling
13128 priorities of insns. */
13130 static int
13131 rs6000_adjust_priority (insn, priority)
13132 rtx insn ATTRIBUTE_UNUSED;
13133 int priority;
13135 /* On machines (like the 750) which have asymmetric integer units,
13136 where one integer unit can do multiply and divides and the other
13137 can't, reduce the priority of multiply/divide so it is scheduled
13138 before other integer operations. */
13140 #if 0
13141 if (! INSN_P (insn))
13142 return priority;
13144 if (GET_CODE (PATTERN (insn)) == USE)
13145 return priority;
13147 switch (rs6000_cpu_attr) {
13148 case CPU_PPC750:
13149 switch (get_attr_type (insn))
13151 default:
13152 break;
13154 case TYPE_IMUL:
13155 case TYPE_IDIV:
13156 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13157 priority, priority);
13158 if (priority >= 0 && priority < 0x01000000)
13159 priority >>= 3;
13160 break;
13163 #endif
13165 return priority;
13168 /* Return how many instructions the machine can issue per cycle. */
13170 static int
13171 rs6000_issue_rate ()
13173 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13174 if (!reload_completed)
13175 return 1;
13177 switch (rs6000_cpu_attr) {
13178 case CPU_RIOS1: /* ? */
13179 case CPU_RS64A:
13180 case CPU_PPC601: /* ? */
13181 case CPU_PPC7450:
13182 return 3;
13183 case CPU_PPC440:
13184 case CPU_PPC603:
13185 case CPU_PPC750:
13186 case CPU_PPC7400:
13187 case CPU_PPC8540:
13188 return 2;
13189 case CPU_RIOS2:
13190 case CPU_PPC604:
13191 case CPU_PPC604E:
13192 case CPU_PPC620:
13193 case CPU_PPC630:
13194 case CPU_POWER4:
13195 return 4;
13196 default:
13197 return 1;
13201 /* Return how many instructions to look ahead for better insn
13202 scheduling. */
13204 static int
13205 rs6000_use_sched_lookahead ()
13207 if (rs6000_cpu_attr == CPU_PPC8540)
13208 return 4;
13209 return 0;
13213 /* Length in units of the trampoline for entering a nested function. */
13216 rs6000_trampoline_size ()
13218 int ret = 0;
13220 switch (DEFAULT_ABI)
13222 default:
13223 abort ();
13225 case ABI_AIX:
13226 ret = (TARGET_32BIT) ? 12 : 24;
13227 break;
13229 case ABI_DARWIN:
13230 case ABI_V4:
13231 ret = (TARGET_32BIT) ? 40 : 48;
13232 break;
13235 return ret;
13238 /* Emit RTL insns to initialize the variable parts of a trampoline.
13239 FNADDR is an RTX for the address of the function's pure code.
13240 CXT is an RTX for the static chain value for the function. */
13242 void
13243 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13244 rtx addr;
13245 rtx fnaddr;
13246 rtx cxt;
13248 enum machine_mode pmode = Pmode;
13249 int regsize = (TARGET_32BIT) ? 4 : 8;
13250 rtx ctx_reg = force_reg (pmode, cxt);
13252 switch (DEFAULT_ABI)
13254 default:
13255 abort ();
13257 /* Macros to shorten the code expansions below. */
13258 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13259 #define MEM_PLUS(addr,offset) \
13260 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13262 /* Under AIX, just build the 3 word function descriptor */
13263 case ABI_AIX:
13265 rtx fn_reg = gen_reg_rtx (pmode);
13266 rtx toc_reg = gen_reg_rtx (pmode);
13267 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13268 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13269 emit_move_insn (MEM_DEREF (addr), fn_reg);
13270 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13271 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13273 break;
13275 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13276 case ABI_DARWIN:
13277 case ABI_V4:
13278 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13279 FALSE, VOIDmode, 4,
13280 addr, pmode,
13281 GEN_INT (rs6000_trampoline_size ()), SImode,
13282 fnaddr, pmode,
13283 ctx_reg, pmode);
13284 break;
13287 return;
13291 /* Table of valid machine attributes. */
13293 const struct attribute_spec rs6000_attribute_table[] =
13295 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13296 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13297 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13298 { NULL, 0, 0, false, false, false, NULL }
13301 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13302 struct attribute_spec.handler. */
13304 static tree
13305 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13306 tree *node;
13307 tree name;
13308 tree args ATTRIBUTE_UNUSED;
13309 int flags ATTRIBUTE_UNUSED;
13310 bool *no_add_attrs;
13312 if (TREE_CODE (*node) != FUNCTION_TYPE
13313 && TREE_CODE (*node) != FIELD_DECL
13314 && TREE_CODE (*node) != TYPE_DECL)
13316 warning ("`%s' attribute only applies to functions",
13317 IDENTIFIER_POINTER (name));
13318 *no_add_attrs = true;
13321 return NULL_TREE;
13324 /* Set longcall attributes on all functions declared when
13325 rs6000_default_long_calls is true. */
13326 static void
13327 rs6000_set_default_type_attributes (type)
13328 tree type;
13330 if (rs6000_default_long_calls
13331 && (TREE_CODE (type) == FUNCTION_TYPE
13332 || TREE_CODE (type) == METHOD_TYPE))
13333 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13334 NULL_TREE,
13335 TYPE_ATTRIBUTES (type));
13338 /* Return a reference suitable for calling a function with the
13339 longcall attribute. */
13341 struct rtx_def *
13342 rs6000_longcall_ref (call_ref)
13343 rtx call_ref;
13345 const char *call_name;
13346 tree node;
13348 if (GET_CODE (call_ref) != SYMBOL_REF)
13349 return call_ref;
13351 /* System V adds '.' to the internal name, so skip them. */
13352 call_name = XSTR (call_ref, 0);
13353 if (*call_name == '.')
13355 while (*call_name == '.')
13356 call_name++;
13358 node = get_identifier (call_name);
13359 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13362 return force_reg (Pmode, call_ref);
13365 #ifdef USING_ELFOS_H
13367 /* A C statement or statements to switch to the appropriate section
13368 for output of RTX in mode MODE. You can assume that RTX is some
13369 kind of constant in RTL. The argument MODE is redundant except in
13370 the case of a `const_int' rtx. Select the section by calling
13371 `text_section' or one of the alternatives for other sections.
13373 Do not define this macro if you put all constants in the read-only
13374 data section. */
13376 static void
13377 rs6000_elf_select_rtx_section (mode, x, align)
13378 enum machine_mode mode;
13379 rtx x;
13380 unsigned HOST_WIDE_INT align;
13382 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13383 toc_section ();
13384 else
13385 default_elf_select_rtx_section (mode, x, align);
13388 /* A C statement or statements to switch to the appropriate
13389 section for output of DECL. DECL is either a `VAR_DECL' node
13390 or a constant of some sort. RELOC indicates whether forming
13391 the initial value of DECL requires link-time relocations. */
13393 static void
13394 rs6000_elf_select_section (decl, reloc, align)
13395 tree decl;
13396 int reloc;
13397 unsigned HOST_WIDE_INT align;
13399 /* Pretend that we're always building for a shared library when
13400 ABI_AIX, because otherwise we end up with dynamic relocations
13401 in read-only sections. This happens for function pointers,
13402 references to vtables in typeinfo, and probably other cases. */
13403 default_elf_select_section_1 (decl, reloc, align,
13404 flag_pic || DEFAULT_ABI == ABI_AIX);
13407 /* A C statement to build up a unique section name, expressed as a
13408 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13409 RELOC indicates whether the initial value of EXP requires
13410 link-time relocations. If you do not define this macro, GCC will use
13411 the symbol name prefixed by `.' as the section name. Note - this
13412 macro can now be called for uninitialized data items as well as
13413 initialized data and functions. */
13415 static void
13416 rs6000_elf_unique_section (decl, reloc)
13417 tree decl;
13418 int reloc;
13420 /* As above, pretend that we're always building for a shared library
13421 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13422 default_unique_section_1 (decl, reloc,
13423 flag_pic || DEFAULT_ABI == ABI_AIX);
13426 /* For a SYMBOL_REF, set generic flags and then perform some
13427 target-specific processing.
13429 When the AIX ABI is requested on a non-AIX system, replace the
13430 function name with the real name (with a leading .) rather than the
13431 function descriptor name. This saves a lot of overriding code to
13432 read the prefixes. */
13434 static void
13435 rs6000_elf_encode_section_info (decl, rtl, first)
13436 tree decl;
13437 rtx rtl;
13438 int first;
13440 default_encode_section_info (decl, rtl, first);
13442 if (first
13443 && TREE_CODE (decl) == FUNCTION_DECL
13444 && !TARGET_AIX
13445 && DEFAULT_ABI == ABI_AIX)
13447 rtx sym_ref = XEXP (rtl, 0);
13448 size_t len = strlen (XSTR (sym_ref, 0));
13449 char *str = alloca (len + 2);
13450 str[0] = '.';
13451 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13452 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13456 static bool
13457 rs6000_elf_in_small_data_p (decl)
13458 tree decl;
13460 if (rs6000_sdata == SDATA_NONE)
13461 return false;
13463 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13465 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13466 if (strcmp (section, ".sdata") == 0
13467 || strcmp (section, ".sdata2") == 0
13468 || strcmp (section, ".sbss") == 0
13469 || strcmp (section, ".sbss2") == 0
13470 || strcmp (section, ".PPC.EMB.sdata0") == 0
13471 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13472 return true;
13474 else
13476 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13478 if (size > 0
13479 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13480 /* If it's not public, and we're not going to reference it there,
13481 there's no need to put it in the small data section. */
13482 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13483 return true;
13486 return false;
13489 #endif /* USING_ELFOS_H */
13492 /* Return a REG that occurs in ADDR with coefficient 1.
13493 ADDR can be effectively incremented by incrementing REG.
13495 r0 is special and we must not select it as an address
13496 register by this routine since our caller will try to
13497 increment the returned register via an "la" instruction. */
13499 struct rtx_def *
13500 find_addr_reg (addr)
13501 rtx addr;
13503 while (GET_CODE (addr) == PLUS)
13505 if (GET_CODE (XEXP (addr, 0)) == REG
13506 && REGNO (XEXP (addr, 0)) != 0)
13507 addr = XEXP (addr, 0);
13508 else if (GET_CODE (XEXP (addr, 1)) == REG
13509 && REGNO (XEXP (addr, 1)) != 0)
13510 addr = XEXP (addr, 1);
13511 else if (CONSTANT_P (XEXP (addr, 0)))
13512 addr = XEXP (addr, 1);
13513 else if (CONSTANT_P (XEXP (addr, 1)))
13514 addr = XEXP (addr, 0);
13515 else
13516 abort ();
13518 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13519 return addr;
13520 abort ();
13523 void
13524 rs6000_fatal_bad_address (op)
13525 rtx op;
13527 fatal_insn ("bad address", op);
13530 #if TARGET_MACHO
13532 #if 0
13533 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13534 reference and a constant. */
13537 symbolic_operand (op)
13538 rtx op;
13540 switch (GET_CODE (op))
13542 case SYMBOL_REF:
13543 case LABEL_REF:
13544 return 1;
13545 case CONST:
13546 op = XEXP (op, 0);
13547 return (GET_CODE (op) == SYMBOL_REF ||
13548 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13549 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13550 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13551 default:
13552 return 0;
13555 #endif
13557 #ifdef RS6000_LONG_BRANCH
13559 static tree stub_list = 0;
13561 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13562 procedure calls to the linked list. */
13564 void
13565 add_compiler_stub (label_name, function_name, line_number)
13566 tree label_name;
13567 tree function_name;
13568 int line_number;
13570 tree stub = build_tree_list (function_name, label_name);
13571 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13572 TREE_CHAIN (stub) = stub_list;
13573 stub_list = stub;
13576 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13577 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13578 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13580 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13581 handling procedure calls from the linked list and initializes the
13582 linked list. */
13584 void
13585 output_compiler_stub ()
13587 char tmp_buf[256];
13588 char label_buf[256];
13589 tree stub;
13591 if (!flag_pic)
13592 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13594 fprintf (asm_out_file,
13595 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13597 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13598 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13599 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13600 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13602 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13603 strcpy (label_buf,
13604 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13605 else
13607 label_buf[0] = '_';
13608 strcpy (label_buf+1,
13609 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13612 strcpy (tmp_buf, "lis r12,hi16(");
13613 strcat (tmp_buf, label_buf);
13614 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13615 strcat (tmp_buf, label_buf);
13616 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13617 output_asm_insn (tmp_buf, 0);
13619 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13620 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13621 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13622 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13625 stub_list = 0;
13628 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13629 already there or not. */
13632 no_previous_def (function_name)
13633 tree function_name;
13635 tree stub;
13636 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13637 if (function_name == STUB_FUNCTION_NAME (stub))
13638 return 0;
13639 return 1;
13642 /* GET_PREV_LABEL gets the label name from the previous definition of
13643 the function. */
13645 tree
13646 get_prev_label (function_name)
13647 tree function_name;
13649 tree stub;
13650 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13651 if (function_name == STUB_FUNCTION_NAME (stub))
13652 return STUB_LABEL_NAME (stub);
13653 return 0;
13656 /* INSN is either a function call or a millicode call. It may have an
13657 unconditional jump in its delay slot.
13659 CALL_DEST is the routine we are calling. */
13661 char *
13662 output_call (insn, call_dest, operand_number)
13663 rtx insn;
13664 rtx call_dest;
13665 int operand_number;
13667 static char buf[256];
13668 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13670 tree labelname;
13671 tree funname = get_identifier (XSTR (call_dest, 0));
13673 if (no_previous_def (funname))
13675 int line_number = 0;
13676 rtx label_rtx = gen_label_rtx ();
13677 char *label_buf, temp_buf[256];
13678 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13679 CODE_LABEL_NUMBER (label_rtx));
13680 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13681 labelname = get_identifier (label_buf);
13682 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13683 if (insn)
13684 line_number = NOTE_LINE_NUMBER (insn);
13685 add_compiler_stub (labelname, funname, line_number);
13687 else
13688 labelname = get_prev_label (funname);
13690 sprintf (buf, "jbsr %%z%d,%.246s",
13691 operand_number, IDENTIFIER_POINTER (labelname));
13692 return buf;
13694 else
13696 sprintf (buf, "bl %%z%d", operand_number);
13697 return buf;
13701 #endif /* RS6000_LONG_BRANCH */
13703 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13704 do { \
13705 const char *const symbol_ = (SYMBOL); \
13706 char *buffer_ = (BUF); \
13707 if (symbol_[0] == '"') \
13709 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13711 else if (name_needs_quotes(symbol_)) \
13713 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13715 else \
13717 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13719 } while (0)
13722 /* Generate PIC and indirect symbol stubs. */
13724 void
13725 machopic_output_stub (file, symb, stub)
13726 FILE *file;
13727 const char *symb, *stub;
13729 unsigned int length;
13730 char *symbol_name, *lazy_ptr_name;
13731 char *local_label_0;
13732 static int label = 0;
13734 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13735 symb = (*targetm.strip_name_encoding) (symb);
13737 label += 1;
13739 length = strlen (symb);
13740 symbol_name = alloca (length + 32);
13741 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13743 lazy_ptr_name = alloca (length + 32);
13744 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13746 local_label_0 = alloca (length + 32);
13747 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13749 if (flag_pic == 2)
13750 machopic_picsymbol_stub1_section ();
13751 else
13752 machopic_symbol_stub1_section ();
13753 fprintf (file, "\t.align 2\n");
13755 fprintf (file, "%s:\n", stub);
13756 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13758 if (flag_pic == 2)
13760 fprintf (file, "\tmflr r0\n");
13761 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13762 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13763 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13764 lazy_ptr_name, local_label_0);
13765 fprintf (file, "\tmtlr r0\n");
13766 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13767 lazy_ptr_name, local_label_0);
13768 fprintf (file, "\tmtctr r12\n");
13769 fprintf (file, "\tbctr\n");
13771 else
13773 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13774 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13775 fprintf (file, "\tmtctr r12\n");
13776 fprintf (file, "\tbctr\n");
13779 machopic_lazy_symbol_ptr_section ();
13780 fprintf (file, "%s:\n", lazy_ptr_name);
13781 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13782 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13785 /* Legitimize PIC addresses. If the address is already
13786 position-independent, we return ORIG. Newly generated
13787 position-independent addresses go into a reg. This is REG if non
13788 zero, otherwise we allocate register(s) as necessary. */
13790 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13793 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13794 rtx orig;
13795 enum machine_mode mode;
13796 rtx reg;
13798 rtx base, offset;
13800 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13801 reg = gen_reg_rtx (Pmode);
13803 if (GET_CODE (orig) == CONST)
13805 if (GET_CODE (XEXP (orig, 0)) == PLUS
13806 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13807 return orig;
13809 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13811 base =
13812 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13813 Pmode, reg);
13814 offset =
13815 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13816 Pmode, reg);
13818 else
13819 abort ();
13821 if (GET_CODE (offset) == CONST_INT)
13823 if (SMALL_INT (offset))
13824 return plus_constant (base, INTVAL (offset));
13825 else if (! reload_in_progress && ! reload_completed)
13826 offset = force_reg (Pmode, offset);
13827 else
13829 rtx mem = force_const_mem (Pmode, orig);
13830 return machopic_legitimize_pic_address (mem, Pmode, reg);
13833 return gen_rtx (PLUS, Pmode, base, offset);
13836 /* Fall back on generic machopic code. */
13837 return machopic_legitimize_pic_address (orig, mode, reg);
13840 /* This is just a placeholder to make linking work without having to
13841 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13842 ever needed for Darwin (not too likely!) this would have to get a
13843 real definition. */
13845 void
13846 toc_section ()
13850 #endif /* TARGET_MACHO */
13852 #if TARGET_ELF
13853 static unsigned int
13854 rs6000_elf_section_type_flags (decl, name, reloc)
13855 tree decl;
13856 const char *name;
13857 int reloc;
13859 unsigned int flags
13860 = default_section_type_flags_1 (decl, name, reloc,
13861 flag_pic || DEFAULT_ABI == ABI_AIX);
13863 if (TARGET_RELOCATABLE)
13864 flags |= SECTION_WRITE;
13866 return flags;
13869 /* Record an element in the table of global constructors. SYMBOL is
13870 a SYMBOL_REF of the function to be called; PRIORITY is a number
13871 between 0 and MAX_INIT_PRIORITY.
13873 This differs from default_named_section_asm_out_constructor in
13874 that we have special handling for -mrelocatable. */
13876 static void
13877 rs6000_elf_asm_out_constructor (symbol, priority)
13878 rtx symbol;
13879 int priority;
13881 const char *section = ".ctors";
13882 char buf[16];
13884 if (priority != DEFAULT_INIT_PRIORITY)
13886 sprintf (buf, ".ctors.%.5u",
13887 /* Invert the numbering so the linker puts us in the proper
13888 order; constructors are run from right to left, and the
13889 linker sorts in increasing order. */
13890 MAX_INIT_PRIORITY - priority);
13891 section = buf;
13894 named_section_flags (section, SECTION_WRITE);
13895 assemble_align (POINTER_SIZE);
13897 if (TARGET_RELOCATABLE)
13899 fputs ("\t.long (", asm_out_file);
13900 output_addr_const (asm_out_file, symbol);
13901 fputs (")@fixup\n", asm_out_file);
13903 else
13904 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13907 static void
13908 rs6000_elf_asm_out_destructor (symbol, priority)
13909 rtx symbol;
13910 int priority;
13912 const char *section = ".dtors";
13913 char buf[16];
13915 if (priority != DEFAULT_INIT_PRIORITY)
13917 sprintf (buf, ".dtors.%.5u",
13918 /* Invert the numbering so the linker puts us in the proper
13919 order; constructors are run from right to left, and the
13920 linker sorts in increasing order. */
13921 MAX_INIT_PRIORITY - priority);
13922 section = buf;
13925 named_section_flags (section, SECTION_WRITE);
13926 assemble_align (POINTER_SIZE);
13928 if (TARGET_RELOCATABLE)
13930 fputs ("\t.long (", asm_out_file);
13931 output_addr_const (asm_out_file, symbol);
13932 fputs (")@fixup\n", asm_out_file);
13934 else
13935 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13937 #endif
13939 #if TARGET_XCOFF
13940 static void
13941 rs6000_xcoff_asm_globalize_label (stream, name)
13942 FILE *stream;
13943 const char *name;
13945 fputs (GLOBAL_ASM_OP, stream);
13946 RS6000_OUTPUT_BASENAME (stream, name);
13947 putc ('\n', stream);
13950 static void
13951 rs6000_xcoff_asm_named_section (name, flags)
13952 const char *name;
13953 unsigned int flags;
13955 int smclass;
13956 static const char * const suffix[3] = { "PR", "RO", "RW" };
13958 if (flags & SECTION_CODE)
13959 smclass = 0;
13960 else if (flags & SECTION_WRITE)
13961 smclass = 2;
13962 else
13963 smclass = 1;
13965 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13966 (flags & SECTION_CODE) ? "." : "",
13967 name, suffix[smclass], flags & SECTION_ENTSIZE);
13970 static void
13971 rs6000_xcoff_select_section (decl, reloc, align)
13972 tree decl;
13973 int reloc;
13974 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13976 if (decl_readonly_section_1 (decl, reloc, 1))
13978 if (TREE_PUBLIC (decl))
13979 read_only_data_section ();
13980 else
13981 read_only_private_data_section ();
13983 else
13985 if (TREE_PUBLIC (decl))
13986 data_section ();
13987 else
13988 private_data_section ();
13992 static void
13993 rs6000_xcoff_unique_section (decl, reloc)
13994 tree decl;
13995 int reloc ATTRIBUTE_UNUSED;
13997 const char *name;
13999 /* Use select_section for private and uninitialized data. */
14000 if (!TREE_PUBLIC (decl)
14001 || DECL_COMMON (decl)
14002 || DECL_INITIAL (decl) == NULL_TREE
14003 || DECL_INITIAL (decl) == error_mark_node
14004 || (flag_zero_initialized_in_bss
14005 && initializer_zerop (DECL_INITIAL (decl))))
14006 return;
14008 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14009 name = (*targetm.strip_name_encoding) (name);
14010 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14013 /* Select section for constant in constant pool.
14015 On RS/6000, all constants are in the private read-only data area.
14016 However, if this is being placed in the TOC it must be output as a
14017 toc entry. */
14019 static void
14020 rs6000_xcoff_select_rtx_section (mode, x, align)
14021 enum machine_mode mode;
14022 rtx x;
14023 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14025 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14026 toc_section ();
14027 else
14028 read_only_private_data_section ();
14031 /* Remove any trailing [DS] or the like from the symbol name. */
14033 static const char *
14034 rs6000_xcoff_strip_name_encoding (name)
14035 const char *name;
14037 size_t len;
14038 if (*name == '*')
14039 name++;
14040 len = strlen (name);
14041 if (name[len - 1] == ']')
14042 return ggc_alloc_string (name, len - 4);
14043 else
14044 return name;
14047 /* Section attributes. AIX is always PIC. */
14049 static unsigned int
14050 rs6000_xcoff_section_type_flags (decl, name, reloc)
14051 tree decl;
14052 const char *name;
14053 int reloc;
14055 unsigned int align;
14056 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14058 /* Align to at least UNIT size. */
14059 if (flags & SECTION_CODE)
14060 align = MIN_UNITS_PER_WORD;
14061 else
14062 /* Increase alignment of large objects if not already stricter. */
14063 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14064 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14065 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14067 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14069 #endif /* TARGET_XCOFF */
14071 #if TARGET_MACHO
14072 /* Cross-module name binding. Darwin does not support overriding
14073 functions at dynamic-link time. */
14075 static bool
14076 rs6000_binds_local_p (decl)
14077 tree decl;
14079 return default_binds_local_p_1 (decl, 0);
14081 #endif
14083 /* Compute a (partial) cost for rtx X. Return true if the complete
14084 cost has been computed, and false if subexpressions should be
14085 scanned. In either case, *TOTAL contains the cost result. */
14087 static bool
14088 rs6000_rtx_costs (x, code, outer_code, total)
14089 rtx x;
14090 int code, outer_code ATTRIBUTE_UNUSED;
14091 int *total;
14093 switch (code)
14095 /* On the RS/6000, if it is valid in the insn, it is free.
14096 So this always returns 0. */
14097 case CONST_INT:
14098 case CONST:
14099 case LABEL_REF:
14100 case SYMBOL_REF:
14101 case CONST_DOUBLE:
14102 case HIGH:
14103 *total = 0;
14104 return true;
14106 case PLUS:
14107 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14108 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14109 + 0x8000) >= 0x10000)
14110 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14111 ? COSTS_N_INSNS (2)
14112 : COSTS_N_INSNS (1));
14113 return true;
14115 case AND:
14116 case IOR:
14117 case XOR:
14118 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14119 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14120 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14121 ? COSTS_N_INSNS (2)
14122 : COSTS_N_INSNS (1));
14123 return true;
14125 case MULT:
14126 if (optimize_size)
14128 *total = COSTS_N_INSNS (2);
14129 return true;
14131 switch (rs6000_cpu)
14133 case PROCESSOR_RIOS1:
14134 case PROCESSOR_PPC405:
14135 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14136 ? COSTS_N_INSNS (5)
14137 : (INTVAL (XEXP (x, 1)) >= -256
14138 && INTVAL (XEXP (x, 1)) <= 255)
14139 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14140 return true;
14142 case PROCESSOR_PPC440:
14143 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14144 ? COSTS_N_INSNS (3)
14145 : COSTS_N_INSNS (2));
14146 return true;
14148 case PROCESSOR_RS64A:
14149 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14150 ? GET_MODE (XEXP (x, 1)) != DImode
14151 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14152 : (INTVAL (XEXP (x, 1)) >= -256
14153 && INTVAL (XEXP (x, 1)) <= 255)
14154 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14155 return true;
14157 case PROCESSOR_RIOS2:
14158 case PROCESSOR_MPCCORE:
14159 case PROCESSOR_PPC604e:
14160 *total = COSTS_N_INSNS (2);
14161 return true;
14163 case PROCESSOR_PPC601:
14164 *total = COSTS_N_INSNS (5);
14165 return true;
14167 case PROCESSOR_PPC603:
14168 case PROCESSOR_PPC7400:
14169 case PROCESSOR_PPC750:
14170 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14171 ? COSTS_N_INSNS (5)
14172 : (INTVAL (XEXP (x, 1)) >= -256
14173 && INTVAL (XEXP (x, 1)) <= 255)
14174 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14175 return true;
14177 case PROCESSOR_PPC7450:
14178 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14179 ? COSTS_N_INSNS (4)
14180 : COSTS_N_INSNS (3));
14181 return true;
14183 case PROCESSOR_PPC403:
14184 case PROCESSOR_PPC604:
14185 case PROCESSOR_PPC8540:
14186 *total = COSTS_N_INSNS (4);
14187 return true;
14189 case PROCESSOR_PPC620:
14190 case PROCESSOR_PPC630:
14191 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14192 ? GET_MODE (XEXP (x, 1)) != DImode
14193 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14194 : (INTVAL (XEXP (x, 1)) >= -256
14195 && INTVAL (XEXP (x, 1)) <= 255)
14196 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14197 return true;
14199 case PROCESSOR_POWER4:
14200 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14201 ? GET_MODE (XEXP (x, 1)) != DImode
14202 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14203 : COSTS_N_INSNS (2));
14204 return true;
14206 default:
14207 abort ();
14210 case DIV:
14211 case MOD:
14212 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14213 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14215 *total = COSTS_N_INSNS (2);
14216 return true;
14218 /* FALLTHRU */
14220 case UDIV:
14221 case UMOD:
14222 switch (rs6000_cpu)
14224 case PROCESSOR_RIOS1:
14225 *total = COSTS_N_INSNS (19);
14226 return true;
14228 case PROCESSOR_RIOS2:
14229 *total = COSTS_N_INSNS (13);
14230 return true;
14232 case PROCESSOR_RS64A:
14233 *total = (GET_MODE (XEXP (x, 1)) != DImode
14234 ? COSTS_N_INSNS (65)
14235 : COSTS_N_INSNS (67));
14236 return true;
14238 case PROCESSOR_MPCCORE:
14239 *total = COSTS_N_INSNS (6);
14240 return true;
14242 case PROCESSOR_PPC403:
14243 *total = COSTS_N_INSNS (33);
14244 return true;
14246 case PROCESSOR_PPC405:
14247 *total = COSTS_N_INSNS (35);
14248 return true;
14250 case PROCESSOR_PPC440:
14251 *total = COSTS_N_INSNS (34);
14252 return true;
14254 case PROCESSOR_PPC601:
14255 *total = COSTS_N_INSNS (36);
14256 return true;
14258 case PROCESSOR_PPC603:
14259 *total = COSTS_N_INSNS (37);
14260 return true;
14262 case PROCESSOR_PPC604:
14263 case PROCESSOR_PPC604e:
14264 *total = COSTS_N_INSNS (20);
14265 return true;
14267 case PROCESSOR_PPC620:
14268 case PROCESSOR_PPC630:
14269 *total = (GET_MODE (XEXP (x, 1)) != DImode
14270 ? COSTS_N_INSNS (21)
14271 : COSTS_N_INSNS (37));
14272 return true;
14274 case PROCESSOR_PPC750:
14275 case PROCESSOR_PPC8540:
14276 case PROCESSOR_PPC7400:
14277 *total = COSTS_N_INSNS (19);
14278 return true;
14280 case PROCESSOR_PPC7450:
14281 *total = COSTS_N_INSNS (23);
14282 return true;
14284 case PROCESSOR_POWER4:
14285 *total = (GET_MODE (XEXP (x, 1)) != DImode
14286 ? COSTS_N_INSNS (18)
14287 : COSTS_N_INSNS (34));
14288 return true;
14290 default:
14291 abort ();
14294 case FFS:
14295 *total = COSTS_N_INSNS (4);
14296 return true;
14298 case MEM:
14299 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14300 *total = 5;
14301 return true;
14303 default:
14304 return false;
14308 /* A C expression returning the cost of moving data from a register of class
14309 CLASS1 to one of CLASS2. */
14312 rs6000_register_move_cost (mode, from, to)
14313 enum machine_mode mode;
14314 enum reg_class from, to;
14316 /* Moves from/to GENERAL_REGS. */
14317 if (reg_classes_intersect_p (to, GENERAL_REGS)
14318 || reg_classes_intersect_p (from, GENERAL_REGS))
14320 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14321 from = to;
14323 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14324 return (rs6000_memory_move_cost (mode, from, 0)
14325 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14327 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14328 else if (from == CR_REGS)
14329 return 4;
14331 else
14332 /* A move will cost one instruction per GPR moved. */
14333 return 2 * HARD_REGNO_NREGS (0, mode);
14336 /* Moving between two similar registers is just one instruction. */
14337 else if (reg_classes_intersect_p (to, from))
14338 return mode == TFmode ? 4 : 2;
14340 /* Everything else has to go through GENERAL_REGS. */
14341 else
14342 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14343 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14346 /* A C expressions returning the cost of moving data of MODE from a register to
14347 or from memory. */
14350 rs6000_memory_move_cost (mode, class, in)
14351 enum machine_mode mode;
14352 enum reg_class class;
14353 int in ATTRIBUTE_UNUSED;
14355 if (reg_classes_intersect_p (class, GENERAL_REGS))
14356 return 4 * HARD_REGNO_NREGS (0, mode);
14357 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14358 return 4 * HARD_REGNO_NREGS (32, mode);
14359 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14360 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14361 else
14362 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14365 /* Return an RTX representing where to find the function value of a
14366 function returning MODE. */
14367 static rtx
14368 rs6000_complex_function_value (enum machine_mode mode)
14370 unsigned int regno;
14371 rtx r1, r2;
14372 enum machine_mode inner = GET_MODE_INNER (mode);
14374 if (FLOAT_MODE_P (mode))
14375 regno = FP_ARG_RETURN;
14376 else
14378 regno = GP_ARG_RETURN;
14380 /* 32-bit is OK since it'll go in r3/r4. */
14381 if (TARGET_32BIT)
14382 return gen_rtx_REG (mode, regno);
14385 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14386 const0_rtx);
14387 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14388 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14389 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14392 /* Define how to find the value returned by a function.
14393 VALTYPE is the data type of the value (as a tree).
14394 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14395 otherwise, FUNC is 0.
14397 On the SPE, both FPs and vectors are returned in r3.
14399 On RS/6000 an integer value is in r3 and a floating-point value is in
14400 fp1, unless -msoft-float. */
14403 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14405 enum machine_mode mode;
14406 unsigned int regno;
14408 if ((INTEGRAL_TYPE_P (valtype)
14409 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14410 || POINTER_TYPE_P (valtype))
14411 mode = word_mode;
14412 else
14413 mode = TYPE_MODE (valtype);
14415 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14416 regno = FP_ARG_RETURN;
14417 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14418 && TARGET_HARD_FLOAT
14419 && SPLIT_COMPLEX_ARGS)
14420 return rs6000_complex_function_value (mode);
14421 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14422 regno = ALTIVEC_ARG_RETURN;
14423 else
14424 regno = GP_ARG_RETURN;
14426 return gen_rtx_REG (mode, regno);
14429 /* Define how to find the value returned by a library function
14430 assuming the value has mode MODE. */
14432 rs6000_libcall_value (enum machine_mode mode)
14434 unsigned int regno;
14436 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14437 && TARGET_HARD_FLOAT && TARGET_FPRS)
14438 regno = FP_ARG_RETURN;
14439 else if (ALTIVEC_VECTOR_MODE (mode))
14440 regno = ALTIVEC_ARG_RETURN;
14441 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14442 return rs6000_complex_function_value (mode);
14443 else
14444 regno = GP_ARG_RETURN;
14446 return gen_rtx_REG (mode, regno);
14449 /* Return true if TYPE is of type __ev64_opaque__. */
14451 static bool
14452 is_ev64_opaque_type (type)
14453 tree type;
14455 return (TARGET_SPE
14456 && (type == opaque_V2SI_type_node
14457 || type == opaque_V2SF_type_node
14458 || type == opaque_p_V2SI_type_node
14459 || (TREE_CODE (type) == VECTOR_TYPE
14460 && TYPE_NAME (type)
14461 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14462 && DECL_NAME (TYPE_NAME (type))
14463 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14464 "__ev64_opaque__") == 0)));
14467 static rtx
14468 rs6000_dwarf_register_span (reg)
14469 rtx reg;
14471 unsigned regno;
14473 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14474 return NULL_RTX;
14476 regno = REGNO (reg);
14478 /* The duality of the SPE register size wreaks all kinds of havoc.
14479 This is a way of distinguishing r0 in 32-bits from r0 in
14480 64-bits. */
14481 return
14482 gen_rtx_PARALLEL (VOIDmode,
14483 BYTES_BIG_ENDIAN
14484 ? gen_rtvec (2,
14485 gen_rtx_REG (SImode, regno + 1200),
14486 gen_rtx_REG (SImode, regno))
14487 : gen_rtvec (2,
14488 gen_rtx_REG (SImode, regno),
14489 gen_rtx_REG (SImode, regno + 1200)));
14492 #include "gt-rs6000.h"