2003-05-15 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blobe451f20c0eeda1f9356edee7f8be98bc707e1735
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
62 && !((n) & 1) \
63 && easy_vector_same (x, y))
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
68 /* Target cpu type */
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
92 /* Nonzero if we want SPE ABI extensions. */
93 int rs6000_spe_abi;
95 /* Whether isel instructions should be generated. */
96 int rs6000_isel;
98 /* Whether SPE simd instructions should be generated. */
99 int rs6000_spe;
101 /* Nonzero if floating point operations are done in the GPRs. */
102 int rs6000_float_gprs = 0;
104 /* String from -mfloat-gprs=. */
105 const char *rs6000_float_gprs_string;
107 /* String from -misel=. */
108 const char *rs6000_isel_string;
110 /* String from -mspe=. */
111 const char *rs6000_spe_string;
113 /* Set to nonzero once AIX common-mode calls have been defined. */
114 static GTY(()) int common_mode_defined;
116 /* Save information from a "cmpxx" operation until the branch or scc is
117 emitted. */
118 rtx rs6000_compare_op0, rs6000_compare_op1;
119 int rs6000_compare_fp_p;
121 /* Label number of label created for -mrelocatable, to call to so we can
122 get the address of the GOT section */
123 int rs6000_pic_labelno;
125 #ifdef USING_ELFOS_H
126 /* Which abi to adhere to */
127 const char *rs6000_abi_name = RS6000_ABI_NAME;
129 /* Semantics of the small data area */
130 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
132 /* Which small data model to use */
133 const char *rs6000_sdata_name = (char *)0;
135 /* Counter for labels which are to be placed in .fixup. */
136 int fixuplabelno = 0;
137 #endif
139 /* Bit size of immediate TLS offsets and string from which it is decoded. */
140 int rs6000_tls_size = 32;
141 const char *rs6000_tls_size_string;
143 /* ABI enumeration available for subtarget to use. */
144 enum rs6000_abi rs6000_current_abi;
146 /* ABI string from -mabi= option. */
147 const char *rs6000_abi_string;
149 /* Debug flags */
150 const char *rs6000_debug_name;
151 int rs6000_debug_stack; /* debug stack applications */
152 int rs6000_debug_arg; /* debug argument handling */
154 /* Opaque types. */
155 static GTY(()) tree opaque_V2SI_type_node;
156 static GTY(()) tree opaque_V2SF_type_node;
157 static GTY(()) tree opaque_p_V2SI_type_node;
159 const char *rs6000_traceback_name;
160 static enum {
161 traceback_default = 0,
162 traceback_none,
163 traceback_part,
164 traceback_full
165 } rs6000_traceback;
167 /* Flag to say the TOC is initialized */
168 int toc_initialized;
169 char toc_label_name[10];
171 /* Alias set for saves and restores from the rs6000 stack. */
172 static int rs6000_sr_alias_set;
174 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
175 The only place that looks at this is rs6000_set_default_type_attributes;
176 everywhere else should rely on the presence or absence of a longcall
177 attribute on the function declaration. */
178 int rs6000_default_long_calls;
179 const char *rs6000_longcall_switch;
181 struct builtin_description
183 /* mask is not const because we're going to alter it below. This
184 nonsense will go away when we rewrite the -march infrastructure
185 to give us more target flag bits. */
186 unsigned int mask;
187 const enum insn_code icode;
188 const char *const name;
189 const enum rs6000_builtins code;
192 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
193 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
194 static void validate_condition_mode
195 PARAMS ((enum rtx_code, enum machine_mode));
196 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
197 static void rs6000_maybe_dead PARAMS ((rtx));
198 static void rs6000_emit_stack_tie PARAMS ((void));
199 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
200 static rtx spe_synthesize_frame_save PARAMS ((rtx));
201 static bool spe_func_has_64bit_regs_p PARAMS ((void));
202 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
203 unsigned int, int, int));
204 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
205 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
206 static unsigned rs6000_hash_constant PARAMS ((rtx));
207 static unsigned toc_hash_function PARAMS ((const void *));
208 static int toc_hash_eq PARAMS ((const void *, const void *));
209 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
210 static bool constant_pool_expr_p PARAMS ((rtx));
211 static bool toc_relative_expr_p PARAMS ((rtx));
212 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
213 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
214 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
215 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
216 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
217 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
218 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
219 #ifdef HAVE_GAS_HIDDEN
220 static void rs6000_assemble_visibility PARAMS ((tree, int));
221 #endif
222 static int rs6000_ra_ever_killed PARAMS ((void));
223 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
224 extern const struct attribute_spec rs6000_attribute_table[];
225 static void rs6000_set_default_type_attributes PARAMS ((tree));
226 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
227 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
228 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
229 HOST_WIDE_INT, tree));
230 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
231 HOST_WIDE_INT, HOST_WIDE_INT));
232 #if TARGET_ELF
233 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
234 int));
235 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
236 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
237 static void rs6000_elf_select_section PARAMS ((tree, int,
238 unsigned HOST_WIDE_INT));
239 static void rs6000_elf_unique_section PARAMS ((tree, int));
240 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
241 unsigned HOST_WIDE_INT));
242 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
243 ATTRIBUTE_UNUSED;
244 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
245 #endif
246 #if TARGET_XCOFF
247 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
248 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
249 static void rs6000_xcoff_select_section PARAMS ((tree, int,
250 unsigned HOST_WIDE_INT));
251 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
252 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
253 unsigned HOST_WIDE_INT));
254 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
255 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
256 #endif
257 #if TARGET_MACHO
258 static bool rs6000_binds_local_p PARAMS ((tree));
259 #endif
260 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
261 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
262 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
263 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
264 static int rs6000_adjust_priority PARAMS ((rtx, int));
265 static int rs6000_issue_rate PARAMS ((void));
266 static int rs6000_use_sched_lookahead PARAMS ((void));
268 static void rs6000_init_builtins PARAMS ((void));
269 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
270 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
271 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
272 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
273 static void altivec_init_builtins PARAMS ((void));
274 static void rs6000_common_init_builtins PARAMS ((void));
276 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
277 int, enum rs6000_builtins,
278 enum rs6000_builtins));
279 static void spe_init_builtins PARAMS ((void));
280 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
281 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
282 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
283 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
285 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
286 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
287 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
288 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
289 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
290 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
291 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
292 static void rs6000_parse_abi_options PARAMS ((void));
293 static void rs6000_parse_tls_size_option PARAMS ((void));
294 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
295 static int first_altivec_reg_to_save PARAMS ((void));
296 static unsigned int compute_vrsave_mask PARAMS ((void));
297 static void is_altivec_return_reg PARAMS ((rtx, void *));
298 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
299 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
300 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
301 static bool is_ev64_opaque_type PARAMS ((tree));
302 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
303 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
304 static rtx rs6000_tls_get_addr PARAMS ((void));
305 static rtx rs6000_got_sym PARAMS ((void));
306 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
307 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
308 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
310 /* Hash table stuff for keeping track of TOC entries. */
312 struct toc_hash_struct GTY(())
314 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
315 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
316 rtx key;
317 enum machine_mode key_mode;
318 int labelno;
321 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
323 /* Default register names. */
324 char rs6000_reg_names[][8] =
326 "0", "1", "2", "3", "4", "5", "6", "7",
327 "8", "9", "10", "11", "12", "13", "14", "15",
328 "16", "17", "18", "19", "20", "21", "22", "23",
329 "24", "25", "26", "27", "28", "29", "30", "31",
330 "0", "1", "2", "3", "4", "5", "6", "7",
331 "8", "9", "10", "11", "12", "13", "14", "15",
332 "16", "17", "18", "19", "20", "21", "22", "23",
333 "24", "25", "26", "27", "28", "29", "30", "31",
334 "mq", "lr", "ctr","ap",
335 "0", "1", "2", "3", "4", "5", "6", "7",
336 "xer",
337 /* AltiVec registers. */
338 "0", "1", "2", "3", "4", "5", "6", "7",
339 "8", "9", "10", "11", "12", "13", "14", "15",
340 "16", "17", "18", "19", "20", "21", "22", "23",
341 "24", "25", "26", "27", "28", "29", "30", "31",
342 "vrsave", "vscr",
343 /* SPE registers. */
344 "spe_acc", "spefscr"
347 #ifdef TARGET_REGNAMES
348 static const char alt_reg_names[][8] =
350 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
351 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
352 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
353 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
354 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
355 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
356 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
357 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
358 "mq", "lr", "ctr", "ap",
359 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
360 "xer",
361 /* AltiVec registers. */
362 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
363 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
364 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
365 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
366 "vrsave", "vscr",
367 /* SPE registers. */
368 "spe_acc", "spefscr"
370 #endif
372 #ifndef MASK_STRICT_ALIGN
373 #define MASK_STRICT_ALIGN 0
374 #endif
375 #ifndef TARGET_PROFILE_KERNEL
376 #define TARGET_PROFILE_KERNEL 0
377 #endif
379 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
380 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
382 /* Return 1 for a symbol ref for a thread-local storage symbol. */
383 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
384 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
386 /* Initialize the GCC target structure. */
387 #undef TARGET_ATTRIBUTE_TABLE
388 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
389 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
390 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
392 #undef TARGET_ASM_ALIGNED_DI_OP
393 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
395 /* Default unaligned ops are only provided for ELF. Find the ops needed
396 for non-ELF systems. */
397 #ifndef OBJECT_FORMAT_ELF
398 #if TARGET_XCOFF
399 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
400 64-bit targets. */
401 #undef TARGET_ASM_UNALIGNED_HI_OP
402 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
403 #undef TARGET_ASM_UNALIGNED_SI_OP
404 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
405 #undef TARGET_ASM_UNALIGNED_DI_OP
406 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
407 #else
408 /* For Darwin. */
409 #undef TARGET_ASM_UNALIGNED_HI_OP
410 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
411 #undef TARGET_ASM_UNALIGNED_SI_OP
412 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
413 #endif
414 #endif
416 /* This hook deals with fixups for relocatable code and DI-mode objects
417 in 64-bit code. */
418 #undef TARGET_ASM_INTEGER
419 #define TARGET_ASM_INTEGER rs6000_assemble_integer
421 #ifdef HAVE_GAS_HIDDEN
422 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
423 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
424 #endif
426 #undef TARGET_HAVE_TLS
427 #define TARGET_HAVE_TLS HAVE_AS_TLS
429 #undef TARGET_CANNOT_FORCE_CONST_MEM
430 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
432 #undef TARGET_ASM_FUNCTION_PROLOGUE
433 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
434 #undef TARGET_ASM_FUNCTION_EPILOGUE
435 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
437 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
438 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
439 #undef TARGET_SCHED_VARIABLE_ISSUE
440 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
442 #undef TARGET_SCHED_ISSUE_RATE
443 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
444 #undef TARGET_SCHED_ADJUST_COST
445 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
446 #undef TARGET_SCHED_ADJUST_PRIORITY
447 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
449 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
450 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
452 #undef TARGET_INIT_BUILTINS
453 #define TARGET_INIT_BUILTINS rs6000_init_builtins
455 #undef TARGET_EXPAND_BUILTIN
456 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
458 #if TARGET_MACHO
459 #undef TARGET_BINDS_LOCAL_P
460 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
461 #endif
463 #undef TARGET_ASM_OUTPUT_MI_THUNK
464 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
466 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
467 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
469 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
470 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
472 #undef TARGET_RTX_COSTS
473 #define TARGET_RTX_COSTS rs6000_rtx_costs
474 #undef TARGET_ADDRESS_COST
475 #define TARGET_ADDRESS_COST hook_int_rtx_0
477 #undef TARGET_VECTOR_OPAQUE_P
478 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
480 #undef TARGET_DWARF_REGISTER_SPAN
481 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
483 struct gcc_target targetm = TARGET_INITIALIZER;
485 /* Override command line options. Mostly we process the processor
486 type and sometimes adjust other TARGET_ options. */
488 void
489 rs6000_override_options (default_cpu)
490 const char *default_cpu;
492 size_t i, j;
493 struct rs6000_cpu_select *ptr;
495 /* Simplify the entries below by making a mask for any POWER
496 variant and any PowerPC variant. */
498 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
499 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
500 | MASK_PPC_GFXOPT | MASK_POWERPC64)
501 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
503 static struct ptt
505 const char *const name; /* Canonical processor name. */
506 const enum processor_type processor; /* Processor type enum value. */
507 const int target_enable; /* Target flags to enable. */
508 const int target_disable; /* Target flags to disable. */
509 } const processor_target_table[]
510 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
511 POWER_MASKS | POWERPC_MASKS},
512 {"power", PROCESSOR_POWER,
513 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
514 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
515 {"power2", PROCESSOR_POWER,
516 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
517 POWERPC_MASKS | MASK_NEW_MNEMONICS},
518 {"power3", PROCESSOR_PPC630,
519 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
520 POWER_MASKS},
521 {"power4", PROCESSOR_POWER4,
522 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
523 POWER_MASKS},
524 {"powerpc", PROCESSOR_POWERPC,
525 MASK_POWERPC | MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
527 {"powerpc64", PROCESSOR_POWERPC64,
528 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
529 POWER_MASKS | POWERPC_OPT_MASKS},
530 {"rios", PROCESSOR_RIOS1,
531 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
532 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
533 {"rios1", PROCESSOR_RIOS1,
534 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
535 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
536 {"rsc", PROCESSOR_PPC601,
537 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
538 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
539 {"rsc1", PROCESSOR_PPC601,
540 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
541 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
542 {"rios2", PROCESSOR_RIOS2,
543 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
544 POWERPC_MASKS | MASK_NEW_MNEMONICS},
545 {"rs64a", PROCESSOR_RS64A,
546 MASK_POWERPC | MASK_NEW_MNEMONICS,
547 POWER_MASKS | POWERPC_OPT_MASKS},
548 {"401", PROCESSOR_PPC403,
549 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
550 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
551 {"403", PROCESSOR_PPC403,
552 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
553 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
554 {"405", PROCESSOR_PPC405,
555 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
556 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
557 {"405fp", PROCESSOR_PPC405,
558 MASK_POWERPC | MASK_NEW_MNEMONICS,
559 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
560 {"440", PROCESSOR_PPC440,
561 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
563 {"440fp", PROCESSOR_PPC440,
564 MASK_POWERPC | MASK_NEW_MNEMONICS,
565 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
566 {"505", PROCESSOR_MPCCORE,
567 MASK_POWERPC | MASK_NEW_MNEMONICS,
568 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
569 {"601", PROCESSOR_PPC601,
570 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
571 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
572 {"602", PROCESSOR_PPC603,
573 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
574 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
575 {"603", PROCESSOR_PPC603,
576 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
578 {"603e", PROCESSOR_PPC603,
579 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
580 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
581 {"ec603e", PROCESSOR_PPC603,
582 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"604", PROCESSOR_PPC604,
585 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
586 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
587 {"604e", PROCESSOR_PPC604e,
588 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
590 {"620", PROCESSOR_PPC620,
591 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
592 POWER_MASKS},
593 {"630", PROCESSOR_PPC630,
594 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
595 POWER_MASKS},
596 {"740", PROCESSOR_PPC750,
597 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
598 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
599 {"750", PROCESSOR_PPC750,
600 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
602 {"7400", PROCESSOR_PPC7400,
603 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
605 {"7450", PROCESSOR_PPC7450,
606 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
607 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
608 {"8540", PROCESSOR_PPC8540,
609 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
610 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
611 {"801", PROCESSOR_MPCCORE,
612 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
613 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
614 {"821", PROCESSOR_MPCCORE,
615 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
616 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
617 {"823", PROCESSOR_MPCCORE,
618 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
619 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
620 {"860", PROCESSOR_MPCCORE,
621 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
622 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
624 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
626 /* Save current -mmultiple/-mno-multiple status. */
627 int multiple = TARGET_MULTIPLE;
628 /* Save current -mstring/-mno-string status. */
629 int string = TARGET_STRING;
631 /* Identify the processor type. */
632 rs6000_select[0].string = default_cpu;
633 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
635 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
637 ptr = &rs6000_select[i];
638 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
640 for (j = 0; j < ptt_size; j++)
641 if (! strcmp (ptr->string, processor_target_table[j].name))
643 if (ptr->set_tune_p)
644 rs6000_cpu = processor_target_table[j].processor;
646 if (ptr->set_arch_p)
648 target_flags |= processor_target_table[j].target_enable;
649 target_flags &= ~processor_target_table[j].target_disable;
651 break;
654 if (j == ptt_size)
655 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
659 if (TARGET_E500)
660 rs6000_isel = 1;
662 /* If we are optimizing big endian systems for space, use the load/store
663 multiple and string instructions. */
664 if (BYTES_BIG_ENDIAN && optimize_size)
665 target_flags |= MASK_MULTIPLE | MASK_STRING;
667 /* If -mmultiple or -mno-multiple was explicitly used, don't
668 override with the processor default */
669 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
670 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
672 /* If -mstring or -mno-string was explicitly used, don't override
673 with the processor default. */
674 if ((target_flags_explicit & MASK_STRING) != 0)
675 target_flags = (target_flags & ~MASK_STRING) | string;
677 /* Don't allow -mmultiple or -mstring on little endian systems
678 unless the cpu is a 750, because the hardware doesn't support the
679 instructions used in little endian mode, and causes an alignment
680 trap. The 750 does not cause an alignment trap (except when the
681 target is unaligned). */
683 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
685 if (TARGET_MULTIPLE)
687 target_flags &= ~MASK_MULTIPLE;
688 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
689 warning ("-mmultiple is not supported on little endian systems");
692 if (TARGET_STRING)
694 target_flags &= ~MASK_STRING;
695 if ((target_flags_explicit & MASK_STRING) != 0)
696 warning ("-mstring is not supported on little endian systems");
700 /* Set debug flags */
701 if (rs6000_debug_name)
703 if (! strcmp (rs6000_debug_name, "all"))
704 rs6000_debug_stack = rs6000_debug_arg = 1;
705 else if (! strcmp (rs6000_debug_name, "stack"))
706 rs6000_debug_stack = 1;
707 else if (! strcmp (rs6000_debug_name, "arg"))
708 rs6000_debug_arg = 1;
709 else
710 error ("unknown -mdebug-%s switch", rs6000_debug_name);
713 if (rs6000_traceback_name)
715 if (! strncmp (rs6000_traceback_name, "full", 4))
716 rs6000_traceback = traceback_full;
717 else if (! strncmp (rs6000_traceback_name, "part", 4))
718 rs6000_traceback = traceback_part;
719 else if (! strncmp (rs6000_traceback_name, "no", 2))
720 rs6000_traceback = traceback_none;
721 else
722 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
723 rs6000_traceback_name);
726 /* Set size of long double */
727 rs6000_long_double_type_size = 64;
728 if (rs6000_long_double_size_string)
730 char *tail;
731 int size = strtol (rs6000_long_double_size_string, &tail, 10);
732 if (*tail != '\0' || (size != 64 && size != 128))
733 error ("Unknown switch -mlong-double-%s",
734 rs6000_long_double_size_string);
735 else
736 rs6000_long_double_type_size = size;
739 /* Handle -mabi= options. */
740 rs6000_parse_abi_options ();
742 /* Handle generic -mFOO=YES/NO options. */
743 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
744 &rs6000_altivec_vrsave);
745 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
746 &rs6000_isel);
747 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
748 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
749 &rs6000_float_gprs);
751 /* Handle -mtls-size option. */
752 rs6000_parse_tls_size_option ();
754 #ifdef SUBTARGET_OVERRIDE_OPTIONS
755 SUBTARGET_OVERRIDE_OPTIONS;
756 #endif
757 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
758 SUBSUBTARGET_OVERRIDE_OPTIONS;
759 #endif
761 if (TARGET_E500)
763 /* The e500 does not have string instructions, and we set
764 MASK_STRING above when optimizing for size. */
765 if ((target_flags & MASK_STRING) != 0)
766 target_flags = target_flags & ~MASK_STRING;
768 /* No SPE means 64-bit long doubles, even if an E500. */
769 if (rs6000_spe_string != 0
770 && !strcmp (rs6000_spe_string, "no"))
771 rs6000_long_double_type_size = 64;
773 else if (rs6000_select[1].string != NULL)
775 /* For the powerpc-eabispe configuration, we set all these by
776 default, so let's unset them if we manually set another
777 CPU that is not the E500. */
778 if (rs6000_abi_string == 0)
779 rs6000_spe_abi = 0;
780 if (rs6000_spe_string == 0)
781 rs6000_spe = 0;
782 if (rs6000_float_gprs_string == 0)
783 rs6000_float_gprs = 0;
784 if (rs6000_isel_string == 0)
785 rs6000_isel = 0;
786 if (rs6000_long_double_size_string == 0)
787 rs6000_long_double_type_size = 64;
790 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
791 using TARGET_OPTIONS to handle a toggle switch, but we're out of
792 bits in target_flags so TARGET_SWITCHES cannot be used.
793 Assumption here is that rs6000_longcall_switch points into the
794 text of the complete option, rather than being a copy, so we can
795 scan back for the presence or absence of the no- modifier. */
796 if (rs6000_longcall_switch)
798 const char *base = rs6000_longcall_switch;
799 while (base[-1] != 'm') base--;
801 if (*rs6000_longcall_switch != '\0')
802 error ("invalid option `%s'", base);
803 rs6000_default_long_calls = (base[0] != 'n');
806 #ifdef TARGET_REGNAMES
807 /* If the user desires alternate register names, copy in the
808 alternate names now. */
809 if (TARGET_REGNAMES)
810 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
811 #endif
813 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
814 If -maix-struct-return or -msvr4-struct-return was explicitly
815 used, don't override with the ABI default. */
816 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
818 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
819 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
820 else
821 target_flags |= MASK_AIX_STRUCT_RET;
824 if (TARGET_LONG_DOUBLE_128
825 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
826 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
828 /* Allocate an alias set for register saves & restores from stack. */
829 rs6000_sr_alias_set = new_alias_set ();
831 if (TARGET_TOC)
832 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
834 /* We can only guarantee the availability of DI pseudo-ops when
835 assembling for 64-bit targets. */
836 if (!TARGET_64BIT)
838 targetm.asm_out.aligned_op.di = NULL;
839 targetm.asm_out.unaligned_op.di = NULL;
842 /* Set maximum branch target alignment at two instructions, eight bytes. */
843 align_jumps_max_skip = 8;
844 align_loops_max_skip = 8;
846 /* Arrange to save and restore machine status around nested functions. */
847 init_machine_status = rs6000_init_machine_status;
850 /* Handle generic options of the form -mfoo=yes/no.
851 NAME is the option name.
852 VALUE is the option value.
853 FLAG is the pointer to the flag where to store a 1 or 0, depending on
854 whether the option value is 'yes' or 'no' respectively. */
855 static void
856 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
858 if (value == 0)
859 return;
860 else if (!strcmp (value, "yes"))
861 *flag = 1;
862 else if (!strcmp (value, "no"))
863 *flag = 0;
864 else
865 error ("unknown -m%s= option specified: '%s'", name, value);
868 /* Handle -mabi= options. */
869 static void
870 rs6000_parse_abi_options ()
872 if (rs6000_abi_string == 0)
873 return;
874 else if (! strcmp (rs6000_abi_string, "altivec"))
875 rs6000_altivec_abi = 1;
876 else if (! strcmp (rs6000_abi_string, "no-altivec"))
877 rs6000_altivec_abi = 0;
878 else if (! strcmp (rs6000_abi_string, "spe"))
880 rs6000_spe_abi = 1;
881 if (!TARGET_SPE_ABI)
882 error ("not configured for ABI: '%s'", rs6000_abi_string);
885 else if (! strcmp (rs6000_abi_string, "no-spe"))
886 rs6000_spe_abi = 0;
887 else
888 error ("unknown ABI specified: '%s'", rs6000_abi_string);
891 /* Validate and record the size specified with the -mtls-size option. */
893 static void
894 rs6000_parse_tls_size_option ()
896 if (rs6000_tls_size_string == 0)
897 return;
898 else if (strcmp (rs6000_tls_size_string, "16") == 0)
899 rs6000_tls_size = 16;
900 else if (strcmp (rs6000_tls_size_string, "32") == 0)
901 rs6000_tls_size = 32;
902 else if (strcmp (rs6000_tls_size_string, "64") == 0)
903 rs6000_tls_size = 64;
904 else
905 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
908 void
909 optimization_options (level, size)
910 int level ATTRIBUTE_UNUSED;
911 int size ATTRIBUTE_UNUSED;
915 /* Do anything needed at the start of the asm file. */
917 void
918 rs6000_file_start (file, default_cpu)
919 FILE *file;
920 const char *default_cpu;
922 size_t i;
923 char buffer[80];
924 const char *start = buffer;
925 struct rs6000_cpu_select *ptr;
927 if (flag_verbose_asm)
929 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
930 rs6000_select[0].string = default_cpu;
932 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
934 ptr = &rs6000_select[i];
935 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
937 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
938 start = "";
942 #ifdef USING_ELFOS_H
943 switch (rs6000_sdata)
945 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
946 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
947 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
948 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
951 if (rs6000_sdata && g_switch_value)
953 fprintf (file, "%s -G %d", start, g_switch_value);
954 start = "";
956 #endif
958 if (*start == '\0')
959 putc ('\n', file);
963 /* Return nonzero if this function is known to have a null epilogue. */
966 direct_return ()
968 if (reload_completed)
970 rs6000_stack_t *info = rs6000_stack_info ();
972 if (info->first_gp_reg_save == 32
973 && info->first_fp_reg_save == 64
974 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
975 && ! info->lr_save_p
976 && ! info->cr_save_p
977 && info->vrsave_mask == 0
978 && ! info->push_p)
979 return 1;
982 return 0;
985 /* Returns 1 always. */
988 any_operand (op, mode)
989 rtx op ATTRIBUTE_UNUSED;
990 enum machine_mode mode ATTRIBUTE_UNUSED;
992 return 1;
995 /* Returns 1 if op is the count register. */
997 count_register_operand (op, mode)
998 rtx op;
999 enum machine_mode mode ATTRIBUTE_UNUSED;
1001 if (GET_CODE (op) != REG)
1002 return 0;
1004 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1005 return 1;
1007 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1008 return 1;
1010 return 0;
1013 /* Returns 1 if op is an altivec register. */
1015 altivec_register_operand (op, mode)
1016 rtx op;
1017 enum machine_mode mode ATTRIBUTE_UNUSED;
1020 return (register_operand (op, mode)
1021 && (GET_CODE (op) != REG
1022 || REGNO (op) > FIRST_PSEUDO_REGISTER
1023 || ALTIVEC_REGNO_P (REGNO (op))));
1027 xer_operand (op, mode)
1028 rtx op;
1029 enum machine_mode mode ATTRIBUTE_UNUSED;
1031 if (GET_CODE (op) != REG)
1032 return 0;
1034 if (XER_REGNO_P (REGNO (op)))
1035 return 1;
1037 return 0;
1040 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1041 by such constants completes more quickly. */
1044 s8bit_cint_operand (op, mode)
1045 rtx op;
1046 enum machine_mode mode ATTRIBUTE_UNUSED;
1048 return ( GET_CODE (op) == CONST_INT
1049 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1052 /* Return 1 if OP is a constant that can fit in a D field. */
1055 short_cint_operand (op, mode)
1056 rtx op;
1057 enum machine_mode mode ATTRIBUTE_UNUSED;
1059 return (GET_CODE (op) == CONST_INT
1060 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1063 /* Similar for an unsigned D field. */
1066 u_short_cint_operand (op, mode)
1067 rtx op;
1068 enum machine_mode mode ATTRIBUTE_UNUSED;
1070 return (GET_CODE (op) == CONST_INT
1071 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1074 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1077 non_short_cint_operand (op, mode)
1078 rtx op;
1079 enum machine_mode mode ATTRIBUTE_UNUSED;
1081 return (GET_CODE (op) == CONST_INT
1082 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1085 /* Returns 1 if OP is a CONST_INT that is a positive value
1086 and an exact power of 2. */
1089 exact_log2_cint_operand (op, mode)
1090 rtx op;
1091 enum machine_mode mode ATTRIBUTE_UNUSED;
1093 return (GET_CODE (op) == CONST_INT
1094 && INTVAL (op) > 0
1095 && exact_log2 (INTVAL (op)) >= 0);
1098 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1099 ctr, or lr). */
1102 gpc_reg_operand (op, mode)
1103 rtx op;
1104 enum machine_mode mode;
1106 return (register_operand (op, mode)
1107 && (GET_CODE (op) != REG
1108 || (REGNO (op) >= ARG_POINTER_REGNUM
1109 && !XER_REGNO_P (REGNO (op)))
1110 || REGNO (op) < MQ_REGNO));
1113 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1114 CR field. */
1117 cc_reg_operand (op, mode)
1118 rtx op;
1119 enum machine_mode mode;
1121 return (register_operand (op, mode)
1122 && (GET_CODE (op) != REG
1123 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1124 || CR_REGNO_P (REGNO (op))));
1127 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1128 CR field that isn't CR0. */
1131 cc_reg_not_cr0_operand (op, mode)
1132 rtx op;
1133 enum machine_mode mode;
1135 return (register_operand (op, mode)
1136 && (GET_CODE (op) != REG
1137 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1138 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1141 /* Returns 1 if OP is either a constant integer valid for a D-field or
1142 a non-special register. If a register, it must be in the proper
1143 mode unless MODE is VOIDmode. */
1146 reg_or_short_operand (op, mode)
1147 rtx op;
1148 enum machine_mode mode;
1150 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1153 /* Similar, except check if the negation of the constant would be
1154 valid for a D-field. */
1157 reg_or_neg_short_operand (op, mode)
1158 rtx op;
1159 enum machine_mode mode;
1161 if (GET_CODE (op) == CONST_INT)
1162 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1164 return gpc_reg_operand (op, mode);
1167 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1168 a non-special register. If a register, it must be in the proper
1169 mode unless MODE is VOIDmode. */
1172 reg_or_aligned_short_operand (op, mode)
1173 rtx op;
1174 enum machine_mode mode;
1176 if (gpc_reg_operand (op, mode))
1177 return 1;
1178 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1179 return 1;
1181 return 0;
1185 /* Return 1 if the operand is either a register or an integer whose
1186 high-order 16 bits are zero. */
1189 reg_or_u_short_operand (op, mode)
1190 rtx op;
1191 enum machine_mode mode;
1193 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1196 /* Return 1 is the operand is either a non-special register or ANY
1197 constant integer. */
1200 reg_or_cint_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode;
1204 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1207 /* Return 1 is the operand is either a non-special register or ANY
1208 32-bit signed constant integer. */
1211 reg_or_arith_cint_operand (op, mode)
1212 rtx op;
1213 enum machine_mode mode;
1215 return (gpc_reg_operand (op, mode)
1216 || (GET_CODE (op) == CONST_INT
1217 #if HOST_BITS_PER_WIDE_INT != 32
1218 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1219 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1220 #endif
1224 /* Return 1 is the operand is either a non-special register or a 32-bit
1225 signed constant integer valid for 64-bit addition. */
1228 reg_or_add_cint64_operand (op, mode)
1229 rtx op;
1230 enum machine_mode mode;
1232 return (gpc_reg_operand (op, mode)
1233 || (GET_CODE (op) == CONST_INT
1234 #if HOST_BITS_PER_WIDE_INT == 32
1235 && INTVAL (op) < 0x7fff8000
1236 #else
1237 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1238 < 0x100000000ll)
1239 #endif
1243 /* Return 1 is the operand is either a non-special register or a 32-bit
1244 signed constant integer valid for 64-bit subtraction. */
1247 reg_or_sub_cint64_operand (op, mode)
1248 rtx op;
1249 enum machine_mode mode;
1251 return (gpc_reg_operand (op, mode)
1252 || (GET_CODE (op) == CONST_INT
1253 #if HOST_BITS_PER_WIDE_INT == 32
1254 && (- INTVAL (op)) < 0x7fff8000
1255 #else
1256 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1257 < 0x100000000ll)
1258 #endif
1262 /* Return 1 is the operand is either a non-special register or ANY
1263 32-bit unsigned constant integer. */
1266 reg_or_logical_cint_operand (op, mode)
1267 rtx op;
1268 enum machine_mode mode;
1270 if (GET_CODE (op) == CONST_INT)
1272 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1274 if (GET_MODE_BITSIZE (mode) <= 32)
1275 abort ();
1277 if (INTVAL (op) < 0)
1278 return 0;
1281 return ((INTVAL (op) & GET_MODE_MASK (mode)
1282 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1284 else if (GET_CODE (op) == CONST_DOUBLE)
1286 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1287 || mode != DImode)
1288 abort ();
1290 return CONST_DOUBLE_HIGH (op) == 0;
1292 else
1293 return gpc_reg_operand (op, mode);
1296 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1299 got_operand (op, mode)
1300 rtx op;
1301 enum machine_mode mode ATTRIBUTE_UNUSED;
1303 return (GET_CODE (op) == SYMBOL_REF
1304 || GET_CODE (op) == CONST
1305 || GET_CODE (op) == LABEL_REF);
1308 /* Return 1 if the operand is a simple references that can be loaded via
1309 the GOT (labels involving addition aren't allowed). */
1312 got_no_const_operand (op, mode)
1313 rtx op;
1314 enum machine_mode mode ATTRIBUTE_UNUSED;
1316 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1319 /* Return the number of instructions it takes to form a constant in an
1320 integer register. */
1322 static int
1323 num_insns_constant_wide (value)
1324 HOST_WIDE_INT value;
1326 /* signed constant loadable with {cal|addi} */
1327 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1328 return 1;
1330 /* constant loadable with {cau|addis} */
1331 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1332 return 1;
1334 #if HOST_BITS_PER_WIDE_INT == 64
1335 else if (TARGET_POWERPC64)
1337 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1338 HOST_WIDE_INT high = value >> 31;
1340 if (high == 0 || high == -1)
1341 return 2;
1343 high >>= 1;
1345 if (low == 0)
1346 return num_insns_constant_wide (high) + 1;
1347 else
1348 return (num_insns_constant_wide (high)
1349 + num_insns_constant_wide (low) + 1);
1351 #endif
1353 else
1354 return 2;
1358 num_insns_constant (op, mode)
1359 rtx op;
1360 enum machine_mode mode;
1362 if (GET_CODE (op) == CONST_INT)
1364 #if HOST_BITS_PER_WIDE_INT == 64
1365 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1366 && mask64_operand (op, mode))
1367 return 2;
1368 else
1369 #endif
1370 return num_insns_constant_wide (INTVAL (op));
1373 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1375 long l;
1376 REAL_VALUE_TYPE rv;
1378 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1379 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1380 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1383 else if (GET_CODE (op) == CONST_DOUBLE)
1385 HOST_WIDE_INT low;
1386 HOST_WIDE_INT high;
1387 long l[2];
1388 REAL_VALUE_TYPE rv;
1389 int endian = (WORDS_BIG_ENDIAN == 0);
1391 if (mode == VOIDmode || mode == DImode)
1393 high = CONST_DOUBLE_HIGH (op);
1394 low = CONST_DOUBLE_LOW (op);
1396 else
1398 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1399 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1400 high = l[endian];
1401 low = l[1 - endian];
1404 if (TARGET_32BIT)
1405 return (num_insns_constant_wide (low)
1406 + num_insns_constant_wide (high));
1408 else
1410 if (high == 0 && low >= 0)
1411 return num_insns_constant_wide (low);
1413 else if (high == -1 && low < 0)
1414 return num_insns_constant_wide (low);
1416 else if (mask64_operand (op, mode))
1417 return 2;
1419 else if (low == 0)
1420 return num_insns_constant_wide (high) + 1;
1422 else
1423 return (num_insns_constant_wide (high)
1424 + num_insns_constant_wide (low) + 1);
1428 else
1429 abort ();
1432 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1433 register with one instruction per word. We only do this if we can
1434 safely read CONST_DOUBLE_{LOW,HIGH}. */
1437 easy_fp_constant (op, mode)
1438 rtx op;
1439 enum machine_mode mode;
1441 if (GET_CODE (op) != CONST_DOUBLE
1442 || GET_MODE (op) != mode
1443 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1444 return 0;
1446 /* Consider all constants with -msoft-float to be easy. */
1447 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1448 && mode != DImode)
1449 return 1;
1451 /* If we are using V.4 style PIC, consider all constants to be hard. */
1452 if (flag_pic && DEFAULT_ABI == ABI_V4)
1453 return 0;
1455 #ifdef TARGET_RELOCATABLE
1456 /* Similarly if we are using -mrelocatable, consider all constants
1457 to be hard. */
1458 if (TARGET_RELOCATABLE)
1459 return 0;
1460 #endif
1462 if (mode == TFmode)
1464 long k[4];
1465 REAL_VALUE_TYPE rv;
1467 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1468 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1470 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1471 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1472 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1473 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1476 else if (mode == DFmode)
1478 long k[2];
1479 REAL_VALUE_TYPE rv;
1481 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1482 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1484 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1485 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1488 else if (mode == SFmode)
1490 long l;
1491 REAL_VALUE_TYPE rv;
1493 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1494 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1496 return num_insns_constant_wide (l) == 1;
1499 else if (mode == DImode)
1500 return ((TARGET_POWERPC64
1501 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1502 || (num_insns_constant (op, DImode) <= 2));
1504 else if (mode == SImode)
1505 return 1;
1506 else
1507 abort ();
1510 /* Return non zero if all elements of a vector have the same value. */
1512 static int
1513 easy_vector_same (op, mode)
1514 rtx op;
1515 enum machine_mode mode ATTRIBUTE_UNUSED;
1517 int units, i, cst;
1519 units = CONST_VECTOR_NUNITS (op);
1521 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1522 for (i = 1; i < units; ++i)
1523 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1524 break;
1525 if (i == units)
1526 return 1;
1527 return 0;
1530 /* Return 1 if the operand is a CONST_INT and can be put into a
1531 register without using memory. */
1534 easy_vector_constant (op, mode)
1535 rtx op;
1536 enum machine_mode mode;
1538 int cst, cst2;
1540 if (GET_CODE (op) != CONST_VECTOR
1541 || (!TARGET_ALTIVEC
1542 && !TARGET_SPE))
1543 return 0;
1545 if (zero_constant (op, mode)
1546 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1547 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1548 return 1;
1550 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1551 return 0;
1553 if (TARGET_SPE && mode == V1DImode)
1554 return 0;
1556 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1557 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1559 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1560 li r0, CONSTANT1
1561 evmergelo r0, r0, r0
1562 li r0, CONSTANT2
1564 I don't know how efficient it would be to allow bigger constants,
1565 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1566 instructions is better than a 64-bit memory load, but I don't
1567 have the e500 timing specs. */
1568 if (TARGET_SPE && mode == V2SImode
1569 && cst >= -0x7fff && cst <= 0x7fff
1570 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1571 return 1;
1573 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1574 return 1;
1576 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1577 return 1;
1579 return 0;
1582 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1585 easy_vector_constant_add_self (op, mode)
1586 rtx op;
1587 enum machine_mode mode;
1589 int cst;
1591 if (!easy_vector_constant (op, mode))
1592 return 0;
1594 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1596 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1599 const char *
1600 output_vec_const_move (operands)
1601 rtx *operands;
1603 int cst, cst2;
1604 enum machine_mode mode;
1605 rtx dest, vec;
1607 dest = operands[0];
1608 vec = operands[1];
1610 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1611 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1612 mode = GET_MODE (dest);
1614 if (TARGET_ALTIVEC)
1616 if (zero_constant (vec, mode))
1617 return "vxor %0,%0,%0";
1618 else if (EASY_VECTOR_15 (cst, vec, mode))
1620 operands[1] = GEN_INT (cst);
1621 switch (mode)
1623 case V4SImode:
1624 return "vspltisw %0,%1";
1625 case V8HImode:
1626 return "vspltish %0,%1";
1627 case V16QImode:
1628 return "vspltisb %0,%1";
1629 default:
1630 abort ();
1633 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1634 return "#";
1635 else
1636 abort ();
1639 if (TARGET_SPE)
1641 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1642 pattern of V1DI, V4HI, and V2SF.
1644 FIXME: We should probabl return # and add post reload
1645 splitters for these, but this way is so easy ;-).
1647 operands[1] = GEN_INT (cst);
1648 operands[2] = GEN_INT (cst2);
1649 if (cst == cst2)
1650 return "li %0,%1\n\tevmergelo %0,%0,%0";
1651 else
1652 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1655 abort ();
1658 /* Return 1 if the operand is the constant 0. This works for scalars
1659 as well as vectors. */
1661 zero_constant (op, mode)
1662 rtx op;
1663 enum machine_mode mode;
1665 return op == CONST0_RTX (mode);
1668 /* Return 1 if the operand is 0.0. */
1670 zero_fp_constant (op, mode)
1671 rtx op;
1672 enum machine_mode mode;
1674 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1677 /* Return 1 if the operand is in volatile memory. Note that during
1678 the RTL generation phase, memory_operand does not return TRUE for
1679 volatile memory references. So this function allows us to
1680 recognize volatile references where its safe. */
1683 volatile_mem_operand (op, mode)
1684 rtx op;
1685 enum machine_mode mode;
1687 if (GET_CODE (op) != MEM)
1688 return 0;
1690 if (!MEM_VOLATILE_P (op))
1691 return 0;
1693 if (mode != GET_MODE (op))
1694 return 0;
1696 if (reload_completed)
1697 return memory_operand (op, mode);
1699 if (reload_in_progress)
1700 return strict_memory_address_p (mode, XEXP (op, 0));
1702 return memory_address_p (mode, XEXP (op, 0));
1705 /* Return 1 if the operand is an offsettable memory operand. */
1708 offsettable_mem_operand (op, mode)
1709 rtx op;
1710 enum machine_mode mode;
1712 return ((GET_CODE (op) == MEM)
1713 && offsettable_address_p (reload_completed || reload_in_progress,
1714 mode, XEXP (op, 0)));
1717 /* Return 1 if the operand is either an easy FP constant (see above) or
1718 memory. */
1721 mem_or_easy_const_operand (op, mode)
1722 rtx op;
1723 enum machine_mode mode;
1725 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1728 /* Return 1 if the operand is either a non-special register or an item
1729 that can be used as the operand of a `mode' add insn. */
1732 add_operand (op, mode)
1733 rtx op;
1734 enum machine_mode mode;
1736 if (GET_CODE (op) == CONST_INT)
1737 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1738 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1740 return gpc_reg_operand (op, mode);
1743 /* Return 1 if OP is a constant but not a valid add_operand. */
1746 non_add_cint_operand (op, mode)
1747 rtx op;
1748 enum machine_mode mode ATTRIBUTE_UNUSED;
1750 return (GET_CODE (op) == CONST_INT
1751 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1752 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1755 /* Return 1 if the operand is a non-special register or a constant that
1756 can be used as the operand of an OR or XOR insn on the RS/6000. */
1759 logical_operand (op, mode)
1760 rtx op;
1761 enum machine_mode mode;
1763 HOST_WIDE_INT opl, oph;
1765 if (gpc_reg_operand (op, mode))
1766 return 1;
1768 if (GET_CODE (op) == CONST_INT)
1770 opl = INTVAL (op) & GET_MODE_MASK (mode);
1772 #if HOST_BITS_PER_WIDE_INT <= 32
1773 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1774 return 0;
1775 #endif
1777 else if (GET_CODE (op) == CONST_DOUBLE)
1779 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1780 abort ();
1782 opl = CONST_DOUBLE_LOW (op);
1783 oph = CONST_DOUBLE_HIGH (op);
1784 if (oph != 0)
1785 return 0;
1787 else
1788 return 0;
1790 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1791 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1794 /* Return 1 if C is a constant that is not a logical operand (as
1795 above), but could be split into one. */
1798 non_logical_cint_operand (op, mode)
1799 rtx op;
1800 enum machine_mode mode;
1802 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1803 && ! logical_operand (op, mode)
1804 && reg_or_logical_cint_operand (op, mode));
1807 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1808 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1809 Reject all ones and all zeros, since these should have been optimized
1810 away and confuse the making of MB and ME. */
1813 mask_operand (op, mode)
1814 rtx op;
1815 enum machine_mode mode ATTRIBUTE_UNUSED;
1817 HOST_WIDE_INT c, lsb;
1819 if (GET_CODE (op) != CONST_INT)
1820 return 0;
1822 c = INTVAL (op);
1824 /* Fail in 64-bit mode if the mask wraps around because the upper
1825 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1826 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1827 return 0;
1829 /* We don't change the number of transitions by inverting,
1830 so make sure we start with the LS bit zero. */
1831 if (c & 1)
1832 c = ~c;
1834 /* Reject all zeros or all ones. */
1835 if (c == 0)
1836 return 0;
1838 /* Find the first transition. */
1839 lsb = c & -c;
1841 /* Invert to look for a second transition. */
1842 c = ~c;
1844 /* Erase first transition. */
1845 c &= -lsb;
1847 /* Find the second transition (if any). */
1848 lsb = c & -c;
1850 /* Match if all the bits above are 1's (or c is zero). */
1851 return c == -lsb;
1854 /* Return 1 for the PowerPC64 rlwinm corner case. */
1857 mask_operand_wrap (op, mode)
1858 rtx op;
1859 enum machine_mode mode ATTRIBUTE_UNUSED;
1861 HOST_WIDE_INT c, lsb;
1863 if (GET_CODE (op) != CONST_INT)
1864 return 0;
1866 c = INTVAL (op);
1868 if ((c & 0x80000001) != 0x80000001)
1869 return 0;
1871 c = ~c;
1872 if (c == 0)
1873 return 0;
1875 lsb = c & -c;
1876 c = ~c;
1877 c &= -lsb;
1878 lsb = c & -c;
1879 return c == -lsb;
1882 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1883 It is if there are no more than one 1->0 or 0->1 transitions.
1884 Reject all zeros, since zero should have been optimized away and
1885 confuses the making of MB and ME. */
1888 mask64_operand (op, mode)
1889 rtx op;
1890 enum machine_mode mode ATTRIBUTE_UNUSED;
1892 if (GET_CODE (op) == CONST_INT)
1894 HOST_WIDE_INT c, lsb;
1896 c = INTVAL (op);
1898 /* Reject all zeros. */
1899 if (c == 0)
1900 return 0;
1902 /* We don't change the number of transitions by inverting,
1903 so make sure we start with the LS bit zero. */
1904 if (c & 1)
1905 c = ~c;
1907 /* Find the transition, and check that all bits above are 1's. */
1908 lsb = c & -c;
1910 /* Match if all the bits above are 1's (or c is zero). */
1911 return c == -lsb;
1913 return 0;
1916 /* Like mask64_operand, but allow up to three transitions. This
1917 predicate is used by insn patterns that generate two rldicl or
1918 rldicr machine insns. */
1921 mask64_2_operand (op, mode)
1922 rtx op;
1923 enum machine_mode mode ATTRIBUTE_UNUSED;
1925 if (GET_CODE (op) == CONST_INT)
1927 HOST_WIDE_INT c, lsb;
1929 c = INTVAL (op);
1931 /* Disallow all zeros. */
1932 if (c == 0)
1933 return 0;
1935 /* We don't change the number of transitions by inverting,
1936 so make sure we start with the LS bit zero. */
1937 if (c & 1)
1938 c = ~c;
1940 /* Find the first transition. */
1941 lsb = c & -c;
1943 /* Invert to look for a second transition. */
1944 c = ~c;
1946 /* Erase first transition. */
1947 c &= -lsb;
1949 /* Find the second transition. */
1950 lsb = c & -c;
1952 /* Invert to look for a third transition. */
1953 c = ~c;
1955 /* Erase second transition. */
1956 c &= -lsb;
1958 /* Find the third transition (if any). */
1959 lsb = c & -c;
1961 /* Match if all the bits above are 1's (or c is zero). */
1962 return c == -lsb;
1964 return 0;
1967 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1968 implement ANDing by the mask IN. */
1969 void
1970 build_mask64_2_operands (in, out)
1971 rtx in;
1972 rtx *out;
1974 #if HOST_BITS_PER_WIDE_INT >= 64
1975 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1976 int shift;
1978 if (GET_CODE (in) != CONST_INT)
1979 abort ();
1981 c = INTVAL (in);
1982 if (c & 1)
1984 /* Assume c initially something like 0x00fff000000fffff. The idea
1985 is to rotate the word so that the middle ^^^^^^ group of zeros
1986 is at the MS end and can be cleared with an rldicl mask. We then
1987 rotate back and clear off the MS ^^ group of zeros with a
1988 second rldicl. */
1989 c = ~c; /* c == 0xff000ffffff00000 */
1990 lsb = c & -c; /* lsb == 0x0000000000100000 */
1991 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1992 c = ~c; /* c == 0x00fff000000fffff */
1993 c &= -lsb; /* c == 0x00fff00000000000 */
1994 lsb = c & -c; /* lsb == 0x0000100000000000 */
1995 c = ~c; /* c == 0xff000fffffffffff */
1996 c &= -lsb; /* c == 0xff00000000000000 */
1997 shift = 0;
1998 while ((lsb >>= 1) != 0)
1999 shift++; /* shift == 44 on exit from loop */
2000 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2001 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2002 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2004 else
2006 /* Assume c initially something like 0xff000f0000000000. The idea
2007 is to rotate the word so that the ^^^ middle group of zeros
2008 is at the LS end and can be cleared with an rldicr mask. We then
2009 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2010 a second rldicr. */
2011 lsb = c & -c; /* lsb == 0x0000010000000000 */
2012 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2013 c = ~c; /* c == 0x00fff0ffffffffff */
2014 c &= -lsb; /* c == 0x00fff00000000000 */
2015 lsb = c & -c; /* lsb == 0x0000100000000000 */
2016 c = ~c; /* c == 0xff000fffffffffff */
2017 c &= -lsb; /* c == 0xff00000000000000 */
2018 shift = 0;
2019 while ((lsb >>= 1) != 0)
2020 shift++; /* shift == 44 on exit from loop */
2021 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2022 m1 >>= shift; /* m1 == 0x0000000000000fff */
2023 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2026 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2027 masks will be all 1's. We are guaranteed more than one transition. */
2028 out[0] = GEN_INT (64 - shift);
2029 out[1] = GEN_INT (m1);
2030 out[2] = GEN_INT (shift);
2031 out[3] = GEN_INT (m2);
2032 #else
2033 (void)in;
2034 (void)out;
2035 abort ();
2036 #endif
2039 /* Return 1 if the operand is either a non-special register or a constant
2040 that can be used as the operand of a PowerPC64 logical AND insn. */
2043 and64_operand (op, mode)
2044 rtx op;
2045 enum machine_mode mode;
2047 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2048 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2050 return (logical_operand (op, mode) || mask64_operand (op, mode));
2053 /* Like the above, but also match constants that can be implemented
2054 with two rldicl or rldicr insns. */
2057 and64_2_operand (op, mode)
2058 rtx op;
2059 enum machine_mode mode;
2061 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2062 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2064 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2067 /* Return 1 if the operand is either a non-special register or a
2068 constant that can be used as the operand of an RS/6000 logical AND insn. */
2071 and_operand (op, mode)
2072 rtx op;
2073 enum machine_mode mode;
2075 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2076 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2078 return (logical_operand (op, mode) || mask_operand (op, mode));
2081 /* Return 1 if the operand is a general register or memory operand. */
2084 reg_or_mem_operand (op, mode)
2085 rtx op;
2086 enum machine_mode mode;
2088 return (gpc_reg_operand (op, mode)
2089 || memory_operand (op, mode)
2090 || volatile_mem_operand (op, mode));
2093 /* Return 1 if the operand is a general register or memory operand without
2094 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2095 instruction. */
2098 lwa_operand (op, mode)
2099 rtx op;
2100 enum machine_mode mode;
2102 rtx inner = op;
2104 if (reload_completed && GET_CODE (inner) == SUBREG)
2105 inner = SUBREG_REG (inner);
2107 return gpc_reg_operand (inner, mode)
2108 || (memory_operand (inner, mode)
2109 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2110 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2111 && (GET_CODE (XEXP (inner, 0)) != PLUS
2112 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2113 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2116 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2119 symbol_ref_operand (op, mode)
2120 rtx op;
2121 enum machine_mode mode;
2123 if (mode != VOIDmode && GET_MODE (op) != mode)
2124 return 0;
2126 return (GET_CODE (op) == SYMBOL_REF
2127 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2130 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2131 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2134 call_operand (op, mode)
2135 rtx op;
2136 enum machine_mode mode;
2138 if (mode != VOIDmode && GET_MODE (op) != mode)
2139 return 0;
2141 return (GET_CODE (op) == SYMBOL_REF
2142 || (GET_CODE (op) == REG
2143 && (REGNO (op) == LINK_REGISTER_REGNUM
2144 || REGNO (op) == COUNT_REGISTER_REGNUM
2145 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2148 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2149 this file. */
2152 current_file_function_operand (op, mode)
2153 rtx op;
2154 enum machine_mode mode ATTRIBUTE_UNUSED;
2156 return (GET_CODE (op) == SYMBOL_REF
2157 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2158 && (SYMBOL_REF_LOCAL_P (op)
2159 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2162 /* Return 1 if this operand is a valid input for a move insn. */
2165 input_operand (op, mode)
2166 rtx op;
2167 enum machine_mode mode;
2169 /* Memory is always valid. */
2170 if (memory_operand (op, mode))
2171 return 1;
2173 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2174 if (GET_CODE (op) == CONSTANT_P_RTX)
2175 return 1;
2177 /* For floating-point, easy constants are valid. */
2178 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2179 && CONSTANT_P (op)
2180 && easy_fp_constant (op, mode))
2181 return 1;
2183 /* Allow any integer constant. */
2184 if (GET_MODE_CLASS (mode) == MODE_INT
2185 && (GET_CODE (op) == CONST_INT
2186 || GET_CODE (op) == CONST_DOUBLE))
2187 return 1;
2189 /* Allow easy vector constants. */
2190 if (GET_CODE (op) == CONST_VECTOR
2191 && easy_vector_constant (op, mode))
2192 return 1;
2194 /* For floating-point or multi-word mode, the only remaining valid type
2195 is a register. */
2196 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2197 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2198 return register_operand (op, mode);
2200 /* The only cases left are integral modes one word or smaller (we
2201 do not get called for MODE_CC values). These can be in any
2202 register. */
2203 if (register_operand (op, mode))
2204 return 1;
2206 /* A SYMBOL_REF referring to the TOC is valid. */
2207 if (legitimate_constant_pool_address_p (op))
2208 return 1;
2210 /* A constant pool expression (relative to the TOC) is valid */
2211 if (toc_relative_expr_p (op))
2212 return 1;
2214 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2215 to be valid. */
2216 if (DEFAULT_ABI == ABI_V4
2217 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2218 && small_data_operand (op, Pmode))
2219 return 1;
2221 return 0;
2224 /* Return 1 for an operand in small memory on V.4/eabi. */
2227 small_data_operand (op, mode)
2228 rtx op ATTRIBUTE_UNUSED;
2229 enum machine_mode mode ATTRIBUTE_UNUSED;
2231 #if TARGET_ELF
2232 rtx sym_ref;
2234 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2235 return 0;
2237 if (DEFAULT_ABI != ABI_V4)
2238 return 0;
2240 if (GET_CODE (op) == SYMBOL_REF)
2241 sym_ref = op;
2243 else if (GET_CODE (op) != CONST
2244 || GET_CODE (XEXP (op, 0)) != PLUS
2245 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2246 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2247 return 0;
2249 else
2251 rtx sum = XEXP (op, 0);
2252 HOST_WIDE_INT summand;
2254 /* We have to be careful here, because it is the referenced address
2255 that must be 32k from _SDA_BASE_, not just the symbol. */
2256 summand = INTVAL (XEXP (sum, 1));
2257 if (summand < 0 || summand > g_switch_value)
2258 return 0;
2260 sym_ref = XEXP (sum, 0);
2263 return SYMBOL_REF_SMALL_P (sym_ref);
2264 #else
2265 return 0;
2266 #endif
2269 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2271 static int
2272 constant_pool_expr_1 (op, have_sym, have_toc)
2273 rtx op;
2274 int *have_sym;
2275 int *have_toc;
2277 switch (GET_CODE(op))
2279 case SYMBOL_REF:
2280 if (RS6000_SYMBOL_REF_TLS_P (op))
2281 return 0;
2282 else if (CONSTANT_POOL_ADDRESS_P (op))
2284 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2286 *have_sym = 1;
2287 return 1;
2289 else
2290 return 0;
2292 else if (! strcmp (XSTR (op, 0), toc_label_name))
2294 *have_toc = 1;
2295 return 1;
2297 else
2298 return 0;
2299 case PLUS:
2300 case MINUS:
2301 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2302 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2303 case CONST:
2304 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2305 case CONST_INT:
2306 return 1;
2307 default:
2308 return 0;
2312 static bool
2313 constant_pool_expr_p (op)
2314 rtx op;
2316 int have_sym = 0;
2317 int have_toc = 0;
2318 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2321 static bool
2322 toc_relative_expr_p (op)
2323 rtx op;
2325 int have_sym = 0;
2326 int have_toc = 0;
2327 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2330 /* SPE offset addressing is limited to 5-bits worth of double words. */
2331 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2333 bool
2334 legitimate_constant_pool_address_p (x)
2335 rtx x;
2337 return (TARGET_TOC
2338 && GET_CODE (x) == PLUS
2339 && GET_CODE (XEXP (x, 0)) == REG
2340 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2341 && constant_pool_expr_p (XEXP (x, 1)));
2344 static bool
2345 legitimate_small_data_p (mode, x)
2346 enum machine_mode mode;
2347 rtx x;
2349 return (DEFAULT_ABI == ABI_V4
2350 && !flag_pic && !TARGET_TOC
2351 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2352 && small_data_operand (x, mode));
2355 static bool
2356 legitimate_offset_address_p (mode, x, strict)
2357 enum machine_mode mode;
2358 rtx x;
2359 int strict;
2361 unsigned HOST_WIDE_INT offset, extra;
2363 if (GET_CODE (x) != PLUS)
2364 return false;
2365 if (GET_CODE (XEXP (x, 0)) != REG)
2366 return false;
2367 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2368 return false;
2369 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2370 return false;
2372 offset = INTVAL (XEXP (x, 1));
2373 extra = 0;
2374 switch (mode)
2376 case V16QImode:
2377 case V8HImode:
2378 case V4SFmode:
2379 case V4SImode:
2380 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2381 which leaves the only valid constant offset of zero, which by
2382 canonicalization rules is also invalid. */
2383 return false;
2385 case V4HImode:
2386 case V2SImode:
2387 case V1DImode:
2388 case V2SFmode:
2389 /* SPE vector modes. */
2390 return SPE_CONST_OFFSET_OK (offset);
2392 case DFmode:
2393 case DImode:
2394 if (TARGET_32BIT)
2395 extra = 4;
2396 else if (offset & 3)
2397 return false;
2398 break;
2400 case TFmode:
2401 case TImode:
2402 if (TARGET_32BIT)
2403 extra = 12;
2404 else if (offset & 3)
2405 return false;
2406 else
2407 extra = 8;
2408 break;
2410 default:
2411 break;
2414 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2417 static bool
2418 legitimate_indexed_address_p (x, strict)
2419 rtx x;
2420 int strict;
2422 rtx op0, op1;
2424 if (GET_CODE (x) != PLUS)
2425 return false;
2426 op0 = XEXP (x, 0);
2427 op1 = XEXP (x, 1);
2429 if (!REG_P (op0) || !REG_P (op1))
2430 return false;
2432 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2433 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2434 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2435 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2438 static inline bool
2439 legitimate_indirect_address_p (x, strict)
2440 rtx x;
2441 int strict;
2443 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2446 static bool
2447 legitimate_lo_sum_address_p (mode, x, strict)
2448 enum machine_mode mode;
2449 rtx x;
2450 int strict;
2452 if (GET_CODE (x) != LO_SUM)
2453 return false;
2454 if (GET_CODE (XEXP (x, 0)) != REG)
2455 return false;
2456 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2457 return false;
2458 x = XEXP (x, 1);
2460 if (TARGET_ELF)
2462 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2463 return false;
2464 if (TARGET_TOC)
2465 return false;
2466 if (GET_MODE_NUNITS (mode) != 1)
2467 return false;
2468 if (GET_MODE_BITSIZE (mode) > 32
2469 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2470 return false;
2472 return CONSTANT_P (x);
2475 return false;
2479 /* Try machine-dependent ways of modifying an illegitimate address
2480 to be legitimate. If we find one, return the new, valid address.
2481 This is used from only one place: `memory_address' in explow.c.
2483 OLDX is the address as it was before break_out_memory_refs was
2484 called. In some cases it is useful to look at this to decide what
2485 needs to be done.
2487 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2489 It is always safe for this function to do nothing. It exists to
2490 recognize opportunities to optimize the output.
2492 On RS/6000, first check for the sum of a register with a constant
2493 integer that is out of range. If so, generate code to add the
2494 constant with the low-order 16 bits masked to the register and force
2495 this result into another register (this can be done with `cau').
2496 Then generate an address of REG+(CONST&0xffff), allowing for the
2497 possibility of bit 16 being a one.
2499 Then check for the sum of a register and something not constant, try to
2500 load the other things into a register and return the sum. */
2503 rs6000_legitimize_address (x, oldx, mode)
2504 rtx x;
2505 rtx oldx ATTRIBUTE_UNUSED;
2506 enum machine_mode mode;
2508 if (GET_CODE (x) == SYMBOL_REF)
2510 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2511 if (model != 0)
2512 return rs6000_legitimize_tls_address (x, model);
2515 if (GET_CODE (x) == PLUS
2516 && GET_CODE (XEXP (x, 0)) == REG
2517 && GET_CODE (XEXP (x, 1)) == CONST_INT
2518 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2520 HOST_WIDE_INT high_int, low_int;
2521 rtx sum;
2522 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2523 high_int = INTVAL (XEXP (x, 1)) - low_int;
2524 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2525 GEN_INT (high_int)), 0);
2526 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2528 else if (GET_CODE (x) == PLUS
2529 && GET_CODE (XEXP (x, 0)) == REG
2530 && GET_CODE (XEXP (x, 1)) != CONST_INT
2531 && GET_MODE_NUNITS (mode) == 1
2532 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2533 || TARGET_POWERPC64
2534 || (mode != DFmode && mode != TFmode))
2535 && (TARGET_POWERPC64 || mode != DImode)
2536 && mode != TImode)
2538 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2539 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2541 else if (ALTIVEC_VECTOR_MODE (mode))
2543 rtx reg;
2545 /* Make sure both operands are registers. */
2546 if (GET_CODE (x) == PLUS)
2547 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2548 force_reg (Pmode, XEXP (x, 1)));
2550 reg = force_reg (Pmode, x);
2551 return reg;
2553 else if (SPE_VECTOR_MODE (mode))
2555 /* We accept [reg + reg] and [reg + OFFSET]. */
2557 if (GET_CODE (x) == PLUS)
2559 rtx op1 = XEXP (x, 0);
2560 rtx op2 = XEXP (x, 1);
2562 op1 = force_reg (Pmode, op1);
2564 if (GET_CODE (op2) != REG
2565 && (GET_CODE (op2) != CONST_INT
2566 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2567 op2 = force_reg (Pmode, op2);
2569 return gen_rtx_PLUS (Pmode, op1, op2);
2572 return force_reg (Pmode, x);
2574 else if (TARGET_ELF
2575 && TARGET_32BIT
2576 && TARGET_NO_TOC
2577 && ! flag_pic
2578 && GET_CODE (x) != CONST_INT
2579 && GET_CODE (x) != CONST_DOUBLE
2580 && CONSTANT_P (x)
2581 && GET_MODE_NUNITS (mode) == 1
2582 && (GET_MODE_BITSIZE (mode) <= 32
2583 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2585 rtx reg = gen_reg_rtx (Pmode);
2586 emit_insn (gen_elf_high (reg, (x)));
2587 return gen_rtx_LO_SUM (Pmode, reg, (x));
2589 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2590 && ! flag_pic
2591 #if TARGET_MACHO
2592 && ! MACHO_DYNAMIC_NO_PIC_P
2593 #endif
2594 && GET_CODE (x) != CONST_INT
2595 && GET_CODE (x) != CONST_DOUBLE
2596 && CONSTANT_P (x)
2597 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2598 && mode != DImode
2599 && mode != TImode)
2601 rtx reg = gen_reg_rtx (Pmode);
2602 emit_insn (gen_macho_high (reg, (x)));
2603 return gen_rtx_LO_SUM (Pmode, reg, (x));
2605 else if (TARGET_TOC
2606 && constant_pool_expr_p (x)
2607 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2609 return create_TOC_reference (x);
2611 else
2612 return NULL_RTX;
2615 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2617 static GTY(()) rtx rs6000_tls_symbol;
2618 static rtx
2619 rs6000_tls_get_addr ()
2621 if (!rs6000_tls_symbol)
2622 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2624 return rs6000_tls_symbol;
2627 /* Construct the SYMBOL_REF for TLS GOT references. */
2629 static GTY(()) rtx rs6000_got_symbol;
2630 static rtx
2631 rs6000_got_sym ()
2633 if (!rs6000_got_symbol)
2635 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2636 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2637 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2640 return rs6000_got_symbol;
2643 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2644 this (thread-local) address. */
2646 static rtx
2647 rs6000_legitimize_tls_address (addr, model)
2648 rtx addr;
2649 enum tls_model model;
2651 rtx dest, insn;
2653 dest = gen_reg_rtx (Pmode);
2654 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2656 rtx tlsreg;
2658 if (TARGET_64BIT)
2660 tlsreg = gen_rtx_REG (Pmode, 13);
2661 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2663 else
2665 tlsreg = gen_rtx_REG (Pmode, 2);
2666 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2668 emit_insn (insn);
2670 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2672 rtx tlsreg, tmp;
2674 tmp = gen_reg_rtx (Pmode);
2675 if (TARGET_64BIT)
2677 tlsreg = gen_rtx_REG (Pmode, 13);
2678 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2680 else
2682 tlsreg = gen_rtx_REG (Pmode, 2);
2683 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2685 emit_insn (insn);
2686 if (TARGET_64BIT)
2687 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2688 else
2689 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2690 emit_insn (insn);
2692 else
2694 rtx r3, got, tga, tmp1, tmp2, eqv;
2696 if (TARGET_64BIT)
2697 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2698 else
2700 if (flag_pic == 1)
2701 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2702 else
2704 rtx gsym = rs6000_got_sym ();
2705 got = gen_reg_rtx (Pmode);
2706 if (flag_pic == 0)
2707 rs6000_emit_move (got, gsym, Pmode);
2708 else
2710 char buf[30];
2711 static int tls_got_labelno = 0;
2712 rtx tempLR, lab, tmp3, mem;
2713 rtx first, last;
2715 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2716 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2717 tempLR = gen_reg_rtx (Pmode);
2718 tmp1 = gen_reg_rtx (Pmode);
2719 tmp2 = gen_reg_rtx (Pmode);
2720 tmp3 = gen_reg_rtx (Pmode);
2721 mem = gen_rtx_MEM (Pmode, tmp1);
2722 RTX_UNCHANGING_P (mem) = 1;
2724 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2725 gsym));
2726 emit_move_insn (tmp1, tempLR);
2727 emit_move_insn (tmp2, mem);
2728 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2729 last = emit_move_insn (got, tmp3);
2730 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2731 REG_NOTES (last));
2732 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2733 REG_NOTES (first));
2734 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2735 REG_NOTES (last));
2740 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2742 r3 = gen_rtx_REG (Pmode, 3);
2743 if (TARGET_64BIT)
2744 insn = gen_tls_gd_64 (r3, got, addr);
2745 else
2746 insn = gen_tls_gd_32 (r3, got, addr);
2747 start_sequence ();
2748 emit_insn (insn);
2749 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2750 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2751 insn = emit_call_insn (insn);
2752 CONST_OR_PURE_CALL_P (insn) = 1;
2753 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2754 insn = get_insns ();
2755 end_sequence ();
2756 emit_libcall_block (insn, dest, r3, addr);
2758 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2760 r3 = gen_rtx_REG (Pmode, 3);
2761 if (TARGET_64BIT)
2762 insn = gen_tls_ld_64 (r3, got);
2763 else
2764 insn = gen_tls_ld_32 (r3, got);
2765 start_sequence ();
2766 emit_insn (insn);
2767 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2768 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2769 insn = emit_call_insn (insn);
2770 CONST_OR_PURE_CALL_P (insn) = 1;
2771 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2772 insn = get_insns ();
2773 end_sequence ();
2774 tmp1 = gen_reg_rtx (Pmode);
2775 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2776 UNSPEC_TLSLD);
2777 emit_libcall_block (insn, tmp1, r3, eqv);
2778 if (rs6000_tls_size == 16)
2780 if (TARGET_64BIT)
2781 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2782 else
2783 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2785 else if (rs6000_tls_size == 32)
2787 tmp2 = gen_reg_rtx (Pmode);
2788 if (TARGET_64BIT)
2789 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2790 else
2791 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2792 emit_insn (insn);
2793 if (TARGET_64BIT)
2794 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2795 else
2796 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2798 else
2800 tmp2 = gen_reg_rtx (Pmode);
2801 if (TARGET_64BIT)
2802 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2803 else
2804 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2805 emit_insn (insn);
2806 insn = gen_rtx_SET (Pmode, dest,
2807 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2809 emit_insn (insn);
2811 else
2813 /* IE, or 64 bit offset LE. */
2814 tmp2 = gen_reg_rtx (Pmode);
2815 if (TARGET_64BIT)
2816 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2817 else
2818 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2819 emit_insn (insn);
2820 if (TARGET_64BIT)
2821 insn = gen_tls_tls_64 (dest, tmp2, addr);
2822 else
2823 insn = gen_tls_tls_32 (dest, tmp2, addr);
2824 emit_insn (insn);
2828 return dest;
2831 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2832 instruction definitions. */
2835 rs6000_tls_symbol_ref (x, mode)
2836 rtx x;
2837 enum machine_mode mode ATTRIBUTE_UNUSED;
2839 return RS6000_SYMBOL_REF_TLS_P (x);
2842 /* Return 1 if X contains a thread-local symbol. */
2844 bool
2845 rs6000_tls_referenced_p (x)
2846 rtx x;
2848 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2851 /* Return 1 if *X is a thread-local symbol. This is the same as
2852 rs6000_tls_symbol_ref except for the type of the unused argument. */
2854 static inline int
2855 rs6000_tls_symbol_ref_1 (x, data)
2856 rtx *x;
2857 void *data ATTRIBUTE_UNUSED;
2859 return RS6000_SYMBOL_REF_TLS_P (*x);
2862 /* The convention appears to be to define this wherever it is used.
2863 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2864 is now used here. */
2865 #ifndef REG_MODE_OK_FOR_BASE_P
2866 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2867 #endif
2869 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2870 replace the input X, or the original X if no replacement is called for.
2871 The output parameter *WIN is 1 if the calling macro should goto WIN,
2872 0 if it should not.
2874 For RS/6000, we wish to handle large displacements off a base
2875 register by splitting the addend across an addiu/addis and the mem insn.
2876 This cuts number of extra insns needed from 3 to 1.
2878 On Darwin, we use this to generate code for floating point constants.
2879 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2880 The Darwin code is inside #if TARGET_MACHO because only then is
2881 machopic_function_base_name() defined. */
2883 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2884 rtx x;
2885 enum machine_mode mode;
2886 int opnum;
2887 int type;
2888 int ind_levels ATTRIBUTE_UNUSED;
2889 int *win;
2891 /* We must recognize output that we have already generated ourselves. */
2892 if (GET_CODE (x) == PLUS
2893 && GET_CODE (XEXP (x, 0)) == PLUS
2894 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2895 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2896 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2898 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2899 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2900 opnum, (enum reload_type)type);
2901 *win = 1;
2902 return x;
2905 #if TARGET_MACHO
2906 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2907 && GET_CODE (x) == LO_SUM
2908 && GET_CODE (XEXP (x, 0)) == PLUS
2909 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2910 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2911 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2912 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2913 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2914 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2915 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2917 /* Result of previous invocation of this function on Darwin
2918 floating point constant. */
2919 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2920 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2921 opnum, (enum reload_type)type);
2922 *win = 1;
2923 return x;
2925 #endif
2926 if (GET_CODE (x) == PLUS
2927 && GET_CODE (XEXP (x, 0)) == REG
2928 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2929 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2930 && GET_CODE (XEXP (x, 1)) == CONST_INT
2931 && !SPE_VECTOR_MODE (mode)
2932 && !ALTIVEC_VECTOR_MODE (mode))
2934 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2935 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2936 HOST_WIDE_INT high
2937 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2939 /* Check for 32-bit overflow. */
2940 if (high + low != val)
2942 *win = 0;
2943 return x;
2946 /* Reload the high part into a base reg; leave the low part
2947 in the mem directly. */
2949 x = gen_rtx_PLUS (GET_MODE (x),
2950 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2951 GEN_INT (high)),
2952 GEN_INT (low));
2954 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2955 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2956 opnum, (enum reload_type)type);
2957 *win = 1;
2958 return x;
2960 #if TARGET_MACHO
2961 if (GET_CODE (x) == SYMBOL_REF
2962 && DEFAULT_ABI == ABI_DARWIN
2963 && !ALTIVEC_VECTOR_MODE (mode)
2964 && flag_pic)
2966 /* Darwin load of floating point constant. */
2967 rtx offset = gen_rtx (CONST, Pmode,
2968 gen_rtx (MINUS, Pmode, x,
2969 gen_rtx (SYMBOL_REF, Pmode,
2970 machopic_function_base_name ())));
2971 x = gen_rtx (LO_SUM, GET_MODE (x),
2972 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2973 gen_rtx (HIGH, Pmode, offset)), offset);
2974 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2975 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2976 opnum, (enum reload_type)type);
2977 *win = 1;
2978 return x;
2980 if (GET_CODE (x) == SYMBOL_REF
2981 && DEFAULT_ABI == ABI_DARWIN
2982 && !ALTIVEC_VECTOR_MODE (mode)
2983 && MACHO_DYNAMIC_NO_PIC_P)
2985 /* Darwin load of floating point constant. */
2986 x = gen_rtx (LO_SUM, GET_MODE (x),
2987 gen_rtx (HIGH, Pmode, x), x);
2988 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2989 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2990 opnum, (enum reload_type)type);
2991 *win = 1;
2992 return x;
2994 #endif
2995 if (TARGET_TOC
2996 && constant_pool_expr_p (x)
2997 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2999 (x) = create_TOC_reference (x);
3000 *win = 1;
3001 return x;
3003 *win = 0;
3004 return x;
3007 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3008 that is a valid memory address for an instruction.
3009 The MODE argument is the machine mode for the MEM expression
3010 that wants to use this address.
3012 On the RS/6000, there are four valid address: a SYMBOL_REF that
3013 refers to a constant pool entry of an address (or the sum of it
3014 plus a constant), a short (16-bit signed) constant plus a register,
3015 the sum of two registers, or a register indirect, possibly with an
3016 auto-increment. For DFmode and DImode with a constant plus register,
3017 we must ensure that both words are addressable or PowerPC64 with offset
3018 word aligned.
3020 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3021 32-bit DImode, TImode), indexed addressing cannot be used because
3022 adjacent memory cells are accessed by adding word-sized offsets
3023 during assembly output. */
3025 rs6000_legitimate_address (mode, x, reg_ok_strict)
3026 enum machine_mode mode;
3027 rtx x;
3028 int reg_ok_strict;
3030 if (RS6000_SYMBOL_REF_TLS_P (x))
3031 return 0;
3032 if (legitimate_indirect_address_p (x, reg_ok_strict))
3033 return 1;
3034 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3035 && !ALTIVEC_VECTOR_MODE (mode)
3036 && !SPE_VECTOR_MODE (mode)
3037 && TARGET_UPDATE
3038 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3039 return 1;
3040 if (legitimate_small_data_p (mode, x))
3041 return 1;
3042 if (legitimate_constant_pool_address_p (x))
3043 return 1;
3044 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3045 if (! reg_ok_strict
3046 && GET_CODE (x) == PLUS
3047 && GET_CODE (XEXP (x, 0)) == REG
3048 && XEXP (x, 0) == virtual_stack_vars_rtx
3049 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3050 return 1;
3051 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3052 return 1;
3053 if (mode != TImode
3054 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3055 || TARGET_POWERPC64
3056 || (mode != DFmode && mode != TFmode))
3057 && (TARGET_POWERPC64 || mode != DImode)
3058 && legitimate_indexed_address_p (x, reg_ok_strict))
3059 return 1;
3060 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3061 return 1;
3062 return 0;
3065 /* Go to LABEL if ADDR (a legitimate address expression)
3066 has an effect that depends on the machine mode it is used for.
3068 On the RS/6000 this is true of all integral offsets (since AltiVec
3069 modes don't allow them) or is a pre-increment or decrement.
3071 ??? Except that due to conceptual problems in offsettable_address_p
3072 we can't really report the problems of integral offsets. So leave
3073 this assuming that the adjustable offset must be valid for the
3074 sub-words of a TFmode operand, which is what we had before. */
3076 bool
3077 rs6000_mode_dependent_address (addr)
3078 rtx addr;
3080 switch (GET_CODE (addr))
3082 case PLUS:
3083 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3085 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3086 return val + 12 + 0x8000 >= 0x10000;
3088 break;
3090 case LO_SUM:
3091 return true;
3093 case PRE_INC:
3094 case PRE_DEC:
3095 return TARGET_UPDATE;
3097 default:
3098 break;
3101 return false;
3104 /* Try to output insns to set TARGET equal to the constant C if it can
3105 be done in less than N insns. Do all computations in MODE.
3106 Returns the place where the output has been placed if it can be
3107 done and the insns have been emitted. If it would take more than N
3108 insns, zero is returned and no insns and emitted. */
3111 rs6000_emit_set_const (dest, mode, source, n)
3112 rtx dest, source;
3113 enum machine_mode mode;
3114 int n ATTRIBUTE_UNUSED;
3116 rtx result, insn, set;
3117 HOST_WIDE_INT c0, c1;
3119 if (mode == QImode || mode == HImode)
3121 if (dest == NULL)
3122 dest = gen_reg_rtx (mode);
3123 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3124 return dest;
3126 else if (mode == SImode)
3128 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3130 emit_insn (gen_rtx_SET (VOIDmode, result,
3131 GEN_INT (INTVAL (source)
3132 & (~ (HOST_WIDE_INT) 0xffff))));
3133 emit_insn (gen_rtx_SET (VOIDmode, dest,
3134 gen_rtx_IOR (SImode, result,
3135 GEN_INT (INTVAL (source) & 0xffff))));
3136 result = dest;
3138 else if (mode == DImode)
3140 if (GET_CODE (source) == CONST_INT)
3142 c0 = INTVAL (source);
3143 c1 = -(c0 < 0);
3145 else if (GET_CODE (source) == CONST_DOUBLE)
3147 #if HOST_BITS_PER_WIDE_INT >= 64
3148 c0 = CONST_DOUBLE_LOW (source);
3149 c1 = -(c0 < 0);
3150 #else
3151 c0 = CONST_DOUBLE_LOW (source);
3152 c1 = CONST_DOUBLE_HIGH (source);
3153 #endif
3155 else
3156 abort ();
3158 result = rs6000_emit_set_long_const (dest, c0, c1);
3160 else
3161 abort ();
3163 insn = get_last_insn ();
3164 set = single_set (insn);
3165 if (! CONSTANT_P (SET_SRC (set)))
3166 set_unique_reg_note (insn, REG_EQUAL, source);
3168 return result;
3171 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3172 fall back to a straight forward decomposition. We do this to avoid
3173 exponential run times encountered when looking for longer sequences
3174 with rs6000_emit_set_const. */
3175 static rtx
3176 rs6000_emit_set_long_const (dest, c1, c2)
3177 rtx dest;
3178 HOST_WIDE_INT c1, c2;
3180 if (!TARGET_POWERPC64)
3182 rtx operand1, operand2;
3184 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3185 DImode);
3186 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3187 DImode);
3188 emit_move_insn (operand1, GEN_INT (c1));
3189 emit_move_insn (operand2, GEN_INT (c2));
3191 else
3193 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3195 ud1 = c1 & 0xffff;
3196 ud2 = (c1 & 0xffff0000) >> 16;
3197 #if HOST_BITS_PER_WIDE_INT >= 64
3198 c2 = c1 >> 32;
3199 #endif
3200 ud3 = c2 & 0xffff;
3201 ud4 = (c2 & 0xffff0000) >> 16;
3203 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3204 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3206 if (ud1 & 0x8000)
3207 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3208 else
3209 emit_move_insn (dest, GEN_INT (ud1));
3212 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3213 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3215 if (ud2 & 0x8000)
3216 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3217 - 0x80000000));
3218 else
3219 emit_move_insn (dest, GEN_INT (ud2 << 16));
3220 if (ud1 != 0)
3221 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3223 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3224 || (ud4 == 0 && ! (ud3 & 0x8000)))
3226 if (ud3 & 0x8000)
3227 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3228 - 0x80000000));
3229 else
3230 emit_move_insn (dest, GEN_INT (ud3 << 16));
3232 if (ud2 != 0)
3233 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3234 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3235 if (ud1 != 0)
3236 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3238 else
3240 if (ud4 & 0x8000)
3241 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3242 - 0x80000000));
3243 else
3244 emit_move_insn (dest, GEN_INT (ud4 << 16));
3246 if (ud3 != 0)
3247 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3249 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3250 if (ud2 != 0)
3251 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3252 GEN_INT (ud2 << 16)));
3253 if (ud1 != 0)
3254 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3257 return dest;
3260 /* Emit a move from SOURCE to DEST in mode MODE. */
3261 void
3262 rs6000_emit_move (dest, source, mode)
3263 rtx dest;
3264 rtx source;
3265 enum machine_mode mode;
3267 rtx operands[2];
3268 operands[0] = dest;
3269 operands[1] = source;
3271 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3272 if (GET_CODE (operands[1]) == CONST_DOUBLE
3273 && ! FLOAT_MODE_P (mode)
3274 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3276 /* FIXME. This should never happen. */
3277 /* Since it seems that it does, do the safe thing and convert
3278 to a CONST_INT. */
3279 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3281 if (GET_CODE (operands[1]) == CONST_DOUBLE
3282 && ! FLOAT_MODE_P (mode)
3283 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3284 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3285 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3286 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3287 abort ();
3289 /* Check if GCC is setting up a block move that will end up using FP
3290 registers as temporaries. We must make sure this is acceptable. */
3291 if (GET_CODE (operands[0]) == MEM
3292 && GET_CODE (operands[1]) == MEM
3293 && mode == DImode
3294 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3295 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3296 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3297 ? 32 : MEM_ALIGN (operands[0])))
3298 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3299 ? 32
3300 : MEM_ALIGN (operands[1]))))
3301 && ! MEM_VOLATILE_P (operands [0])
3302 && ! MEM_VOLATILE_P (operands [1]))
3304 emit_move_insn (adjust_address (operands[0], SImode, 0),
3305 adjust_address (operands[1], SImode, 0));
3306 emit_move_insn (adjust_address (operands[0], SImode, 4),
3307 adjust_address (operands[1], SImode, 4));
3308 return;
3311 if (!no_new_pseudos)
3313 if (GET_CODE (operands[1]) == MEM && optimize > 0
3314 && (mode == QImode || mode == HImode || mode == SImode)
3315 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3317 rtx reg = gen_reg_rtx (word_mode);
3319 emit_insn (gen_rtx_SET (word_mode, reg,
3320 gen_rtx_ZERO_EXTEND (word_mode,
3321 operands[1])));
3322 operands[1] = gen_lowpart (mode, reg);
3324 if (GET_CODE (operands[0]) != REG)
3325 operands[1] = force_reg (mode, operands[1]);
3328 if (mode == SFmode && ! TARGET_POWERPC
3329 && TARGET_HARD_FLOAT && TARGET_FPRS
3330 && GET_CODE (operands[0]) == MEM)
3332 int regnum;
3334 if (reload_in_progress || reload_completed)
3335 regnum = true_regnum (operands[1]);
3336 else if (GET_CODE (operands[1]) == REG)
3337 regnum = REGNO (operands[1]);
3338 else
3339 regnum = -1;
3341 /* If operands[1] is a register, on POWER it may have
3342 double-precision data in it, so truncate it to single
3343 precision. */
3344 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3346 rtx newreg;
3347 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3348 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3349 operands[1] = newreg;
3353 /* Recognize the case where operand[1] is a reference to thread-local
3354 data and load its address to a register. */
3355 if (GET_CODE (operands[1]) == SYMBOL_REF)
3357 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3358 if (model != 0)
3359 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3362 /* Handle the case where reload calls us with an invalid address. */
3363 if (reload_in_progress && mode == Pmode
3364 && (! general_operand (operands[1], mode)
3365 || ! nonimmediate_operand (operands[0], mode)))
3366 goto emit_set;
3368 /* Handle the case of CONSTANT_P_RTX. */
3369 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3370 goto emit_set;
3372 /* FIXME: In the long term, this switch statement should go away
3373 and be replaced by a sequence of tests based on things like
3374 mode == Pmode. */
3375 switch (mode)
3377 case HImode:
3378 case QImode:
3379 if (CONSTANT_P (operands[1])
3380 && GET_CODE (operands[1]) != CONST_INT)
3381 operands[1] = force_const_mem (mode, operands[1]);
3382 break;
3384 case TFmode:
3385 case DFmode:
3386 case SFmode:
3387 if (CONSTANT_P (operands[1])
3388 && ! easy_fp_constant (operands[1], mode))
3389 operands[1] = force_const_mem (mode, operands[1]);
3390 break;
3392 case V16QImode:
3393 case V8HImode:
3394 case V4SFmode:
3395 case V4SImode:
3396 case V4HImode:
3397 case V2SFmode:
3398 case V2SImode:
3399 case V1DImode:
3400 if (CONSTANT_P (operands[1])
3401 && !easy_vector_constant (operands[1], mode))
3402 operands[1] = force_const_mem (mode, operands[1]);
3403 break;
3405 case SImode:
3406 case DImode:
3407 /* Use default pattern for address of ELF small data */
3408 if (TARGET_ELF
3409 && mode == Pmode
3410 && DEFAULT_ABI == ABI_V4
3411 && (GET_CODE (operands[1]) == SYMBOL_REF
3412 || GET_CODE (operands[1]) == CONST)
3413 && small_data_operand (operands[1], mode))
3415 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3416 return;
3419 if (DEFAULT_ABI == ABI_V4
3420 && mode == Pmode && mode == SImode
3421 && flag_pic == 1 && got_operand (operands[1], mode))
3423 emit_insn (gen_movsi_got (operands[0], operands[1]));
3424 return;
3427 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3428 && TARGET_NO_TOC
3429 && ! flag_pic
3430 && mode == Pmode
3431 && CONSTANT_P (operands[1])
3432 && GET_CODE (operands[1]) != HIGH
3433 && GET_CODE (operands[1]) != CONST_INT)
3435 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3437 /* If this is a function address on -mcall-aixdesc,
3438 convert it to the address of the descriptor. */
3439 if (DEFAULT_ABI == ABI_AIX
3440 && GET_CODE (operands[1]) == SYMBOL_REF
3441 && XSTR (operands[1], 0)[0] == '.')
3443 const char *name = XSTR (operands[1], 0);
3444 rtx new_ref;
3445 while (*name == '.')
3446 name++;
3447 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3448 CONSTANT_POOL_ADDRESS_P (new_ref)
3449 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3450 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3451 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3452 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3453 operands[1] = new_ref;
3456 if (DEFAULT_ABI == ABI_DARWIN)
3458 #if TARGET_MACHO
3459 if (MACHO_DYNAMIC_NO_PIC_P)
3461 /* Take care of any required data indirection. */
3462 operands[1] = rs6000_machopic_legitimize_pic_address (
3463 operands[1], mode, operands[0]);
3464 if (operands[0] != operands[1])
3465 emit_insn (gen_rtx_SET (VOIDmode,
3466 operands[0], operands[1]));
3467 return;
3469 #endif
3470 emit_insn (gen_macho_high (target, operands[1]));
3471 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3472 return;
3475 emit_insn (gen_elf_high (target, operands[1]));
3476 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3477 return;
3480 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3481 and we have put it in the TOC, we just need to make a TOC-relative
3482 reference to it. */
3483 if (TARGET_TOC
3484 && GET_CODE (operands[1]) == SYMBOL_REF
3485 && constant_pool_expr_p (operands[1])
3486 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3487 get_pool_mode (operands[1])))
3489 operands[1] = create_TOC_reference (operands[1]);
3491 else if (mode == Pmode
3492 && CONSTANT_P (operands[1])
3493 && ((GET_CODE (operands[1]) != CONST_INT
3494 && ! easy_fp_constant (operands[1], mode))
3495 || (GET_CODE (operands[1]) == CONST_INT
3496 && num_insns_constant (operands[1], mode) > 2)
3497 || (GET_CODE (operands[0]) == REG
3498 && FP_REGNO_P (REGNO (operands[0]))))
3499 && GET_CODE (operands[1]) != HIGH
3500 && ! legitimate_constant_pool_address_p (operands[1])
3501 && ! toc_relative_expr_p (operands[1]))
3503 /* Emit a USE operation so that the constant isn't deleted if
3504 expensive optimizations are turned on because nobody
3505 references it. This should only be done for operands that
3506 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3507 This should not be done for operands that contain LABEL_REFs.
3508 For now, we just handle the obvious case. */
3509 if (GET_CODE (operands[1]) != LABEL_REF)
3510 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3512 #if TARGET_MACHO
3513 /* Darwin uses a special PIC legitimizer. */
3514 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3516 operands[1] =
3517 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3518 operands[0]);
3519 if (operands[0] != operands[1])
3520 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3521 return;
3523 #endif
3525 /* If we are to limit the number of things we put in the TOC and
3526 this is a symbol plus a constant we can add in one insn,
3527 just put the symbol in the TOC and add the constant. Don't do
3528 this if reload is in progress. */
3529 if (GET_CODE (operands[1]) == CONST
3530 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3531 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3532 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3533 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3534 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3535 && ! side_effects_p (operands[0]))
3537 rtx sym =
3538 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3539 rtx other = XEXP (XEXP (operands[1], 0), 1);
3541 sym = force_reg (mode, sym);
3542 if (mode == SImode)
3543 emit_insn (gen_addsi3 (operands[0], sym, other));
3544 else
3545 emit_insn (gen_adddi3 (operands[0], sym, other));
3546 return;
3549 operands[1] = force_const_mem (mode, operands[1]);
3551 if (TARGET_TOC
3552 && constant_pool_expr_p (XEXP (operands[1], 0))
3553 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3554 get_pool_constant (XEXP (operands[1], 0)),
3555 get_pool_mode (XEXP (operands[1], 0))))
3557 operands[1]
3558 = gen_rtx_MEM (mode,
3559 create_TOC_reference (XEXP (operands[1], 0)));
3560 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3561 RTX_UNCHANGING_P (operands[1]) = 1;
3564 break;
3566 case TImode:
3567 if (GET_CODE (operands[0]) == MEM
3568 && GET_CODE (XEXP (operands[0], 0)) != REG
3569 && ! reload_in_progress)
3570 operands[0]
3571 = replace_equiv_address (operands[0],
3572 copy_addr_to_reg (XEXP (operands[0], 0)));
3574 if (GET_CODE (operands[1]) == MEM
3575 && GET_CODE (XEXP (operands[1], 0)) != REG
3576 && ! reload_in_progress)
3577 operands[1]
3578 = replace_equiv_address (operands[1],
3579 copy_addr_to_reg (XEXP (operands[1], 0)));
3580 if (TARGET_POWER)
3582 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3583 gen_rtvec (2,
3584 gen_rtx_SET (VOIDmode,
3585 operands[0], operands[1]),
3586 gen_rtx_CLOBBER (VOIDmode,
3587 gen_rtx_SCRATCH (SImode)))));
3588 return;
3590 break;
3592 default:
3593 abort ();
3596 /* Above, we may have called force_const_mem which may have returned
3597 an invalid address. If we can, fix this up; otherwise, reload will
3598 have to deal with it. */
3599 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3600 operands[1] = validize_mem (operands[1]);
3602 emit_set:
3603 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3606 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3607 for a call to a function whose data type is FNTYPE.
3608 For a library call, FNTYPE is 0.
3610 For incoming args we set the number of arguments in the prototype large
3611 so we never return a PARALLEL. */
3613 void
3614 init_cumulative_args (cum, fntype, libname, incoming)
3615 CUMULATIVE_ARGS *cum;
3616 tree fntype;
3617 rtx libname ATTRIBUTE_UNUSED;
3618 int incoming;
3620 static CUMULATIVE_ARGS zero_cumulative;
3622 *cum = zero_cumulative;
3623 cum->words = 0;
3624 cum->fregno = FP_ARG_MIN_REG;
3625 cum->vregno = ALTIVEC_ARG_MIN_REG;
3626 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3627 cum->call_cookie = CALL_NORMAL;
3628 cum->sysv_gregno = GP_ARG_MIN_REG;
3630 if (incoming)
3631 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3633 else if (cum->prototype)
3634 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3635 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3636 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3638 else
3639 cum->nargs_prototype = 0;
3641 cum->orig_nargs = cum->nargs_prototype;
3643 /* Check for a longcall attribute. */
3644 if (fntype
3645 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3646 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3647 cum->call_cookie = CALL_LONG;
3649 if (TARGET_DEBUG_ARG)
3651 fprintf (stderr, "\ninit_cumulative_args:");
3652 if (fntype)
3654 tree ret_type = TREE_TYPE (fntype);
3655 fprintf (stderr, " ret code = %s,",
3656 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3659 if (cum->call_cookie & CALL_LONG)
3660 fprintf (stderr, " longcall,");
3662 fprintf (stderr, " proto = %d, nargs = %d\n",
3663 cum->prototype, cum->nargs_prototype);
3667 /* If defined, a C expression which determines whether, and in which
3668 direction, to pad out an argument with extra space. The value
3669 should be of type `enum direction': either `upward' to pad above
3670 the argument, `downward' to pad below, or `none' to inhibit
3671 padding.
3673 For the AIX ABI structs are always stored left shifted in their
3674 argument slot. */
3676 enum direction
3677 function_arg_padding (mode, type)
3678 enum machine_mode mode;
3679 tree type;
3681 if (type != 0 && AGGREGATE_TYPE_P (type))
3682 return upward;
3684 /* This is the default definition. */
3685 return (! BYTES_BIG_ENDIAN
3686 ? upward
3687 : ((mode == BLKmode
3688 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3689 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3690 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3691 ? downward : upward));
3694 /* If defined, a C expression that gives the alignment boundary, in bits,
3695 of an argument with the specified mode and type. If it is not defined,
3696 PARM_BOUNDARY is used for all arguments.
3698 V.4 wants long longs to be double word aligned. */
3701 function_arg_boundary (mode, type)
3702 enum machine_mode mode;
3703 tree type ATTRIBUTE_UNUSED;
3705 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3706 return 64;
3707 else if (SPE_VECTOR_MODE (mode))
3708 return 64;
3709 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3710 return 128;
3711 else
3712 return PARM_BOUNDARY;
3715 /* Update the data in CUM to advance over an argument
3716 of mode MODE and data type TYPE.
3717 (TYPE is null for libcalls where that information may not be available.) */
3719 void
3720 function_arg_advance (cum, mode, type, named)
3721 CUMULATIVE_ARGS *cum;
3722 enum machine_mode mode;
3723 tree type;
3724 int named;
3726 cum->nargs_prototype--;
3728 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3730 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3731 cum->vregno++;
3732 else
3733 cum->words += RS6000_ARG_SIZE (mode, type);
3735 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3736 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3737 cum->sysv_gregno++;
3738 else if (DEFAULT_ABI == ABI_V4)
3740 if (TARGET_HARD_FLOAT && TARGET_FPRS
3741 && (mode == SFmode || mode == DFmode))
3743 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3744 cum->fregno++;
3745 else
3747 if (mode == DFmode)
3748 cum->words += cum->words & 1;
3749 cum->words += RS6000_ARG_SIZE (mode, type);
3752 else
3754 int n_words;
3755 int gregno = cum->sysv_gregno;
3757 /* Aggregates and IEEE quad get passed by reference. */
3758 if ((type && AGGREGATE_TYPE_P (type))
3759 || mode == TFmode)
3760 n_words = 1;
3761 else
3762 n_words = RS6000_ARG_SIZE (mode, type);
3764 /* Long long and SPE vectors are put in odd registers. */
3765 if (n_words == 2 && (gregno & 1) == 0)
3766 gregno += 1;
3768 /* Long long and SPE vectors are not split between registers
3769 and stack. */
3770 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3772 /* Long long is aligned on the stack. */
3773 if (n_words == 2)
3774 cum->words += cum->words & 1;
3775 cum->words += n_words;
3778 /* Note: continuing to accumulate gregno past when we've started
3779 spilling to the stack indicates the fact that we've started
3780 spilling to the stack to expand_builtin_saveregs. */
3781 cum->sysv_gregno = gregno + n_words;
3784 if (TARGET_DEBUG_ARG)
3786 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3787 cum->words, cum->fregno);
3788 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3789 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3790 fprintf (stderr, "mode = %4s, named = %d\n",
3791 GET_MODE_NAME (mode), named);
3794 else
3796 int align = (TARGET_32BIT && (cum->words & 1) != 0
3797 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3799 cum->words += align + RS6000_ARG_SIZE (mode, type);
3801 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3802 && TARGET_HARD_FLOAT && TARGET_FPRS)
3803 cum->fregno += (mode == TFmode ? 2 : 1);
3805 if (TARGET_DEBUG_ARG)
3807 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3808 cum->words, cum->fregno);
3809 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3810 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3811 fprintf (stderr, "named = %d, align = %d\n", named, align);
3816 /* Determine where to put an argument to a function.
3817 Value is zero to push the argument on the stack,
3818 or a hard register in which to store the argument.
3820 MODE is the argument's machine mode.
3821 TYPE is the data type of the argument (as a tree).
3822 This is null for libcalls where that information may
3823 not be available.
3824 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3825 the preceding args and about the function being called.
3826 NAMED is nonzero if this argument is a named parameter
3827 (otherwise it is an extra parameter matching an ellipsis).
3829 On RS/6000 the first eight words of non-FP are normally in registers
3830 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3831 Under V.4, the first 8 FP args are in registers.
3833 If this is floating-point and no prototype is specified, we use
3834 both an FP and integer register (or possibly FP reg and stack). Library
3835 functions (when TYPE is zero) always have the proper types for args,
3836 so we can pass the FP value just in one register. emit_library_function
3837 doesn't support PARALLEL anyway. */
3839 struct rtx_def *
3840 function_arg (cum, mode, type, named)
3841 CUMULATIVE_ARGS *cum;
3842 enum machine_mode mode;
3843 tree type;
3844 int named;
3846 enum rs6000_abi abi = DEFAULT_ABI;
3848 /* Return a marker to indicate whether CR1 needs to set or clear the
3849 bit that V.4 uses to say fp args were passed in registers.
3850 Assume that we don't need the marker for software floating point,
3851 or compiler generated library calls. */
3852 if (mode == VOIDmode)
3854 if (abi == ABI_V4
3855 && cum->nargs_prototype < 0
3856 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3858 /* For the SPE, we need to crxor CR6 always. */
3859 if (TARGET_SPE_ABI)
3860 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3861 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3862 return GEN_INT (cum->call_cookie
3863 | ((cum->fregno == FP_ARG_MIN_REG)
3864 ? CALL_V4_SET_FP_ARGS
3865 : CALL_V4_CLEAR_FP_ARGS));
3868 return GEN_INT (cum->call_cookie);
3871 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3873 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3874 return gen_rtx_REG (mode, cum->vregno);
3875 else
3876 return NULL;
3878 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3880 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3881 return gen_rtx_REG (mode, cum->sysv_gregno);
3882 else
3883 return NULL;
3885 else if (abi == ABI_V4)
3887 if (TARGET_HARD_FLOAT && TARGET_FPRS
3888 && (mode == SFmode || mode == DFmode))
3890 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3891 return gen_rtx_REG (mode, cum->fregno);
3892 else
3893 return NULL;
3895 else
3897 int n_words;
3898 int gregno = cum->sysv_gregno;
3900 /* Aggregates and IEEE quad get passed by reference. */
3901 if ((type && AGGREGATE_TYPE_P (type))
3902 || mode == TFmode)
3903 n_words = 1;
3904 else
3905 n_words = RS6000_ARG_SIZE (mode, type);
3907 /* Long long and SPE vectors are put in odd registers. */
3908 if (n_words == 2 && (gregno & 1) == 0)
3909 gregno += 1;
3911 /* Long long and SPE vectors are not split between registers
3912 and stack. */
3913 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3915 /* SPE vectors in ... get split into 2 registers. */
3916 if (TARGET_SPE && TARGET_SPE_ABI
3917 && SPE_VECTOR_MODE (mode) && !named)
3919 rtx r1, r2;
3920 enum machine_mode m = SImode;
3922 r1 = gen_rtx_REG (m, gregno);
3923 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3924 r2 = gen_rtx_REG (m, gregno + 1);
3925 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3926 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3928 return gen_rtx_REG (mode, gregno);
3930 else
3931 return NULL;
3934 else
3936 int align = (TARGET_32BIT && (cum->words & 1) != 0
3937 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3938 int align_words = cum->words + align;
3940 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3941 return NULL_RTX;
3943 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3945 if (! type
3946 || ((cum->nargs_prototype > 0)
3947 /* IBM AIX extended its linkage convention definition always
3948 to require FP args after register save area hole on the
3949 stack. */
3950 && (DEFAULT_ABI != ABI_AIX
3951 || ! TARGET_XL_CALL
3952 || (align_words < GP_ARG_NUM_REG))))
3953 return gen_rtx_REG (mode, cum->fregno);
3955 return gen_rtx_PARALLEL (mode,
3956 gen_rtvec (2,
3957 gen_rtx_EXPR_LIST (VOIDmode,
3958 ((align_words >= GP_ARG_NUM_REG)
3959 ? NULL_RTX
3960 : (align_words
3961 + RS6000_ARG_SIZE (mode, type)
3962 > GP_ARG_NUM_REG
3963 /* If this is partially on the stack, then
3964 we only include the portion actually
3965 in registers here. */
3966 ? gen_rtx_REG (SImode,
3967 GP_ARG_MIN_REG + align_words)
3968 : gen_rtx_REG (mode,
3969 GP_ARG_MIN_REG + align_words))),
3970 const0_rtx),
3971 gen_rtx_EXPR_LIST (VOIDmode,
3972 gen_rtx_REG (mode, cum->fregno),
3973 const0_rtx)));
3975 else if (align_words < GP_ARG_NUM_REG)
3976 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3977 else
3978 return NULL_RTX;
3982 /* For an arg passed partly in registers and partly in memory,
3983 this is the number of registers used.
3984 For args passed entirely in registers or entirely in memory, zero. */
3987 function_arg_partial_nregs (cum, mode, type, named)
3988 CUMULATIVE_ARGS *cum;
3989 enum machine_mode mode;
3990 tree type;
3991 int named ATTRIBUTE_UNUSED;
3993 if (DEFAULT_ABI == ABI_V4)
3994 return 0;
3996 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3997 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3999 if (cum->nargs_prototype >= 0)
4000 return 0;
4003 if (cum->words < GP_ARG_NUM_REG
4004 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4006 int ret = GP_ARG_NUM_REG - cum->words;
4007 if (ret && TARGET_DEBUG_ARG)
4008 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4010 return ret;
4013 return 0;
4016 /* A C expression that indicates when an argument must be passed by
4017 reference. If nonzero for an argument, a copy of that argument is
4018 made in memory and a pointer to the argument is passed instead of
4019 the argument itself. The pointer is passed in whatever way is
4020 appropriate for passing a pointer to that type.
4022 Under V.4, structures and unions are passed by reference.
4024 As an extension to all ABIs, variable sized types are passed by
4025 reference. */
4028 function_arg_pass_by_reference (cum, mode, type, named)
4029 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4030 enum machine_mode mode ATTRIBUTE_UNUSED;
4031 tree type;
4032 int named ATTRIBUTE_UNUSED;
4034 if (DEFAULT_ABI == ABI_V4
4035 && ((type && AGGREGATE_TYPE_P (type))
4036 || mode == TFmode))
4038 if (TARGET_DEBUG_ARG)
4039 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4041 return 1;
4043 return type && int_size_in_bytes (type) <= 0;
4046 /* Perform any needed actions needed for a function that is receiving a
4047 variable number of arguments.
4049 CUM is as above.
4051 MODE and TYPE are the mode and type of the current parameter.
4053 PRETEND_SIZE is a variable that should be set to the amount of stack
4054 that must be pushed by the prolog to pretend that our caller pushed
4057 Normally, this macro will push all remaining incoming registers on the
4058 stack and set PRETEND_SIZE to the length of the registers pushed. */
4060 void
4061 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4062 CUMULATIVE_ARGS *cum;
4063 enum machine_mode mode;
4064 tree type;
4065 int *pretend_size ATTRIBUTE_UNUSED;
4066 int no_rtl;
4069 CUMULATIVE_ARGS next_cum;
4070 int reg_size = TARGET_32BIT ? 4 : 8;
4071 rtx save_area = NULL_RTX, mem;
4072 int first_reg_offset, set;
4073 tree fntype;
4074 int stdarg_p;
4076 fntype = TREE_TYPE (current_function_decl);
4077 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4078 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4079 != void_type_node));
4081 /* For varargs, we do not want to skip the dummy va_dcl argument.
4082 For stdargs, we do want to skip the last named argument. */
4083 next_cum = *cum;
4084 if (stdarg_p)
4085 function_arg_advance (&next_cum, mode, type, 1);
4087 if (DEFAULT_ABI == ABI_V4)
4089 /* Indicate to allocate space on the stack for varargs save area. */
4090 cfun->machine->sysv_varargs_p = 1;
4091 if (! no_rtl)
4092 save_area = plus_constant (virtual_stack_vars_rtx,
4093 - RS6000_VARARGS_SIZE);
4095 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4097 else
4099 first_reg_offset = next_cum.words;
4100 save_area = virtual_incoming_args_rtx;
4101 cfun->machine->sysv_varargs_p = 0;
4103 if (MUST_PASS_IN_STACK (mode, type))
4104 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4107 set = get_varargs_alias_set ();
4108 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4110 mem = gen_rtx_MEM (BLKmode,
4111 plus_constant (save_area,
4112 first_reg_offset * reg_size)),
4113 set_mem_alias_set (mem, set);
4114 set_mem_align (mem, BITS_PER_WORD);
4116 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4117 GP_ARG_NUM_REG - first_reg_offset);
4120 /* Save FP registers if needed. */
4121 if (DEFAULT_ABI == ABI_V4
4122 && TARGET_HARD_FLOAT && TARGET_FPRS
4123 && ! no_rtl
4124 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4126 int fregno = next_cum.fregno;
4127 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4128 rtx lab = gen_label_rtx ();
4129 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4131 emit_jump_insn (gen_rtx_SET (VOIDmode,
4132 pc_rtx,
4133 gen_rtx_IF_THEN_ELSE (VOIDmode,
4134 gen_rtx_NE (VOIDmode, cr1,
4135 const0_rtx),
4136 gen_rtx_LABEL_REF (VOIDmode, lab),
4137 pc_rtx)));
4139 while (fregno <= FP_ARG_V4_MAX_REG)
4141 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4142 set_mem_alias_set (mem, set);
4143 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4144 fregno++;
4145 off += 8;
4148 emit_label (lab);
4152 /* Create the va_list data type. */
4154 tree
4155 rs6000_build_va_list ()
4157 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4159 /* For AIX, prefer 'char *' because that's what the system
4160 header files like. */
4161 if (DEFAULT_ABI != ABI_V4)
4162 return build_pointer_type (char_type_node);
4164 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4165 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4167 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4168 unsigned_char_type_node);
4169 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4170 unsigned_char_type_node);
4171 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4172 ptr_type_node);
4173 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4174 ptr_type_node);
4176 DECL_FIELD_CONTEXT (f_gpr) = record;
4177 DECL_FIELD_CONTEXT (f_fpr) = record;
4178 DECL_FIELD_CONTEXT (f_ovf) = record;
4179 DECL_FIELD_CONTEXT (f_sav) = record;
4181 TREE_CHAIN (record) = type_decl;
4182 TYPE_NAME (record) = type_decl;
4183 TYPE_FIELDS (record) = f_gpr;
4184 TREE_CHAIN (f_gpr) = f_fpr;
4185 TREE_CHAIN (f_fpr) = f_ovf;
4186 TREE_CHAIN (f_ovf) = f_sav;
4188 layout_type (record);
4190 /* The correct type is an array type of one element. */
4191 return build_array_type (record, build_index_type (size_zero_node));
4194 /* Implement va_start. */
4196 void
4197 rs6000_va_start (valist, nextarg)
4198 tree valist;
4199 rtx nextarg;
4201 HOST_WIDE_INT words, n_gpr, n_fpr;
4202 tree f_gpr, f_fpr, f_ovf, f_sav;
4203 tree gpr, fpr, ovf, sav, t;
4205 /* Only SVR4 needs something special. */
4206 if (DEFAULT_ABI != ABI_V4)
4208 std_expand_builtin_va_start (valist, nextarg);
4209 return;
4212 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4213 f_fpr = TREE_CHAIN (f_gpr);
4214 f_ovf = TREE_CHAIN (f_fpr);
4215 f_sav = TREE_CHAIN (f_ovf);
4217 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4218 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4219 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4220 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4221 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4223 /* Count number of gp and fp argument registers used. */
4224 words = current_function_args_info.words;
4225 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4226 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4228 if (TARGET_DEBUG_ARG)
4230 fputs ("va_start: words = ", stderr);
4231 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
4232 fputs (", n_gpr = ", stderr);
4233 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
4234 fputs (", n_fpr = ", stderr);
4235 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
4236 putc ('\n', stderr);
4239 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4240 TREE_SIDE_EFFECTS (t) = 1;
4241 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4243 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4244 TREE_SIDE_EFFECTS (t) = 1;
4245 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4247 /* Find the overflow area. */
4248 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4249 if (words != 0)
4250 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4251 build_int_2 (words * UNITS_PER_WORD, 0));
4252 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4253 TREE_SIDE_EFFECTS (t) = 1;
4254 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4256 /* Find the register save area. */
4257 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4258 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4259 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4260 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4261 TREE_SIDE_EFFECTS (t) = 1;
4262 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4265 /* Implement va_arg. */
4268 rs6000_va_arg (valist, type)
4269 tree valist, type;
4271 tree f_gpr, f_fpr, f_ovf, f_sav;
4272 tree gpr, fpr, ovf, sav, reg, t, u;
4273 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4274 rtx lab_false, lab_over, addr_rtx, r;
4276 if (DEFAULT_ABI != ABI_V4)
4278 /* Variable sized types are passed by reference. */
4279 if (int_size_in_bytes (type) <= 0)
4281 u = build_pointer_type (type);
4283 /* Args grow upward. */
4284 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4285 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4286 TREE_SIDE_EFFECTS (t) = 1;
4288 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4289 TREE_SIDE_EFFECTS (t) = 1;
4291 t = build1 (INDIRECT_REF, u, t);
4292 TREE_SIDE_EFFECTS (t) = 1;
4294 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4296 else
4297 return std_expand_builtin_va_arg (valist, type);
4300 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4301 f_fpr = TREE_CHAIN (f_gpr);
4302 f_ovf = TREE_CHAIN (f_fpr);
4303 f_sav = TREE_CHAIN (f_ovf);
4305 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4306 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4307 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4308 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4309 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4311 size = int_size_in_bytes (type);
4312 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4314 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4316 /* Aggregates and long doubles are passed by reference. */
4317 indirect_p = 1;
4318 reg = gpr;
4319 n_reg = 1;
4320 sav_ofs = 0;
4321 sav_scale = 4;
4322 size = UNITS_PER_WORD;
4323 rsize = 1;
4325 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4327 /* FP args go in FP registers, if present. */
4328 indirect_p = 0;
4329 reg = fpr;
4330 n_reg = 1;
4331 sav_ofs = 8*4;
4332 sav_scale = 8;
4334 else
4336 /* Otherwise into GP registers. */
4337 indirect_p = 0;
4338 reg = gpr;
4339 n_reg = rsize;
4340 sav_ofs = 0;
4341 sav_scale = 4;
4344 /* Pull the value out of the saved registers ... */
4346 lab_false = gen_label_rtx ();
4347 lab_over = gen_label_rtx ();
4348 addr_rtx = gen_reg_rtx (Pmode);
4350 /* AltiVec vectors never go in registers. */
4351 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4353 TREE_THIS_VOLATILE (reg) = 1;
4354 emit_cmp_and_jump_insns
4355 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4356 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4357 lab_false);
4359 /* Long long is aligned in the registers. */
4360 if (n_reg > 1)
4362 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4363 build_int_2 (n_reg - 1, 0));
4364 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4365 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4366 TREE_SIDE_EFFECTS (u) = 1;
4367 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4370 if (sav_ofs)
4371 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4372 else
4373 t = sav;
4375 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4376 build_int_2 (n_reg, 0));
4377 TREE_SIDE_EFFECTS (u) = 1;
4379 u = build1 (CONVERT_EXPR, integer_type_node, u);
4380 TREE_SIDE_EFFECTS (u) = 1;
4382 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4383 TREE_SIDE_EFFECTS (u) = 1;
4385 t = build (PLUS_EXPR, ptr_type_node, t, u);
4386 TREE_SIDE_EFFECTS (t) = 1;
4388 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4389 if (r != addr_rtx)
4390 emit_move_insn (addr_rtx, r);
4392 emit_jump_insn (gen_jump (lab_over));
4393 emit_barrier ();
4396 emit_label (lab_false);
4398 /* ... otherwise out of the overflow area. */
4400 /* Make sure we don't find reg 7 for the next int arg.
4402 All AltiVec vectors go in the overflow area. So in the AltiVec
4403 case we need to get the vectors from the overflow area, but
4404 remember where the GPRs and FPRs are. */
4405 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4406 || !TARGET_ALTIVEC))
4408 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4409 TREE_SIDE_EFFECTS (t) = 1;
4410 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4413 /* Care for on-stack alignment if needed. */
4414 if (rsize <= 1)
4415 t = ovf;
4416 else
4418 int align;
4420 /* AltiVec vectors are 16 byte aligned. */
4421 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4422 align = 15;
4423 else
4424 align = 7;
4426 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4427 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4429 t = save_expr (t);
4431 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4432 if (r != addr_rtx)
4433 emit_move_insn (addr_rtx, r);
4435 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4436 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4437 TREE_SIDE_EFFECTS (t) = 1;
4438 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4440 emit_label (lab_over);
4442 if (indirect_p)
4444 r = gen_rtx_MEM (Pmode, addr_rtx);
4445 set_mem_alias_set (r, get_varargs_alias_set ());
4446 emit_move_insn (addr_rtx, r);
4449 return addr_rtx;
4452 /* Builtins. */
4454 #define def_builtin(MASK, NAME, TYPE, CODE) \
4455 do { \
4456 if ((MASK) & target_flags) \
4457 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4458 NULL, NULL_TREE); \
4459 } while (0)
4461 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4463 static const struct builtin_description bdesc_3arg[] =
4465 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4466 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4467 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4468 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4469 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4470 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4471 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4472 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4473 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4474 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4475 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4476 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4477 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4478 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4479 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4480 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4481 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4482 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4483 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4484 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4485 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4486 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4487 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4490 /* DST operations: void foo (void *, const int, const char). */
4492 static const struct builtin_description bdesc_dst[] =
4494 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4495 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4496 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4497 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4500 /* Simple binary operations: VECc = foo (VECa, VECb). */
4502 static struct builtin_description bdesc_2arg[] =
4504 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4505 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4506 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4507 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4508 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4509 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4510 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4511 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4512 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4513 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4514 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4515 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4516 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4517 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4518 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4519 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4520 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4521 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4522 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4523 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4524 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4525 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4540 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4541 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4542 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4543 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4544 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4545 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4546 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4550 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4551 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4553 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4554 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4555 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4556 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4557 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4558 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4559 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4560 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4561 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4562 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4563 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4564 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4565 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4567 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4569 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4571 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4572 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4573 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4574 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4575 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4576 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4577 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4578 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4581 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4582 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4583 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4584 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4600 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4601 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4602 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4603 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4608 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4609 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4610 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4611 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4616 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4618 /* Place holder, leave as first spe builtin. */
4619 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4620 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4621 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4622 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4623 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4624 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4625 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4626 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4627 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4628 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4629 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4630 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4631 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4632 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4633 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4634 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4635 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4636 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4637 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4638 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4639 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4640 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4641 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4642 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4643 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4644 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4645 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4646 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4647 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4648 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4649 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4650 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4651 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4652 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4653 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4654 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4655 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4656 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4657 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4658 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4659 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4660 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4661 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4662 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4663 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4664 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4665 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4666 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4667 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4668 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4669 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4670 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4671 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4672 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4673 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4674 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4675 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4676 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4677 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4678 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4679 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4680 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4681 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4682 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4683 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4684 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4685 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4686 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4687 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4688 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4689 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4690 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4691 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4692 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4693 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4694 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4695 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4696 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4697 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4698 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4699 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4700 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4701 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4702 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4703 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4704 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4705 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4706 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4707 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4708 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4709 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4710 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4711 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4712 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4713 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4714 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4715 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4716 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4717 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4718 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4719 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4720 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4721 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4722 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4723 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4724 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4725 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4726 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4727 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4729 /* SPE binary operations expecting a 5-bit unsigned literal. */
4730 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4732 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4733 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4734 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4735 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4736 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4737 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4738 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4739 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4740 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4741 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4742 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4743 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4744 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4745 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4746 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4747 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4748 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4749 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4750 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4751 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4752 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4753 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4754 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4755 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4756 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4757 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4759 /* Place-holder. Leave as last binary SPE builtin. */
4760 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4763 /* AltiVec predicates. */
4765 struct builtin_description_predicates
4767 const unsigned int mask;
4768 const enum insn_code icode;
4769 const char *opcode;
4770 const char *const name;
4771 const enum rs6000_builtins code;
4774 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4776 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4777 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4778 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4779 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4780 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4781 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4782 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4783 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4784 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4785 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4786 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4787 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4788 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4791 /* SPE predicates. */
4792 static struct builtin_description bdesc_spe_predicates[] =
4794 /* Place-holder. Leave as first. */
4795 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4796 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4797 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4798 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4799 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4800 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4801 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4802 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4803 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4804 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4805 /* Place-holder. Leave as last. */
4806 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4809 /* SPE evsel predicates. */
4810 static struct builtin_description bdesc_spe_evsel[] =
4812 /* Place-holder. Leave as first. */
4813 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4814 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4815 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4816 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4817 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4818 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4819 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4820 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4821 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4822 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4823 /* Place-holder. Leave as last. */
4824 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4827 /* ABS* operations. */
4829 static const struct builtin_description bdesc_abs[] =
4831 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4832 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4833 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4834 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4835 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4836 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4837 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4840 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4841 foo (VECa). */
4843 static struct builtin_description bdesc_1arg[] =
4845 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4846 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4847 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4848 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4849 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4850 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4851 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4852 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4853 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4854 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4856 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4857 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4858 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4859 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4860 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4861 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4863 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4864 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4865 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4866 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4867 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4868 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4869 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4870 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4871 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4872 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4873 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4874 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4875 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4876 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4877 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4878 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4879 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4880 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4881 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4882 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4883 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4884 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4885 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4886 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4887 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4888 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4889 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4890 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4891 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4892 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4893 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4894 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4896 /* Place-holder. Leave as last unary SPE builtin. */
4897 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4900 static rtx
4901 rs6000_expand_unop_builtin (icode, arglist, target)
4902 enum insn_code icode;
4903 tree arglist;
4904 rtx target;
4906 rtx pat;
4907 tree arg0 = TREE_VALUE (arglist);
4908 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4909 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4910 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4912 if (icode == CODE_FOR_nothing)
4913 /* Builtin not supported on this processor. */
4914 return 0;
4916 /* If we got invalid arguments bail out before generating bad rtl. */
4917 if (arg0 == error_mark_node)
4918 return const0_rtx;
4920 if (icode == CODE_FOR_altivec_vspltisb
4921 || icode == CODE_FOR_altivec_vspltish
4922 || icode == CODE_FOR_altivec_vspltisw
4923 || icode == CODE_FOR_spe_evsplatfi
4924 || icode == CODE_FOR_spe_evsplati)
4926 /* Only allow 5-bit *signed* literals. */
4927 if (GET_CODE (op0) != CONST_INT
4928 || INTVAL (op0) > 0x1f
4929 || INTVAL (op0) < -0x1f)
4931 error ("argument 1 must be a 5-bit signed literal");
4932 return const0_rtx;
4936 if (target == 0
4937 || GET_MODE (target) != tmode
4938 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4939 target = gen_reg_rtx (tmode);
4941 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4942 op0 = copy_to_mode_reg (mode0, op0);
4944 pat = GEN_FCN (icode) (target, op0);
4945 if (! pat)
4946 return 0;
4947 emit_insn (pat);
4949 return target;
4952 static rtx
4953 altivec_expand_abs_builtin (icode, arglist, target)
4954 enum insn_code icode;
4955 tree arglist;
4956 rtx target;
4958 rtx pat, scratch1, scratch2;
4959 tree arg0 = TREE_VALUE (arglist);
4960 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4961 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4962 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4964 /* If we have invalid arguments, bail out before generating bad rtl. */
4965 if (arg0 == error_mark_node)
4966 return const0_rtx;
4968 if (target == 0
4969 || GET_MODE (target) != tmode
4970 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4971 target = gen_reg_rtx (tmode);
4973 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4974 op0 = copy_to_mode_reg (mode0, op0);
4976 scratch1 = gen_reg_rtx (mode0);
4977 scratch2 = gen_reg_rtx (mode0);
4979 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4980 if (! pat)
4981 return 0;
4982 emit_insn (pat);
4984 return target;
4987 static rtx
4988 rs6000_expand_binop_builtin (icode, arglist, target)
4989 enum insn_code icode;
4990 tree arglist;
4991 rtx target;
4993 rtx pat;
4994 tree arg0 = TREE_VALUE (arglist);
4995 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4996 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4997 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4998 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4999 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5000 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5002 if (icode == CODE_FOR_nothing)
5003 /* Builtin not supported on this processor. */
5004 return 0;
5006 /* If we got invalid arguments bail out before generating bad rtl. */
5007 if (arg0 == error_mark_node || arg1 == error_mark_node)
5008 return const0_rtx;
5010 if (icode == CODE_FOR_altivec_vcfux
5011 || icode == CODE_FOR_altivec_vcfsx
5012 || icode == CODE_FOR_altivec_vctsxs
5013 || icode == CODE_FOR_altivec_vctuxs
5014 || icode == CODE_FOR_altivec_vspltb
5015 || icode == CODE_FOR_altivec_vsplth
5016 || icode == CODE_FOR_altivec_vspltw
5017 || icode == CODE_FOR_spe_evaddiw
5018 || icode == CODE_FOR_spe_evldd
5019 || icode == CODE_FOR_spe_evldh
5020 || icode == CODE_FOR_spe_evldw
5021 || icode == CODE_FOR_spe_evlhhesplat
5022 || icode == CODE_FOR_spe_evlhhossplat
5023 || icode == CODE_FOR_spe_evlhhousplat
5024 || icode == CODE_FOR_spe_evlwhe
5025 || icode == CODE_FOR_spe_evlwhos
5026 || icode == CODE_FOR_spe_evlwhou
5027 || icode == CODE_FOR_spe_evlwhsplat
5028 || icode == CODE_FOR_spe_evlwwsplat
5029 || icode == CODE_FOR_spe_evrlwi
5030 || icode == CODE_FOR_spe_evslwi
5031 || icode == CODE_FOR_spe_evsrwis
5032 || icode == CODE_FOR_spe_evsubifw
5033 || icode == CODE_FOR_spe_evsrwiu)
5035 /* Only allow 5-bit unsigned literals. */
5036 if (TREE_CODE (arg1) != INTEGER_CST
5037 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5039 error ("argument 2 must be a 5-bit unsigned literal");
5040 return const0_rtx;
5044 if (target == 0
5045 || GET_MODE (target) != tmode
5046 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5047 target = gen_reg_rtx (tmode);
5049 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5050 op0 = copy_to_mode_reg (mode0, op0);
5051 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5052 op1 = copy_to_mode_reg (mode1, op1);
5054 pat = GEN_FCN (icode) (target, op0, op1);
5055 if (! pat)
5056 return 0;
5057 emit_insn (pat);
5059 return target;
5062 static rtx
5063 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5064 enum insn_code icode;
5065 const char *opcode;
5066 tree arglist;
5067 rtx target;
5069 rtx pat, scratch;
5070 tree cr6_form = TREE_VALUE (arglist);
5071 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5072 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5073 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5074 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5075 enum machine_mode tmode = SImode;
5076 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5077 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5078 int cr6_form_int;
5080 if (TREE_CODE (cr6_form) != INTEGER_CST)
5082 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5083 return const0_rtx;
5085 else
5086 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5088 if (mode0 != mode1)
5089 abort ();
5091 /* If we have invalid arguments, bail out before generating bad rtl. */
5092 if (arg0 == error_mark_node || arg1 == error_mark_node)
5093 return const0_rtx;
5095 if (target == 0
5096 || GET_MODE (target) != tmode
5097 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5098 target = gen_reg_rtx (tmode);
5100 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5101 op0 = copy_to_mode_reg (mode0, op0);
5102 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5103 op1 = copy_to_mode_reg (mode1, op1);
5105 scratch = gen_reg_rtx (mode0);
5107 pat = GEN_FCN (icode) (scratch, op0, op1,
5108 gen_rtx (SYMBOL_REF, Pmode, opcode));
5109 if (! pat)
5110 return 0;
5111 emit_insn (pat);
5113 /* The vec_any* and vec_all* predicates use the same opcodes for two
5114 different operations, but the bits in CR6 will be different
5115 depending on what information we want. So we have to play tricks
5116 with CR6 to get the right bits out.
5118 If you think this is disgusting, look at the specs for the
5119 AltiVec predicates. */
5121 switch (cr6_form_int)
5123 case 0:
5124 emit_insn (gen_cr6_test_for_zero (target));
5125 break;
5126 case 1:
5127 emit_insn (gen_cr6_test_for_zero_reverse (target));
5128 break;
5129 case 2:
5130 emit_insn (gen_cr6_test_for_lt (target));
5131 break;
5132 case 3:
5133 emit_insn (gen_cr6_test_for_lt_reverse (target));
5134 break;
5135 default:
5136 error ("argument 1 of __builtin_altivec_predicate is out of range");
5137 break;
5140 return target;
5143 static rtx
5144 altivec_expand_stv_builtin (icode, arglist)
5145 enum insn_code icode;
5146 tree arglist;
5148 tree arg0 = TREE_VALUE (arglist);
5149 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5150 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5151 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5152 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5153 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5154 rtx pat;
5155 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5156 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5157 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5159 /* Invalid arguments. Bail before doing anything stoopid! */
5160 if (arg0 == error_mark_node
5161 || arg1 == error_mark_node
5162 || arg2 == error_mark_node)
5163 return const0_rtx;
5165 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5166 op0 = copy_to_mode_reg (mode2, op0);
5167 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5168 op1 = copy_to_mode_reg (mode0, op1);
5169 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5170 op2 = copy_to_mode_reg (mode1, op2);
5172 pat = GEN_FCN (icode) (op1, op2, op0);
5173 if (pat)
5174 emit_insn (pat);
5175 return NULL_RTX;
5178 static rtx
5179 rs6000_expand_ternop_builtin (icode, arglist, target)
5180 enum insn_code icode;
5181 tree arglist;
5182 rtx target;
5184 rtx pat;
5185 tree arg0 = TREE_VALUE (arglist);
5186 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5187 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5188 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5189 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5190 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5191 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5192 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5193 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5194 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5196 if (icode == CODE_FOR_nothing)
5197 /* Builtin not supported on this processor. */
5198 return 0;
5200 /* If we got invalid arguments bail out before generating bad rtl. */
5201 if (arg0 == error_mark_node
5202 || arg1 == error_mark_node
5203 || arg2 == error_mark_node)
5204 return const0_rtx;
5206 if (icode == CODE_FOR_altivec_vsldoi_4sf
5207 || icode == CODE_FOR_altivec_vsldoi_4si
5208 || icode == CODE_FOR_altivec_vsldoi_8hi
5209 || icode == CODE_FOR_altivec_vsldoi_16qi)
5211 /* Only allow 4-bit unsigned literals. */
5212 if (TREE_CODE (arg2) != INTEGER_CST
5213 || TREE_INT_CST_LOW (arg2) & ~0xf)
5215 error ("argument 3 must be a 4-bit unsigned literal");
5216 return const0_rtx;
5220 if (target == 0
5221 || GET_MODE (target) != tmode
5222 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5223 target = gen_reg_rtx (tmode);
5225 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5226 op0 = copy_to_mode_reg (mode0, op0);
5227 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5228 op1 = copy_to_mode_reg (mode1, op1);
5229 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5230 op2 = copy_to_mode_reg (mode2, op2);
5232 pat = GEN_FCN (icode) (target, op0, op1, op2);
5233 if (! pat)
5234 return 0;
5235 emit_insn (pat);
5237 return target;
5240 /* Expand the lvx builtins. */
5241 static rtx
5242 altivec_expand_ld_builtin (exp, target, expandedp)
5243 tree exp;
5244 rtx target;
5245 bool *expandedp;
5247 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5248 tree arglist = TREE_OPERAND (exp, 1);
5249 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5250 tree arg0;
5251 enum machine_mode tmode, mode0;
5252 rtx pat, op0;
5253 enum insn_code icode;
5255 switch (fcode)
5257 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5258 icode = CODE_FOR_altivec_lvx_16qi;
5259 break;
5260 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5261 icode = CODE_FOR_altivec_lvx_8hi;
5262 break;
5263 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5264 icode = CODE_FOR_altivec_lvx_4si;
5265 break;
5266 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5267 icode = CODE_FOR_altivec_lvx_4sf;
5268 break;
5269 default:
5270 *expandedp = false;
5271 return NULL_RTX;
5274 *expandedp = true;
5276 arg0 = TREE_VALUE (arglist);
5277 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5278 tmode = insn_data[icode].operand[0].mode;
5279 mode0 = insn_data[icode].operand[1].mode;
5281 if (target == 0
5282 || GET_MODE (target) != tmode
5283 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5284 target = gen_reg_rtx (tmode);
5286 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5287 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5289 pat = GEN_FCN (icode) (target, op0);
5290 if (! pat)
5291 return 0;
5292 emit_insn (pat);
5293 return target;
5296 /* Expand the stvx builtins. */
5297 static rtx
5298 altivec_expand_st_builtin (exp, target, expandedp)
5299 tree exp;
5300 rtx target ATTRIBUTE_UNUSED;
5301 bool *expandedp;
5303 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5304 tree arglist = TREE_OPERAND (exp, 1);
5305 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5306 tree arg0, arg1;
5307 enum machine_mode mode0, mode1;
5308 rtx pat, op0, op1;
5309 enum insn_code icode;
5311 switch (fcode)
5313 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5314 icode = CODE_FOR_altivec_stvx_16qi;
5315 break;
5316 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5317 icode = CODE_FOR_altivec_stvx_8hi;
5318 break;
5319 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5320 icode = CODE_FOR_altivec_stvx_4si;
5321 break;
5322 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5323 icode = CODE_FOR_altivec_stvx_4sf;
5324 break;
5325 default:
5326 *expandedp = false;
5327 return NULL_RTX;
5330 arg0 = TREE_VALUE (arglist);
5331 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5332 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5333 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5334 mode0 = insn_data[icode].operand[0].mode;
5335 mode1 = insn_data[icode].operand[1].mode;
5337 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5338 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5339 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5340 op1 = copy_to_mode_reg (mode1, op1);
5342 pat = GEN_FCN (icode) (op0, op1);
5343 if (pat)
5344 emit_insn (pat);
5346 *expandedp = true;
5347 return NULL_RTX;
5350 /* Expand the dst builtins. */
5351 static rtx
5352 altivec_expand_dst_builtin (exp, target, expandedp)
5353 tree exp;
5354 rtx target ATTRIBUTE_UNUSED;
5355 bool *expandedp;
5357 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5358 tree arglist = TREE_OPERAND (exp, 1);
5359 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5360 tree arg0, arg1, arg2;
5361 enum machine_mode mode0, mode1, mode2;
5362 rtx pat, op0, op1, op2;
5363 struct builtin_description *d;
5364 size_t i;
5366 *expandedp = false;
5368 /* Handle DST variants. */
5369 d = (struct builtin_description *) bdesc_dst;
5370 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5371 if (d->code == fcode)
5373 arg0 = TREE_VALUE (arglist);
5374 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5375 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5376 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5377 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5378 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5379 mode0 = insn_data[d->icode].operand[0].mode;
5380 mode1 = insn_data[d->icode].operand[1].mode;
5381 mode2 = insn_data[d->icode].operand[2].mode;
5383 /* Invalid arguments, bail out before generating bad rtl. */
5384 if (arg0 == error_mark_node
5385 || arg1 == error_mark_node
5386 || arg2 == error_mark_node)
5387 return const0_rtx;
5389 if (TREE_CODE (arg2) != INTEGER_CST
5390 || TREE_INT_CST_LOW (arg2) & ~0x3)
5392 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5393 return const0_rtx;
5396 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5397 op0 = copy_to_mode_reg (mode0, op0);
5398 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5399 op1 = copy_to_mode_reg (mode1, op1);
5401 pat = GEN_FCN (d->icode) (op0, op1, op2);
5402 if (pat != 0)
5403 emit_insn (pat);
5405 *expandedp = true;
5406 return NULL_RTX;
5409 return NULL_RTX;
5412 /* Expand the builtin in EXP and store the result in TARGET. Store
5413 true in *EXPANDEDP if we found a builtin to expand. */
5414 static rtx
5415 altivec_expand_builtin (exp, target, expandedp)
5416 tree exp;
5417 rtx target;
5418 bool *expandedp;
5420 struct builtin_description *d;
5421 struct builtin_description_predicates *dp;
5422 size_t i;
5423 enum insn_code icode;
5424 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5425 tree arglist = TREE_OPERAND (exp, 1);
5426 tree arg0;
5427 rtx op0, pat;
5428 enum machine_mode tmode, mode0;
5429 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5431 target = altivec_expand_ld_builtin (exp, target, expandedp);
5432 if (*expandedp)
5433 return target;
5435 target = altivec_expand_st_builtin (exp, target, expandedp);
5436 if (*expandedp)
5437 return target;
5439 target = altivec_expand_dst_builtin (exp, target, expandedp);
5440 if (*expandedp)
5441 return target;
5443 *expandedp = true;
5445 switch (fcode)
5447 case ALTIVEC_BUILTIN_STVX:
5448 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5449 case ALTIVEC_BUILTIN_STVEBX:
5450 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5451 case ALTIVEC_BUILTIN_STVEHX:
5452 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5453 case ALTIVEC_BUILTIN_STVEWX:
5454 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5455 case ALTIVEC_BUILTIN_STVXL:
5456 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5458 case ALTIVEC_BUILTIN_MFVSCR:
5459 icode = CODE_FOR_altivec_mfvscr;
5460 tmode = insn_data[icode].operand[0].mode;
5462 if (target == 0
5463 || GET_MODE (target) != tmode
5464 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5465 target = gen_reg_rtx (tmode);
5467 pat = GEN_FCN (icode) (target);
5468 if (! pat)
5469 return 0;
5470 emit_insn (pat);
5471 return target;
5473 case ALTIVEC_BUILTIN_MTVSCR:
5474 icode = CODE_FOR_altivec_mtvscr;
5475 arg0 = TREE_VALUE (arglist);
5476 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5477 mode0 = insn_data[icode].operand[0].mode;
5479 /* If we got invalid arguments bail out before generating bad rtl. */
5480 if (arg0 == error_mark_node)
5481 return const0_rtx;
5483 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5484 op0 = copy_to_mode_reg (mode0, op0);
5486 pat = GEN_FCN (icode) (op0);
5487 if (pat)
5488 emit_insn (pat);
5489 return NULL_RTX;
5491 case ALTIVEC_BUILTIN_DSSALL:
5492 emit_insn (gen_altivec_dssall ());
5493 return NULL_RTX;
5495 case ALTIVEC_BUILTIN_DSS:
5496 icode = CODE_FOR_altivec_dss;
5497 arg0 = TREE_VALUE (arglist);
5498 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5499 mode0 = insn_data[icode].operand[0].mode;
5501 /* If we got invalid arguments bail out before generating bad rtl. */
5502 if (arg0 == error_mark_node)
5503 return const0_rtx;
5505 if (TREE_CODE (arg0) != INTEGER_CST
5506 || TREE_INT_CST_LOW (arg0) & ~0x3)
5508 error ("argument to dss must be a 2-bit unsigned literal");
5509 return const0_rtx;
5512 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5513 op0 = copy_to_mode_reg (mode0, op0);
5515 emit_insn (gen_altivec_dss (op0));
5516 return NULL_RTX;
5519 /* Expand abs* operations. */
5520 d = (struct builtin_description *) bdesc_abs;
5521 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5522 if (d->code == fcode)
5523 return altivec_expand_abs_builtin (d->icode, arglist, target);
5525 /* Expand the AltiVec predicates. */
5526 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5527 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5528 if (dp->code == fcode)
5529 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5531 /* LV* are funky. We initialized them differently. */
5532 switch (fcode)
5534 case ALTIVEC_BUILTIN_LVSL:
5535 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5536 arglist, target);
5537 case ALTIVEC_BUILTIN_LVSR:
5538 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5539 arglist, target);
5540 case ALTIVEC_BUILTIN_LVEBX:
5541 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5542 arglist, target);
5543 case ALTIVEC_BUILTIN_LVEHX:
5544 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5545 arglist, target);
5546 case ALTIVEC_BUILTIN_LVEWX:
5547 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5548 arglist, target);
5549 case ALTIVEC_BUILTIN_LVXL:
5550 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5551 arglist, target);
5552 case ALTIVEC_BUILTIN_LVX:
5553 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5554 arglist, target);
5555 default:
5556 break;
5557 /* Fall through. */
5560 *expandedp = false;
5561 return NULL_RTX;
5564 /* Binops that need to be initialized manually, but can be expanded
5565 automagically by rs6000_expand_binop_builtin. */
5566 static struct builtin_description bdesc_2arg_spe[] =
5568 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5569 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5570 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5571 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5572 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5573 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5574 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5575 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5576 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5577 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5578 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5579 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5580 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5581 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5582 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5583 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5584 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5585 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5586 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5587 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5588 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5589 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5592 /* Expand the builtin in EXP and store the result in TARGET. Store
5593 true in *EXPANDEDP if we found a builtin to expand.
5595 This expands the SPE builtins that are not simple unary and binary
5596 operations. */
5597 static rtx
5598 spe_expand_builtin (exp, target, expandedp)
5599 tree exp;
5600 rtx target;
5601 bool *expandedp;
5603 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5604 tree arglist = TREE_OPERAND (exp, 1);
5605 tree arg1, arg0;
5606 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5607 enum insn_code icode;
5608 enum machine_mode tmode, mode0;
5609 rtx pat, op0;
5610 struct builtin_description *d;
5611 size_t i;
5613 *expandedp = true;
5615 /* Syntax check for a 5-bit unsigned immediate. */
5616 switch (fcode)
5618 case SPE_BUILTIN_EVSTDD:
5619 case SPE_BUILTIN_EVSTDH:
5620 case SPE_BUILTIN_EVSTDW:
5621 case SPE_BUILTIN_EVSTWHE:
5622 case SPE_BUILTIN_EVSTWHO:
5623 case SPE_BUILTIN_EVSTWWE:
5624 case SPE_BUILTIN_EVSTWWO:
5625 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5626 if (TREE_CODE (arg1) != INTEGER_CST
5627 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5629 error ("argument 2 must be a 5-bit unsigned literal");
5630 return const0_rtx;
5632 break;
5633 default:
5634 break;
5637 d = (struct builtin_description *) bdesc_2arg_spe;
5638 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5639 if (d->code == fcode)
5640 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5642 d = (struct builtin_description *) bdesc_spe_predicates;
5643 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5644 if (d->code == fcode)
5645 return spe_expand_predicate_builtin (d->icode, arglist, target);
5647 d = (struct builtin_description *) bdesc_spe_evsel;
5648 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5649 if (d->code == fcode)
5650 return spe_expand_evsel_builtin (d->icode, arglist, target);
5652 switch (fcode)
5654 case SPE_BUILTIN_EVSTDDX:
5655 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5656 case SPE_BUILTIN_EVSTDHX:
5657 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5658 case SPE_BUILTIN_EVSTDWX:
5659 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5660 case SPE_BUILTIN_EVSTWHEX:
5661 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5662 case SPE_BUILTIN_EVSTWHOX:
5663 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5664 case SPE_BUILTIN_EVSTWWEX:
5665 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5666 case SPE_BUILTIN_EVSTWWOX:
5667 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5668 case SPE_BUILTIN_EVSTDD:
5669 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5670 case SPE_BUILTIN_EVSTDH:
5671 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5672 case SPE_BUILTIN_EVSTDW:
5673 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5674 case SPE_BUILTIN_EVSTWHE:
5675 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5676 case SPE_BUILTIN_EVSTWHO:
5677 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5678 case SPE_BUILTIN_EVSTWWE:
5679 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5680 case SPE_BUILTIN_EVSTWWO:
5681 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5682 case SPE_BUILTIN_MFSPEFSCR:
5683 icode = CODE_FOR_spe_mfspefscr;
5684 tmode = insn_data[icode].operand[0].mode;
5686 if (target == 0
5687 || GET_MODE (target) != tmode
5688 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5689 target = gen_reg_rtx (tmode);
5691 pat = GEN_FCN (icode) (target);
5692 if (! pat)
5693 return 0;
5694 emit_insn (pat);
5695 return target;
5696 case SPE_BUILTIN_MTSPEFSCR:
5697 icode = CODE_FOR_spe_mtspefscr;
5698 arg0 = TREE_VALUE (arglist);
5699 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5700 mode0 = insn_data[icode].operand[0].mode;
5702 if (arg0 == error_mark_node)
5703 return const0_rtx;
5705 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5706 op0 = copy_to_mode_reg (mode0, op0);
5708 pat = GEN_FCN (icode) (op0);
5709 if (pat)
5710 emit_insn (pat);
5711 return NULL_RTX;
5712 default:
5713 break;
5716 *expandedp = false;
5717 return NULL_RTX;
5720 static rtx
5721 spe_expand_predicate_builtin (icode, arglist, target)
5722 enum insn_code icode;
5723 tree arglist;
5724 rtx target;
5726 rtx pat, scratch, tmp;
5727 tree form = TREE_VALUE (arglist);
5728 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5729 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5730 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5731 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5732 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5733 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5734 int form_int;
5735 enum rtx_code code;
5737 if (TREE_CODE (form) != INTEGER_CST)
5739 error ("argument 1 of __builtin_spe_predicate must be a constant");
5740 return const0_rtx;
5742 else
5743 form_int = TREE_INT_CST_LOW (form);
5745 if (mode0 != mode1)
5746 abort ();
5748 if (arg0 == error_mark_node || arg1 == error_mark_node)
5749 return const0_rtx;
5751 if (target == 0
5752 || GET_MODE (target) != SImode
5753 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5754 target = gen_reg_rtx (SImode);
5756 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5757 op0 = copy_to_mode_reg (mode0, op0);
5758 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5759 op1 = copy_to_mode_reg (mode1, op1);
5761 scratch = gen_reg_rtx (CCmode);
5763 pat = GEN_FCN (icode) (scratch, op0, op1);
5764 if (! pat)
5765 return const0_rtx;
5766 emit_insn (pat);
5768 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5769 _lower_. We use one compare, but look in different bits of the
5770 CR for each variant.
5772 There are 2 elements in each SPE simd type (upper/lower). The CR
5773 bits are set as follows:
5775 BIT0 | BIT 1 | BIT 2 | BIT 3
5776 U | L | (U | L) | (U & L)
5778 So, for an "all" relationship, BIT 3 would be set.
5779 For an "any" relationship, BIT 2 would be set. Etc.
5781 Following traditional nomenclature, these bits map to:
5783 BIT0 | BIT 1 | BIT 2 | BIT 3
5784 LT | GT | EQ | OV
5786 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5789 switch (form_int)
5791 /* All variant. OV bit. */
5792 case 0:
5793 /* We need to get to the OV bit, which is the ORDERED bit. We
5794 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5795 that's ugly and will trigger a validate_condition_mode abort.
5796 So let's just use another pattern. */
5797 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5798 return target;
5799 /* Any variant. EQ bit. */
5800 case 1:
5801 code = EQ;
5802 break;
5803 /* Upper variant. LT bit. */
5804 case 2:
5805 code = LT;
5806 break;
5807 /* Lower variant. GT bit. */
5808 case 3:
5809 code = GT;
5810 break;
5811 default:
5812 error ("argument 1 of __builtin_spe_predicate is out of range");
5813 return const0_rtx;
5816 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5817 emit_move_insn (target, tmp);
5819 return target;
5822 /* The evsel builtins look like this:
5824 e = __builtin_spe_evsel_OP (a, b, c, d);
5826 and work like this:
5828 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5829 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5832 static rtx
5833 spe_expand_evsel_builtin (icode, arglist, target)
5834 enum insn_code icode;
5835 tree arglist;
5836 rtx target;
5838 rtx pat, scratch;
5839 tree arg0 = TREE_VALUE (arglist);
5840 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5841 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5842 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5843 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5844 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5845 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5846 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5847 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5848 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5850 if (mode0 != mode1)
5851 abort ();
5853 if (arg0 == error_mark_node || arg1 == error_mark_node
5854 || arg2 == error_mark_node || arg3 == error_mark_node)
5855 return const0_rtx;
5857 if (target == 0
5858 || GET_MODE (target) != mode0
5859 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5860 target = gen_reg_rtx (mode0);
5862 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5863 op0 = copy_to_mode_reg (mode0, op0);
5864 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5865 op1 = copy_to_mode_reg (mode0, op1);
5866 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5867 op2 = copy_to_mode_reg (mode0, op2);
5868 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5869 op3 = copy_to_mode_reg (mode0, op3);
5871 /* Generate the compare. */
5872 scratch = gen_reg_rtx (CCmode);
5873 pat = GEN_FCN (icode) (scratch, op0, op1);
5874 if (! pat)
5875 return const0_rtx;
5876 emit_insn (pat);
5878 if (mode0 == V2SImode)
5879 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5880 else
5881 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5883 return target;
5886 /* Expand an expression EXP that calls a built-in function,
5887 with result going to TARGET if that's convenient
5888 (and in mode MODE if that's convenient).
5889 SUBTARGET may be used as the target for computing one of EXP's operands.
5890 IGNORE is nonzero if the value is to be ignored. */
5892 static rtx
5893 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5894 tree exp;
5895 rtx target;
5896 rtx subtarget ATTRIBUTE_UNUSED;
5897 enum machine_mode mode ATTRIBUTE_UNUSED;
5898 int ignore ATTRIBUTE_UNUSED;
5900 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5901 tree arglist = TREE_OPERAND (exp, 1);
5902 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5903 struct builtin_description *d;
5904 size_t i;
5905 rtx ret;
5906 bool success;
5908 if (TARGET_ALTIVEC)
5910 ret = altivec_expand_builtin (exp, target, &success);
5912 if (success)
5913 return ret;
5915 if (TARGET_SPE)
5917 ret = spe_expand_builtin (exp, target, &success);
5919 if (success)
5920 return ret;
5923 if (TARGET_ALTIVEC || TARGET_SPE)
5925 /* Handle simple unary operations. */
5926 d = (struct builtin_description *) bdesc_1arg;
5927 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5928 if (d->code == fcode)
5929 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5931 /* Handle simple binary operations. */
5932 d = (struct builtin_description *) bdesc_2arg;
5933 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5934 if (d->code == fcode)
5935 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5937 /* Handle simple ternary operations. */
5938 d = (struct builtin_description *) bdesc_3arg;
5939 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5940 if (d->code == fcode)
5941 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5944 abort ();
5945 return NULL_RTX;
5948 static void
5949 rs6000_init_builtins ()
5951 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5952 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5953 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5955 if (TARGET_SPE)
5956 spe_init_builtins ();
5957 if (TARGET_ALTIVEC)
5958 altivec_init_builtins ();
5959 if (TARGET_ALTIVEC || TARGET_SPE)
5960 rs6000_common_init_builtins ();
5963 /* Search through a set of builtins and enable the mask bits.
5964 DESC is an array of builtins.
5965 SIZE is the total number of builtins.
5966 START is the builtin enum at which to start.
5967 END is the builtin enum at which to end. */
5968 static void
5969 enable_mask_for_builtins (desc, size, start, end)
5970 struct builtin_description *desc;
5971 int size;
5972 enum rs6000_builtins start, end;
5974 int i;
5976 for (i = 0; i < size; ++i)
5977 if (desc[i].code == start)
5978 break;
5980 if (i == size)
5981 return;
5983 for (; i < size; ++i)
5985 /* Flip all the bits on. */
5986 desc[i].mask = target_flags;
5987 if (desc[i].code == end)
5988 break;
5992 static void
5993 spe_init_builtins ()
5995 tree endlink = void_list_node;
5996 tree puint_type_node = build_pointer_type (unsigned_type_node);
5997 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5998 struct builtin_description *d;
5999 size_t i;
6001 tree v2si_ftype_4_v2si
6002 = build_function_type
6003 (opaque_V2SI_type_node,
6004 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6005 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6006 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6007 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6008 endlink)))));
6010 tree v2sf_ftype_4_v2sf
6011 = build_function_type
6012 (opaque_V2SF_type_node,
6013 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6014 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6015 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6016 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6017 endlink)))));
6019 tree int_ftype_int_v2si_v2si
6020 = build_function_type
6021 (integer_type_node,
6022 tree_cons (NULL_TREE, integer_type_node,
6023 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6024 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6025 endlink))));
6027 tree int_ftype_int_v2sf_v2sf
6028 = build_function_type
6029 (integer_type_node,
6030 tree_cons (NULL_TREE, integer_type_node,
6031 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6032 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6033 endlink))));
6035 tree void_ftype_v2si_puint_int
6036 = build_function_type (void_type_node,
6037 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6038 tree_cons (NULL_TREE, puint_type_node,
6039 tree_cons (NULL_TREE,
6040 integer_type_node,
6041 endlink))));
6043 tree void_ftype_v2si_puint_char
6044 = build_function_type (void_type_node,
6045 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6046 tree_cons (NULL_TREE, puint_type_node,
6047 tree_cons (NULL_TREE,
6048 char_type_node,
6049 endlink))));
6051 tree void_ftype_v2si_pv2si_int
6052 = build_function_type (void_type_node,
6053 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6054 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6055 tree_cons (NULL_TREE,
6056 integer_type_node,
6057 endlink))));
6059 tree void_ftype_v2si_pv2si_char
6060 = build_function_type (void_type_node,
6061 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6062 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6063 tree_cons (NULL_TREE,
6064 char_type_node,
6065 endlink))));
6067 tree void_ftype_int
6068 = build_function_type (void_type_node,
6069 tree_cons (NULL_TREE, integer_type_node, endlink));
6071 tree int_ftype_void
6072 = build_function_type (integer_type_node, endlink);
6074 tree v2si_ftype_pv2si_int
6075 = build_function_type (opaque_V2SI_type_node,
6076 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6077 tree_cons (NULL_TREE, integer_type_node,
6078 endlink)));
6080 tree v2si_ftype_puint_int
6081 = build_function_type (opaque_V2SI_type_node,
6082 tree_cons (NULL_TREE, puint_type_node,
6083 tree_cons (NULL_TREE, integer_type_node,
6084 endlink)));
6086 tree v2si_ftype_pushort_int
6087 = build_function_type (opaque_V2SI_type_node,
6088 tree_cons (NULL_TREE, pushort_type_node,
6089 tree_cons (NULL_TREE, integer_type_node,
6090 endlink)));
6092 /* The initialization of the simple binary and unary builtins is
6093 done in rs6000_common_init_builtins, but we have to enable the
6094 mask bits here manually because we have run out of `target_flags'
6095 bits. We really need to redesign this mask business. */
6097 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6098 ARRAY_SIZE (bdesc_2arg),
6099 SPE_BUILTIN_EVADDW,
6100 SPE_BUILTIN_EVXOR);
6101 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6102 ARRAY_SIZE (bdesc_1arg),
6103 SPE_BUILTIN_EVABS,
6104 SPE_BUILTIN_EVSUBFUSIAAW);
6105 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6106 ARRAY_SIZE (bdesc_spe_predicates),
6107 SPE_BUILTIN_EVCMPEQ,
6108 SPE_BUILTIN_EVFSTSTLT);
6109 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6110 ARRAY_SIZE (bdesc_spe_evsel),
6111 SPE_BUILTIN_EVSEL_CMPGTS,
6112 SPE_BUILTIN_EVSEL_FSTSTEQ);
6114 /* Initialize irregular SPE builtins. */
6116 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6117 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6118 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6119 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6120 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6121 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6122 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6123 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6124 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6125 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6126 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6127 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6128 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6129 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6130 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6131 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6133 /* Loads. */
6134 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6135 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6136 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6137 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6138 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6139 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6140 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6141 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6142 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6143 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6144 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6145 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6146 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6147 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6148 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6149 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6150 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6151 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6152 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6153 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6154 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6155 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6157 /* Predicates. */
6158 d = (struct builtin_description *) bdesc_spe_predicates;
6159 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6161 tree type;
6163 switch (insn_data[d->icode].operand[1].mode)
6165 case V2SImode:
6166 type = int_ftype_int_v2si_v2si;
6167 break;
6168 case V2SFmode:
6169 type = int_ftype_int_v2sf_v2sf;
6170 break;
6171 default:
6172 abort ();
6175 def_builtin (d->mask, d->name, type, d->code);
6178 /* Evsel predicates. */
6179 d = (struct builtin_description *) bdesc_spe_evsel;
6180 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6182 tree type;
6184 switch (insn_data[d->icode].operand[1].mode)
6186 case V2SImode:
6187 type = v2si_ftype_4_v2si;
6188 break;
6189 case V2SFmode:
6190 type = v2sf_ftype_4_v2sf;
6191 break;
6192 default:
6193 abort ();
6196 def_builtin (d->mask, d->name, type, d->code);
6200 static void
6201 altivec_init_builtins ()
6203 struct builtin_description *d;
6204 struct builtin_description_predicates *dp;
6205 size_t i;
6206 tree pfloat_type_node = build_pointer_type (float_type_node);
6207 tree pint_type_node = build_pointer_type (integer_type_node);
6208 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6209 tree pchar_type_node = build_pointer_type (char_type_node);
6211 tree pvoid_type_node = build_pointer_type (void_type_node);
6213 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6214 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6215 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6216 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6218 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6220 tree int_ftype_int_v4si_v4si
6221 = build_function_type_list (integer_type_node,
6222 integer_type_node, V4SI_type_node,
6223 V4SI_type_node, NULL_TREE);
6224 tree v4sf_ftype_pcfloat
6225 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6226 tree void_ftype_pfloat_v4sf
6227 = build_function_type_list (void_type_node,
6228 pfloat_type_node, V4SF_type_node, NULL_TREE);
6229 tree v4si_ftype_pcint
6230 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6231 tree void_ftype_pint_v4si
6232 = build_function_type_list (void_type_node,
6233 pint_type_node, V4SI_type_node, NULL_TREE);
6234 tree v8hi_ftype_pcshort
6235 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6236 tree void_ftype_pshort_v8hi
6237 = build_function_type_list (void_type_node,
6238 pshort_type_node, V8HI_type_node, NULL_TREE);
6239 tree v16qi_ftype_pcchar
6240 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6241 tree void_ftype_pchar_v16qi
6242 = build_function_type_list (void_type_node,
6243 pchar_type_node, V16QI_type_node, NULL_TREE);
6244 tree void_ftype_v4si
6245 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6246 tree v8hi_ftype_void
6247 = build_function_type (V8HI_type_node, void_list_node);
6248 tree void_ftype_void
6249 = build_function_type (void_type_node, void_list_node);
6250 tree void_ftype_qi
6251 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6253 tree v16qi_ftype_int_pcvoid
6254 = build_function_type_list (V16QI_type_node,
6255 integer_type_node, pcvoid_type_node, NULL_TREE);
6256 tree v8hi_ftype_int_pcvoid
6257 = build_function_type_list (V8HI_type_node,
6258 integer_type_node, pcvoid_type_node, NULL_TREE);
6259 tree v4si_ftype_int_pcvoid
6260 = build_function_type_list (V4SI_type_node,
6261 integer_type_node, pcvoid_type_node, NULL_TREE);
6263 tree void_ftype_v4si_int_pvoid
6264 = build_function_type_list (void_type_node,
6265 V4SI_type_node, integer_type_node,
6266 pvoid_type_node, NULL_TREE);
6267 tree void_ftype_v16qi_int_pvoid
6268 = build_function_type_list (void_type_node,
6269 V16QI_type_node, integer_type_node,
6270 pvoid_type_node, NULL_TREE);
6271 tree void_ftype_v8hi_int_pvoid
6272 = build_function_type_list (void_type_node,
6273 V8HI_type_node, integer_type_node,
6274 pvoid_type_node, NULL_TREE);
6275 tree int_ftype_int_v8hi_v8hi
6276 = build_function_type_list (integer_type_node,
6277 integer_type_node, V8HI_type_node,
6278 V8HI_type_node, NULL_TREE);
6279 tree int_ftype_int_v16qi_v16qi
6280 = build_function_type_list (integer_type_node,
6281 integer_type_node, V16QI_type_node,
6282 V16QI_type_node, NULL_TREE);
6283 tree int_ftype_int_v4sf_v4sf
6284 = build_function_type_list (integer_type_node,
6285 integer_type_node, V4SF_type_node,
6286 V4SF_type_node, NULL_TREE);
6287 tree v4si_ftype_v4si
6288 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6289 tree v8hi_ftype_v8hi
6290 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6291 tree v16qi_ftype_v16qi
6292 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6293 tree v4sf_ftype_v4sf
6294 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6295 tree void_ftype_pcvoid_int_char
6296 = build_function_type_list (void_type_node,
6297 pcvoid_type_node, integer_type_node,
6298 char_type_node, NULL_TREE);
6300 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6301 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6302 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6303 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6304 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6305 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6306 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6307 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6308 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6309 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6310 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6311 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6312 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6313 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6314 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6315 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6316 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6317 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6318 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6319 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6320 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6321 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6322 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6323 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6324 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6325 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6326 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6327 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6328 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6329 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6330 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6331 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6333 /* Add the DST variants. */
6334 d = (struct builtin_description *) bdesc_dst;
6335 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6336 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6338 /* Initialize the predicates. */
6339 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6340 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6342 enum machine_mode mode1;
6343 tree type;
6345 mode1 = insn_data[dp->icode].operand[1].mode;
6347 switch (mode1)
6349 case V4SImode:
6350 type = int_ftype_int_v4si_v4si;
6351 break;
6352 case V8HImode:
6353 type = int_ftype_int_v8hi_v8hi;
6354 break;
6355 case V16QImode:
6356 type = int_ftype_int_v16qi_v16qi;
6357 break;
6358 case V4SFmode:
6359 type = int_ftype_int_v4sf_v4sf;
6360 break;
6361 default:
6362 abort ();
6365 def_builtin (dp->mask, dp->name, type, dp->code);
6368 /* Initialize the abs* operators. */
6369 d = (struct builtin_description *) bdesc_abs;
6370 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6372 enum machine_mode mode0;
6373 tree type;
6375 mode0 = insn_data[d->icode].operand[0].mode;
6377 switch (mode0)
6379 case V4SImode:
6380 type = v4si_ftype_v4si;
6381 break;
6382 case V8HImode:
6383 type = v8hi_ftype_v8hi;
6384 break;
6385 case V16QImode:
6386 type = v16qi_ftype_v16qi;
6387 break;
6388 case V4SFmode:
6389 type = v4sf_ftype_v4sf;
6390 break;
6391 default:
6392 abort ();
6395 def_builtin (d->mask, d->name, type, d->code);
6399 static void
6400 rs6000_common_init_builtins ()
6402 struct builtin_description *d;
6403 size_t i;
6405 tree v4sf_ftype_v4sf_v4sf_v16qi
6406 = build_function_type_list (V4SF_type_node,
6407 V4SF_type_node, V4SF_type_node,
6408 V16QI_type_node, NULL_TREE);
6409 tree v4si_ftype_v4si_v4si_v16qi
6410 = build_function_type_list (V4SI_type_node,
6411 V4SI_type_node, V4SI_type_node,
6412 V16QI_type_node, NULL_TREE);
6413 tree v8hi_ftype_v8hi_v8hi_v16qi
6414 = build_function_type_list (V8HI_type_node,
6415 V8HI_type_node, V8HI_type_node,
6416 V16QI_type_node, NULL_TREE);
6417 tree v16qi_ftype_v16qi_v16qi_v16qi
6418 = build_function_type_list (V16QI_type_node,
6419 V16QI_type_node, V16QI_type_node,
6420 V16QI_type_node, NULL_TREE);
6421 tree v4si_ftype_char
6422 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6423 tree v8hi_ftype_char
6424 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6425 tree v16qi_ftype_char
6426 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6427 tree v8hi_ftype_v16qi
6428 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6429 tree v4sf_ftype_v4sf
6430 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6432 tree v2si_ftype_v2si_v2si
6433 = build_function_type_list (opaque_V2SI_type_node,
6434 opaque_V2SI_type_node,
6435 opaque_V2SI_type_node, NULL_TREE);
6437 tree v2sf_ftype_v2sf_v2sf
6438 = build_function_type_list (opaque_V2SF_type_node,
6439 opaque_V2SF_type_node,
6440 opaque_V2SF_type_node, NULL_TREE);
6442 tree v2si_ftype_int_int
6443 = build_function_type_list (opaque_V2SI_type_node,
6444 integer_type_node, integer_type_node,
6445 NULL_TREE);
6447 tree v2si_ftype_v2si
6448 = build_function_type_list (opaque_V2SI_type_node,
6449 opaque_V2SI_type_node, NULL_TREE);
6451 tree v2sf_ftype_v2sf
6452 = build_function_type_list (opaque_V2SF_type_node,
6453 opaque_V2SF_type_node, NULL_TREE);
6455 tree v2sf_ftype_v2si
6456 = build_function_type_list (opaque_V2SF_type_node,
6457 opaque_V2SI_type_node, NULL_TREE);
6459 tree v2si_ftype_v2sf
6460 = build_function_type_list (opaque_V2SI_type_node,
6461 opaque_V2SF_type_node, NULL_TREE);
6463 tree v2si_ftype_v2si_char
6464 = build_function_type_list (opaque_V2SI_type_node,
6465 opaque_V2SI_type_node,
6466 char_type_node, NULL_TREE);
6468 tree v2si_ftype_int_char
6469 = build_function_type_list (opaque_V2SI_type_node,
6470 integer_type_node, char_type_node, NULL_TREE);
6472 tree v2si_ftype_char
6473 = build_function_type_list (opaque_V2SI_type_node,
6474 char_type_node, NULL_TREE);
6476 tree int_ftype_int_int
6477 = build_function_type_list (integer_type_node,
6478 integer_type_node, integer_type_node,
6479 NULL_TREE);
6481 tree v4si_ftype_v4si_v4si
6482 = build_function_type_list (V4SI_type_node,
6483 V4SI_type_node, V4SI_type_node, NULL_TREE);
6484 tree v4sf_ftype_v4si_char
6485 = build_function_type_list (V4SF_type_node,
6486 V4SI_type_node, char_type_node, NULL_TREE);
6487 tree v4si_ftype_v4sf_char
6488 = build_function_type_list (V4SI_type_node,
6489 V4SF_type_node, char_type_node, NULL_TREE);
6490 tree v4si_ftype_v4si_char
6491 = build_function_type_list (V4SI_type_node,
6492 V4SI_type_node, char_type_node, NULL_TREE);
6493 tree v8hi_ftype_v8hi_char
6494 = build_function_type_list (V8HI_type_node,
6495 V8HI_type_node, char_type_node, NULL_TREE);
6496 tree v16qi_ftype_v16qi_char
6497 = build_function_type_list (V16QI_type_node,
6498 V16QI_type_node, char_type_node, NULL_TREE);
6499 tree v16qi_ftype_v16qi_v16qi_char
6500 = build_function_type_list (V16QI_type_node,
6501 V16QI_type_node, V16QI_type_node,
6502 char_type_node, NULL_TREE);
6503 tree v8hi_ftype_v8hi_v8hi_char
6504 = build_function_type_list (V8HI_type_node,
6505 V8HI_type_node, V8HI_type_node,
6506 char_type_node, NULL_TREE);
6507 tree v4si_ftype_v4si_v4si_char
6508 = build_function_type_list (V4SI_type_node,
6509 V4SI_type_node, V4SI_type_node,
6510 char_type_node, NULL_TREE);
6511 tree v4sf_ftype_v4sf_v4sf_char
6512 = build_function_type_list (V4SF_type_node,
6513 V4SF_type_node, V4SF_type_node,
6514 char_type_node, NULL_TREE);
6515 tree v4sf_ftype_v4sf_v4sf
6516 = build_function_type_list (V4SF_type_node,
6517 V4SF_type_node, V4SF_type_node, NULL_TREE);
6518 tree v4sf_ftype_v4sf_v4sf_v4si
6519 = build_function_type_list (V4SF_type_node,
6520 V4SF_type_node, V4SF_type_node,
6521 V4SI_type_node, NULL_TREE);
6522 tree v4sf_ftype_v4sf_v4sf_v4sf
6523 = build_function_type_list (V4SF_type_node,
6524 V4SF_type_node, V4SF_type_node,
6525 V4SF_type_node, NULL_TREE);
6526 tree v4si_ftype_v4si_v4si_v4si
6527 = build_function_type_list (V4SI_type_node,
6528 V4SI_type_node, V4SI_type_node,
6529 V4SI_type_node, NULL_TREE);
6530 tree v8hi_ftype_v8hi_v8hi
6531 = build_function_type_list (V8HI_type_node,
6532 V8HI_type_node, V8HI_type_node, NULL_TREE);
6533 tree v8hi_ftype_v8hi_v8hi_v8hi
6534 = build_function_type_list (V8HI_type_node,
6535 V8HI_type_node, V8HI_type_node,
6536 V8HI_type_node, NULL_TREE);
6537 tree v4si_ftype_v8hi_v8hi_v4si
6538 = build_function_type_list (V4SI_type_node,
6539 V8HI_type_node, V8HI_type_node,
6540 V4SI_type_node, NULL_TREE);
6541 tree v4si_ftype_v16qi_v16qi_v4si
6542 = build_function_type_list (V4SI_type_node,
6543 V16QI_type_node, V16QI_type_node,
6544 V4SI_type_node, NULL_TREE);
6545 tree v16qi_ftype_v16qi_v16qi
6546 = build_function_type_list (V16QI_type_node,
6547 V16QI_type_node, V16QI_type_node, NULL_TREE);
6548 tree v4si_ftype_v4sf_v4sf
6549 = build_function_type_list (V4SI_type_node,
6550 V4SF_type_node, V4SF_type_node, NULL_TREE);
6551 tree v8hi_ftype_v16qi_v16qi
6552 = build_function_type_list (V8HI_type_node,
6553 V16QI_type_node, V16QI_type_node, NULL_TREE);
6554 tree v4si_ftype_v8hi_v8hi
6555 = build_function_type_list (V4SI_type_node,
6556 V8HI_type_node, V8HI_type_node, NULL_TREE);
6557 tree v8hi_ftype_v4si_v4si
6558 = build_function_type_list (V8HI_type_node,
6559 V4SI_type_node, V4SI_type_node, NULL_TREE);
6560 tree v16qi_ftype_v8hi_v8hi
6561 = build_function_type_list (V16QI_type_node,
6562 V8HI_type_node, V8HI_type_node, NULL_TREE);
6563 tree v4si_ftype_v16qi_v4si
6564 = build_function_type_list (V4SI_type_node,
6565 V16QI_type_node, V4SI_type_node, NULL_TREE);
6566 tree v4si_ftype_v16qi_v16qi
6567 = build_function_type_list (V4SI_type_node,
6568 V16QI_type_node, V16QI_type_node, NULL_TREE);
6569 tree v4si_ftype_v8hi_v4si
6570 = build_function_type_list (V4SI_type_node,
6571 V8HI_type_node, V4SI_type_node, NULL_TREE);
6572 tree v4si_ftype_v8hi
6573 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6574 tree int_ftype_v4si_v4si
6575 = build_function_type_list (integer_type_node,
6576 V4SI_type_node, V4SI_type_node, NULL_TREE);
6577 tree int_ftype_v4sf_v4sf
6578 = build_function_type_list (integer_type_node,
6579 V4SF_type_node, V4SF_type_node, NULL_TREE);
6580 tree int_ftype_v16qi_v16qi
6581 = build_function_type_list (integer_type_node,
6582 V16QI_type_node, V16QI_type_node, NULL_TREE);
6583 tree int_ftype_v8hi_v8hi
6584 = build_function_type_list (integer_type_node,
6585 V8HI_type_node, V8HI_type_node, NULL_TREE);
6587 /* Add the simple ternary operators. */
6588 d = (struct builtin_description *) bdesc_3arg;
6589 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6592 enum machine_mode mode0, mode1, mode2, mode3;
6593 tree type;
6595 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6596 continue;
6598 mode0 = insn_data[d->icode].operand[0].mode;
6599 mode1 = insn_data[d->icode].operand[1].mode;
6600 mode2 = insn_data[d->icode].operand[2].mode;
6601 mode3 = insn_data[d->icode].operand[3].mode;
6603 /* When all four are of the same mode. */
6604 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6606 switch (mode0)
6608 case V4SImode:
6609 type = v4si_ftype_v4si_v4si_v4si;
6610 break;
6611 case V4SFmode:
6612 type = v4sf_ftype_v4sf_v4sf_v4sf;
6613 break;
6614 case V8HImode:
6615 type = v8hi_ftype_v8hi_v8hi_v8hi;
6616 break;
6617 case V16QImode:
6618 type = v16qi_ftype_v16qi_v16qi_v16qi;
6619 break;
6620 default:
6621 abort();
6624 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6626 switch (mode0)
6628 case V4SImode:
6629 type = v4si_ftype_v4si_v4si_v16qi;
6630 break;
6631 case V4SFmode:
6632 type = v4sf_ftype_v4sf_v4sf_v16qi;
6633 break;
6634 case V8HImode:
6635 type = v8hi_ftype_v8hi_v8hi_v16qi;
6636 break;
6637 case V16QImode:
6638 type = v16qi_ftype_v16qi_v16qi_v16qi;
6639 break;
6640 default:
6641 abort();
6644 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6645 && mode3 == V4SImode)
6646 type = v4si_ftype_v16qi_v16qi_v4si;
6647 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6648 && mode3 == V4SImode)
6649 type = v4si_ftype_v8hi_v8hi_v4si;
6650 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6651 && mode3 == V4SImode)
6652 type = v4sf_ftype_v4sf_v4sf_v4si;
6654 /* vchar, vchar, vchar, 4 bit literal. */
6655 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6656 && mode3 == QImode)
6657 type = v16qi_ftype_v16qi_v16qi_char;
6659 /* vshort, vshort, vshort, 4 bit literal. */
6660 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6661 && mode3 == QImode)
6662 type = v8hi_ftype_v8hi_v8hi_char;
6664 /* vint, vint, vint, 4 bit literal. */
6665 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6666 && mode3 == QImode)
6667 type = v4si_ftype_v4si_v4si_char;
6669 /* vfloat, vfloat, vfloat, 4 bit literal. */
6670 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6671 && mode3 == QImode)
6672 type = v4sf_ftype_v4sf_v4sf_char;
6674 else
6675 abort ();
6677 def_builtin (d->mask, d->name, type, d->code);
6680 /* Add the simple binary operators. */
6681 d = (struct builtin_description *) bdesc_2arg;
6682 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6684 enum machine_mode mode0, mode1, mode2;
6685 tree type;
6687 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6688 continue;
6690 mode0 = insn_data[d->icode].operand[0].mode;
6691 mode1 = insn_data[d->icode].operand[1].mode;
6692 mode2 = insn_data[d->icode].operand[2].mode;
6694 /* When all three operands are of the same mode. */
6695 if (mode0 == mode1 && mode1 == mode2)
6697 switch (mode0)
6699 case V4SFmode:
6700 type = v4sf_ftype_v4sf_v4sf;
6701 break;
6702 case V4SImode:
6703 type = v4si_ftype_v4si_v4si;
6704 break;
6705 case V16QImode:
6706 type = v16qi_ftype_v16qi_v16qi;
6707 break;
6708 case V8HImode:
6709 type = v8hi_ftype_v8hi_v8hi;
6710 break;
6711 case V2SImode:
6712 type = v2si_ftype_v2si_v2si;
6713 break;
6714 case V2SFmode:
6715 type = v2sf_ftype_v2sf_v2sf;
6716 break;
6717 case SImode:
6718 type = int_ftype_int_int;
6719 break;
6720 default:
6721 abort ();
6725 /* A few other combos we really don't want to do manually. */
6727 /* vint, vfloat, vfloat. */
6728 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6729 type = v4si_ftype_v4sf_v4sf;
6731 /* vshort, vchar, vchar. */
6732 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6733 type = v8hi_ftype_v16qi_v16qi;
6735 /* vint, vshort, vshort. */
6736 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6737 type = v4si_ftype_v8hi_v8hi;
6739 /* vshort, vint, vint. */
6740 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6741 type = v8hi_ftype_v4si_v4si;
6743 /* vchar, vshort, vshort. */
6744 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6745 type = v16qi_ftype_v8hi_v8hi;
6747 /* vint, vchar, vint. */
6748 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6749 type = v4si_ftype_v16qi_v4si;
6751 /* vint, vchar, vchar. */
6752 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6753 type = v4si_ftype_v16qi_v16qi;
6755 /* vint, vshort, vint. */
6756 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6757 type = v4si_ftype_v8hi_v4si;
6759 /* vint, vint, 5 bit literal. */
6760 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6761 type = v4si_ftype_v4si_char;
6763 /* vshort, vshort, 5 bit literal. */
6764 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6765 type = v8hi_ftype_v8hi_char;
6767 /* vchar, vchar, 5 bit literal. */
6768 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6769 type = v16qi_ftype_v16qi_char;
6771 /* vfloat, vint, 5 bit literal. */
6772 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6773 type = v4sf_ftype_v4si_char;
6775 /* vint, vfloat, 5 bit literal. */
6776 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6777 type = v4si_ftype_v4sf_char;
6779 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6780 type = v2si_ftype_int_int;
6782 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6783 type = v2si_ftype_v2si_char;
6785 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6786 type = v2si_ftype_int_char;
6788 /* int, x, x. */
6789 else if (mode0 == SImode)
6791 switch (mode1)
6793 case V4SImode:
6794 type = int_ftype_v4si_v4si;
6795 break;
6796 case V4SFmode:
6797 type = int_ftype_v4sf_v4sf;
6798 break;
6799 case V16QImode:
6800 type = int_ftype_v16qi_v16qi;
6801 break;
6802 case V8HImode:
6803 type = int_ftype_v8hi_v8hi;
6804 break;
6805 default:
6806 abort ();
6810 else
6811 abort ();
6813 def_builtin (d->mask, d->name, type, d->code);
6816 /* Add the simple unary operators. */
6817 d = (struct builtin_description *) bdesc_1arg;
6818 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6820 enum machine_mode mode0, mode1;
6821 tree type;
6823 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6824 continue;
6826 mode0 = insn_data[d->icode].operand[0].mode;
6827 mode1 = insn_data[d->icode].operand[1].mode;
6829 if (mode0 == V4SImode && mode1 == QImode)
6830 type = v4si_ftype_char;
6831 else if (mode0 == V8HImode && mode1 == QImode)
6832 type = v8hi_ftype_char;
6833 else if (mode0 == V16QImode && mode1 == QImode)
6834 type = v16qi_ftype_char;
6835 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6836 type = v4sf_ftype_v4sf;
6837 else if (mode0 == V8HImode && mode1 == V16QImode)
6838 type = v8hi_ftype_v16qi;
6839 else if (mode0 == V4SImode && mode1 == V8HImode)
6840 type = v4si_ftype_v8hi;
6841 else if (mode0 == V2SImode && mode1 == V2SImode)
6842 type = v2si_ftype_v2si;
6843 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6844 type = v2sf_ftype_v2sf;
6845 else if (mode0 == V2SFmode && mode1 == V2SImode)
6846 type = v2sf_ftype_v2si;
6847 else if (mode0 == V2SImode && mode1 == V2SFmode)
6848 type = v2si_ftype_v2sf;
6849 else if (mode0 == V2SImode && mode1 == QImode)
6850 type = v2si_ftype_char;
6851 else
6852 abort ();
6854 def_builtin (d->mask, d->name, type, d->code);
6859 /* Expand a block move operation, and return 1 if successful. Return 0
6860 if we should let the compiler generate normal code.
6862 operands[0] is the destination
6863 operands[1] is the source
6864 operands[2] is the length
6865 operands[3] is the alignment */
6867 #define MAX_MOVE_REG 4
6870 expand_block_move (operands)
6871 rtx operands[];
6873 rtx orig_dest = operands[0];
6874 rtx orig_src = operands[1];
6875 rtx bytes_rtx = operands[2];
6876 rtx align_rtx = operands[3];
6877 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6878 int align;
6879 int bytes;
6880 int offset;
6881 int move_bytes;
6883 /* If this is not a fixed size move, just call memcpy */
6884 if (! constp)
6885 return 0;
6887 /* If this is not a fixed size alignment, abort */
6888 if (GET_CODE (align_rtx) != CONST_INT)
6889 abort ();
6890 align = INTVAL (align_rtx);
6892 /* Anything to move? */
6893 bytes = INTVAL (bytes_rtx);
6894 if (bytes <= 0)
6895 return 1;
6897 /* store_one_arg depends on expand_block_move to handle at least the size of
6898 reg_parm_stack_space. */
6899 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6900 return 0;
6902 if (TARGET_STRING) /* string instructions are available */
6904 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6906 union {
6907 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6908 rtx (*mov) PARAMS ((rtx, rtx));
6909 } gen_func;
6910 enum machine_mode mode = BLKmode;
6911 rtx src, dest;
6913 if (bytes > 24 /* move up to 32 bytes at a time */
6914 && ! fixed_regs[5]
6915 && ! fixed_regs[6]
6916 && ! fixed_regs[7]
6917 && ! fixed_regs[8]
6918 && ! fixed_regs[9]
6919 && ! fixed_regs[10]
6920 && ! fixed_regs[11]
6921 && ! fixed_regs[12])
6923 move_bytes = (bytes > 32) ? 32 : bytes;
6924 gen_func.movstrsi = gen_movstrsi_8reg;
6926 else if (bytes > 16 /* move up to 24 bytes at a time */
6927 && ! fixed_regs[5]
6928 && ! fixed_regs[6]
6929 && ! fixed_regs[7]
6930 && ! fixed_regs[8]
6931 && ! fixed_regs[9]
6932 && ! fixed_regs[10])
6934 move_bytes = (bytes > 24) ? 24 : bytes;
6935 gen_func.movstrsi = gen_movstrsi_6reg;
6937 else if (bytes > 8 /* move up to 16 bytes at a time */
6938 && ! fixed_regs[5]
6939 && ! fixed_regs[6]
6940 && ! fixed_regs[7]
6941 && ! fixed_regs[8])
6943 move_bytes = (bytes > 16) ? 16 : bytes;
6944 gen_func.movstrsi = gen_movstrsi_4reg;
6946 else if (bytes >= 8 && TARGET_POWERPC64
6947 /* 64-bit loads and stores require word-aligned
6948 displacements. */
6949 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6951 move_bytes = 8;
6952 mode = DImode;
6953 gen_func.mov = gen_movdi;
6955 else if (bytes > 4 && !TARGET_POWERPC64)
6956 { /* move up to 8 bytes at a time */
6957 move_bytes = (bytes > 8) ? 8 : bytes;
6958 gen_func.movstrsi = gen_movstrsi_2reg;
6960 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6961 { /* move 4 bytes */
6962 move_bytes = 4;
6963 mode = SImode;
6964 gen_func.mov = gen_movsi;
6966 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6967 { /* move 2 bytes */
6968 move_bytes = 2;
6969 mode = HImode;
6970 gen_func.mov = gen_movhi;
6972 else if (bytes == 1) /* move 1 byte */
6974 move_bytes = 1;
6975 mode = QImode;
6976 gen_func.mov = gen_movqi;
6978 else
6979 { /* move up to 4 bytes at a time */
6980 move_bytes = (bytes > 4) ? 4 : bytes;
6981 gen_func.movstrsi = gen_movstrsi_1reg;
6984 src = adjust_address (orig_src, mode, offset);
6985 dest = adjust_address (orig_dest, mode, offset);
6987 if (mode == BLKmode)
6989 /* Move the address into scratch registers. The movstrsi
6990 patterns require zero offset. */
6991 if (!REG_P (XEXP (src, 0)))
6993 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6994 src = replace_equiv_address (src, src_reg);
6996 set_mem_size (src, GEN_INT (move_bytes));
6998 if (!REG_P (XEXP (dest, 0)))
7000 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7001 dest = replace_equiv_address (dest, dest_reg);
7003 set_mem_size (dest, GEN_INT (move_bytes));
7005 emit_insn ((*gen_func.movstrsi) (dest, src,
7006 GEN_INT (move_bytes & 31),
7007 align_rtx));
7009 else
7011 rtx tmp_reg = gen_reg_rtx (mode);
7013 emit_insn ((*gen_func.mov) (tmp_reg, src));
7014 emit_insn ((*gen_func.mov) (dest, tmp_reg));
7019 else /* string instructions not available */
7021 rtx stores[MAX_MOVE_REG];
7022 int num_reg = 0;
7023 int i;
7025 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7027 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
7028 enum machine_mode mode;
7029 rtx src, dest, tmp_reg;
7031 /* Generate the appropriate load and store, saving the stores
7032 for later. */
7033 if (bytes >= 8 && TARGET_POWERPC64
7034 /* 64-bit loads and stores require word-aligned
7035 displacements. */
7036 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7038 move_bytes = 8;
7039 mode = DImode;
7040 gen_mov_func = gen_movdi;
7042 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7044 move_bytes = 4;
7045 mode = SImode;
7046 gen_mov_func = gen_movsi;
7048 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7050 move_bytes = 2;
7051 mode = HImode;
7052 gen_mov_func = gen_movhi;
7054 else
7056 move_bytes = 1;
7057 mode = QImode;
7058 gen_mov_func = gen_movqi;
7061 src = adjust_address (orig_src, mode, offset);
7062 dest = adjust_address (orig_dest, mode, offset);
7063 tmp_reg = gen_reg_rtx (mode);
7065 emit_insn ((*gen_mov_func) (tmp_reg, src));
7066 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
7068 if (num_reg >= MAX_MOVE_REG)
7070 for (i = 0; i < num_reg; i++)
7071 emit_insn (stores[i]);
7072 num_reg = 0;
7076 for (i = 0; i < num_reg; i++)
7077 emit_insn (stores[i]);
7080 return 1;
7084 /* Return 1 if OP is a load multiple operation. It is known to be a
7085 PARALLEL and the first section will be tested. */
7088 load_multiple_operation (op, mode)
7089 rtx op;
7090 enum machine_mode mode ATTRIBUTE_UNUSED;
7092 int count = XVECLEN (op, 0);
7093 unsigned int dest_regno;
7094 rtx src_addr;
7095 int i;
7097 /* Perform a quick check so we don't blow up below. */
7098 if (count <= 1
7099 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7100 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7101 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7102 return 0;
7104 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7105 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7107 for (i = 1; i < count; i++)
7109 rtx elt = XVECEXP (op, 0, i);
7111 if (GET_CODE (elt) != SET
7112 || GET_CODE (SET_DEST (elt)) != REG
7113 || GET_MODE (SET_DEST (elt)) != SImode
7114 || REGNO (SET_DEST (elt)) != dest_regno + i
7115 || GET_CODE (SET_SRC (elt)) != MEM
7116 || GET_MODE (SET_SRC (elt)) != SImode
7117 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7118 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7119 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7120 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7121 return 0;
7124 return 1;
7127 /* Similar, but tests for store multiple. Here, the second vector element
7128 is a CLOBBER. It will be tested later. */
7131 store_multiple_operation (op, mode)
7132 rtx op;
7133 enum machine_mode mode ATTRIBUTE_UNUSED;
7135 int count = XVECLEN (op, 0) - 1;
7136 unsigned int src_regno;
7137 rtx dest_addr;
7138 int i;
7140 /* Perform a quick check so we don't blow up below. */
7141 if (count <= 1
7142 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7143 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7144 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7145 return 0;
7147 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7148 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7150 for (i = 1; i < count; i++)
7152 rtx elt = XVECEXP (op, 0, i + 1);
7154 if (GET_CODE (elt) != SET
7155 || GET_CODE (SET_SRC (elt)) != REG
7156 || GET_MODE (SET_SRC (elt)) != SImode
7157 || REGNO (SET_SRC (elt)) != src_regno + i
7158 || GET_CODE (SET_DEST (elt)) != MEM
7159 || GET_MODE (SET_DEST (elt)) != SImode
7160 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7161 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7162 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7163 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7164 return 0;
7167 return 1;
7170 /* Return a string to perform a load_multiple operation.
7171 operands[0] is the vector.
7172 operands[1] is the source address.
7173 operands[2] is the first destination register. */
7175 const char *
7176 rs6000_output_load_multiple (operands)
7177 rtx operands[3];
7179 /* We have to handle the case where the pseudo used to contain the address
7180 is assigned to one of the output registers. */
7181 int i, j;
7182 int words = XVECLEN (operands[0], 0);
7183 rtx xop[10];
7185 if (XVECLEN (operands[0], 0) == 1)
7186 return "{l|lwz} %2,0(%1)";
7188 for (i = 0; i < words; i++)
7189 if (refers_to_regno_p (REGNO (operands[2]) + i,
7190 REGNO (operands[2]) + i + 1, operands[1], 0))
7192 if (i == words-1)
7194 xop[0] = GEN_INT (4 * (words-1));
7195 xop[1] = operands[1];
7196 xop[2] = operands[2];
7197 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7198 return "";
7200 else if (i == 0)
7202 xop[0] = GEN_INT (4 * (words-1));
7203 xop[1] = operands[1];
7204 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7205 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7206 return "";
7208 else
7210 for (j = 0; j < words; j++)
7211 if (j != i)
7213 xop[0] = GEN_INT (j * 4);
7214 xop[1] = operands[1];
7215 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7216 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7218 xop[0] = GEN_INT (i * 4);
7219 xop[1] = operands[1];
7220 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7221 return "";
7225 return "{lsi|lswi} %2,%1,%N0";
7228 /* Return 1 for a parallel vrsave operation. */
7231 vrsave_operation (op, mode)
7232 rtx op;
7233 enum machine_mode mode ATTRIBUTE_UNUSED;
7235 int count = XVECLEN (op, 0);
7236 unsigned int dest_regno, src_regno;
7237 int i;
7239 if (count <= 1
7240 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7241 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7242 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7243 return 0;
7245 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7246 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7248 if (dest_regno != VRSAVE_REGNO
7249 && src_regno != VRSAVE_REGNO)
7250 return 0;
7252 for (i = 1; i < count; i++)
7254 rtx elt = XVECEXP (op, 0, i);
7256 if (GET_CODE (elt) != CLOBBER
7257 && GET_CODE (elt) != SET)
7258 return 0;
7261 return 1;
7264 /* Return 1 for an PARALLEL suitable for mtcrf. */
7267 mtcrf_operation (op, mode)
7268 rtx op;
7269 enum machine_mode mode ATTRIBUTE_UNUSED;
7271 int count = XVECLEN (op, 0);
7272 int i;
7273 rtx src_reg;
7275 /* Perform a quick check so we don't blow up below. */
7276 if (count < 1
7277 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7278 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7279 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7280 return 0;
7281 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7283 if (GET_CODE (src_reg) != REG
7284 || GET_MODE (src_reg) != SImode
7285 || ! INT_REGNO_P (REGNO (src_reg)))
7286 return 0;
7288 for (i = 0; i < count; i++)
7290 rtx exp = XVECEXP (op, 0, i);
7291 rtx unspec;
7292 int maskval;
7294 if (GET_CODE (exp) != SET
7295 || GET_CODE (SET_DEST (exp)) != REG
7296 || GET_MODE (SET_DEST (exp)) != CCmode
7297 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7298 return 0;
7299 unspec = SET_SRC (exp);
7300 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7302 if (GET_CODE (unspec) != UNSPEC
7303 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7304 || XVECLEN (unspec, 0) != 2
7305 || XVECEXP (unspec, 0, 0) != src_reg
7306 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7307 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7308 return 0;
7310 return 1;
7313 /* Return 1 for an PARALLEL suitable for lmw. */
7316 lmw_operation (op, mode)
7317 rtx op;
7318 enum machine_mode mode ATTRIBUTE_UNUSED;
7320 int count = XVECLEN (op, 0);
7321 unsigned int dest_regno;
7322 rtx src_addr;
7323 unsigned int base_regno;
7324 HOST_WIDE_INT offset;
7325 int i;
7327 /* Perform a quick check so we don't blow up below. */
7328 if (count <= 1
7329 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7330 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7331 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7332 return 0;
7334 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7335 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7337 if (dest_regno > 31
7338 || count != 32 - (int) dest_regno)
7339 return 0;
7341 if (legitimate_indirect_address_p (src_addr, 0))
7343 offset = 0;
7344 base_regno = REGNO (src_addr);
7345 if (base_regno == 0)
7346 return 0;
7348 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7350 offset = INTVAL (XEXP (src_addr, 1));
7351 base_regno = REGNO (XEXP (src_addr, 0));
7353 else
7354 return 0;
7356 for (i = 0; i < count; i++)
7358 rtx elt = XVECEXP (op, 0, i);
7359 rtx newaddr;
7360 rtx addr_reg;
7361 HOST_WIDE_INT newoffset;
7363 if (GET_CODE (elt) != SET
7364 || GET_CODE (SET_DEST (elt)) != REG
7365 || GET_MODE (SET_DEST (elt)) != SImode
7366 || REGNO (SET_DEST (elt)) != dest_regno + i
7367 || GET_CODE (SET_SRC (elt)) != MEM
7368 || GET_MODE (SET_SRC (elt)) != SImode)
7369 return 0;
7370 newaddr = XEXP (SET_SRC (elt), 0);
7371 if (legitimate_indirect_address_p (newaddr, 0))
7373 newoffset = 0;
7374 addr_reg = newaddr;
7376 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7378 addr_reg = XEXP (newaddr, 0);
7379 newoffset = INTVAL (XEXP (newaddr, 1));
7381 else
7382 return 0;
7383 if (REGNO (addr_reg) != base_regno
7384 || newoffset != offset + 4 * i)
7385 return 0;
7388 return 1;
7391 /* Return 1 for an PARALLEL suitable for stmw. */
7394 stmw_operation (op, mode)
7395 rtx op;
7396 enum machine_mode mode ATTRIBUTE_UNUSED;
7398 int count = XVECLEN (op, 0);
7399 unsigned int src_regno;
7400 rtx dest_addr;
7401 unsigned int base_regno;
7402 HOST_WIDE_INT offset;
7403 int i;
7405 /* Perform a quick check so we don't blow up below. */
7406 if (count <= 1
7407 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7408 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7409 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7410 return 0;
7412 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7413 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7415 if (src_regno > 31
7416 || count != 32 - (int) src_regno)
7417 return 0;
7419 if (legitimate_indirect_address_p (dest_addr, 0))
7421 offset = 0;
7422 base_regno = REGNO (dest_addr);
7423 if (base_regno == 0)
7424 return 0;
7426 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7428 offset = INTVAL (XEXP (dest_addr, 1));
7429 base_regno = REGNO (XEXP (dest_addr, 0));
7431 else
7432 return 0;
7434 for (i = 0; i < count; i++)
7436 rtx elt = XVECEXP (op, 0, i);
7437 rtx newaddr;
7438 rtx addr_reg;
7439 HOST_WIDE_INT newoffset;
7441 if (GET_CODE (elt) != SET
7442 || GET_CODE (SET_SRC (elt)) != REG
7443 || GET_MODE (SET_SRC (elt)) != SImode
7444 || REGNO (SET_SRC (elt)) != src_regno + i
7445 || GET_CODE (SET_DEST (elt)) != MEM
7446 || GET_MODE (SET_DEST (elt)) != SImode)
7447 return 0;
7448 newaddr = XEXP (SET_DEST (elt), 0);
7449 if (legitimate_indirect_address_p (newaddr, 0))
7451 newoffset = 0;
7452 addr_reg = newaddr;
7454 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7456 addr_reg = XEXP (newaddr, 0);
7457 newoffset = INTVAL (XEXP (newaddr, 1));
7459 else
7460 return 0;
7461 if (REGNO (addr_reg) != base_regno
7462 || newoffset != offset + 4 * i)
7463 return 0;
7466 return 1;
7469 /* A validation routine: say whether CODE, a condition code, and MODE
7470 match. The other alternatives either don't make sense or should
7471 never be generated. */
7473 static void
7474 validate_condition_mode (code, mode)
7475 enum rtx_code code;
7476 enum machine_mode mode;
7478 if (GET_RTX_CLASS (code) != '<'
7479 || GET_MODE_CLASS (mode) != MODE_CC)
7480 abort ();
7482 /* These don't make sense. */
7483 if ((code == GT || code == LT || code == GE || code == LE)
7484 && mode == CCUNSmode)
7485 abort ();
7487 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7488 && mode != CCUNSmode)
7489 abort ();
7491 if (mode != CCFPmode
7492 && (code == ORDERED || code == UNORDERED
7493 || code == UNEQ || code == LTGT
7494 || code == UNGT || code == UNLT
7495 || code == UNGE || code == UNLE))
7496 abort ();
7498 /* These should never be generated except for
7499 flag_finite_math_only. */
7500 if (mode == CCFPmode
7501 && ! flag_finite_math_only
7502 && (code == LE || code == GE
7503 || code == UNEQ || code == LTGT
7504 || code == UNGT || code == UNLT))
7505 abort ();
7507 /* These are invalid; the information is not there. */
7508 if (mode == CCEQmode
7509 && code != EQ && code != NE)
7510 abort ();
7513 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7514 We only check the opcode against the mode of the CC value here. */
7517 branch_comparison_operator (op, mode)
7518 rtx op;
7519 enum machine_mode mode ATTRIBUTE_UNUSED;
7521 enum rtx_code code = GET_CODE (op);
7522 enum machine_mode cc_mode;
7524 if (GET_RTX_CLASS (code) != '<')
7525 return 0;
7527 cc_mode = GET_MODE (XEXP (op, 0));
7528 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7529 return 0;
7531 validate_condition_mode (code, cc_mode);
7533 return 1;
7536 /* Return 1 if OP is a comparison operation that is valid for a branch
7537 insn and which is true if the corresponding bit in the CC register
7538 is set. */
7541 branch_positive_comparison_operator (op, mode)
7542 rtx op;
7543 enum machine_mode mode;
7545 enum rtx_code code;
7547 if (! branch_comparison_operator (op, mode))
7548 return 0;
7550 code = GET_CODE (op);
7551 return (code == EQ || code == LT || code == GT
7552 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7553 || code == LTU || code == GTU
7554 || code == UNORDERED);
7557 /* Return 1 if OP is a comparison operation that is valid for an scc
7558 insn: it must be a positive comparison. */
7561 scc_comparison_operator (op, mode)
7562 rtx op;
7563 enum machine_mode mode;
7565 return branch_positive_comparison_operator (op, mode);
7569 trap_comparison_operator (op, mode)
7570 rtx op;
7571 enum machine_mode mode;
7573 if (mode != VOIDmode && mode != GET_MODE (op))
7574 return 0;
7575 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7579 boolean_operator (op, mode)
7580 rtx op;
7581 enum machine_mode mode ATTRIBUTE_UNUSED;
7583 enum rtx_code code = GET_CODE (op);
7584 return (code == AND || code == IOR || code == XOR);
7588 boolean_or_operator (op, mode)
7589 rtx op;
7590 enum machine_mode mode ATTRIBUTE_UNUSED;
7592 enum rtx_code code = GET_CODE (op);
7593 return (code == IOR || code == XOR);
7597 min_max_operator (op, mode)
7598 rtx op;
7599 enum machine_mode mode ATTRIBUTE_UNUSED;
7601 enum rtx_code code = GET_CODE (op);
7602 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7605 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7606 mask required to convert the result of a rotate insn into a shift
7607 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7610 includes_lshift_p (shiftop, andop)
7611 rtx shiftop;
7612 rtx andop;
7614 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7616 shift_mask <<= INTVAL (shiftop);
7618 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7621 /* Similar, but for right shift. */
7624 includes_rshift_p (shiftop, andop)
7625 rtx shiftop;
7626 rtx andop;
7628 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7630 shift_mask >>= INTVAL (shiftop);
7632 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7635 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7636 to perform a left shift. It must have exactly SHIFTOP least
7637 significant 0's, then one or more 1's, then zero or more 0's. */
7640 includes_rldic_lshift_p (shiftop, andop)
7641 rtx shiftop;
7642 rtx andop;
7644 if (GET_CODE (andop) == CONST_INT)
7646 HOST_WIDE_INT c, lsb, shift_mask;
7648 c = INTVAL (andop);
7649 if (c == 0 || c == ~0)
7650 return 0;
7652 shift_mask = ~0;
7653 shift_mask <<= INTVAL (shiftop);
7655 /* Find the least significant one bit. */
7656 lsb = c & -c;
7658 /* It must coincide with the LSB of the shift mask. */
7659 if (-lsb != shift_mask)
7660 return 0;
7662 /* Invert to look for the next transition (if any). */
7663 c = ~c;
7665 /* Remove the low group of ones (originally low group of zeros). */
7666 c &= -lsb;
7668 /* Again find the lsb, and check we have all 1's above. */
7669 lsb = c & -c;
7670 return c == -lsb;
7672 else if (GET_CODE (andop) == CONST_DOUBLE
7673 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7675 HOST_WIDE_INT low, high, lsb;
7676 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7678 low = CONST_DOUBLE_LOW (andop);
7679 if (HOST_BITS_PER_WIDE_INT < 64)
7680 high = CONST_DOUBLE_HIGH (andop);
7682 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7683 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7684 return 0;
7686 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7688 shift_mask_high = ~0;
7689 if (INTVAL (shiftop) > 32)
7690 shift_mask_high <<= INTVAL (shiftop) - 32;
7692 lsb = high & -high;
7694 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7695 return 0;
7697 high = ~high;
7698 high &= -lsb;
7700 lsb = high & -high;
7701 return high == -lsb;
7704 shift_mask_low = ~0;
7705 shift_mask_low <<= INTVAL (shiftop);
7707 lsb = low & -low;
7709 if (-lsb != shift_mask_low)
7710 return 0;
7712 if (HOST_BITS_PER_WIDE_INT < 64)
7713 high = ~high;
7714 low = ~low;
7715 low &= -lsb;
7717 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7719 lsb = high & -high;
7720 return high == -lsb;
7723 lsb = low & -low;
7724 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7726 else
7727 return 0;
7730 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7731 to perform a left shift. It must have SHIFTOP or more least
7732 signifigant 0's, with the remainder of the word 1's. */
7735 includes_rldicr_lshift_p (shiftop, andop)
7736 rtx shiftop;
7737 rtx andop;
7739 if (GET_CODE (andop) == CONST_INT)
7741 HOST_WIDE_INT c, lsb, shift_mask;
7743 shift_mask = ~0;
7744 shift_mask <<= INTVAL (shiftop);
7745 c = INTVAL (andop);
7747 /* Find the least signifigant one bit. */
7748 lsb = c & -c;
7750 /* It must be covered by the shift mask.
7751 This test also rejects c == 0. */
7752 if ((lsb & shift_mask) == 0)
7753 return 0;
7755 /* Check we have all 1's above the transition, and reject all 1's. */
7756 return c == -lsb && lsb != 1;
7758 else if (GET_CODE (andop) == CONST_DOUBLE
7759 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7761 HOST_WIDE_INT low, lsb, shift_mask_low;
7763 low = CONST_DOUBLE_LOW (andop);
7765 if (HOST_BITS_PER_WIDE_INT < 64)
7767 HOST_WIDE_INT high, shift_mask_high;
7769 high = CONST_DOUBLE_HIGH (andop);
7771 if (low == 0)
7773 shift_mask_high = ~0;
7774 if (INTVAL (shiftop) > 32)
7775 shift_mask_high <<= INTVAL (shiftop) - 32;
7777 lsb = high & -high;
7779 if ((lsb & shift_mask_high) == 0)
7780 return 0;
7782 return high == -lsb;
7784 if (high != ~0)
7785 return 0;
7788 shift_mask_low = ~0;
7789 shift_mask_low <<= INTVAL (shiftop);
7791 lsb = low & -low;
7793 if ((lsb & shift_mask_low) == 0)
7794 return 0;
7796 return low == -lsb && lsb != 1;
7798 else
7799 return 0;
7802 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7803 for lfq and stfq insns.
7805 Note reg1 and reg2 *must* be hard registers. To be sure we will
7806 abort if we are passed pseudo registers. */
7809 registers_ok_for_quad_peep (reg1, reg2)
7810 rtx reg1, reg2;
7812 /* We might have been passed a SUBREG. */
7813 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7814 return 0;
7816 return (REGNO (reg1) == REGNO (reg2) - 1);
7819 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7820 addr1 and addr2 must be in consecutive memory locations
7821 (addr2 == addr1 + 8). */
7824 addrs_ok_for_quad_peep (addr1, addr2)
7825 rtx addr1;
7826 rtx addr2;
7828 unsigned int reg1;
7829 int offset1;
7831 /* Extract an offset (if used) from the first addr. */
7832 if (GET_CODE (addr1) == PLUS)
7834 /* If not a REG, return zero. */
7835 if (GET_CODE (XEXP (addr1, 0)) != REG)
7836 return 0;
7837 else
7839 reg1 = REGNO (XEXP (addr1, 0));
7840 /* The offset must be constant! */
7841 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7842 return 0;
7843 offset1 = INTVAL (XEXP (addr1, 1));
7846 else if (GET_CODE (addr1) != REG)
7847 return 0;
7848 else
7850 reg1 = REGNO (addr1);
7851 /* This was a simple (mem (reg)) expression. Offset is 0. */
7852 offset1 = 0;
7855 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7856 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7857 register as addr1. */
7858 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7859 return 1;
7860 if (GET_CODE (addr2) != PLUS)
7861 return 0;
7863 if (GET_CODE (XEXP (addr2, 0)) != REG
7864 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7865 return 0;
7867 if (reg1 != REGNO (XEXP (addr2, 0)))
7868 return 0;
7870 /* The offset for the second addr must be 8 more than the first addr. */
7871 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7872 return 0;
7874 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7875 instructions. */
7876 return 1;
7879 /* Return the register class of a scratch register needed to copy IN into
7880 or out of a register in CLASS in MODE. If it can be done directly,
7881 NO_REGS is returned. */
7883 enum reg_class
7884 secondary_reload_class (class, mode, in)
7885 enum reg_class class;
7886 enum machine_mode mode ATTRIBUTE_UNUSED;
7887 rtx in;
7889 int regno;
7891 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7892 #if TARGET_MACHO
7893 && MACHOPIC_INDIRECT
7894 #endif
7897 /* We cannot copy a symbolic operand directly into anything
7898 other than BASE_REGS for TARGET_ELF. So indicate that a
7899 register from BASE_REGS is needed as an intermediate
7900 register.
7902 On Darwin, pic addresses require a load from memory, which
7903 needs a base register. */
7904 if (class != BASE_REGS
7905 && (GET_CODE (in) == SYMBOL_REF
7906 || GET_CODE (in) == HIGH
7907 || GET_CODE (in) == LABEL_REF
7908 || GET_CODE (in) == CONST))
7909 return BASE_REGS;
7912 if (GET_CODE (in) == REG)
7914 regno = REGNO (in);
7915 if (regno >= FIRST_PSEUDO_REGISTER)
7917 regno = true_regnum (in);
7918 if (regno >= FIRST_PSEUDO_REGISTER)
7919 regno = -1;
7922 else if (GET_CODE (in) == SUBREG)
7924 regno = true_regnum (in);
7925 if (regno >= FIRST_PSEUDO_REGISTER)
7926 regno = -1;
7928 else
7929 regno = -1;
7931 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7932 into anything. */
7933 if (class == GENERAL_REGS || class == BASE_REGS
7934 || (regno >= 0 && INT_REGNO_P (regno)))
7935 return NO_REGS;
7937 /* Constants, memory, and FP registers can go into FP registers. */
7938 if ((regno == -1 || FP_REGNO_P (regno))
7939 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7940 return NO_REGS;
7942 /* Memory, and AltiVec registers can go into AltiVec registers. */
7943 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7944 && class == ALTIVEC_REGS)
7945 return NO_REGS;
7947 /* We can copy among the CR registers. */
7948 if ((class == CR_REGS || class == CR0_REGS)
7949 && regno >= 0 && CR_REGNO_P (regno))
7950 return NO_REGS;
7952 /* Otherwise, we need GENERAL_REGS. */
7953 return GENERAL_REGS;
7956 /* Given a comparison operation, return the bit number in CCR to test. We
7957 know this is a valid comparison.
7959 SCC_P is 1 if this is for an scc. That means that %D will have been
7960 used instead of %C, so the bits will be in different places.
7962 Return -1 if OP isn't a valid comparison for some reason. */
7965 ccr_bit (op, scc_p)
7966 rtx op;
7967 int scc_p;
7969 enum rtx_code code = GET_CODE (op);
7970 enum machine_mode cc_mode;
7971 int cc_regnum;
7972 int base_bit;
7973 rtx reg;
7975 if (GET_RTX_CLASS (code) != '<')
7976 return -1;
7978 reg = XEXP (op, 0);
7980 if (GET_CODE (reg) != REG
7981 || ! CR_REGNO_P (REGNO (reg)))
7982 abort ();
7984 cc_mode = GET_MODE (reg);
7985 cc_regnum = REGNO (reg);
7986 base_bit = 4 * (cc_regnum - CR0_REGNO);
7988 validate_condition_mode (code, cc_mode);
7990 /* When generating a sCOND operation, only positive conditions are
7991 allowed. */
7992 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7993 && code != GTU && code != LTU)
7994 abort ();
7996 switch (code)
7998 case NE:
7999 if (TARGET_E500 && !TARGET_FPRS
8000 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8001 return base_bit + 1;
8002 return scc_p ? base_bit + 3 : base_bit + 2;
8003 case EQ:
8004 if (TARGET_E500 && !TARGET_FPRS
8005 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8006 return base_bit + 1;
8007 return base_bit + 2;
8008 case GT: case GTU: case UNLE:
8009 return base_bit + 1;
8010 case LT: case LTU: case UNGE:
8011 return base_bit;
8012 case ORDERED: case UNORDERED:
8013 return base_bit + 3;
8015 case GE: case GEU:
8016 /* If scc, we will have done a cror to put the bit in the
8017 unordered position. So test that bit. For integer, this is ! LT
8018 unless this is an scc insn. */
8019 return scc_p ? base_bit + 3 : base_bit;
8021 case LE: case LEU:
8022 return scc_p ? base_bit + 3 : base_bit + 1;
8024 default:
8025 abort ();
8029 /* Return the GOT register. */
8031 struct rtx_def *
8032 rs6000_got_register (value)
8033 rtx value ATTRIBUTE_UNUSED;
8035 /* The second flow pass currently (June 1999) can't update
8036 regs_ever_live without disturbing other parts of the compiler, so
8037 update it here to make the prolog/epilogue code happy. */
8038 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8039 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8041 current_function_uses_pic_offset_table = 1;
8043 return pic_offset_table_rtx;
8046 /* Function to init struct machine_function.
8047 This will be called, via a pointer variable,
8048 from push_function_context. */
8050 static struct machine_function *
8051 rs6000_init_machine_status ()
8053 return ggc_alloc_cleared (sizeof (machine_function));
8056 /* These macros test for integers and extract the low-order bits. */
8057 #define INT_P(X) \
8058 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8059 && GET_MODE (X) == VOIDmode)
8061 #define INT_LOWPART(X) \
8062 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8065 extract_MB (op)
8066 rtx op;
8068 int i;
8069 unsigned long val = INT_LOWPART (op);
8071 /* If the high bit is zero, the value is the first 1 bit we find
8072 from the left. */
8073 if ((val & 0x80000000) == 0)
8075 if ((val & 0xffffffff) == 0)
8076 abort ();
8078 i = 1;
8079 while (((val <<= 1) & 0x80000000) == 0)
8080 ++i;
8081 return i;
8084 /* If the high bit is set and the low bit is not, or the mask is all
8085 1's, the value is zero. */
8086 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8087 return 0;
8089 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8090 from the right. */
8091 i = 31;
8092 while (((val >>= 1) & 1) != 0)
8093 --i;
8095 return i;
8099 extract_ME (op)
8100 rtx op;
8102 int i;
8103 unsigned long val = INT_LOWPART (op);
8105 /* If the low bit is zero, the value is the first 1 bit we find from
8106 the right. */
8107 if ((val & 1) == 0)
8109 if ((val & 0xffffffff) == 0)
8110 abort ();
8112 i = 30;
8113 while (((val >>= 1) & 1) == 0)
8114 --i;
8116 return i;
8119 /* If the low bit is set and the high bit is not, or the mask is all
8120 1's, the value is 31. */
8121 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8122 return 31;
8124 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8125 from the left. */
8126 i = 0;
8127 while (((val <<= 1) & 0x80000000) != 0)
8128 ++i;
8130 return i;
8133 /* Locate some local-dynamic symbol still in use by this function
8134 so that we can print its name in some tls_ld pattern. */
8136 static const char *
8137 rs6000_get_some_local_dynamic_name ()
8139 rtx insn;
8141 if (cfun->machine->some_ld_name)
8142 return cfun->machine->some_ld_name;
8144 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8145 if (INSN_P (insn)
8146 && for_each_rtx (&PATTERN (insn),
8147 rs6000_get_some_local_dynamic_name_1, 0))
8148 return cfun->machine->some_ld_name;
8150 abort ();
8153 /* Helper function for rs6000_get_some_local_dynamic_name. */
8155 static int
8156 rs6000_get_some_local_dynamic_name_1 (px, data)
8157 rtx *px;
8158 void *data ATTRIBUTE_UNUSED;
8160 rtx x = *px;
8162 if (GET_CODE (x) == SYMBOL_REF)
8164 const char *str = XSTR (x, 0);
8165 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8167 cfun->machine->some_ld_name = str;
8168 return 1;
8172 return 0;
8175 /* Print an operand. Recognize special options, documented below. */
8177 #if TARGET_ELF
8178 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8179 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8180 #else
8181 #define SMALL_DATA_RELOC "sda21"
8182 #define SMALL_DATA_REG 0
8183 #endif
8185 void
8186 print_operand (file, x, code)
8187 FILE *file;
8188 rtx x;
8189 int code;
8191 int i;
8192 HOST_WIDE_INT val;
8193 unsigned HOST_WIDE_INT uval;
8195 switch (code)
8197 case '.':
8198 /* Write out an instruction after the call which may be replaced
8199 with glue code by the loader. This depends on the AIX version. */
8200 asm_fprintf (file, RS6000_CALL_GLUE);
8201 return;
8203 /* %a is output_address. */
8205 case 'A':
8206 /* If X is a constant integer whose low-order 5 bits are zero,
8207 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8208 in the AIX assembler where "sri" with a zero shift count
8209 writes a trash instruction. */
8210 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8211 putc ('l', file);
8212 else
8213 putc ('r', file);
8214 return;
8216 case 'b':
8217 /* If constant, low-order 16 bits of constant, unsigned.
8218 Otherwise, write normally. */
8219 if (INT_P (x))
8220 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8221 else
8222 print_operand (file, x, 0);
8223 return;
8225 case 'B':
8226 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8227 for 64-bit mask direction. */
8228 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8229 return;
8231 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8232 output_operand. */
8234 case 'E':
8235 /* X is a CR register. Print the number of the EQ bit of the CR */
8236 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8237 output_operand_lossage ("invalid %%E value");
8238 else
8239 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8240 return;
8242 case 'f':
8243 /* X is a CR register. Print the shift count needed to move it
8244 to the high-order four bits. */
8245 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8246 output_operand_lossage ("invalid %%f value");
8247 else
8248 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8249 return;
8251 case 'F':
8252 /* Similar, but print the count for the rotate in the opposite
8253 direction. */
8254 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8255 output_operand_lossage ("invalid %%F value");
8256 else
8257 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8258 return;
8260 case 'G':
8261 /* X is a constant integer. If it is negative, print "m",
8262 otherwise print "z". This is to make an aze or ame insn. */
8263 if (GET_CODE (x) != CONST_INT)
8264 output_operand_lossage ("invalid %%G value");
8265 else if (INTVAL (x) >= 0)
8266 putc ('z', file);
8267 else
8268 putc ('m', file);
8269 return;
8271 case 'h':
8272 /* If constant, output low-order five bits. Otherwise, write
8273 normally. */
8274 if (INT_P (x))
8275 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8276 else
8277 print_operand (file, x, 0);
8278 return;
8280 case 'H':
8281 /* If constant, output low-order six bits. Otherwise, write
8282 normally. */
8283 if (INT_P (x))
8284 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8285 else
8286 print_operand (file, x, 0);
8287 return;
8289 case 'I':
8290 /* Print `i' if this is a constant, else nothing. */
8291 if (INT_P (x))
8292 putc ('i', file);
8293 return;
8295 case 'j':
8296 /* Write the bit number in CCR for jump. */
8297 i = ccr_bit (x, 0);
8298 if (i == -1)
8299 output_operand_lossage ("invalid %%j code");
8300 else
8301 fprintf (file, "%d", i);
8302 return;
8304 case 'J':
8305 /* Similar, but add one for shift count in rlinm for scc and pass
8306 scc flag to `ccr_bit'. */
8307 i = ccr_bit (x, 1);
8308 if (i == -1)
8309 output_operand_lossage ("invalid %%J code");
8310 else
8311 /* If we want bit 31, write a shift count of zero, not 32. */
8312 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8313 return;
8315 case 'k':
8316 /* X must be a constant. Write the 1's complement of the
8317 constant. */
8318 if (! INT_P (x))
8319 output_operand_lossage ("invalid %%k value");
8320 else
8321 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8322 return;
8324 case 'K':
8325 /* X must be a symbolic constant on ELF. Write an
8326 expression suitable for an 'addi' that adds in the low 16
8327 bits of the MEM. */
8328 if (GET_CODE (x) != CONST)
8330 print_operand_address (file, x);
8331 fputs ("@l", file);
8333 else
8335 if (GET_CODE (XEXP (x, 0)) != PLUS
8336 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8337 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8338 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8339 output_operand_lossage ("invalid %%K value");
8340 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8341 fputs ("@l", file);
8342 /* For GNU as, there must be a non-alphanumeric character
8343 between 'l' and the number. The '-' is added by
8344 print_operand() already. */
8345 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8346 fputs ("+", file);
8347 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8349 return;
8351 /* %l is output_asm_label. */
8353 case 'L':
8354 /* Write second word of DImode or DFmode reference. Works on register
8355 or non-indexed memory only. */
8356 if (GET_CODE (x) == REG)
8357 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8358 else if (GET_CODE (x) == MEM)
8360 /* Handle possible auto-increment. Since it is pre-increment and
8361 we have already done it, we can just use an offset of word. */
8362 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8363 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8364 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8365 UNITS_PER_WORD));
8366 else
8367 output_address (XEXP (adjust_address_nv (x, SImode,
8368 UNITS_PER_WORD),
8369 0));
8371 if (small_data_operand (x, GET_MODE (x)))
8372 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8373 reg_names[SMALL_DATA_REG]);
8375 return;
8377 case 'm':
8378 /* MB value for a mask operand. */
8379 if (! mask_operand (x, SImode))
8380 output_operand_lossage ("invalid %%m value");
8382 fprintf (file, "%d", extract_MB (x));
8383 return;
8385 case 'M':
8386 /* ME value for a mask operand. */
8387 if (! mask_operand (x, SImode))
8388 output_operand_lossage ("invalid %%M value");
8390 fprintf (file, "%d", extract_ME (x));
8391 return;
8393 /* %n outputs the negative of its operand. */
8395 case 'N':
8396 /* Write the number of elements in the vector times 4. */
8397 if (GET_CODE (x) != PARALLEL)
8398 output_operand_lossage ("invalid %%N value");
8399 else
8400 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8401 return;
8403 case 'O':
8404 /* Similar, but subtract 1 first. */
8405 if (GET_CODE (x) != PARALLEL)
8406 output_operand_lossage ("invalid %%O value");
8407 else
8408 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8409 return;
8411 case 'p':
8412 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8413 if (! INT_P (x)
8414 || INT_LOWPART (x) < 0
8415 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8416 output_operand_lossage ("invalid %%p value");
8417 else
8418 fprintf (file, "%d", i);
8419 return;
8421 case 'P':
8422 /* The operand must be an indirect memory reference. The result
8423 is the register number. */
8424 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8425 || REGNO (XEXP (x, 0)) >= 32)
8426 output_operand_lossage ("invalid %%P value");
8427 else
8428 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8429 return;
8431 case 'q':
8432 /* This outputs the logical code corresponding to a boolean
8433 expression. The expression may have one or both operands
8434 negated (if one, only the first one). For condition register
8435 logical operations, it will also treat the negated
8436 CR codes as NOTs, but not handle NOTs of them. */
8438 const char *const *t = 0;
8439 const char *s;
8440 enum rtx_code code = GET_CODE (x);
8441 static const char * const tbl[3][3] = {
8442 { "and", "andc", "nor" },
8443 { "or", "orc", "nand" },
8444 { "xor", "eqv", "xor" } };
8446 if (code == AND)
8447 t = tbl[0];
8448 else if (code == IOR)
8449 t = tbl[1];
8450 else if (code == XOR)
8451 t = tbl[2];
8452 else
8453 output_operand_lossage ("invalid %%q value");
8455 if (GET_CODE (XEXP (x, 0)) != NOT)
8456 s = t[0];
8457 else
8459 if (GET_CODE (XEXP (x, 1)) == NOT)
8460 s = t[2];
8461 else
8462 s = t[1];
8465 fputs (s, file);
8467 return;
8469 case 'R':
8470 /* X is a CR register. Print the mask for `mtcrf'. */
8471 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8472 output_operand_lossage ("invalid %%R value");
8473 else
8474 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8475 return;
8477 case 's':
8478 /* Low 5 bits of 32 - value */
8479 if (! INT_P (x))
8480 output_operand_lossage ("invalid %%s value");
8481 else
8482 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8483 return;
8485 case 'S':
8486 /* PowerPC64 mask position. All 0's is excluded.
8487 CONST_INT 32-bit mask is considered sign-extended so any
8488 transition must occur within the CONST_INT, not on the boundary. */
8489 if (! mask64_operand (x, DImode))
8490 output_operand_lossage ("invalid %%S value");
8492 uval = INT_LOWPART (x);
8494 if (uval & 1) /* Clear Left */
8496 #if HOST_BITS_PER_WIDE_INT > 64
8497 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8498 #endif
8499 i = 64;
8501 else /* Clear Right */
8503 uval = ~uval;
8504 #if HOST_BITS_PER_WIDE_INT > 64
8505 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8506 #endif
8507 i = 63;
8509 while (uval != 0)
8510 --i, uval >>= 1;
8511 if (i < 0)
8512 abort ();
8513 fprintf (file, "%d", i);
8514 return;
8516 case 't':
8517 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8518 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8519 abort ();
8521 /* Bit 3 is OV bit. */
8522 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8524 /* If we want bit 31, write a shift count of zero, not 32. */
8525 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8526 return;
8528 case 'T':
8529 /* Print the symbolic name of a branch target register. */
8530 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8531 && REGNO (x) != COUNT_REGISTER_REGNUM))
8532 output_operand_lossage ("invalid %%T value");
8533 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8534 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8535 else
8536 fputs ("ctr", file);
8537 return;
8539 case 'u':
8540 /* High-order 16 bits of constant for use in unsigned operand. */
8541 if (! INT_P (x))
8542 output_operand_lossage ("invalid %%u value");
8543 else
8544 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8545 (INT_LOWPART (x) >> 16) & 0xffff);
8546 return;
8548 case 'v':
8549 /* High-order 16 bits of constant for use in signed operand. */
8550 if (! INT_P (x))
8551 output_operand_lossage ("invalid %%v value");
8552 else
8553 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8554 (INT_LOWPART (x) >> 16) & 0xffff);
8555 return;
8557 case 'U':
8558 /* Print `u' if this has an auto-increment or auto-decrement. */
8559 if (GET_CODE (x) == MEM
8560 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8561 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8562 putc ('u', file);
8563 return;
8565 case 'V':
8566 /* Print the trap code for this operand. */
8567 switch (GET_CODE (x))
8569 case EQ:
8570 fputs ("eq", file); /* 4 */
8571 break;
8572 case NE:
8573 fputs ("ne", file); /* 24 */
8574 break;
8575 case LT:
8576 fputs ("lt", file); /* 16 */
8577 break;
8578 case LE:
8579 fputs ("le", file); /* 20 */
8580 break;
8581 case GT:
8582 fputs ("gt", file); /* 8 */
8583 break;
8584 case GE:
8585 fputs ("ge", file); /* 12 */
8586 break;
8587 case LTU:
8588 fputs ("llt", file); /* 2 */
8589 break;
8590 case LEU:
8591 fputs ("lle", file); /* 6 */
8592 break;
8593 case GTU:
8594 fputs ("lgt", file); /* 1 */
8595 break;
8596 case GEU:
8597 fputs ("lge", file); /* 5 */
8598 break;
8599 default:
8600 abort ();
8602 break;
8604 case 'w':
8605 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8606 normally. */
8607 if (INT_P (x))
8608 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8609 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8610 else
8611 print_operand (file, x, 0);
8612 return;
8614 case 'W':
8615 /* MB value for a PowerPC64 rldic operand. */
8616 val = (GET_CODE (x) == CONST_INT
8617 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8619 if (val < 0)
8620 i = -1;
8621 else
8622 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8623 if ((val <<= 1) < 0)
8624 break;
8626 #if HOST_BITS_PER_WIDE_INT == 32
8627 if (GET_CODE (x) == CONST_INT && i >= 0)
8628 i += 32; /* zero-extend high-part was all 0's */
8629 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8631 val = CONST_DOUBLE_LOW (x);
8633 if (val == 0)
8634 abort ();
8635 else if (val < 0)
8636 --i;
8637 else
8638 for ( ; i < 64; i++)
8639 if ((val <<= 1) < 0)
8640 break;
8642 #endif
8644 fprintf (file, "%d", i + 1);
8645 return;
8647 case 'X':
8648 if (GET_CODE (x) == MEM
8649 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8650 putc ('x', file);
8651 return;
8653 case 'Y':
8654 /* Like 'L', for third word of TImode */
8655 if (GET_CODE (x) == REG)
8656 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8657 else if (GET_CODE (x) == MEM)
8659 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8660 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8661 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8662 else
8663 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8664 if (small_data_operand (x, GET_MODE (x)))
8665 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8666 reg_names[SMALL_DATA_REG]);
8668 return;
8670 case 'z':
8671 /* X is a SYMBOL_REF. Write out the name preceded by a
8672 period and without any trailing data in brackets. Used for function
8673 names. If we are configured for System V (or the embedded ABI) on
8674 the PowerPC, do not emit the period, since those systems do not use
8675 TOCs and the like. */
8676 if (GET_CODE (x) != SYMBOL_REF)
8677 abort ();
8679 if (XSTR (x, 0)[0] != '.')
8681 switch (DEFAULT_ABI)
8683 default:
8684 abort ();
8686 case ABI_AIX:
8687 putc ('.', file);
8688 break;
8690 case ABI_V4:
8691 case ABI_DARWIN:
8692 break;
8695 #if TARGET_AIX
8696 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8697 #else
8698 assemble_name (file, XSTR (x, 0));
8699 #endif
8700 return;
8702 case 'Z':
8703 /* Like 'L', for last word of TImode. */
8704 if (GET_CODE (x) == REG)
8705 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8706 else if (GET_CODE (x) == MEM)
8708 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8709 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8710 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8711 else
8712 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8713 if (small_data_operand (x, GET_MODE (x)))
8714 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8715 reg_names[SMALL_DATA_REG]);
8717 return;
8719 /* Print AltiVec or SPE memory operand. */
8720 case 'y':
8722 rtx tmp;
8724 if (GET_CODE (x) != MEM)
8725 abort ();
8727 tmp = XEXP (x, 0);
8729 if (TARGET_E500)
8731 /* Handle [reg]. */
8732 if (GET_CODE (tmp) == REG)
8734 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8735 break;
8737 /* Handle [reg+UIMM]. */
8738 else if (GET_CODE (tmp) == PLUS &&
8739 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8741 int x;
8743 if (GET_CODE (XEXP (tmp, 0)) != REG)
8744 abort ();
8746 x = INTVAL (XEXP (tmp, 1));
8747 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8748 break;
8751 /* Fall through. Must be [reg+reg]. */
8753 if (GET_CODE (tmp) == REG)
8754 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8755 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8757 if (REGNO (XEXP (tmp, 0)) == 0)
8758 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8759 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8760 else
8761 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8762 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8764 else
8765 abort ();
8766 break;
8769 case 0:
8770 if (GET_CODE (x) == REG)
8771 fprintf (file, "%s", reg_names[REGNO (x)]);
8772 else if (GET_CODE (x) == MEM)
8774 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8775 know the width from the mode. */
8776 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8777 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8778 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8779 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8780 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8781 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8782 else
8783 output_address (XEXP (x, 0));
8785 else
8786 output_addr_const (file, x);
8787 return;
8789 case '&':
8790 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8791 return;
8793 default:
8794 output_operand_lossage ("invalid %%xn code");
8798 /* Print the address of an operand. */
8800 void
8801 print_operand_address (file, x)
8802 FILE *file;
8803 rtx x;
8805 if (GET_CODE (x) == REG)
8806 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8807 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8808 || GET_CODE (x) == LABEL_REF)
8810 output_addr_const (file, x);
8811 if (small_data_operand (x, GET_MODE (x)))
8812 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8813 reg_names[SMALL_DATA_REG]);
8814 else if (TARGET_TOC)
8815 abort ();
8817 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8819 if (REGNO (XEXP (x, 0)) == 0)
8820 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8821 reg_names[ REGNO (XEXP (x, 0)) ]);
8822 else
8823 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8824 reg_names[ REGNO (XEXP (x, 1)) ]);
8826 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8828 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8829 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8831 #if TARGET_ELF
8832 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8833 && CONSTANT_P (XEXP (x, 1)))
8835 output_addr_const (file, XEXP (x, 1));
8836 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8838 #endif
8839 #if TARGET_MACHO
8840 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8841 && CONSTANT_P (XEXP (x, 1)))
8843 fprintf (file, "lo16(");
8844 output_addr_const (file, XEXP (x, 1));
8845 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8847 #endif
8848 else if (legitimate_constant_pool_address_p (x))
8850 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8852 rtx contains_minus = XEXP (x, 1);
8853 rtx minus, symref;
8854 const char *name;
8856 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8857 turn it into (sym) for output_addr_const. */
8858 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8859 contains_minus = XEXP (contains_minus, 0);
8861 minus = XEXP (contains_minus, 0);
8862 symref = XEXP (minus, 0);
8863 XEXP (contains_minus, 0) = symref;
8864 if (TARGET_ELF)
8866 char *newname;
8868 name = XSTR (symref, 0);
8869 newname = alloca (strlen (name) + sizeof ("@toc"));
8870 strcpy (newname, name);
8871 strcat (newname, "@toc");
8872 XSTR (symref, 0) = newname;
8874 output_addr_const (file, XEXP (x, 1));
8875 if (TARGET_ELF)
8876 XSTR (symref, 0) = name;
8877 XEXP (contains_minus, 0) = minus;
8879 else
8880 output_addr_const (file, XEXP (x, 1));
8882 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8884 else
8885 abort ();
8888 /* Target hook for assembling integer objects. The PowerPC version has
8889 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8890 is defined. It also needs to handle DI-mode objects on 64-bit
8891 targets. */
8893 static bool
8894 rs6000_assemble_integer (x, size, aligned_p)
8895 rtx x;
8896 unsigned int size;
8897 int aligned_p;
8899 #ifdef RELOCATABLE_NEEDS_FIXUP
8900 /* Special handling for SI values. */
8901 if (size == 4 && aligned_p)
8903 extern int in_toc_section PARAMS ((void));
8904 static int recurse = 0;
8906 /* For -mrelocatable, we mark all addresses that need to be fixed up
8907 in the .fixup section. */
8908 if (TARGET_RELOCATABLE
8909 && !in_toc_section ()
8910 && !in_text_section ()
8911 && !recurse
8912 && GET_CODE (x) != CONST_INT
8913 && GET_CODE (x) != CONST_DOUBLE
8914 && CONSTANT_P (x))
8916 char buf[256];
8918 recurse = 1;
8919 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8920 fixuplabelno++;
8921 ASM_OUTPUT_LABEL (asm_out_file, buf);
8922 fprintf (asm_out_file, "\t.long\t(");
8923 output_addr_const (asm_out_file, x);
8924 fprintf (asm_out_file, ")@fixup\n");
8925 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8926 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8927 fprintf (asm_out_file, "\t.long\t");
8928 assemble_name (asm_out_file, buf);
8929 fprintf (asm_out_file, "\n\t.previous\n");
8930 recurse = 0;
8931 return true;
8933 /* Remove initial .'s to turn a -mcall-aixdesc function
8934 address into the address of the descriptor, not the function
8935 itself. */
8936 else if (GET_CODE (x) == SYMBOL_REF
8937 && XSTR (x, 0)[0] == '.'
8938 && DEFAULT_ABI == ABI_AIX)
8940 const char *name = XSTR (x, 0);
8941 while (*name == '.')
8942 name++;
8944 fprintf (asm_out_file, "\t.long\t%s\n", name);
8945 return true;
8948 #endif /* RELOCATABLE_NEEDS_FIXUP */
8949 return default_assemble_integer (x, size, aligned_p);
8952 #ifdef HAVE_GAS_HIDDEN
8953 /* Emit an assembler directive to set symbol visibility for DECL to
8954 VISIBILITY_TYPE. */
8956 static void
8957 rs6000_assemble_visibility (decl, vis)
8958 tree decl;
8959 int vis;
8961 /* Functions need to have their entry point symbol visibility set as
8962 well as their descriptor symbol visibility. */
8963 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8965 static const char * const visibility_types[] = {
8966 NULL, "internal", "hidden", "protected"
8969 const char *name, *type;
8971 name = ((* targetm.strip_name_encoding)
8972 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8973 type = visibility_types[vis];
8975 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8976 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8978 else
8979 default_assemble_visibility (decl, vis);
8981 #endif
8983 enum rtx_code
8984 rs6000_reverse_condition (mode, code)
8985 enum machine_mode mode;
8986 enum rtx_code code;
8988 /* Reversal of FP compares takes care -- an ordered compare
8989 becomes an unordered compare and vice versa. */
8990 if (mode == CCFPmode
8991 && (!flag_finite_math_only
8992 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8993 || code == UNEQ || code == LTGT))
8994 return reverse_condition_maybe_unordered (code);
8995 else
8996 return reverse_condition (code);
8999 /* Generate a compare for CODE. Return a brand-new rtx that
9000 represents the result of the compare. */
9002 static rtx
9003 rs6000_generate_compare (code)
9004 enum rtx_code code;
9006 enum machine_mode comp_mode;
9007 rtx compare_result;
9009 if (rs6000_compare_fp_p)
9010 comp_mode = CCFPmode;
9011 else if (code == GTU || code == LTU
9012 || code == GEU || code == LEU)
9013 comp_mode = CCUNSmode;
9014 else
9015 comp_mode = CCmode;
9017 /* First, the compare. */
9018 compare_result = gen_reg_rtx (comp_mode);
9020 /* SPE FP compare instructions on the GPRs. Yuck! */
9021 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9022 && rs6000_compare_fp_p)
9024 rtx cmp, or1, or2, or_result, compare_result2;
9026 switch (code)
9028 case EQ:
9029 case UNEQ:
9030 case NE:
9031 case LTGT:
9032 cmp = flag_finite_math_only
9033 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9034 rs6000_compare_op1)
9035 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9036 rs6000_compare_op1);
9037 break;
9038 case GT:
9039 case GTU:
9040 case UNGT:
9041 case UNGE:
9042 case GE:
9043 case GEU:
9044 cmp = flag_finite_math_only
9045 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9046 rs6000_compare_op1)
9047 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9048 rs6000_compare_op1);
9049 break;
9050 case LT:
9051 case LTU:
9052 case UNLT:
9053 case UNLE:
9054 case LE:
9055 case LEU:
9056 cmp = flag_finite_math_only
9057 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9058 rs6000_compare_op1)
9059 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9060 rs6000_compare_op1);
9061 break;
9062 default:
9063 abort ();
9066 /* Synthesize LE and GE from LT/GT || EQ. */
9067 if (code == LE || code == GE || code == LEU || code == GEU)
9069 /* Synthesize GE/LE frome GT/LT || EQ. */
9071 emit_insn (cmp);
9073 switch (code)
9075 case LE: code = LT; break;
9076 case GE: code = GT; break;
9077 case LEU: code = LT; break;
9078 case GEU: code = GT; break;
9079 default: abort ();
9082 or1 = gen_reg_rtx (SImode);
9083 or2 = gen_reg_rtx (SImode);
9084 or_result = gen_reg_rtx (CCEQmode);
9085 compare_result2 = gen_reg_rtx (CCFPmode);
9087 /* Do the EQ. */
9088 cmp = flag_finite_math_only
9089 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9090 rs6000_compare_op1)
9091 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9092 rs6000_compare_op1);
9093 emit_insn (cmp);
9095 /* The MC8540 FP compare instructions set the CR bits
9096 differently than other PPC compare instructions. For
9097 that matter, there is no generic test instruction, but a
9098 testgt, testlt, and testeq. For a true condition, bit 2
9099 is set (x1xx) in the CR. Following the traditional CR
9100 values:
9102 LT GT EQ OV
9103 bit3 bit2 bit1 bit0
9105 ... bit 2 would be a GT CR alias, so later on we
9106 look in the GT bits for the branch instructions.
9107 However, we must be careful to emit correct RTL in
9108 the meantime, so optimizations don't get confused. */
9110 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9111 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9113 /* OR them together. */
9114 cmp = gen_rtx_SET (VOIDmode, or_result,
9115 gen_rtx_COMPARE (CCEQmode,
9116 gen_rtx_IOR (SImode, or1, or2),
9117 const_true_rtx));
9118 compare_result = or_result;
9119 code = EQ;
9121 else
9123 /* We only care about 1 bit (x1xx), so map everything to NE to
9124 maintain rtl sanity. We'll get to the right bit (x1xx) at
9125 code output time. */
9126 if (code == NE || code == LTGT)
9127 /* Do the inverse here because we have no cmpne
9128 instruction. We use the cmpeq instruction and expect
9129 to get a 0 instead. */
9130 code = EQ;
9131 else
9132 code = NE;
9135 emit_insn (cmp);
9137 else
9138 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9139 gen_rtx_COMPARE (comp_mode,
9140 rs6000_compare_op0,
9141 rs6000_compare_op1)));
9143 /* Some kinds of FP comparisons need an OR operation;
9144 under flag_finite_math_only we don't bother. */
9145 if (rs6000_compare_fp_p
9146 && ! flag_finite_math_only
9147 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9148 && (code == LE || code == GE
9149 || code == UNEQ || code == LTGT
9150 || code == UNGT || code == UNLT))
9152 enum rtx_code or1, or2;
9153 rtx or1_rtx, or2_rtx, compare2_rtx;
9154 rtx or_result = gen_reg_rtx (CCEQmode);
9156 switch (code)
9158 case LE: or1 = LT; or2 = EQ; break;
9159 case GE: or1 = GT; or2 = EQ; break;
9160 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9161 case LTGT: or1 = LT; or2 = GT; break;
9162 case UNGT: or1 = UNORDERED; or2 = GT; break;
9163 case UNLT: or1 = UNORDERED; or2 = LT; break;
9164 default: abort ();
9166 validate_condition_mode (or1, comp_mode);
9167 validate_condition_mode (or2, comp_mode);
9168 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9169 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9170 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9171 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9172 const_true_rtx);
9173 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9175 compare_result = or_result;
9176 code = EQ;
9179 validate_condition_mode (code, GET_MODE (compare_result));
9181 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9185 /* Emit the RTL for an sCOND pattern. */
9187 void
9188 rs6000_emit_sCOND (code, result)
9189 enum rtx_code code;
9190 rtx result;
9192 rtx condition_rtx;
9193 enum machine_mode op_mode;
9194 enum rtx_code cond_code;
9196 condition_rtx = rs6000_generate_compare (code);
9197 cond_code = GET_CODE (condition_rtx);
9199 if (cond_code == NE
9200 || cond_code == GE || cond_code == LE
9201 || cond_code == GEU || cond_code == LEU
9202 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9204 rtx not_result = gen_reg_rtx (CCEQmode);
9205 rtx not_op, rev_cond_rtx;
9206 enum machine_mode cc_mode;
9208 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9210 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9211 SImode, XEXP (condition_rtx, 0), const0_rtx);
9212 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9213 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9214 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9217 op_mode = GET_MODE (rs6000_compare_op0);
9218 if (op_mode == VOIDmode)
9219 op_mode = GET_MODE (rs6000_compare_op1);
9221 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9223 PUT_MODE (condition_rtx, DImode);
9224 convert_move (result, condition_rtx, 0);
9226 else
9228 PUT_MODE (condition_rtx, SImode);
9229 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9233 /* Emit a branch of kind CODE to location LOC. */
9235 void
9236 rs6000_emit_cbranch (code, loc)
9237 enum rtx_code code;
9238 rtx loc;
9240 rtx condition_rtx, loc_ref;
9242 condition_rtx = rs6000_generate_compare (code);
9243 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9244 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9245 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9246 loc_ref, pc_rtx)));
9249 /* Return the string to output a conditional branch to LABEL, which is
9250 the operand number of the label, or -1 if the branch is really a
9251 conditional return.
9253 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9254 condition code register and its mode specifies what kind of
9255 comparison we made.
9257 REVERSED is nonzero if we should reverse the sense of the comparison.
9259 INSN is the insn. */
9261 char *
9262 output_cbranch (op, label, reversed, insn)
9263 rtx op;
9264 const char * label;
9265 int reversed;
9266 rtx insn;
9268 static char string[64];
9269 enum rtx_code code = GET_CODE (op);
9270 rtx cc_reg = XEXP (op, 0);
9271 enum machine_mode mode = GET_MODE (cc_reg);
9272 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9273 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9274 int really_reversed = reversed ^ need_longbranch;
9275 char *s = string;
9276 const char *ccode;
9277 const char *pred;
9278 rtx note;
9280 validate_condition_mode (code, mode);
9282 /* Work out which way this really branches. We could use
9283 reverse_condition_maybe_unordered here always but this
9284 makes the resulting assembler clearer. */
9285 if (really_reversed)
9287 /* Reversal of FP compares takes care -- an ordered compare
9288 becomes an unordered compare and vice versa. */
9289 if (mode == CCFPmode)
9290 code = reverse_condition_maybe_unordered (code);
9291 else
9292 code = reverse_condition (code);
9295 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9297 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9298 to the GT bit. */
9299 if (code == EQ)
9300 /* Opposite of GT. */
9301 code = UNLE;
9302 else if (code == NE)
9303 code = GT;
9304 else
9305 abort ();
9308 switch (code)
9310 /* Not all of these are actually distinct opcodes, but
9311 we distinguish them for clarity of the resulting assembler. */
9312 case NE: case LTGT:
9313 ccode = "ne"; break;
9314 case EQ: case UNEQ:
9315 ccode = "eq"; break;
9316 case GE: case GEU:
9317 ccode = "ge"; break;
9318 case GT: case GTU: case UNGT:
9319 ccode = "gt"; break;
9320 case LE: case LEU:
9321 ccode = "le"; break;
9322 case LT: case LTU: case UNLT:
9323 ccode = "lt"; break;
9324 case UNORDERED: ccode = "un"; break;
9325 case ORDERED: ccode = "nu"; break;
9326 case UNGE: ccode = "nl"; break;
9327 case UNLE: ccode = "ng"; break;
9328 default:
9329 abort ();
9332 /* Maybe we have a guess as to how likely the branch is.
9333 The old mnemonics don't have a way to specify this information. */
9334 pred = "";
9335 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9336 if (note != NULL_RTX)
9338 /* PROB is the difference from 50%. */
9339 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9340 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9342 /* Only hint for highly probable/improbable branches on newer
9343 cpus as static prediction overrides processor dynamic
9344 prediction. For older cpus we may as well always hint, but
9345 assume not taken for branches that are very close to 50% as a
9346 mispredicted taken branch is more expensive than a
9347 mispredicted not-taken branch. */
9348 if (always_hint
9349 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9351 if (abs (prob) > REG_BR_PROB_BASE / 20
9352 && ((prob > 0) ^ need_longbranch))
9353 pred = "+";
9354 else
9355 pred = "-";
9359 if (label == NULL)
9360 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9361 else
9362 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9364 /* We need to escape any '%' characters in the reg_names string.
9365 Assume they'd only be the first character... */
9366 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9367 *s++ = '%';
9368 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9370 if (label != NULL)
9372 /* If the branch distance was too far, we may have to use an
9373 unconditional branch to go the distance. */
9374 if (need_longbranch)
9375 s += sprintf (s, ",$+8\n\tb %s", label);
9376 else
9377 s += sprintf (s, ",%s", label);
9380 return string;
9383 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9384 operands of the last comparison is nonzero/true, FALSE_COND if it
9385 is zero/false. Return 0 if the hardware has no such operation. */
9388 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9389 rtx dest;
9390 rtx op;
9391 rtx true_cond;
9392 rtx false_cond;
9394 enum rtx_code code = GET_CODE (op);
9395 rtx op0 = rs6000_compare_op0;
9396 rtx op1 = rs6000_compare_op1;
9397 REAL_VALUE_TYPE c1;
9398 enum machine_mode compare_mode = GET_MODE (op0);
9399 enum machine_mode result_mode = GET_MODE (dest);
9400 rtx temp;
9402 /* These modes should always match. */
9403 if (GET_MODE (op1) != compare_mode
9404 /* In the isel case however, we can use a compare immediate, so
9405 op1 may be a small constant. */
9406 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9407 return 0;
9408 if (GET_MODE (true_cond) != result_mode)
9409 return 0;
9410 if (GET_MODE (false_cond) != result_mode)
9411 return 0;
9413 /* First, work out if the hardware can do this at all, or
9414 if it's too slow... */
9415 if (! rs6000_compare_fp_p)
9417 if (TARGET_ISEL)
9418 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9419 return 0;
9422 /* Eliminate half of the comparisons by switching operands, this
9423 makes the remaining code simpler. */
9424 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9425 || code == LTGT || code == LT || code == UNLE)
9427 code = reverse_condition_maybe_unordered (code);
9428 temp = true_cond;
9429 true_cond = false_cond;
9430 false_cond = temp;
9433 /* UNEQ and LTGT take four instructions for a comparison with zero,
9434 it'll probably be faster to use a branch here too. */
9435 if (code == UNEQ && HONOR_NANS (compare_mode))
9436 return 0;
9438 if (GET_CODE (op1) == CONST_DOUBLE)
9439 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9441 /* We're going to try to implement comparisons by performing
9442 a subtract, then comparing against zero. Unfortunately,
9443 Inf - Inf is NaN which is not zero, and so if we don't
9444 know that the operand is finite and the comparison
9445 would treat EQ different to UNORDERED, we can't do it. */
9446 if (HONOR_INFINITIES (compare_mode)
9447 && code != GT && code != UNGE
9448 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9449 /* Constructs of the form (a OP b ? a : b) are safe. */
9450 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9451 || (! rtx_equal_p (op0, true_cond)
9452 && ! rtx_equal_p (op1, true_cond))))
9453 return 0;
9454 /* At this point we know we can use fsel. */
9456 /* Reduce the comparison to a comparison against zero. */
9457 temp = gen_reg_rtx (compare_mode);
9458 emit_insn (gen_rtx_SET (VOIDmode, temp,
9459 gen_rtx_MINUS (compare_mode, op0, op1)));
9460 op0 = temp;
9461 op1 = CONST0_RTX (compare_mode);
9463 /* If we don't care about NaNs we can reduce some of the comparisons
9464 down to faster ones. */
9465 if (! HONOR_NANS (compare_mode))
9466 switch (code)
9468 case GT:
9469 code = LE;
9470 temp = true_cond;
9471 true_cond = false_cond;
9472 false_cond = temp;
9473 break;
9474 case UNGE:
9475 code = GE;
9476 break;
9477 case UNEQ:
9478 code = EQ;
9479 break;
9480 default:
9481 break;
9484 /* Now, reduce everything down to a GE. */
9485 switch (code)
9487 case GE:
9488 break;
9490 case LE:
9491 temp = gen_reg_rtx (compare_mode);
9492 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9493 op0 = temp;
9494 break;
9496 case ORDERED:
9497 temp = gen_reg_rtx (compare_mode);
9498 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9499 op0 = temp;
9500 break;
9502 case EQ:
9503 temp = gen_reg_rtx (compare_mode);
9504 emit_insn (gen_rtx_SET (VOIDmode, temp,
9505 gen_rtx_NEG (compare_mode,
9506 gen_rtx_ABS (compare_mode, op0))));
9507 op0 = temp;
9508 break;
9510 case UNGE:
9511 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9512 temp = gen_reg_rtx (result_mode);
9513 emit_insn (gen_rtx_SET (VOIDmode, temp,
9514 gen_rtx_IF_THEN_ELSE (result_mode,
9515 gen_rtx_GE (VOIDmode,
9516 op0, op1),
9517 true_cond, false_cond)));
9518 false_cond = true_cond;
9519 true_cond = temp;
9521 temp = gen_reg_rtx (compare_mode);
9522 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9523 op0 = temp;
9524 break;
9526 case GT:
9527 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9528 temp = gen_reg_rtx (result_mode);
9529 emit_insn (gen_rtx_SET (VOIDmode, temp,
9530 gen_rtx_IF_THEN_ELSE (result_mode,
9531 gen_rtx_GE (VOIDmode,
9532 op0, op1),
9533 true_cond, false_cond)));
9534 true_cond = false_cond;
9535 false_cond = temp;
9537 temp = gen_reg_rtx (compare_mode);
9538 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9539 op0 = temp;
9540 break;
9542 default:
9543 abort ();
9546 emit_insn (gen_rtx_SET (VOIDmode, dest,
9547 gen_rtx_IF_THEN_ELSE (result_mode,
9548 gen_rtx_GE (VOIDmode,
9549 op0, op1),
9550 true_cond, false_cond)));
9551 return 1;
9554 /* Same as above, but for ints (isel). */
9556 static int
9557 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9558 rtx dest;
9559 rtx op;
9560 rtx true_cond;
9561 rtx false_cond;
9563 rtx condition_rtx, cr;
9565 /* All isel implementations thus far are 32-bits. */
9566 if (GET_MODE (rs6000_compare_op0) != SImode)
9567 return 0;
9569 /* We still have to do the compare, because isel doesn't do a
9570 compare, it just looks at the CRx bits set by a previous compare
9571 instruction. */
9572 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9573 cr = XEXP (condition_rtx, 0);
9575 if (GET_MODE (cr) == CCmode)
9576 emit_insn (gen_isel_signed (dest, condition_rtx,
9577 true_cond, false_cond, cr));
9578 else
9579 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9580 true_cond, false_cond, cr));
9582 return 1;
9585 const char *
9586 output_isel (operands)
9587 rtx *operands;
9589 enum rtx_code code;
9591 code = GET_CODE (operands[1]);
9592 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9594 PUT_CODE (operands[1], reverse_condition (code));
9595 return "isel %0,%3,%2,%j1";
9597 else
9598 return "isel %0,%2,%3,%j1";
9601 void
9602 rs6000_emit_minmax (dest, code, op0, op1)
9603 rtx dest;
9604 enum rtx_code code;
9605 rtx op0;
9606 rtx op1;
9608 enum machine_mode mode = GET_MODE (op0);
9609 enum rtx_code c;
9610 rtx target;
9612 if (code == SMAX || code == SMIN)
9613 c = GE;
9614 else
9615 c = GEU;
9617 if (code == SMAX || code == UMAX)
9618 target = emit_conditional_move (dest, c, op0, op1, mode,
9619 op0, op1, mode, 0);
9620 else
9621 target = emit_conditional_move (dest, c, op0, op1, mode,
9622 op1, op0, mode, 0);
9623 if (target == NULL_RTX)
9624 abort ();
9625 if (target != dest)
9626 emit_move_insn (dest, target);
9629 /* This page contains routines that are used to determine what the
9630 function prologue and epilogue code will do and write them out. */
9632 /* Return the first fixed-point register that is required to be
9633 saved. 32 if none. */
9636 first_reg_to_save ()
9638 int first_reg;
9640 /* Find lowest numbered live register. */
9641 for (first_reg = 13; first_reg <= 31; first_reg++)
9642 if (regs_ever_live[first_reg]
9643 && (! call_used_regs[first_reg]
9644 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9645 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9646 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9647 break;
9649 #if TARGET_MACHO
9650 if (flag_pic
9651 && current_function_uses_pic_offset_table
9652 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9653 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9654 #endif
9656 return first_reg;
9659 /* Similar, for FP regs. */
9662 first_fp_reg_to_save ()
9664 int first_reg;
9666 /* Find lowest numbered live register. */
9667 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9668 if (regs_ever_live[first_reg])
9669 break;
9671 return first_reg;
9674 /* Similar, for AltiVec regs. */
9676 static int
9677 first_altivec_reg_to_save ()
9679 int i;
9681 /* Stack frame remains as is unless we are in AltiVec ABI. */
9682 if (! TARGET_ALTIVEC_ABI)
9683 return LAST_ALTIVEC_REGNO + 1;
9685 /* Find lowest numbered live register. */
9686 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9687 if (regs_ever_live[i])
9688 break;
9690 return i;
9693 /* Return a 32-bit mask of the AltiVec registers we need to set in
9694 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9695 the 32-bit word is 0. */
9697 static unsigned int
9698 compute_vrsave_mask ()
9700 unsigned int i, mask = 0;
9702 /* First, find out if we use _any_ altivec registers. */
9703 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9704 if (regs_ever_live[i])
9705 mask |= ALTIVEC_REG_BIT (i);
9707 if (mask == 0)
9708 return mask;
9710 /* Next, remove the argument registers from the set. These must
9711 be in the VRSAVE mask set by the caller, so we don't need to add
9712 them in again. More importantly, the mask we compute here is
9713 used to generate CLOBBERs in the set_vrsave insn, and we do not
9714 wish the argument registers to die. */
9715 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9716 mask &= ~ALTIVEC_REG_BIT (i);
9718 /* Similarly, remove the return value from the set. */
9720 bool yes = false;
9721 diddle_return_value (is_altivec_return_reg, &yes);
9722 if (yes)
9723 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9726 return mask;
9729 static void
9730 is_altivec_return_reg (reg, xyes)
9731 rtx reg;
9732 void *xyes;
9734 bool *yes = (bool *) xyes;
9735 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9736 *yes = true;
9740 /* Calculate the stack information for the current function. This is
9741 complicated by having two separate calling sequences, the AIX calling
9742 sequence and the V.4 calling sequence.
9744 AIX (and Darwin/Mac OS X) stack frames look like:
9745 32-bit 64-bit
9746 SP----> +---------------------------------------+
9747 | back chain to caller | 0 0
9748 +---------------------------------------+
9749 | saved CR | 4 8 (8-11)
9750 +---------------------------------------+
9751 | saved LR | 8 16
9752 +---------------------------------------+
9753 | reserved for compilers | 12 24
9754 +---------------------------------------+
9755 | reserved for binders | 16 32
9756 +---------------------------------------+
9757 | saved TOC pointer | 20 40
9758 +---------------------------------------+
9759 | Parameter save area (P) | 24 48
9760 +---------------------------------------+
9761 | Alloca space (A) | 24+P etc.
9762 +---------------------------------------+
9763 | Local variable space (L) | 24+P+A
9764 +---------------------------------------+
9765 | Float/int conversion temporary (X) | 24+P+A+L
9766 +---------------------------------------+
9767 | Save area for AltiVec registers (W) | 24+P+A+L+X
9768 +---------------------------------------+
9769 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9770 +---------------------------------------+
9771 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9772 +---------------------------------------+
9773 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9774 +---------------------------------------+
9775 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9776 +---------------------------------------+
9777 old SP->| back chain to caller's caller |
9778 +---------------------------------------+
9780 The required alignment for AIX configurations is two words (i.e., 8
9781 or 16 bytes).
9784 V.4 stack frames look like:
9786 SP----> +---------------------------------------+
9787 | back chain to caller | 0
9788 +---------------------------------------+
9789 | caller's saved LR | 4
9790 +---------------------------------------+
9791 | Parameter save area (P) | 8
9792 +---------------------------------------+
9793 | Alloca space (A) | 8+P
9794 +---------------------------------------+
9795 | Varargs save area (V) | 8+P+A
9796 +---------------------------------------+
9797 | Local variable space (L) | 8+P+A+V
9798 +---------------------------------------+
9799 | Float/int conversion temporary (X) | 8+P+A+V+L
9800 +---------------------------------------+
9801 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9802 +---------------------------------------+
9803 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9804 +---------------------------------------+
9805 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9806 +---------------------------------------+
9807 | SPE: area for 64-bit GP registers |
9808 +---------------------------------------+
9809 | SPE alignment padding |
9810 +---------------------------------------+
9811 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9812 +---------------------------------------+
9813 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9814 +---------------------------------------+
9815 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9816 +---------------------------------------+
9817 old SP->| back chain to caller's caller |
9818 +---------------------------------------+
9820 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9821 given. (But note below and in sysv4.h that we require only 8 and
9822 may round up the size of our stack frame anyways. The historical
9823 reason is early versions of powerpc-linux which didn't properly
9824 align the stack at program startup. A happy side-effect is that
9825 -mno-eabi libraries can be used with -meabi programs.)
9827 The EABI configuration defaults to the V.4 layout. However,
9828 the stack alignment requirements may differ. If -mno-eabi is not
9829 given, the required stack alignment is 8 bytes; if -mno-eabi is
9830 given, the required alignment is 16 bytes. (But see V.4 comment
9831 above.) */
9833 #ifndef ABI_STACK_BOUNDARY
9834 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9835 #endif
9837 rs6000_stack_t *
9838 rs6000_stack_info ()
9840 static rs6000_stack_t info, zero_info;
9841 rs6000_stack_t *info_ptr = &info;
9842 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9843 int ehrd_size;
9844 int total_raw_size;
9846 /* Zero all fields portably. */
9847 info = zero_info;
9849 if (TARGET_SPE)
9851 /* Cache value so we don't rescan instruction chain over and over. */
9852 if (cfun->machine->insn_chain_scanned_p == 0)
9854 cfun->machine->insn_chain_scanned_p = 1;
9855 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9859 /* Select which calling sequence. */
9860 info_ptr->abi = DEFAULT_ABI;
9862 /* Calculate which registers need to be saved & save area size. */
9863 info_ptr->first_gp_reg_save = first_reg_to_save ();
9864 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9865 even if it currently looks like we won't. */
9866 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9867 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9868 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9869 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9870 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9871 else
9872 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9874 /* For the SPE, we have an additional upper 32-bits on each GPR.
9875 Ideally we should save the entire 64-bits only when the upper
9876 half is used in SIMD instructions. Since we only record
9877 registers live (not the size they are used in), this proves
9878 difficult because we'd have to traverse the instruction chain at
9879 the right time, taking reload into account. This is a real pain,
9880 so we opt to save the GPRs in 64-bits always if but one register
9881 gets used in 64-bits. Otherwise, all the registers in the frame
9882 get saved in 32-bits.
9884 So... since when we save all GPRs (except the SP) in 64-bits, the
9885 traditional GP save area will be empty. */
9886 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9887 info_ptr->gp_size = 0;
9889 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9890 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9892 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9893 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9894 - info_ptr->first_altivec_reg_save);
9896 /* Does this function call anything? */
9897 info_ptr->calls_p = (! current_function_is_leaf
9898 || cfun->machine->ra_needs_full_frame);
9900 /* Determine if we need to save the link register. */
9901 if (rs6000_ra_ever_killed ()
9902 || (DEFAULT_ABI == ABI_AIX
9903 && current_function_profile
9904 && !TARGET_PROFILE_KERNEL)
9905 #ifdef TARGET_RELOCATABLE
9906 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9907 #endif
9908 || (info_ptr->first_fp_reg_save != 64
9909 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9910 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9911 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9912 || (DEFAULT_ABI == ABI_DARWIN
9913 && flag_pic
9914 && current_function_uses_pic_offset_table)
9915 || info_ptr->calls_p)
9917 info_ptr->lr_save_p = 1;
9918 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9921 /* Determine if we need to save the condition code registers. */
9922 if (regs_ever_live[CR2_REGNO]
9923 || regs_ever_live[CR3_REGNO]
9924 || regs_ever_live[CR4_REGNO])
9926 info_ptr->cr_save_p = 1;
9927 if (DEFAULT_ABI == ABI_V4)
9928 info_ptr->cr_size = reg_size;
9931 /* If the current function calls __builtin_eh_return, then we need
9932 to allocate stack space for registers that will hold data for
9933 the exception handler. */
9934 if (current_function_calls_eh_return)
9936 unsigned int i;
9937 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9938 continue;
9940 /* SPE saves EH registers in 64-bits. */
9941 ehrd_size = i * (TARGET_SPE_ABI
9942 && info_ptr->spe_64bit_regs_used != 0
9943 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9945 else
9946 ehrd_size = 0;
9948 /* Determine various sizes. */
9949 info_ptr->reg_size = reg_size;
9950 info_ptr->fixed_size = RS6000_SAVE_AREA;
9951 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9952 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9953 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9956 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9957 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9958 else
9959 info_ptr->spe_gp_size = 0;
9961 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9963 info_ptr->vrsave_mask = compute_vrsave_mask ();
9964 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9966 else
9968 info_ptr->vrsave_mask = 0;
9969 info_ptr->vrsave_size = 0;
9972 /* Calculate the offsets. */
9973 switch (DEFAULT_ABI)
9975 case ABI_NONE:
9976 default:
9977 abort ();
9979 case ABI_AIX:
9980 case ABI_DARWIN:
9981 info_ptr->fp_save_offset = - info_ptr->fp_size;
9982 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9984 if (TARGET_ALTIVEC_ABI)
9986 info_ptr->vrsave_save_offset
9987 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9989 /* Align stack so vector save area is on a quadword boundary. */
9990 if (info_ptr->altivec_size != 0)
9991 info_ptr->altivec_padding_size
9992 = 16 - (-info_ptr->vrsave_save_offset % 16);
9993 else
9994 info_ptr->altivec_padding_size = 0;
9996 info_ptr->altivec_save_offset
9997 = info_ptr->vrsave_save_offset
9998 - info_ptr->altivec_padding_size
9999 - info_ptr->altivec_size;
10001 /* Adjust for AltiVec case. */
10002 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10004 else
10005 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10006 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10007 info_ptr->lr_save_offset = 2*reg_size;
10008 break;
10010 case ABI_V4:
10011 info_ptr->fp_save_offset = - info_ptr->fp_size;
10012 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10013 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10015 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10017 /* Align stack so SPE GPR save area is aligned on a
10018 double-word boundary. */
10019 if (info_ptr->spe_gp_size != 0)
10020 info_ptr->spe_padding_size
10021 = 8 - (-info_ptr->cr_save_offset % 8);
10022 else
10023 info_ptr->spe_padding_size = 0;
10025 info_ptr->spe_gp_save_offset
10026 = info_ptr->cr_save_offset
10027 - info_ptr->spe_padding_size
10028 - info_ptr->spe_gp_size;
10030 /* Adjust for SPE case. */
10031 info_ptr->toc_save_offset
10032 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10034 else if (TARGET_ALTIVEC_ABI)
10036 info_ptr->vrsave_save_offset
10037 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10039 /* Align stack so vector save area is on a quadword boundary. */
10040 if (info_ptr->altivec_size != 0)
10041 info_ptr->altivec_padding_size
10042 = 16 - (-info_ptr->vrsave_save_offset % 16);
10043 else
10044 info_ptr->altivec_padding_size = 0;
10046 info_ptr->altivec_save_offset
10047 = info_ptr->vrsave_save_offset
10048 - info_ptr->altivec_padding_size
10049 - info_ptr->altivec_size;
10051 /* Adjust for AltiVec case. */
10052 info_ptr->toc_save_offset
10053 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10055 else
10056 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10057 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10058 info_ptr->lr_save_offset = reg_size;
10059 break;
10062 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10063 + info_ptr->gp_size
10064 + info_ptr->altivec_size
10065 + info_ptr->altivec_padding_size
10066 + info_ptr->vrsave_size
10067 + info_ptr->spe_gp_size
10068 + info_ptr->spe_padding_size
10069 + ehrd_size
10070 + info_ptr->cr_size
10071 + info_ptr->lr_size
10072 + info_ptr->vrsave_size
10073 + info_ptr->toc_size,
10074 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10075 ? 16 : 8);
10077 total_raw_size = (info_ptr->vars_size
10078 + info_ptr->parm_size
10079 + info_ptr->save_size
10080 + info_ptr->varargs_size
10081 + info_ptr->fixed_size);
10083 info_ptr->total_size =
10084 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10086 /* Determine if we need to allocate any stack frame:
10088 For AIX we need to push the stack if a frame pointer is needed
10089 (because the stack might be dynamically adjusted), if we are
10090 debugging, if we make calls, or if the sum of fp_save, gp_save,
10091 and local variables are more than the space needed to save all
10092 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10093 + 18*8 = 288 (GPR13 reserved).
10095 For V.4 we don't have the stack cushion that AIX uses, but assume
10096 that the debugger can handle stackless frames. */
10098 if (info_ptr->calls_p)
10099 info_ptr->push_p = 1;
10101 else if (DEFAULT_ABI == ABI_V4)
10102 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10104 else if (frame_pointer_needed)
10105 info_ptr->push_p = 1;
10107 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10108 info_ptr->push_p = 1;
10110 else
10111 info_ptr->push_p
10112 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10114 /* Zero offsets if we're not saving those registers. */
10115 if (info_ptr->fp_size == 0)
10116 info_ptr->fp_save_offset = 0;
10118 if (info_ptr->gp_size == 0)
10119 info_ptr->gp_save_offset = 0;
10121 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10122 info_ptr->altivec_save_offset = 0;
10124 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10125 info_ptr->vrsave_save_offset = 0;
10127 if (! TARGET_SPE_ABI
10128 || info_ptr->spe_64bit_regs_used == 0
10129 || info_ptr->spe_gp_size == 0)
10130 info_ptr->spe_gp_save_offset = 0;
10132 if (! info_ptr->lr_save_p)
10133 info_ptr->lr_save_offset = 0;
10135 if (! info_ptr->cr_save_p)
10136 info_ptr->cr_save_offset = 0;
10138 if (! info_ptr->toc_save_p)
10139 info_ptr->toc_save_offset = 0;
10141 return info_ptr;
10144 /* Return true if the current function uses any GPRs in 64-bit SIMD
10145 mode. */
10147 static bool
10148 spe_func_has_64bit_regs_p ()
10150 rtx insns, insn;
10152 /* Functions that save and restore all the call-saved registers will
10153 need to save/restore the registers in 64-bits. */
10154 if (current_function_calls_eh_return
10155 || current_function_calls_setjmp
10156 || current_function_has_nonlocal_goto)
10157 return true;
10159 insns = get_insns ();
10161 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10163 if (INSN_P (insn))
10165 rtx i;
10167 i = PATTERN (insn);
10168 if (GET_CODE (i) == SET
10169 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10170 return true;
10174 return false;
10177 void
10178 debug_stack_info (info)
10179 rs6000_stack_t *info;
10181 const char *abi_string;
10183 if (! info)
10184 info = rs6000_stack_info ();
10186 fprintf (stderr, "\nStack information for function %s:\n",
10187 ((current_function_decl && DECL_NAME (current_function_decl))
10188 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10189 : "<unknown>"));
10191 switch (info->abi)
10193 default: abi_string = "Unknown"; break;
10194 case ABI_NONE: abi_string = "NONE"; break;
10195 case ABI_AIX: abi_string = "AIX"; break;
10196 case ABI_DARWIN: abi_string = "Darwin"; break;
10197 case ABI_V4: abi_string = "V.4"; break;
10200 fprintf (stderr, "\tABI = %5s\n", abi_string);
10202 if (TARGET_ALTIVEC_ABI)
10203 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10205 if (TARGET_SPE_ABI)
10206 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10208 if (info->first_gp_reg_save != 32)
10209 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10211 if (info->first_fp_reg_save != 64)
10212 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10214 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10215 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10216 info->first_altivec_reg_save);
10218 if (info->lr_save_p)
10219 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10221 if (info->cr_save_p)
10222 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10224 if (info->toc_save_p)
10225 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10227 if (info->vrsave_mask)
10228 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10230 if (info->push_p)
10231 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10233 if (info->calls_p)
10234 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10236 if (info->gp_save_offset)
10237 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10239 if (info->fp_save_offset)
10240 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10242 if (info->altivec_save_offset)
10243 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10244 info->altivec_save_offset);
10246 if (info->spe_gp_save_offset)
10247 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10248 info->spe_gp_save_offset);
10250 if (info->vrsave_save_offset)
10251 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10252 info->vrsave_save_offset);
10254 if (info->lr_save_offset)
10255 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10257 if (info->cr_save_offset)
10258 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10260 if (info->toc_save_offset)
10261 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10263 if (info->varargs_save_offset)
10264 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10266 if (info->total_size)
10267 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10269 if (info->varargs_size)
10270 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10272 if (info->vars_size)
10273 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10275 if (info->parm_size)
10276 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10278 if (info->fixed_size)
10279 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10281 if (info->gp_size)
10282 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10284 if (info->spe_gp_size)
10285 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10287 if (info->fp_size)
10288 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10290 if (info->altivec_size)
10291 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10293 if (info->vrsave_size)
10294 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10296 if (info->altivec_padding_size)
10297 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10298 info->altivec_padding_size);
10300 if (info->spe_padding_size)
10301 fprintf (stderr, "\tspe_padding_size = %5d\n",
10302 info->spe_padding_size);
10304 if (info->lr_size)
10305 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10307 if (info->cr_size)
10308 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10310 if (info->toc_size)
10311 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10313 if (info->save_size)
10314 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10316 if (info->reg_size != 4)
10317 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10319 fprintf (stderr, "\n");
10323 rs6000_return_addr (count, frame)
10324 int count;
10325 rtx frame;
10327 /* Currently we don't optimize very well between prolog and body
10328 code and for PIC code the code can be actually quite bad, so
10329 don't try to be too clever here. */
10330 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10332 cfun->machine->ra_needs_full_frame = 1;
10334 return
10335 gen_rtx_MEM
10336 (Pmode,
10337 memory_address
10338 (Pmode,
10339 plus_constant (copy_to_reg
10340 (gen_rtx_MEM (Pmode,
10341 memory_address (Pmode, frame))),
10342 RETURN_ADDRESS_OFFSET)));
10345 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10348 /* Say whether a function is a candidate for sibcall handling or not.
10349 We do not allow indirect calls to be optimized into sibling calls.
10350 Also, we can't do it if there are any vector parameters; there's
10351 nowhere to put the VRsave code so it works; note that functions with
10352 vector parameters are required to have a prototype, so the argument
10353 type info must be available here. (The tail recursion case can work
10354 with vector parameters, but there's no way to distinguish here.) */
10355 static bool
10356 rs6000_function_ok_for_sibcall (decl, exp)
10357 tree decl;
10358 tree exp ATTRIBUTE_UNUSED;
10360 tree type;
10361 if (decl)
10363 if (TARGET_ALTIVEC_VRSAVE)
10365 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10366 type; type = TREE_CHAIN (type))
10368 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10369 return false;
10372 if (DEFAULT_ABI == ABI_DARWIN
10373 || (*targetm.binds_local_p) (decl))
10375 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10377 if (!lookup_attribute ("longcall", attr_list)
10378 || lookup_attribute ("shortcall", attr_list))
10379 return true;
10382 return false;
10385 static int
10386 rs6000_ra_ever_killed ()
10388 rtx top;
10389 rtx reg;
10390 rtx insn;
10392 /* Irritatingly, there are two kinds of thunks -- those created with
10393 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10394 through the regular part of the compiler. This is a very hacky
10395 way to tell them apart. */
10396 if (current_function_is_thunk && !no_new_pseudos)
10397 return 0;
10399 /* regs_ever_live has LR marked as used if any sibcalls are present,
10400 but this should not force saving and restoring in the
10401 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10402 clobbers LR, so that is inappropriate. */
10404 /* Also, the prologue can generate a store into LR that
10405 doesn't really count, like this:
10407 move LR->R0
10408 bcl to set PIC register
10409 move LR->R31
10410 move R0->LR
10412 When we're called from the epilogue, we need to avoid counting
10413 this as a store. */
10415 push_topmost_sequence ();
10416 top = get_insns ();
10417 pop_topmost_sequence ();
10418 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10420 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10422 if (INSN_P (insn))
10424 if (FIND_REG_INC_NOTE (insn, reg))
10425 return 1;
10426 else if (GET_CODE (insn) == CALL_INSN
10427 && !SIBLING_CALL_P (insn))
10428 return 1;
10429 else if (set_of (reg, insn) != NULL_RTX
10430 && !prologue_epilogue_contains (insn))
10431 return 1;
10434 return 0;
10437 /* Add a REG_MAYBE_DEAD note to the insn. */
10438 static void
10439 rs6000_maybe_dead (insn)
10440 rtx insn;
10442 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10443 const0_rtx,
10444 REG_NOTES (insn));
10447 /* Emit instructions needed to load the TOC register.
10448 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10449 a constant pool; or for SVR4 -fpic. */
10451 void
10452 rs6000_emit_load_toc_table (fromprolog)
10453 int fromprolog;
10455 rtx dest, insn;
10456 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10458 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10460 rtx temp = (fromprolog
10461 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10462 : gen_reg_rtx (Pmode));
10463 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10464 if (fromprolog)
10465 rs6000_maybe_dead (insn);
10466 insn = emit_move_insn (dest, temp);
10467 if (fromprolog)
10468 rs6000_maybe_dead (insn);
10470 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10472 char buf[30];
10473 rtx tempLR = (fromprolog
10474 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10475 : gen_reg_rtx (Pmode));
10476 rtx temp0 = (fromprolog
10477 ? gen_rtx_REG (Pmode, 0)
10478 : gen_reg_rtx (Pmode));
10479 rtx symF;
10481 /* possibly create the toc section */
10482 if (! toc_initialized)
10484 toc_section ();
10485 function_section (current_function_decl);
10488 if (fromprolog)
10490 rtx symL;
10492 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10493 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10495 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10496 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10498 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10499 symF)));
10500 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10501 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10502 symL,
10503 symF)));
10505 else
10507 rtx tocsym;
10508 static int reload_toc_labelno = 0;
10510 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10512 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10513 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10515 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10516 emit_move_insn (dest, tempLR);
10517 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10519 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10520 if (fromprolog)
10521 rs6000_maybe_dead (insn);
10523 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10525 /* This is for AIX code running in non-PIC ELF32. */
10526 char buf[30];
10527 rtx realsym;
10528 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10529 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10531 insn = emit_insn (gen_elf_high (dest, realsym));
10532 if (fromprolog)
10533 rs6000_maybe_dead (insn);
10534 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10535 if (fromprolog)
10536 rs6000_maybe_dead (insn);
10538 else if (DEFAULT_ABI == ABI_AIX)
10540 if (TARGET_32BIT)
10541 insn = emit_insn (gen_load_toc_aix_si (dest));
10542 else
10543 insn = emit_insn (gen_load_toc_aix_di (dest));
10544 if (fromprolog)
10545 rs6000_maybe_dead (insn);
10547 else
10548 abort ();
10551 int
10552 get_TOC_alias_set ()
10554 static int set = -1;
10555 if (set == -1)
10556 set = new_alias_set ();
10557 return set;
10560 /* This retuns nonzero if the current function uses the TOC. This is
10561 determined by the presence of (unspec ... UNSPEC_TOC) or
10562 use (unspec ... UNSPEC_TOC), which are generated by the various
10563 load_toc_* patterns. */
10566 uses_TOC ()
10568 rtx insn;
10570 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10571 if (INSN_P (insn))
10573 rtx pat = PATTERN (insn);
10574 int i;
10576 if (GET_CODE (pat) == PARALLEL)
10577 for (i = 0; i < XVECLEN (pat, 0); i++)
10579 rtx sub = XVECEXP (pat, 0, i);
10580 if (GET_CODE (sub) == USE)
10582 sub = XEXP (sub, 0);
10583 if (GET_CODE (sub) == UNSPEC
10584 && XINT (sub, 1) == UNSPEC_TOC)
10585 return 1;
10589 return 0;
10593 create_TOC_reference (symbol)
10594 rtx symbol;
10596 return gen_rtx_PLUS (Pmode,
10597 gen_rtx_REG (Pmode, TOC_REGISTER),
10598 gen_rtx_CONST (Pmode,
10599 gen_rtx_MINUS (Pmode, symbol,
10600 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10603 #if TARGET_AIX
10604 /* __throw will restore its own return address to be the same as the
10605 return address of the function that the throw is being made to.
10606 This is unfortunate, because we want to check the original
10607 return address to see if we need to restore the TOC.
10608 So we have to squirrel it away here.
10609 This is used only in compiling __throw and __rethrow.
10611 Most of this code should be removed by CSE. */
10612 static rtx insn_after_throw;
10614 /* This does the saving... */
10615 void
10616 rs6000_aix_emit_builtin_unwind_init ()
10618 rtx mem;
10619 rtx stack_top = gen_reg_rtx (Pmode);
10620 rtx opcode_addr = gen_reg_rtx (Pmode);
10622 insn_after_throw = gen_reg_rtx (SImode);
10624 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10625 emit_move_insn (stack_top, mem);
10627 mem = gen_rtx_MEM (Pmode,
10628 gen_rtx_PLUS (Pmode, stack_top,
10629 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10630 emit_move_insn (opcode_addr, mem);
10631 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10634 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10635 in _eh.o). Only used on AIX.
10637 The idea is that on AIX, function calls look like this:
10638 bl somefunction-trampoline
10639 lwz r2,20(sp)
10641 and later,
10642 somefunction-trampoline:
10643 stw r2,20(sp)
10644 ... load function address in the count register ...
10645 bctr
10646 or like this, if the linker determines that this is not a cross-module call
10647 and so the TOC need not be restored:
10648 bl somefunction
10650 or like this, if the compiler could determine that this is not a
10651 cross-module call:
10652 bl somefunction
10653 now, the tricky bit here is that register 2 is saved and restored
10654 by the _linker_, so we can't readily generate debugging information
10655 for it. So we need to go back up the call chain looking at the
10656 insns at return addresses to see which calls saved the TOC register
10657 and so see where it gets restored from.
10659 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10660 just before the actual epilogue.
10662 On the bright side, this incurs no space or time overhead unless an
10663 exception is thrown, except for the extra code in libgcc.a.
10665 The parameter STACKSIZE is a register containing (at runtime)
10666 the amount to be popped off the stack in addition to the stack frame
10667 of this routine (which will be __throw or __rethrow, and so is
10668 guaranteed to have a stack frame). */
10670 void
10671 rs6000_emit_eh_toc_restore (stacksize)
10672 rtx stacksize;
10674 rtx top_of_stack;
10675 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10676 rtx tocompare = gen_reg_rtx (SImode);
10677 rtx opcode = gen_reg_rtx (SImode);
10678 rtx opcode_addr = gen_reg_rtx (Pmode);
10679 rtx mem;
10680 rtx loop_start = gen_label_rtx ();
10681 rtx no_toc_restore_needed = gen_label_rtx ();
10682 rtx loop_exit = gen_label_rtx ();
10684 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10685 set_mem_alias_set (mem, rs6000_sr_alias_set);
10686 emit_move_insn (bottom_of_stack, mem);
10688 top_of_stack = expand_binop (Pmode, add_optab,
10689 bottom_of_stack, stacksize,
10690 NULL_RTX, 1, OPTAB_WIDEN);
10692 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10693 : 0xE8410028, SImode));
10695 if (insn_after_throw == NULL_RTX)
10696 abort ();
10697 emit_move_insn (opcode, insn_after_throw);
10699 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10700 emit_label (loop_start);
10702 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10703 SImode, NULL_RTX, NULL_RTX,
10704 no_toc_restore_needed);
10706 mem = gen_rtx_MEM (Pmode,
10707 gen_rtx_PLUS (Pmode, bottom_of_stack,
10708 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10709 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10711 emit_label (no_toc_restore_needed);
10712 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10713 Pmode, NULL_RTX, NULL_RTX,
10714 loop_exit);
10716 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10717 set_mem_alias_set (mem, rs6000_sr_alias_set);
10718 emit_move_insn (bottom_of_stack, mem);
10720 mem = gen_rtx_MEM (Pmode,
10721 gen_rtx_PLUS (Pmode, bottom_of_stack,
10722 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10723 emit_move_insn (opcode_addr, mem);
10724 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10726 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10727 emit_jump (loop_start);
10728 emit_note (NULL, NOTE_INSN_LOOP_END);
10729 emit_label (loop_exit);
10731 #endif /* TARGET_AIX */
10733 /* This ties together stack memory (MEM with an alias set of
10734 rs6000_sr_alias_set) and the change to the stack pointer. */
10736 static void
10737 rs6000_emit_stack_tie ()
10739 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10741 set_mem_alias_set (mem, rs6000_sr_alias_set);
10742 emit_insn (gen_stack_tie (mem));
10745 /* Emit the correct code for allocating stack space, as insns.
10746 If COPY_R12, make sure a copy of the old frame is left in r12.
10747 The generated code may use hard register 0 as a temporary. */
10749 static void
10750 rs6000_emit_allocate_stack (size, copy_r12)
10751 HOST_WIDE_INT size;
10752 int copy_r12;
10754 rtx insn;
10755 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10756 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10757 rtx todec = GEN_INT (-size);
10759 if (current_function_limit_stack)
10761 if (REG_P (stack_limit_rtx)
10762 && REGNO (stack_limit_rtx) > 1
10763 && REGNO (stack_limit_rtx) <= 31)
10765 emit_insn (TARGET_32BIT
10766 ? gen_addsi3 (tmp_reg,
10767 stack_limit_rtx,
10768 GEN_INT (size))
10769 : gen_adddi3 (tmp_reg,
10770 stack_limit_rtx,
10771 GEN_INT (size)));
10773 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10774 const0_rtx));
10776 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10777 && TARGET_32BIT
10778 && DEFAULT_ABI == ABI_V4)
10780 rtx toload = gen_rtx_CONST (VOIDmode,
10781 gen_rtx_PLUS (Pmode,
10782 stack_limit_rtx,
10783 GEN_INT (size)));
10785 emit_insn (gen_elf_high (tmp_reg, toload));
10786 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10787 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10788 const0_rtx));
10790 else
10791 warning ("stack limit expression is not supported");
10794 if (copy_r12 || ! TARGET_UPDATE)
10795 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10797 if (TARGET_UPDATE)
10799 if (size > 32767)
10801 /* Need a note here so that try_split doesn't get confused. */
10802 if (get_last_insn() == NULL_RTX)
10803 emit_note (0, NOTE_INSN_DELETED);
10804 insn = emit_move_insn (tmp_reg, todec);
10805 try_split (PATTERN (insn), insn, 0);
10806 todec = tmp_reg;
10809 insn = emit_insn (TARGET_32BIT
10810 ? gen_movsi_update (stack_reg, stack_reg,
10811 todec, stack_reg)
10812 : gen_movdi_update (stack_reg, stack_reg,
10813 todec, stack_reg));
10815 else
10817 insn = emit_insn (TARGET_32BIT
10818 ? gen_addsi3 (stack_reg, stack_reg, todec)
10819 : gen_adddi3 (stack_reg, stack_reg, todec));
10820 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10821 gen_rtx_REG (Pmode, 12));
10824 RTX_FRAME_RELATED_P (insn) = 1;
10825 REG_NOTES (insn) =
10826 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10827 gen_rtx_SET (VOIDmode, stack_reg,
10828 gen_rtx_PLUS (Pmode, stack_reg,
10829 GEN_INT (-size))),
10830 REG_NOTES (insn));
10833 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10834 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10835 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10836 deduce these equivalences by itself so it wasn't necessary to hold
10837 its hand so much. */
10839 static void
10840 rs6000_frame_related (insn, reg, val, reg2, rreg)
10841 rtx insn;
10842 rtx reg;
10843 HOST_WIDE_INT val;
10844 rtx reg2;
10845 rtx rreg;
10847 rtx real, temp;
10849 /* copy_rtx will not make unique copies of registers, so we need to
10850 ensure we don't have unwanted sharing here. */
10851 if (reg == reg2)
10852 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10854 if (reg == rreg)
10855 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10857 real = copy_rtx (PATTERN (insn));
10859 if (reg2 != NULL_RTX)
10860 real = replace_rtx (real, reg2, rreg);
10862 real = replace_rtx (real, reg,
10863 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10864 STACK_POINTER_REGNUM),
10865 GEN_INT (val)));
10867 /* We expect that 'real' is either a SET or a PARALLEL containing
10868 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10869 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10871 if (GET_CODE (real) == SET)
10873 rtx set = real;
10875 temp = simplify_rtx (SET_SRC (set));
10876 if (temp)
10877 SET_SRC (set) = temp;
10878 temp = simplify_rtx (SET_DEST (set));
10879 if (temp)
10880 SET_DEST (set) = temp;
10881 if (GET_CODE (SET_DEST (set)) == MEM)
10883 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10884 if (temp)
10885 XEXP (SET_DEST (set), 0) = temp;
10888 else if (GET_CODE (real) == PARALLEL)
10890 int i;
10891 for (i = 0; i < XVECLEN (real, 0); i++)
10892 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10894 rtx set = XVECEXP (real, 0, i);
10896 temp = simplify_rtx (SET_SRC (set));
10897 if (temp)
10898 SET_SRC (set) = temp;
10899 temp = simplify_rtx (SET_DEST (set));
10900 if (temp)
10901 SET_DEST (set) = temp;
10902 if (GET_CODE (SET_DEST (set)) == MEM)
10904 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10905 if (temp)
10906 XEXP (SET_DEST (set), 0) = temp;
10908 RTX_FRAME_RELATED_P (set) = 1;
10911 else
10912 abort ();
10914 if (TARGET_SPE)
10915 real = spe_synthesize_frame_save (real);
10917 RTX_FRAME_RELATED_P (insn) = 1;
10918 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10919 real,
10920 REG_NOTES (insn));
10923 /* Given an SPE frame note, return a PARALLEL of SETs with the
10924 original note, plus a synthetic register save. */
10926 static rtx
10927 spe_synthesize_frame_save (real)
10928 rtx real;
10930 rtx synth, offset, reg, real2;
10932 if (GET_CODE (real) != SET
10933 || GET_MODE (SET_SRC (real)) != V2SImode)
10934 return real;
10936 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10937 frame related note. The parallel contains a set of the register
10938 being saved, and another set to a synthetic register (n+1200).
10939 This is so we can differentiate between 64-bit and 32-bit saves.
10940 Words cannot describe this nastiness. */
10942 if (GET_CODE (SET_DEST (real)) != MEM
10943 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10944 || GET_CODE (SET_SRC (real)) != REG)
10945 abort ();
10947 /* Transform:
10948 (set (mem (plus (reg x) (const y)))
10949 (reg z))
10950 into:
10951 (set (mem (plus (reg x) (const y+4)))
10952 (reg z+1200))
10955 real2 = copy_rtx (real);
10956 PUT_MODE (SET_DEST (real2), SImode);
10957 reg = SET_SRC (real2);
10958 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10959 synth = copy_rtx (real2);
10961 if (BYTES_BIG_ENDIAN)
10963 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10964 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10967 reg = SET_SRC (synth);
10969 synth = replace_rtx (synth, reg,
10970 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10972 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10973 synth = replace_rtx (synth, offset,
10974 GEN_INT (INTVAL (offset)
10975 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10977 RTX_FRAME_RELATED_P (synth) = 1;
10978 RTX_FRAME_RELATED_P (real2) = 1;
10979 if (BYTES_BIG_ENDIAN)
10980 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10981 else
10982 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10984 return real;
10987 /* Returns an insn that has a vrsave set operation with the
10988 appropriate CLOBBERs. */
10990 static rtx
10991 generate_set_vrsave (reg, info, epiloguep)
10992 rtx reg;
10993 rs6000_stack_t *info;
10994 int epiloguep;
10996 int nclobs, i;
10997 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10998 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11000 clobs[0]
11001 = gen_rtx_SET (VOIDmode,
11002 vrsave,
11003 gen_rtx_UNSPEC_VOLATILE (SImode,
11004 gen_rtvec (2, reg, vrsave),
11005 30));
11007 nclobs = 1;
11009 /* We need to clobber the registers in the mask so the scheduler
11010 does not move sets to VRSAVE before sets of AltiVec registers.
11012 However, if the function receives nonlocal gotos, reload will set
11013 all call saved registers live. We will end up with:
11015 (set (reg 999) (mem))
11016 (parallel [ (set (reg vrsave) (unspec blah))
11017 (clobber (reg 999))])
11019 The clobber will cause the store into reg 999 to be dead, and
11020 flow will attempt to delete an epilogue insn. In this case, we
11021 need an unspec use/set of the register. */
11023 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11024 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11026 if (!epiloguep || call_used_regs [i])
11027 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11028 gen_rtx_REG (V4SImode, i));
11029 else
11031 rtx reg = gen_rtx_REG (V4SImode, i);
11033 clobs[nclobs++]
11034 = gen_rtx_SET (VOIDmode,
11035 reg,
11036 gen_rtx_UNSPEC (V4SImode,
11037 gen_rtvec (1, reg), 27));
11041 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11043 for (i = 0; i < nclobs; ++i)
11044 XVECEXP (insn, 0, i) = clobs[i];
11046 return insn;
11049 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11050 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11052 static void
11053 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11054 rtx frame_reg;
11055 rtx frame_ptr;
11056 enum machine_mode mode;
11057 unsigned int regno;
11058 int offset;
11059 int total_size;
11061 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11062 rtx replacea, replaceb;
11064 int_rtx = GEN_INT (offset);
11066 /* Some cases that need register indexed addressing. */
11067 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11068 || (TARGET_SPE_ABI
11069 && SPE_VECTOR_MODE (mode)
11070 && !SPE_CONST_OFFSET_OK (offset)))
11072 /* Whomever calls us must make sure r11 is available in the
11073 flow path of instructions in the prologue. */
11074 offset_rtx = gen_rtx_REG (Pmode, 11);
11075 emit_move_insn (offset_rtx, int_rtx);
11077 replacea = offset_rtx;
11078 replaceb = int_rtx;
11080 else
11082 offset_rtx = int_rtx;
11083 replacea = NULL_RTX;
11084 replaceb = NULL_RTX;
11087 reg = gen_rtx_REG (mode, regno);
11088 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11089 mem = gen_rtx_MEM (mode, addr);
11090 set_mem_alias_set (mem, rs6000_sr_alias_set);
11092 insn = emit_move_insn (mem, reg);
11094 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11097 /* Emit an offset memory reference suitable for a frame store, while
11098 converting to a valid addressing mode. */
11100 static rtx
11101 gen_frame_mem_offset (mode, reg, offset)
11102 enum machine_mode mode;
11103 rtx reg;
11104 int offset;
11106 rtx int_rtx, offset_rtx;
11108 int_rtx = GEN_INT (offset);
11110 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11112 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11113 emit_move_insn (offset_rtx, int_rtx);
11115 else
11116 offset_rtx = int_rtx;
11118 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11121 /* Emit function prologue as insns. */
11123 void
11124 rs6000_emit_prologue ()
11126 rs6000_stack_t *info = rs6000_stack_info ();
11127 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11128 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11129 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11130 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11131 rtx frame_reg_rtx = sp_reg_rtx;
11132 rtx cr_save_rtx = NULL;
11133 rtx insn;
11134 int saving_FPRs_inline;
11135 int using_store_multiple;
11136 HOST_WIDE_INT sp_offset = 0;
11138 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11140 reg_mode = V2SImode;
11141 reg_size = 8;
11144 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11145 && (!TARGET_SPE_ABI
11146 || info->spe_64bit_regs_used == 0)
11147 && info->first_gp_reg_save < 31);
11148 saving_FPRs_inline = (info->first_fp_reg_save == 64
11149 || FP_SAVE_INLINE (info->first_fp_reg_save));
11151 /* For V.4, update stack before we do any saving and set back pointer. */
11152 if (info->push_p && DEFAULT_ABI == ABI_V4)
11154 if (info->total_size < 32767)
11155 sp_offset = info->total_size;
11156 else
11157 frame_reg_rtx = frame_ptr_rtx;
11158 rs6000_emit_allocate_stack (info->total_size,
11159 (frame_reg_rtx != sp_reg_rtx
11160 && (info->cr_save_p
11161 || info->lr_save_p
11162 || info->first_fp_reg_save < 64
11163 || info->first_gp_reg_save < 32
11164 )));
11165 if (frame_reg_rtx != sp_reg_rtx)
11166 rs6000_emit_stack_tie ();
11169 /* Save AltiVec registers if needed. */
11170 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11172 int i;
11174 /* There should be a non inline version of this, for when we
11175 are saving lots of vector registers. */
11176 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11177 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11179 rtx areg, savereg, mem;
11180 int offset;
11182 offset = info->altivec_save_offset + sp_offset
11183 + 16 * (i - info->first_altivec_reg_save);
11185 savereg = gen_rtx_REG (V4SImode, i);
11187 areg = gen_rtx_REG (Pmode, 0);
11188 emit_move_insn (areg, GEN_INT (offset));
11190 /* AltiVec addressing mode is [reg+reg]. */
11191 mem = gen_rtx_MEM (V4SImode,
11192 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11194 set_mem_alias_set (mem, rs6000_sr_alias_set);
11196 insn = emit_move_insn (mem, savereg);
11198 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11199 areg, GEN_INT (offset));
11203 /* VRSAVE is a bit vector representing which AltiVec registers
11204 are used. The OS uses this to determine which vector
11205 registers to save on a context switch. We need to save
11206 VRSAVE on the stack frame, add whatever AltiVec registers we
11207 used in this function, and do the corresponding magic in the
11208 epilogue. */
11210 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11212 rtx reg, mem, vrsave;
11213 int offset;
11215 /* Get VRSAVE onto a GPR. */
11216 reg = gen_rtx_REG (SImode, 12);
11217 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11218 if (TARGET_MACHO)
11219 emit_insn (gen_get_vrsave_internal (reg));
11220 else
11221 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11223 /* Save VRSAVE. */
11224 offset = info->vrsave_save_offset + sp_offset;
11226 = gen_rtx_MEM (SImode,
11227 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11228 set_mem_alias_set (mem, rs6000_sr_alias_set);
11229 insn = emit_move_insn (mem, reg);
11231 /* Include the registers in the mask. */
11232 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11234 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11237 /* If we use the link register, get it into r0. */
11238 if (info->lr_save_p)
11239 emit_move_insn (gen_rtx_REG (Pmode, 0),
11240 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11242 /* If we need to save CR, put it into r12. */
11243 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11245 cr_save_rtx = gen_rtx_REG (SImode, 12);
11246 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11249 /* Do any required saving of fpr's. If only one or two to save, do
11250 it ourselves. Otherwise, call function. */
11251 if (saving_FPRs_inline)
11253 int i;
11254 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11255 if ((regs_ever_live[info->first_fp_reg_save+i]
11256 && ! call_used_regs[info->first_fp_reg_save+i]))
11257 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11258 info->first_fp_reg_save + i,
11259 info->fp_save_offset + sp_offset + 8 * i,
11260 info->total_size);
11262 else if (info->first_fp_reg_save != 64)
11264 int i;
11265 char rname[30];
11266 const char *alloc_rname;
11267 rtvec p;
11268 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11270 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11271 gen_rtx_REG (Pmode,
11272 LINK_REGISTER_REGNUM));
11273 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11274 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11275 alloc_rname = ggc_strdup (rname);
11276 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11277 gen_rtx_SYMBOL_REF (Pmode,
11278 alloc_rname));
11279 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11281 rtx addr, reg, mem;
11282 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11283 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11284 GEN_INT (info->fp_save_offset
11285 + sp_offset + 8*i));
11286 mem = gen_rtx_MEM (DFmode, addr);
11287 set_mem_alias_set (mem, rs6000_sr_alias_set);
11289 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11291 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11292 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11293 NULL_RTX, NULL_RTX);
11296 /* Save GPRs. This is done as a PARALLEL if we are using
11297 the store-multiple instructions. */
11298 if (using_store_multiple)
11300 rtvec p;
11301 int i;
11302 p = rtvec_alloc (32 - info->first_gp_reg_save);
11303 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11305 rtx addr, reg, mem;
11306 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11307 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11308 GEN_INT (info->gp_save_offset
11309 + sp_offset
11310 + reg_size * i));
11311 mem = gen_rtx_MEM (reg_mode, addr);
11312 set_mem_alias_set (mem, rs6000_sr_alias_set);
11314 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11316 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11317 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11318 NULL_RTX, NULL_RTX);
11320 else
11322 int i;
11323 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11324 if ((regs_ever_live[info->first_gp_reg_save+i]
11325 && ! call_used_regs[info->first_gp_reg_save+i])
11326 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11327 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11328 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11330 rtx addr, reg, mem;
11331 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11333 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11335 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11336 rtx b;
11338 if (!SPE_CONST_OFFSET_OK (offset))
11340 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11341 emit_move_insn (b, GEN_INT (offset));
11343 else
11344 b = GEN_INT (offset);
11346 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11347 mem = gen_rtx_MEM (V2SImode, addr);
11348 set_mem_alias_set (mem, rs6000_sr_alias_set);
11349 insn = emit_move_insn (mem, reg);
11351 if (GET_CODE (b) == CONST_INT)
11352 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11353 NULL_RTX, NULL_RTX);
11354 else
11355 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11356 b, GEN_INT (offset));
11358 else
11360 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11361 GEN_INT (info->gp_save_offset
11362 + sp_offset
11363 + reg_size * i));
11364 mem = gen_rtx_MEM (reg_mode, addr);
11365 set_mem_alias_set (mem, rs6000_sr_alias_set);
11367 insn = emit_move_insn (mem, reg);
11368 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11369 NULL_RTX, NULL_RTX);
11374 /* ??? There's no need to emit actual instructions here, but it's the
11375 easiest way to get the frame unwind information emitted. */
11376 if (current_function_calls_eh_return)
11378 unsigned int i, regno;
11380 for (i = 0; ; ++i)
11382 regno = EH_RETURN_DATA_REGNO (i);
11383 if (regno == INVALID_REGNUM)
11384 break;
11386 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11387 info->ehrd_offset + sp_offset
11388 + reg_size * (int) i,
11389 info->total_size);
11393 /* Save lr if we used it. */
11394 if (info->lr_save_p)
11396 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11397 GEN_INT (info->lr_save_offset + sp_offset));
11398 rtx reg = gen_rtx_REG (Pmode, 0);
11399 rtx mem = gen_rtx_MEM (Pmode, addr);
11400 /* This should not be of rs6000_sr_alias_set, because of
11401 __builtin_return_address. */
11403 insn = emit_move_insn (mem, reg);
11404 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11405 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11408 /* Save CR if we use any that must be preserved. */
11409 if (info->cr_save_p)
11411 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11412 GEN_INT (info->cr_save_offset + sp_offset));
11413 rtx mem = gen_rtx_MEM (SImode, addr);
11415 set_mem_alias_set (mem, rs6000_sr_alias_set);
11417 /* If r12 was used to hold the original sp, copy cr into r0 now
11418 that it's free. */
11419 if (REGNO (frame_reg_rtx) == 12)
11421 cr_save_rtx = gen_rtx_REG (SImode, 0);
11422 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11424 insn = emit_move_insn (mem, cr_save_rtx);
11426 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11427 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11428 But that's OK. All we have to do is specify that _one_ condition
11429 code register is saved in this stack slot. The thrower's epilogue
11430 will then restore all the call-saved registers.
11431 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11432 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11433 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11436 /* Update stack and set back pointer unless this is V.4,
11437 for which it was done previously. */
11438 if (info->push_p && DEFAULT_ABI != ABI_V4)
11439 rs6000_emit_allocate_stack (info->total_size, FALSE);
11441 /* Set frame pointer, if needed. */
11442 if (frame_pointer_needed)
11444 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11445 sp_reg_rtx);
11446 RTX_FRAME_RELATED_P (insn) = 1;
11449 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11450 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11451 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11452 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11454 /* If emit_load_toc_table will use the link register, we need to save
11455 it. We use R12 for this purpose because emit_load_toc_table
11456 can use register 0. This allows us to use a plain 'blr' to return
11457 from the procedure more often. */
11458 int save_LR_around_toc_setup = (TARGET_ELF
11459 && DEFAULT_ABI != ABI_AIX
11460 && flag_pic
11461 && ! info->lr_save_p
11462 && EXIT_BLOCK_PTR->pred != NULL);
11463 if (save_LR_around_toc_setup)
11465 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11466 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11467 rs6000_emit_load_toc_table (TRUE);
11468 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11470 else
11471 rs6000_emit_load_toc_table (TRUE);
11474 #if TARGET_MACHO
11475 if (DEFAULT_ABI == ABI_DARWIN
11476 && flag_pic && current_function_uses_pic_offset_table)
11478 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11479 const char *picbase = machopic_function_base_name ();
11480 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11482 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11484 rs6000_maybe_dead (
11485 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11486 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11488 #endif
11491 /* Write function prologue. */
11493 static void
11494 rs6000_output_function_prologue (file, size)
11495 FILE *file;
11496 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11498 rs6000_stack_t *info = rs6000_stack_info ();
11500 if (TARGET_DEBUG_STACK)
11501 debug_stack_info (info);
11503 /* Write .extern for any function we will call to save and restore
11504 fp values. */
11505 if (info->first_fp_reg_save < 64
11506 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11507 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11508 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11509 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11510 RESTORE_FP_SUFFIX);
11512 /* Write .extern for AIX common mode routines, if needed. */
11513 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11515 fputs ("\t.extern __mulh\n", file);
11516 fputs ("\t.extern __mull\n", file);
11517 fputs ("\t.extern __divss\n", file);
11518 fputs ("\t.extern __divus\n", file);
11519 fputs ("\t.extern __quoss\n", file);
11520 fputs ("\t.extern __quous\n", file);
11521 common_mode_defined = 1;
11524 if (! HAVE_prologue)
11526 start_sequence ();
11528 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11529 the "toplevel" insn chain. */
11530 emit_note (0, NOTE_INSN_DELETED);
11531 rs6000_emit_prologue ();
11532 emit_note (0, NOTE_INSN_DELETED);
11534 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11536 rtx insn;
11537 unsigned addr = 0;
11538 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11540 INSN_ADDRESSES_NEW (insn, addr);
11541 addr += 4;
11545 if (TARGET_DEBUG_STACK)
11546 debug_rtx_list (get_insns (), 100);
11547 final (get_insns (), file, FALSE, FALSE);
11548 end_sequence ();
11551 rs6000_pic_labelno++;
11554 /* Emit function epilogue as insns.
11556 At present, dwarf2out_frame_debug_expr doesn't understand
11557 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11558 anywhere in the epilogue. Most of the insns below would in any case
11559 need special notes to explain where r11 is in relation to the stack. */
11561 void
11562 rs6000_emit_epilogue (sibcall)
11563 int sibcall;
11565 rs6000_stack_t *info;
11566 int restoring_FPRs_inline;
11567 int using_load_multiple;
11568 int using_mfcr_multiple;
11569 int use_backchain_to_restore_sp;
11570 int sp_offset = 0;
11571 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11572 rtx frame_reg_rtx = sp_reg_rtx;
11573 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11574 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11575 int i;
11577 info = rs6000_stack_info ();
11579 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11581 reg_mode = V2SImode;
11582 reg_size = 8;
11585 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11586 && (!TARGET_SPE_ABI
11587 || info->spe_64bit_regs_used == 0)
11588 && info->first_gp_reg_save < 31);
11589 restoring_FPRs_inline = (sibcall
11590 || current_function_calls_eh_return
11591 || info->first_fp_reg_save == 64
11592 || FP_SAVE_INLINE (info->first_fp_reg_save));
11593 use_backchain_to_restore_sp = (frame_pointer_needed
11594 || current_function_calls_alloca
11595 || info->total_size > 32767);
11596 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11597 || rs6000_cpu == PROCESSOR_PPC603
11598 || rs6000_cpu == PROCESSOR_PPC750
11599 || optimize_size);
11601 /* If we have a frame pointer, a call to alloca, or a large stack
11602 frame, restore the old stack pointer using the backchain. Otherwise,
11603 we know what size to update it with. */
11604 if (use_backchain_to_restore_sp)
11606 /* Under V.4, don't reset the stack pointer until after we're done
11607 loading the saved registers. */
11608 if (DEFAULT_ABI == ABI_V4)
11609 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11611 emit_move_insn (frame_reg_rtx,
11612 gen_rtx_MEM (Pmode, sp_reg_rtx));
11615 else if (info->push_p)
11617 if (DEFAULT_ABI == ABI_V4)
11618 sp_offset = info->total_size;
11619 else
11621 emit_insn (TARGET_32BIT
11622 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11623 GEN_INT (info->total_size))
11624 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11625 GEN_INT (info->total_size)));
11629 /* Restore AltiVec registers if needed. */
11630 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11632 int i;
11634 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11635 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11637 rtx addr, areg, mem;
11639 areg = gen_rtx_REG (Pmode, 0);
11640 emit_move_insn
11641 (areg, GEN_INT (info->altivec_save_offset
11642 + sp_offset
11643 + 16 * (i - info->first_altivec_reg_save)));
11645 /* AltiVec addressing mode is [reg+reg]. */
11646 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11647 mem = gen_rtx_MEM (V4SImode, addr);
11648 set_mem_alias_set (mem, rs6000_sr_alias_set);
11650 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11654 /* Restore VRSAVE if needed. */
11655 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11657 rtx addr, mem, reg;
11659 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11660 GEN_INT (info->vrsave_save_offset + sp_offset));
11661 mem = gen_rtx_MEM (SImode, addr);
11662 set_mem_alias_set (mem, rs6000_sr_alias_set);
11663 reg = gen_rtx_REG (SImode, 12);
11664 emit_move_insn (reg, mem);
11666 emit_insn (generate_set_vrsave (reg, info, 1));
11669 /* Get the old lr if we saved it. */
11670 if (info->lr_save_p)
11672 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11673 info->lr_save_offset + sp_offset);
11675 set_mem_alias_set (mem, rs6000_sr_alias_set);
11677 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11680 /* Get the old cr if we saved it. */
11681 if (info->cr_save_p)
11683 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11684 GEN_INT (info->cr_save_offset + sp_offset));
11685 rtx mem = gen_rtx_MEM (SImode, addr);
11687 set_mem_alias_set (mem, rs6000_sr_alias_set);
11689 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11692 /* Set LR here to try to overlap restores below. */
11693 if (info->lr_save_p)
11694 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11695 gen_rtx_REG (Pmode, 0));
11697 /* Load exception handler data registers, if needed. */
11698 if (current_function_calls_eh_return)
11700 unsigned int i, regno;
11702 for (i = 0; ; ++i)
11704 rtx mem;
11706 regno = EH_RETURN_DATA_REGNO (i);
11707 if (regno == INVALID_REGNUM)
11708 break;
11710 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11711 info->ehrd_offset + sp_offset
11712 + reg_size * (int) i);
11713 set_mem_alias_set (mem, rs6000_sr_alias_set);
11715 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11719 /* Restore GPRs. This is done as a PARALLEL if we are using
11720 the load-multiple instructions. */
11721 if (using_load_multiple)
11723 rtvec p;
11724 p = rtvec_alloc (32 - info->first_gp_reg_save);
11725 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11727 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11728 GEN_INT (info->gp_save_offset
11729 + sp_offset
11730 + reg_size * i));
11731 rtx mem = gen_rtx_MEM (reg_mode, addr);
11733 set_mem_alias_set (mem, rs6000_sr_alias_set);
11735 RTVEC_ELT (p, i) =
11736 gen_rtx_SET (VOIDmode,
11737 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11738 mem);
11740 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11742 else
11743 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11744 if ((regs_ever_live[info->first_gp_reg_save+i]
11745 && ! call_used_regs[info->first_gp_reg_save+i])
11746 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11747 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11748 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11750 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11751 GEN_INT (info->gp_save_offset
11752 + sp_offset
11753 + reg_size * i));
11754 rtx mem = gen_rtx_MEM (reg_mode, addr);
11756 /* Restore 64-bit quantities for SPE. */
11757 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11759 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11760 rtx b;
11762 if (!SPE_CONST_OFFSET_OK (offset))
11764 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11765 emit_move_insn (b, GEN_INT (offset));
11767 else
11768 b = GEN_INT (offset);
11770 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11771 mem = gen_rtx_MEM (V2SImode, addr);
11774 set_mem_alias_set (mem, rs6000_sr_alias_set);
11776 emit_move_insn (gen_rtx_REG (reg_mode,
11777 info->first_gp_reg_save + i), mem);
11780 /* Restore fpr's if we need to do it without calling a function. */
11781 if (restoring_FPRs_inline)
11782 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11783 if ((regs_ever_live[info->first_fp_reg_save+i]
11784 && ! call_used_regs[info->first_fp_reg_save+i]))
11786 rtx addr, mem;
11787 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11788 GEN_INT (info->fp_save_offset
11789 + sp_offset
11790 + 8 * i));
11791 mem = gen_rtx_MEM (DFmode, addr);
11792 set_mem_alias_set (mem, rs6000_sr_alias_set);
11794 emit_move_insn (gen_rtx_REG (DFmode,
11795 info->first_fp_reg_save + i),
11796 mem);
11799 /* If we saved cr, restore it here. Just those that were used. */
11800 if (info->cr_save_p)
11802 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11803 int count = 0;
11805 if (using_mfcr_multiple)
11807 for (i = 0; i < 8; i++)
11808 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11809 count++;
11810 if (count == 0)
11811 abort ();
11814 if (using_mfcr_multiple && count > 1)
11816 rtvec p;
11817 int ndx;
11819 p = rtvec_alloc (count);
11821 ndx = 0;
11822 for (i = 0; i < 8; i++)
11823 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11825 rtvec r = rtvec_alloc (2);
11826 RTVEC_ELT (r, 0) = r12_rtx;
11827 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11828 RTVEC_ELT (p, ndx) =
11829 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11830 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11831 ndx++;
11833 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11834 if (ndx != count)
11835 abort ();
11837 else
11838 for (i = 0; i < 8; i++)
11839 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11841 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11842 CR0_REGNO+i),
11843 r12_rtx));
11847 /* If this is V.4, unwind the stack pointer after all of the loads
11848 have been done. We need to emit a block here so that sched
11849 doesn't decide to move the sp change before the register restores
11850 (which may not have any obvious dependency on the stack). This
11851 doesn't hurt performance, because there is no scheduling that can
11852 be done after this point. */
11853 if (DEFAULT_ABI == ABI_V4)
11855 if (frame_reg_rtx != sp_reg_rtx)
11856 rs6000_emit_stack_tie ();
11858 if (use_backchain_to_restore_sp)
11860 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11862 else if (sp_offset != 0)
11864 emit_insn (TARGET_32BIT
11865 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11866 GEN_INT (sp_offset))
11867 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11868 GEN_INT (sp_offset)));
11872 if (current_function_calls_eh_return)
11874 rtx sa = EH_RETURN_STACKADJ_RTX;
11875 emit_insn (TARGET_32BIT
11876 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11877 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11880 if (!sibcall)
11882 rtvec p;
11883 if (! restoring_FPRs_inline)
11884 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11885 else
11886 p = rtvec_alloc (2);
11888 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11889 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11890 gen_rtx_REG (Pmode,
11891 LINK_REGISTER_REGNUM));
11893 /* If we have to restore more than two FP registers, branch to the
11894 restore function. It will return to our caller. */
11895 if (! restoring_FPRs_inline)
11897 int i;
11898 char rname[30];
11899 const char *alloc_rname;
11901 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11902 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11903 alloc_rname = ggc_strdup (rname);
11904 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11905 gen_rtx_SYMBOL_REF (Pmode,
11906 alloc_rname));
11908 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11910 rtx addr, mem;
11911 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11912 GEN_INT (info->fp_save_offset + 8*i));
11913 mem = gen_rtx_MEM (DFmode, addr);
11914 set_mem_alias_set (mem, rs6000_sr_alias_set);
11916 RTVEC_ELT (p, i+3) =
11917 gen_rtx_SET (VOIDmode,
11918 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11919 mem);
11923 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11927 /* Write function epilogue. */
11929 static void
11930 rs6000_output_function_epilogue (file, size)
11931 FILE *file;
11932 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11934 rs6000_stack_t *info = rs6000_stack_info ();
11936 if (! HAVE_epilogue)
11938 rtx insn = get_last_insn ();
11939 /* If the last insn was a BARRIER, we don't have to write anything except
11940 the trace table. */
11941 if (GET_CODE (insn) == NOTE)
11942 insn = prev_nonnote_insn (insn);
11943 if (insn == 0 || GET_CODE (insn) != BARRIER)
11945 /* This is slightly ugly, but at least we don't have two
11946 copies of the epilogue-emitting code. */
11947 start_sequence ();
11949 /* A NOTE_INSN_DELETED is supposed to be at the start
11950 and end of the "toplevel" insn chain. */
11951 emit_note (0, NOTE_INSN_DELETED);
11952 rs6000_emit_epilogue (FALSE);
11953 emit_note (0, NOTE_INSN_DELETED);
11955 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11957 rtx insn;
11958 unsigned addr = 0;
11959 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11961 INSN_ADDRESSES_NEW (insn, addr);
11962 addr += 4;
11966 if (TARGET_DEBUG_STACK)
11967 debug_rtx_list (get_insns (), 100);
11968 final (get_insns (), file, FALSE, FALSE);
11969 end_sequence ();
11973 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11974 on its format.
11976 We don't output a traceback table if -finhibit-size-directive was
11977 used. The documentation for -finhibit-size-directive reads
11978 ``don't output a @code{.size} assembler directive, or anything
11979 else that would cause trouble if the function is split in the
11980 middle, and the two halves are placed at locations far apart in
11981 memory.'' The traceback table has this property, since it
11982 includes the offset from the start of the function to the
11983 traceback table itself.
11985 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11986 different traceback table. */
11987 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11988 && rs6000_traceback != traceback_none)
11990 const char *fname = NULL;
11991 const char *language_string = lang_hooks.name;
11992 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11993 int i;
11994 int optional_tbtab;
11996 if (rs6000_traceback == traceback_full)
11997 optional_tbtab = 1;
11998 else if (rs6000_traceback == traceback_part)
11999 optional_tbtab = 0;
12000 else
12001 optional_tbtab = !optimize_size && !TARGET_ELF;
12003 if (optional_tbtab)
12005 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12006 while (*fname == '.') /* V.4 encodes . in the name */
12007 fname++;
12009 /* Need label immediately before tbtab, so we can compute
12010 its offset from the function start. */
12011 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12012 ASM_OUTPUT_LABEL (file, fname);
12015 /* The .tbtab pseudo-op can only be used for the first eight
12016 expressions, since it can't handle the possibly variable
12017 length fields that follow. However, if you omit the optional
12018 fields, the assembler outputs zeros for all optional fields
12019 anyways, giving each variable length field is minimum length
12020 (as defined in sys/debug.h). Thus we can not use the .tbtab
12021 pseudo-op at all. */
12023 /* An all-zero word flags the start of the tbtab, for debuggers
12024 that have to find it by searching forward from the entry
12025 point or from the current pc. */
12026 fputs ("\t.long 0\n", file);
12028 /* Tbtab format type. Use format type 0. */
12029 fputs ("\t.byte 0,", file);
12031 /* Language type. Unfortunately, there doesn't seem to be any
12032 official way to get this info, so we use language_string. C
12033 is 0. C++ is 9. No number defined for Obj-C, so use the
12034 value for C for now. There is no official value for Java,
12035 although IBM appears to be using 13. There is no official value
12036 for Chill, so we've chosen 44 pseudo-randomly. */
12037 if (! strcmp (language_string, "GNU C")
12038 || ! strcmp (language_string, "GNU Objective-C"))
12039 i = 0;
12040 else if (! strcmp (language_string, "GNU F77"))
12041 i = 1;
12042 else if (! strcmp (language_string, "GNU Ada"))
12043 i = 3;
12044 else if (! strcmp (language_string, "GNU Pascal"))
12045 i = 2;
12046 else if (! strcmp (language_string, "GNU C++"))
12047 i = 9;
12048 else if (! strcmp (language_string, "GNU Java"))
12049 i = 13;
12050 else if (! strcmp (language_string, "GNU CHILL"))
12051 i = 44;
12052 else
12053 abort ();
12054 fprintf (file, "%d,", i);
12056 /* 8 single bit fields: global linkage (not set for C extern linkage,
12057 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12058 from start of procedure stored in tbtab, internal function, function
12059 has controlled storage, function has no toc, function uses fp,
12060 function logs/aborts fp operations. */
12061 /* Assume that fp operations are used if any fp reg must be saved. */
12062 fprintf (file, "%d,",
12063 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12065 /* 6 bitfields: function is interrupt handler, name present in
12066 proc table, function calls alloca, on condition directives
12067 (controls stack walks, 3 bits), saves condition reg, saves
12068 link reg. */
12069 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12070 set up as a frame pointer, even when there is no alloca call. */
12071 fprintf (file, "%d,",
12072 ((optional_tbtab << 6)
12073 | ((optional_tbtab & frame_pointer_needed) << 5)
12074 | (info->cr_save_p << 1)
12075 | (info->lr_save_p)));
12077 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12078 (6 bits). */
12079 fprintf (file, "%d,",
12080 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12082 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12083 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12085 if (optional_tbtab)
12087 /* Compute the parameter info from the function decl argument
12088 list. */
12089 tree decl;
12090 int next_parm_info_bit = 31;
12092 for (decl = DECL_ARGUMENTS (current_function_decl);
12093 decl; decl = TREE_CHAIN (decl))
12095 rtx parameter = DECL_INCOMING_RTL (decl);
12096 enum machine_mode mode = GET_MODE (parameter);
12098 if (GET_CODE (parameter) == REG)
12100 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12102 int bits;
12104 float_parms++;
12106 if (mode == SFmode)
12107 bits = 0x2;
12108 else if (mode == DFmode || mode == TFmode)
12109 bits = 0x3;
12110 else
12111 abort ();
12113 /* If only one bit will fit, don't or in this entry. */
12114 if (next_parm_info_bit > 0)
12115 parm_info |= (bits << (next_parm_info_bit - 1));
12116 next_parm_info_bit -= 2;
12118 else
12120 fixed_parms += ((GET_MODE_SIZE (mode)
12121 + (UNITS_PER_WORD - 1))
12122 / UNITS_PER_WORD);
12123 next_parm_info_bit -= 1;
12129 /* Number of fixed point parameters. */
12130 /* This is actually the number of words of fixed point parameters; thus
12131 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12132 fprintf (file, "%d,", fixed_parms);
12134 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12135 all on stack. */
12136 /* This is actually the number of fp registers that hold parameters;
12137 and thus the maximum value is 13. */
12138 /* Set parameters on stack bit if parameters are not in their original
12139 registers, regardless of whether they are on the stack? Xlc
12140 seems to set the bit when not optimizing. */
12141 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12143 if (! optional_tbtab)
12144 return;
12146 /* Optional fields follow. Some are variable length. */
12148 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12149 11 double float. */
12150 /* There is an entry for each parameter in a register, in the order that
12151 they occur in the parameter list. Any intervening arguments on the
12152 stack are ignored. If the list overflows a long (max possible length
12153 34 bits) then completely leave off all elements that don't fit. */
12154 /* Only emit this long if there was at least one parameter. */
12155 if (fixed_parms || float_parms)
12156 fprintf (file, "\t.long %d\n", parm_info);
12158 /* Offset from start of code to tb table. */
12159 fputs ("\t.long ", file);
12160 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12161 #if TARGET_AIX
12162 RS6000_OUTPUT_BASENAME (file, fname);
12163 #else
12164 assemble_name (file, fname);
12165 #endif
12166 fputs ("-.", file);
12167 #if TARGET_AIX
12168 RS6000_OUTPUT_BASENAME (file, fname);
12169 #else
12170 assemble_name (file, fname);
12171 #endif
12172 putc ('\n', file);
12174 /* Interrupt handler mask. */
12175 /* Omit this long, since we never set the interrupt handler bit
12176 above. */
12178 /* Number of CTL (controlled storage) anchors. */
12179 /* Omit this long, since the has_ctl bit is never set above. */
12181 /* Displacement into stack of each CTL anchor. */
12182 /* Omit this list of longs, because there are no CTL anchors. */
12184 /* Length of function name. */
12185 if (*fname == '*')
12186 ++fname;
12187 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12189 /* Function name. */
12190 assemble_string (fname, strlen (fname));
12192 /* Register for alloca automatic storage; this is always reg 31.
12193 Only emit this if the alloca bit was set above. */
12194 if (frame_pointer_needed)
12195 fputs ("\t.byte 31\n", file);
12197 fputs ("\t.align 2\n", file);
12201 /* A C compound statement that outputs the assembler code for a thunk
12202 function, used to implement C++ virtual function calls with
12203 multiple inheritance. The thunk acts as a wrapper around a virtual
12204 function, adjusting the implicit object parameter before handing
12205 control off to the real function.
12207 First, emit code to add the integer DELTA to the location that
12208 contains the incoming first argument. Assume that this argument
12209 contains a pointer, and is the one used to pass the `this' pointer
12210 in C++. This is the incoming argument *before* the function
12211 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12212 values of all other incoming arguments.
12214 After the addition, emit code to jump to FUNCTION, which is a
12215 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12216 not touch the return address. Hence returning from FUNCTION will
12217 return to whoever called the current `thunk'.
12219 The effect must be as if FUNCTION had been called directly with the
12220 adjusted first argument. This macro is responsible for emitting
12221 all of the code for a thunk function; output_function_prologue()
12222 and output_function_epilogue() are not invoked.
12224 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12225 been extracted from it.) It might possibly be useful on some
12226 targets, but probably not.
12228 If you do not define this macro, the target-independent code in the
12229 C++ frontend will generate a less efficient heavyweight thunk that
12230 calls FUNCTION instead of jumping to it. The generic approach does
12231 not support varargs. */
12233 static void
12234 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12235 FILE *file;
12236 tree thunk_fndecl ATTRIBUTE_UNUSED;
12237 HOST_WIDE_INT delta;
12238 HOST_WIDE_INT vcall_offset;
12239 tree function;
12241 rtx this, insn, funexp;
12243 reload_completed = 1;
12244 no_new_pseudos = 1;
12246 /* Mark the end of the (empty) prologue. */
12247 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
12249 /* Find the "this" pointer. If the function returns a structure,
12250 the structure return pointer is in r3. */
12251 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12252 this = gen_rtx_REG (Pmode, 4);
12253 else
12254 this = gen_rtx_REG (Pmode, 3);
12256 /* Apply the constant offset, if required. */
12257 if (delta)
12259 rtx delta_rtx = GEN_INT (delta);
12260 emit_insn (TARGET_32BIT
12261 ? gen_addsi3 (this, this, delta_rtx)
12262 : gen_adddi3 (this, this, delta_rtx));
12265 /* Apply the offset from the vtable, if required. */
12266 if (vcall_offset)
12268 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12269 rtx tmp = gen_rtx_REG (Pmode, 12);
12271 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12272 emit_insn (TARGET_32BIT
12273 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12274 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12275 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12276 emit_insn (TARGET_32BIT
12277 ? gen_addsi3 (this, this, tmp)
12278 : gen_adddi3 (this, this, tmp));
12281 /* Generate a tail call to the target function. */
12282 if (!TREE_USED (function))
12284 assemble_external (function);
12285 TREE_USED (function) = 1;
12287 funexp = XEXP (DECL_RTL (function), 0);
12288 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12289 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12291 #if TARGET_MACHO
12292 if (MACHOPIC_INDIRECT)
12293 funexp = machopic_indirect_call_target (funexp);
12294 #endif
12296 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12297 generate sibcall RTL explicitly to avoid constraint abort. */
12298 insn = emit_call_insn (
12299 gen_rtx_PARALLEL (VOIDmode,
12300 gen_rtvec (4,
12301 gen_rtx_CALL (VOIDmode,
12302 funexp, const0_rtx),
12303 gen_rtx_USE (VOIDmode, const0_rtx),
12304 gen_rtx_USE (VOIDmode,
12305 gen_rtx_REG (SImode,
12306 LINK_REGISTER_REGNUM)),
12307 gen_rtx_RETURN (VOIDmode))));
12308 SIBLING_CALL_P (insn) = 1;
12309 emit_barrier ();
12311 /* Run just enough of rest_of_compilation to get the insns emitted.
12312 There's not really enough bulk here to make other passes such as
12313 instruction scheduling worth while. Note that use_thunk calls
12314 assemble_start_function and assemble_end_function. */
12315 insn = get_insns ();
12316 shorten_branches (insn);
12317 final_start_function (insn, file, 1);
12318 final (insn, file, 1, 0);
12319 final_end_function ();
12321 reload_completed = 0;
12322 no_new_pseudos = 0;
12325 /* A quick summary of the various types of 'constant-pool tables'
12326 under PowerPC:
12328 Target Flags Name One table per
12329 AIX (none) AIX TOC object file
12330 AIX -mfull-toc AIX TOC object file
12331 AIX -mminimal-toc AIX minimal TOC translation unit
12332 SVR4/EABI (none) SVR4 SDATA object file
12333 SVR4/EABI -fpic SVR4 pic object file
12334 SVR4/EABI -fPIC SVR4 PIC translation unit
12335 SVR4/EABI -mrelocatable EABI TOC function
12336 SVR4/EABI -maix AIX TOC object file
12337 SVR4/EABI -maix -mminimal-toc
12338 AIX minimal TOC translation unit
12340 Name Reg. Set by entries contains:
12341 made by addrs? fp? sum?
12343 AIX TOC 2 crt0 as Y option option
12344 AIX minimal TOC 30 prolog gcc Y Y option
12345 SVR4 SDATA 13 crt0 gcc N Y N
12346 SVR4 pic 30 prolog ld Y not yet N
12347 SVR4 PIC 30 prolog gcc Y option option
12348 EABI TOC 30 prolog gcc Y option option
12352 /* Hash functions for the hash table. */
12354 static unsigned
12355 rs6000_hash_constant (k)
12356 rtx k;
12358 enum rtx_code code = GET_CODE (k);
12359 enum machine_mode mode = GET_MODE (k);
12360 unsigned result = (code << 3) ^ mode;
12361 const char *format;
12362 int flen, fidx;
12364 format = GET_RTX_FORMAT (code);
12365 flen = strlen (format);
12366 fidx = 0;
12368 switch (code)
12370 case LABEL_REF:
12371 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12373 case CONST_DOUBLE:
12374 if (mode != VOIDmode)
12375 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12376 flen = 2;
12377 break;
12379 case CODE_LABEL:
12380 fidx = 3;
12381 break;
12383 default:
12384 break;
12387 for (; fidx < flen; fidx++)
12388 switch (format[fidx])
12390 case 's':
12392 unsigned i, len;
12393 const char *str = XSTR (k, fidx);
12394 len = strlen (str);
12395 result = result * 613 + len;
12396 for (i = 0; i < len; i++)
12397 result = result * 613 + (unsigned) str[i];
12398 break;
12400 case 'u':
12401 case 'e':
12402 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12403 break;
12404 case 'i':
12405 case 'n':
12406 result = result * 613 + (unsigned) XINT (k, fidx);
12407 break;
12408 case 'w':
12409 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12410 result = result * 613 + (unsigned) XWINT (k, fidx);
12411 else
12413 size_t i;
12414 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12415 result = result * 613 + (unsigned) (XWINT (k, fidx)
12416 >> CHAR_BIT * i);
12418 break;
12419 case '0':
12420 break;
12421 default:
12422 abort ();
12425 return result;
12428 static unsigned
12429 toc_hash_function (hash_entry)
12430 const void * hash_entry;
12432 const struct toc_hash_struct *thc =
12433 (const struct toc_hash_struct *) hash_entry;
12434 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12437 /* Compare H1 and H2 for equivalence. */
12439 static int
12440 toc_hash_eq (h1, h2)
12441 const void * h1;
12442 const void * h2;
12444 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12445 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12447 if (((const struct toc_hash_struct *) h1)->key_mode
12448 != ((const struct toc_hash_struct *) h2)->key_mode)
12449 return 0;
12451 return rtx_equal_p (r1, r2);
12454 /* These are the names given by the C++ front-end to vtables, and
12455 vtable-like objects. Ideally, this logic should not be here;
12456 instead, there should be some programmatic way of inquiring as
12457 to whether or not an object is a vtable. */
12459 #define VTABLE_NAME_P(NAME) \
12460 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12461 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12462 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12463 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12465 void
12466 rs6000_output_symbol_ref (file, x)
12467 FILE *file;
12468 rtx x;
12470 /* Currently C++ toc references to vtables can be emitted before it
12471 is decided whether the vtable is public or private. If this is
12472 the case, then the linker will eventually complain that there is
12473 a reference to an unknown section. Thus, for vtables only,
12474 we emit the TOC reference to reference the symbol and not the
12475 section. */
12476 const char *name = XSTR (x, 0);
12478 if (VTABLE_NAME_P (name))
12480 RS6000_OUTPUT_BASENAME (file, name);
12482 else
12483 assemble_name (file, name);
12486 /* Output a TOC entry. We derive the entry name from what is being
12487 written. */
12489 void
12490 output_toc (file, x, labelno, mode)
12491 FILE *file;
12492 rtx x;
12493 int labelno;
12494 enum machine_mode mode;
12496 char buf[256];
12497 const char *name = buf;
12498 const char *real_name;
12499 rtx base = x;
12500 int offset = 0;
12502 if (TARGET_NO_TOC)
12503 abort ();
12505 /* When the linker won't eliminate them, don't output duplicate
12506 TOC entries (this happens on AIX if there is any kind of TOC,
12507 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12508 CODE_LABELs. */
12509 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12511 struct toc_hash_struct *h;
12512 void * * found;
12514 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12515 time because GGC is not initialised at that point. */
12516 if (toc_hash_table == NULL)
12517 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12518 toc_hash_eq, NULL);
12520 h = ggc_alloc (sizeof (*h));
12521 h->key = x;
12522 h->key_mode = mode;
12523 h->labelno = labelno;
12525 found = htab_find_slot (toc_hash_table, h, 1);
12526 if (*found == NULL)
12527 *found = h;
12528 else /* This is indeed a duplicate.
12529 Set this label equal to that label. */
12531 fputs ("\t.set ", file);
12532 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12533 fprintf (file, "%d,", labelno);
12534 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12535 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12536 found)->labelno));
12537 return;
12541 /* If we're going to put a double constant in the TOC, make sure it's
12542 aligned properly when strict alignment is on. */
12543 if (GET_CODE (x) == CONST_DOUBLE
12544 && STRICT_ALIGNMENT
12545 && GET_MODE_BITSIZE (mode) >= 64
12546 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12547 ASM_OUTPUT_ALIGN (file, 3);
12550 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12552 /* Handle FP constants specially. Note that if we have a minimal
12553 TOC, things we put here aren't actually in the TOC, so we can allow
12554 FP constants. */
12555 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12557 REAL_VALUE_TYPE rv;
12558 long k[4];
12560 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12561 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12563 if (TARGET_64BIT)
12565 if (TARGET_MINIMAL_TOC)
12566 fputs (DOUBLE_INT_ASM_OP, file);
12567 else
12568 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12569 k[0] & 0xffffffff, k[1] & 0xffffffff,
12570 k[2] & 0xffffffff, k[3] & 0xffffffff);
12571 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12572 k[0] & 0xffffffff, k[1] & 0xffffffff,
12573 k[2] & 0xffffffff, k[3] & 0xffffffff);
12574 return;
12576 else
12578 if (TARGET_MINIMAL_TOC)
12579 fputs ("\t.long ", file);
12580 else
12581 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12582 k[0] & 0xffffffff, k[1] & 0xffffffff,
12583 k[2] & 0xffffffff, k[3] & 0xffffffff);
12584 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12585 k[0] & 0xffffffff, k[1] & 0xffffffff,
12586 k[2] & 0xffffffff, k[3] & 0xffffffff);
12587 return;
12590 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12592 REAL_VALUE_TYPE rv;
12593 long k[2];
12595 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12596 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12598 if (TARGET_64BIT)
12600 if (TARGET_MINIMAL_TOC)
12601 fputs (DOUBLE_INT_ASM_OP, file);
12602 else
12603 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12604 k[0] & 0xffffffff, k[1] & 0xffffffff);
12605 fprintf (file, "0x%lx%08lx\n",
12606 k[0] & 0xffffffff, k[1] & 0xffffffff);
12607 return;
12609 else
12611 if (TARGET_MINIMAL_TOC)
12612 fputs ("\t.long ", file);
12613 else
12614 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12615 k[0] & 0xffffffff, k[1] & 0xffffffff);
12616 fprintf (file, "0x%lx,0x%lx\n",
12617 k[0] & 0xffffffff, k[1] & 0xffffffff);
12618 return;
12621 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12623 REAL_VALUE_TYPE rv;
12624 long l;
12626 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12627 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12629 if (TARGET_64BIT)
12631 if (TARGET_MINIMAL_TOC)
12632 fputs (DOUBLE_INT_ASM_OP, file);
12633 else
12634 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12635 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12636 return;
12638 else
12640 if (TARGET_MINIMAL_TOC)
12641 fputs ("\t.long ", file);
12642 else
12643 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12644 fprintf (file, "0x%lx\n", l & 0xffffffff);
12645 return;
12648 else if (GET_MODE (x) == VOIDmode
12649 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12651 unsigned HOST_WIDE_INT low;
12652 HOST_WIDE_INT high;
12654 if (GET_CODE (x) == CONST_DOUBLE)
12656 low = CONST_DOUBLE_LOW (x);
12657 high = CONST_DOUBLE_HIGH (x);
12659 else
12660 #if HOST_BITS_PER_WIDE_INT == 32
12662 low = INTVAL (x);
12663 high = (low & 0x80000000) ? ~0 : 0;
12665 #else
12667 low = INTVAL (x) & 0xffffffff;
12668 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12670 #endif
12672 /* TOC entries are always Pmode-sized, but since this
12673 is a bigendian machine then if we're putting smaller
12674 integer constants in the TOC we have to pad them.
12675 (This is still a win over putting the constants in
12676 a separate constant pool, because then we'd have
12677 to have both a TOC entry _and_ the actual constant.)
12679 For a 32-bit target, CONST_INT values are loaded and shifted
12680 entirely within `low' and can be stored in one TOC entry. */
12682 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12683 abort ();/* It would be easy to make this work, but it doesn't now. */
12685 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12687 #if HOST_BITS_PER_WIDE_INT == 32
12688 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12689 POINTER_SIZE, &low, &high, 0);
12690 #else
12691 low |= high << 32;
12692 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12693 high = (HOST_WIDE_INT) low >> 32;
12694 low &= 0xffffffff;
12695 #endif
12698 if (TARGET_64BIT)
12700 if (TARGET_MINIMAL_TOC)
12701 fputs (DOUBLE_INT_ASM_OP, file);
12702 else
12703 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12704 (long) high & 0xffffffff, (long) low & 0xffffffff);
12705 fprintf (file, "0x%lx%08lx\n",
12706 (long) high & 0xffffffff, (long) low & 0xffffffff);
12707 return;
12709 else
12711 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12713 if (TARGET_MINIMAL_TOC)
12714 fputs ("\t.long ", file);
12715 else
12716 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12717 (long) high & 0xffffffff, (long) low & 0xffffffff);
12718 fprintf (file, "0x%lx,0x%lx\n",
12719 (long) high & 0xffffffff, (long) low & 0xffffffff);
12721 else
12723 if (TARGET_MINIMAL_TOC)
12724 fputs ("\t.long ", file);
12725 else
12726 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12727 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12729 return;
12733 if (GET_CODE (x) == CONST)
12735 if (GET_CODE (XEXP (x, 0)) != PLUS)
12736 abort ();
12738 base = XEXP (XEXP (x, 0), 0);
12739 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12742 if (GET_CODE (base) == SYMBOL_REF)
12743 name = XSTR (base, 0);
12744 else if (GET_CODE (base) == LABEL_REF)
12745 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12746 else if (GET_CODE (base) == CODE_LABEL)
12747 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12748 else
12749 abort ();
12751 real_name = (*targetm.strip_name_encoding) (name);
12752 if (TARGET_MINIMAL_TOC)
12753 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12754 else
12756 fprintf (file, "\t.tc %s", real_name);
12758 if (offset < 0)
12759 fprintf (file, ".N%d", - offset);
12760 else if (offset)
12761 fprintf (file, ".P%d", offset);
12763 fputs ("[TC],", file);
12766 /* Currently C++ toc references to vtables can be emitted before it
12767 is decided whether the vtable is public or private. If this is
12768 the case, then the linker will eventually complain that there is
12769 a TOC reference to an unknown section. Thus, for vtables only,
12770 we emit the TOC reference to reference the symbol and not the
12771 section. */
12772 if (VTABLE_NAME_P (name))
12774 RS6000_OUTPUT_BASENAME (file, name);
12775 if (offset < 0)
12776 fprintf (file, "%d", offset);
12777 else if (offset > 0)
12778 fprintf (file, "+%d", offset);
12780 else
12781 output_addr_const (file, x);
12782 putc ('\n', file);
12785 /* Output an assembler pseudo-op to write an ASCII string of N characters
12786 starting at P to FILE.
12788 On the RS/6000, we have to do this using the .byte operation and
12789 write out special characters outside the quoted string.
12790 Also, the assembler is broken; very long strings are truncated,
12791 so we must artificially break them up early. */
12793 void
12794 output_ascii (file, p, n)
12795 FILE *file;
12796 const char *p;
12797 int n;
12799 char c;
12800 int i, count_string;
12801 const char *for_string = "\t.byte \"";
12802 const char *for_decimal = "\t.byte ";
12803 const char *to_close = NULL;
12805 count_string = 0;
12806 for (i = 0; i < n; i++)
12808 c = *p++;
12809 if (c >= ' ' && c < 0177)
12811 if (for_string)
12812 fputs (for_string, file);
12813 putc (c, file);
12815 /* Write two quotes to get one. */
12816 if (c == '"')
12818 putc (c, file);
12819 ++count_string;
12822 for_string = NULL;
12823 for_decimal = "\"\n\t.byte ";
12824 to_close = "\"\n";
12825 ++count_string;
12827 if (count_string >= 512)
12829 fputs (to_close, file);
12831 for_string = "\t.byte \"";
12832 for_decimal = "\t.byte ";
12833 to_close = NULL;
12834 count_string = 0;
12837 else
12839 if (for_decimal)
12840 fputs (for_decimal, file);
12841 fprintf (file, "%d", c);
12843 for_string = "\n\t.byte \"";
12844 for_decimal = ", ";
12845 to_close = "\n";
12846 count_string = 0;
12850 /* Now close the string if we have written one. Then end the line. */
12851 if (to_close)
12852 fputs (to_close, file);
12855 /* Generate a unique section name for FILENAME for a section type
12856 represented by SECTION_DESC. Output goes into BUF.
12858 SECTION_DESC can be any string, as long as it is different for each
12859 possible section type.
12861 We name the section in the same manner as xlc. The name begins with an
12862 underscore followed by the filename (after stripping any leading directory
12863 names) with the last period replaced by the string SECTION_DESC. If
12864 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12865 the name. */
12867 void
12868 rs6000_gen_section_name (buf, filename, section_desc)
12869 char **buf;
12870 const char *filename;
12871 const char *section_desc;
12873 const char *q, *after_last_slash, *last_period = 0;
12874 char *p;
12875 int len;
12877 after_last_slash = filename;
12878 for (q = filename; *q; q++)
12880 if (*q == '/')
12881 after_last_slash = q + 1;
12882 else if (*q == '.')
12883 last_period = q;
12886 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12887 *buf = (char *) xmalloc (len);
12889 p = *buf;
12890 *p++ = '_';
12892 for (q = after_last_slash; *q; q++)
12894 if (q == last_period)
12896 strcpy (p, section_desc);
12897 p += strlen (section_desc);
12898 break;
12901 else if (ISALNUM (*q))
12902 *p++ = *q;
12905 if (last_period == 0)
12906 strcpy (p, section_desc);
12907 else
12908 *p = '\0';
12911 /* Emit profile function. */
12913 void
12914 output_profile_hook (labelno)
12915 int labelno ATTRIBUTE_UNUSED;
12917 if (TARGET_PROFILE_KERNEL)
12918 return;
12920 if (DEFAULT_ABI == ABI_AIX)
12922 #ifdef NO_PROFILE_COUNTERS
12923 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12924 #else
12925 char buf[30];
12926 const char *label_name;
12927 rtx fun;
12929 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12930 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12931 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12933 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12934 fun, Pmode);
12935 #endif
12937 else if (DEFAULT_ABI == ABI_DARWIN)
12939 const char *mcount_name = RS6000_MCOUNT;
12940 int caller_addr_regno = LINK_REGISTER_REGNUM;
12942 /* Be conservative and always set this, at least for now. */
12943 current_function_uses_pic_offset_table = 1;
12945 #if TARGET_MACHO
12946 /* For PIC code, set up a stub and collect the caller's address
12947 from r0, which is where the prologue puts it. */
12948 if (MACHOPIC_INDIRECT)
12950 mcount_name = machopic_stub_name (mcount_name);
12951 if (current_function_uses_pic_offset_table)
12952 caller_addr_regno = 0;
12954 #endif
12955 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12956 0, VOIDmode, 1,
12957 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12961 /* Write function profiler code. */
12963 void
12964 output_function_profiler (file, labelno)
12965 FILE *file;
12966 int labelno;
12968 char buf[100];
12969 int save_lr = 8;
12971 switch (DEFAULT_ABI)
12973 default:
12974 abort ();
12976 case ABI_V4:
12977 save_lr = 4;
12978 if (!TARGET_32BIT)
12980 warning ("no profiling of 64-bit code for this ABI");
12981 return;
12983 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12984 fprintf (file, "\tmflr %s\n", reg_names[0]);
12985 if (flag_pic == 1)
12987 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12988 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12989 reg_names[0], save_lr, reg_names[1]);
12990 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12991 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12992 assemble_name (file, buf);
12993 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12995 else if (flag_pic > 1)
12997 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12998 reg_names[0], save_lr, reg_names[1]);
12999 /* Now, we need to get the address of the label. */
13000 fputs ("\tbl 1f\n\t.long ", file);
13001 assemble_name (file, buf);
13002 fputs ("-.\n1:", file);
13003 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13004 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13005 reg_names[0], reg_names[11]);
13006 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13007 reg_names[0], reg_names[0], reg_names[11]);
13009 else
13011 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13012 assemble_name (file, buf);
13013 fputs ("@ha\n", file);
13014 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13015 reg_names[0], save_lr, reg_names[1]);
13016 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13017 assemble_name (file, buf);
13018 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13021 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13022 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13023 break;
13025 case ABI_AIX:
13026 case ABI_DARWIN:
13027 if (!TARGET_PROFILE_KERNEL)
13029 /* Don't do anything, done in output_profile_hook (). */
13031 else
13033 if (TARGET_32BIT)
13034 abort ();
13036 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13037 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13039 if (current_function_needs_context)
13041 asm_fprintf (file, "\tstd %s,24(%s)\n",
13042 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13043 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13044 asm_fprintf (file, "\tld %s,24(%s)\n",
13045 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13047 else
13048 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13050 break;
13055 static int
13056 rs6000_use_dfa_pipeline_interface ()
13058 return 1;
13061 /* Power4 load update and store update instructions are cracked into a
13062 load or store and an integer insn which are executed in the same cycle.
13063 Branches have their own dispatch slot which does not count against the
13064 GCC issue rate, but it changes the program flow so there are no other
13065 instructions to issue in this cycle. */
13067 static int
13068 rs6000_variable_issue (stream, verbose, insn, more)
13069 FILE *stream ATTRIBUTE_UNUSED;
13070 int verbose ATTRIBUTE_UNUSED;
13071 rtx insn;
13072 int more;
13074 if (GET_CODE (PATTERN (insn)) == USE
13075 || GET_CODE (PATTERN (insn)) == CLOBBER)
13076 return more;
13078 if (rs6000_cpu == PROCESSOR_POWER4)
13080 enum attr_type type = get_attr_type (insn);
13081 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13082 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13083 return 0;
13084 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13085 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13086 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13087 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13088 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13089 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13090 || type == TYPE_IDIV || type == TYPE_LDIV)
13091 return more > 2 ? more - 2 : 0;
13094 return more - 1;
13097 /* Adjust the cost of a scheduling dependency. Return the new cost of
13098 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13100 static int
13101 rs6000_adjust_cost (insn, link, dep_insn, cost)
13102 rtx insn;
13103 rtx link;
13104 rtx dep_insn ATTRIBUTE_UNUSED;
13105 int cost;
13107 if (! recog_memoized (insn))
13108 return 0;
13110 if (REG_NOTE_KIND (link) != 0)
13111 return 0;
13113 if (REG_NOTE_KIND (link) == 0)
13115 /* Data dependency; DEP_INSN writes a register that INSN reads
13116 some cycles later. */
13117 switch (get_attr_type (insn))
13119 case TYPE_JMPREG:
13120 /* Tell the first scheduling pass about the latency between
13121 a mtctr and bctr (and mtlr and br/blr). The first
13122 scheduling pass will not know about this latency since
13123 the mtctr instruction, which has the latency associated
13124 to it, will be generated by reload. */
13125 return TARGET_POWER ? 5 : 4;
13126 case TYPE_BRANCH:
13127 /* Leave some extra cycles between a compare and its
13128 dependent branch, to inhibit expensive mispredicts. */
13129 if ((rs6000_cpu_attr == CPU_PPC603
13130 || rs6000_cpu_attr == CPU_PPC604
13131 || rs6000_cpu_attr == CPU_PPC604E
13132 || rs6000_cpu_attr == CPU_PPC620
13133 || rs6000_cpu_attr == CPU_PPC630
13134 || rs6000_cpu_attr == CPU_PPC750
13135 || rs6000_cpu_attr == CPU_PPC7400
13136 || rs6000_cpu_attr == CPU_PPC7450
13137 || rs6000_cpu_attr == CPU_POWER4)
13138 && recog_memoized (dep_insn)
13139 && (INSN_CODE (dep_insn) >= 0)
13140 && (get_attr_type (dep_insn) == TYPE_CMP
13141 || get_attr_type (dep_insn) == TYPE_COMPARE
13142 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13143 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13144 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13145 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13146 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13147 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13148 return cost + 2;
13149 default:
13150 break;
13152 /* Fall out to return default cost. */
13155 return cost;
13158 /* A C statement (sans semicolon) to update the integer scheduling
13159 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13160 INSN earlier, increase the priority to execute INSN later. Do not
13161 define this macro if you do not need to adjust the scheduling
13162 priorities of insns. */
13164 static int
13165 rs6000_adjust_priority (insn, priority)
13166 rtx insn ATTRIBUTE_UNUSED;
13167 int priority;
13169 /* On machines (like the 750) which have asymmetric integer units,
13170 where one integer unit can do multiply and divides and the other
13171 can't, reduce the priority of multiply/divide so it is scheduled
13172 before other integer operations. */
13174 #if 0
13175 if (! INSN_P (insn))
13176 return priority;
13178 if (GET_CODE (PATTERN (insn)) == USE)
13179 return priority;
13181 switch (rs6000_cpu_attr) {
13182 case CPU_PPC750:
13183 switch (get_attr_type (insn))
13185 default:
13186 break;
13188 case TYPE_IMUL:
13189 case TYPE_IDIV:
13190 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13191 priority, priority);
13192 if (priority >= 0 && priority < 0x01000000)
13193 priority >>= 3;
13194 break;
13197 #endif
13199 return priority;
13202 /* Return how many instructions the machine can issue per cycle. */
13204 static int
13205 rs6000_issue_rate ()
13207 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13208 if (!reload_completed)
13209 return 1;
13211 switch (rs6000_cpu_attr) {
13212 case CPU_RIOS1: /* ? */
13213 case CPU_RS64A:
13214 case CPU_PPC601: /* ? */
13215 case CPU_PPC7450:
13216 return 3;
13217 case CPU_PPC440:
13218 case CPU_PPC603:
13219 case CPU_PPC750:
13220 case CPU_PPC7400:
13221 case CPU_PPC8540:
13222 return 2;
13223 case CPU_RIOS2:
13224 case CPU_PPC604:
13225 case CPU_PPC604E:
13226 case CPU_PPC620:
13227 case CPU_PPC630:
13228 case CPU_POWER4:
13229 return 4;
13230 default:
13231 return 1;
13235 /* Return how many instructions to look ahead for better insn
13236 scheduling. */
13238 static int
13239 rs6000_use_sched_lookahead ()
13241 if (rs6000_cpu_attr == CPU_PPC8540)
13242 return 4;
13243 return 0;
13247 /* Length in units of the trampoline for entering a nested function. */
13250 rs6000_trampoline_size ()
13252 int ret = 0;
13254 switch (DEFAULT_ABI)
13256 default:
13257 abort ();
13259 case ABI_AIX:
13260 ret = (TARGET_32BIT) ? 12 : 24;
13261 break;
13263 case ABI_DARWIN:
13264 case ABI_V4:
13265 ret = (TARGET_32BIT) ? 40 : 48;
13266 break;
13269 return ret;
13272 /* Emit RTL insns to initialize the variable parts of a trampoline.
13273 FNADDR is an RTX for the address of the function's pure code.
13274 CXT is an RTX for the static chain value for the function. */
13276 void
13277 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13278 rtx addr;
13279 rtx fnaddr;
13280 rtx cxt;
13282 enum machine_mode pmode = Pmode;
13283 int regsize = (TARGET_32BIT) ? 4 : 8;
13284 rtx ctx_reg = force_reg (pmode, cxt);
13286 switch (DEFAULT_ABI)
13288 default:
13289 abort ();
13291 /* Macros to shorten the code expansions below. */
13292 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13293 #define MEM_PLUS(addr,offset) \
13294 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13296 /* Under AIX, just build the 3 word function descriptor */
13297 case ABI_AIX:
13299 rtx fn_reg = gen_reg_rtx (pmode);
13300 rtx toc_reg = gen_reg_rtx (pmode);
13301 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13302 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13303 emit_move_insn (MEM_DEREF (addr), fn_reg);
13304 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13305 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13307 break;
13309 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13310 case ABI_DARWIN:
13311 case ABI_V4:
13312 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13313 FALSE, VOIDmode, 4,
13314 addr, pmode,
13315 GEN_INT (rs6000_trampoline_size ()), SImode,
13316 fnaddr, pmode,
13317 ctx_reg, pmode);
13318 break;
13321 return;
13325 /* Table of valid machine attributes. */
13327 const struct attribute_spec rs6000_attribute_table[] =
13329 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13330 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13331 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13332 { NULL, 0, 0, false, false, false, NULL }
13335 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13336 struct attribute_spec.handler. */
13338 static tree
13339 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13340 tree *node;
13341 tree name;
13342 tree args ATTRIBUTE_UNUSED;
13343 int flags ATTRIBUTE_UNUSED;
13344 bool *no_add_attrs;
13346 if (TREE_CODE (*node) != FUNCTION_TYPE
13347 && TREE_CODE (*node) != FIELD_DECL
13348 && TREE_CODE (*node) != TYPE_DECL)
13350 warning ("`%s' attribute only applies to functions",
13351 IDENTIFIER_POINTER (name));
13352 *no_add_attrs = true;
13355 return NULL_TREE;
13358 /* Set longcall attributes on all functions declared when
13359 rs6000_default_long_calls is true. */
13360 static void
13361 rs6000_set_default_type_attributes (type)
13362 tree type;
13364 if (rs6000_default_long_calls
13365 && (TREE_CODE (type) == FUNCTION_TYPE
13366 || TREE_CODE (type) == METHOD_TYPE))
13367 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13368 NULL_TREE,
13369 TYPE_ATTRIBUTES (type));
13372 /* Return a reference suitable for calling a function with the
13373 longcall attribute. */
13375 struct rtx_def *
13376 rs6000_longcall_ref (call_ref)
13377 rtx call_ref;
13379 const char *call_name;
13380 tree node;
13382 if (GET_CODE (call_ref) != SYMBOL_REF)
13383 return call_ref;
13385 /* System V adds '.' to the internal name, so skip them. */
13386 call_name = XSTR (call_ref, 0);
13387 if (*call_name == '.')
13389 while (*call_name == '.')
13390 call_name++;
13392 node = get_identifier (call_name);
13393 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13396 return force_reg (Pmode, call_ref);
13399 #ifdef USING_ELFOS_H
13401 /* A C statement or statements to switch to the appropriate section
13402 for output of RTX in mode MODE. You can assume that RTX is some
13403 kind of constant in RTL. The argument MODE is redundant except in
13404 the case of a `const_int' rtx. Select the section by calling
13405 `text_section' or one of the alternatives for other sections.
13407 Do not define this macro if you put all constants in the read-only
13408 data section. */
13410 static void
13411 rs6000_elf_select_rtx_section (mode, x, align)
13412 enum machine_mode mode;
13413 rtx x;
13414 unsigned HOST_WIDE_INT align;
13416 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13417 toc_section ();
13418 else
13419 default_elf_select_rtx_section (mode, x, align);
13422 /* A C statement or statements to switch to the appropriate
13423 section for output of DECL. DECL is either a `VAR_DECL' node
13424 or a constant of some sort. RELOC indicates whether forming
13425 the initial value of DECL requires link-time relocations. */
13427 static void
13428 rs6000_elf_select_section (decl, reloc, align)
13429 tree decl;
13430 int reloc;
13431 unsigned HOST_WIDE_INT align;
13433 /* Pretend that we're always building for a shared library when
13434 ABI_AIX, because otherwise we end up with dynamic relocations
13435 in read-only sections. This happens for function pointers,
13436 references to vtables in typeinfo, and probably other cases. */
13437 default_elf_select_section_1 (decl, reloc, align,
13438 flag_pic || DEFAULT_ABI == ABI_AIX);
13441 /* A C statement to build up a unique section name, expressed as a
13442 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13443 RELOC indicates whether the initial value of EXP requires
13444 link-time relocations. If you do not define this macro, GCC will use
13445 the symbol name prefixed by `.' as the section name. Note - this
13446 macro can now be called for uninitialized data items as well as
13447 initialized data and functions. */
13449 static void
13450 rs6000_elf_unique_section (decl, reloc)
13451 tree decl;
13452 int reloc;
13454 /* As above, pretend that we're always building for a shared library
13455 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13456 default_unique_section_1 (decl, reloc,
13457 flag_pic || DEFAULT_ABI == ABI_AIX);
13460 /* For a SYMBOL_REF, set generic flags and then perform some
13461 target-specific processing.
13463 When the AIX ABI is requested on a non-AIX system, replace the
13464 function name with the real name (with a leading .) rather than the
13465 function descriptor name. This saves a lot of overriding code to
13466 read the prefixes. */
13468 static void
13469 rs6000_elf_encode_section_info (decl, rtl, first)
13470 tree decl;
13471 rtx rtl;
13472 int first;
13474 default_encode_section_info (decl, rtl, first);
13476 if (first
13477 && TREE_CODE (decl) == FUNCTION_DECL
13478 && !TARGET_AIX
13479 && DEFAULT_ABI == ABI_AIX)
13481 rtx sym_ref = XEXP (rtl, 0);
13482 size_t len = strlen (XSTR (sym_ref, 0));
13483 char *str = alloca (len + 2);
13484 str[0] = '.';
13485 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13486 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13490 static bool
13491 rs6000_elf_in_small_data_p (decl)
13492 tree decl;
13494 if (rs6000_sdata == SDATA_NONE)
13495 return false;
13497 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13499 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13500 if (strcmp (section, ".sdata") == 0
13501 || strcmp (section, ".sdata2") == 0
13502 || strcmp (section, ".sbss") == 0
13503 || strcmp (section, ".sbss2") == 0
13504 || strcmp (section, ".PPC.EMB.sdata0") == 0
13505 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13506 return true;
13508 else
13510 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13512 if (size > 0
13513 && size <= g_switch_value
13514 /* If it's not public, and we're not going to reference it there,
13515 there's no need to put it in the small data section. */
13516 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13517 return true;
13520 return false;
13523 #endif /* USING_ELFOS_H */
13526 /* Return a REG that occurs in ADDR with coefficient 1.
13527 ADDR can be effectively incremented by incrementing REG.
13529 r0 is special and we must not select it as an address
13530 register by this routine since our caller will try to
13531 increment the returned register via an "la" instruction. */
13533 struct rtx_def *
13534 find_addr_reg (addr)
13535 rtx addr;
13537 while (GET_CODE (addr) == PLUS)
13539 if (GET_CODE (XEXP (addr, 0)) == REG
13540 && REGNO (XEXP (addr, 0)) != 0)
13541 addr = XEXP (addr, 0);
13542 else if (GET_CODE (XEXP (addr, 1)) == REG
13543 && REGNO (XEXP (addr, 1)) != 0)
13544 addr = XEXP (addr, 1);
13545 else if (CONSTANT_P (XEXP (addr, 0)))
13546 addr = XEXP (addr, 1);
13547 else if (CONSTANT_P (XEXP (addr, 1)))
13548 addr = XEXP (addr, 0);
13549 else
13550 abort ();
13552 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13553 return addr;
13554 abort ();
13557 void
13558 rs6000_fatal_bad_address (op)
13559 rtx op;
13561 fatal_insn ("bad address", op);
13564 #if TARGET_MACHO
13566 #if 0
13567 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13568 reference and a constant. */
13571 symbolic_operand (op)
13572 rtx op;
13574 switch (GET_CODE (op))
13576 case SYMBOL_REF:
13577 case LABEL_REF:
13578 return 1;
13579 case CONST:
13580 op = XEXP (op, 0);
13581 return (GET_CODE (op) == SYMBOL_REF ||
13582 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13583 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13584 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13585 default:
13586 return 0;
13589 #endif
13591 #ifdef RS6000_LONG_BRANCH
13593 static tree stub_list = 0;
13595 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13596 procedure calls to the linked list. */
13598 void
13599 add_compiler_stub (label_name, function_name, line_number)
13600 tree label_name;
13601 tree function_name;
13602 int line_number;
13604 tree stub = build_tree_list (function_name, label_name);
13605 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13606 TREE_CHAIN (stub) = stub_list;
13607 stub_list = stub;
13610 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13611 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13612 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13614 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13615 handling procedure calls from the linked list and initializes the
13616 linked list. */
13618 void
13619 output_compiler_stub ()
13621 char tmp_buf[256];
13622 char label_buf[256];
13623 tree stub;
13625 if (!flag_pic)
13626 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13628 fprintf (asm_out_file,
13629 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13631 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13632 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13633 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13634 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13636 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13637 strcpy (label_buf,
13638 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13639 else
13641 label_buf[0] = '_';
13642 strcpy (label_buf+1,
13643 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13646 strcpy (tmp_buf, "lis r12,hi16(");
13647 strcat (tmp_buf, label_buf);
13648 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13649 strcat (tmp_buf, label_buf);
13650 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13651 output_asm_insn (tmp_buf, 0);
13653 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13654 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13655 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13656 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13659 stub_list = 0;
13662 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13663 already there or not. */
13666 no_previous_def (function_name)
13667 tree function_name;
13669 tree stub;
13670 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13671 if (function_name == STUB_FUNCTION_NAME (stub))
13672 return 0;
13673 return 1;
13676 /* GET_PREV_LABEL gets the label name from the previous definition of
13677 the function. */
13679 tree
13680 get_prev_label (function_name)
13681 tree function_name;
13683 tree stub;
13684 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13685 if (function_name == STUB_FUNCTION_NAME (stub))
13686 return STUB_LABEL_NAME (stub);
13687 return 0;
13690 /* INSN is either a function call or a millicode call. It may have an
13691 unconditional jump in its delay slot.
13693 CALL_DEST is the routine we are calling. */
13695 char *
13696 output_call (insn, call_dest, operand_number)
13697 rtx insn;
13698 rtx call_dest;
13699 int operand_number;
13701 static char buf[256];
13702 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13704 tree labelname;
13705 tree funname = get_identifier (XSTR (call_dest, 0));
13707 if (no_previous_def (funname))
13709 int line_number = 0;
13710 rtx label_rtx = gen_label_rtx ();
13711 char *label_buf, temp_buf[256];
13712 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13713 CODE_LABEL_NUMBER (label_rtx));
13714 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13715 labelname = get_identifier (label_buf);
13716 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13717 if (insn)
13718 line_number = NOTE_LINE_NUMBER (insn);
13719 add_compiler_stub (labelname, funname, line_number);
13721 else
13722 labelname = get_prev_label (funname);
13724 sprintf (buf, "jbsr %%z%d,%.246s",
13725 operand_number, IDENTIFIER_POINTER (labelname));
13726 return buf;
13728 else
13730 sprintf (buf, "bl %%z%d", operand_number);
13731 return buf;
13735 #endif /* RS6000_LONG_BRANCH */
13737 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13738 do { \
13739 const char *const symbol_ = (SYMBOL); \
13740 char *buffer_ = (BUF); \
13741 if (symbol_[0] == '"') \
13743 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13745 else if (name_needs_quotes(symbol_)) \
13747 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13749 else \
13751 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13753 } while (0)
13756 /* Generate PIC and indirect symbol stubs. */
13758 void
13759 machopic_output_stub (file, symb, stub)
13760 FILE *file;
13761 const char *symb, *stub;
13763 unsigned int length;
13764 char *symbol_name, *lazy_ptr_name;
13765 char *local_label_0;
13766 static int label = 0;
13768 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13769 symb = (*targetm.strip_name_encoding) (symb);
13771 label += 1;
13773 length = strlen (symb);
13774 symbol_name = alloca (length + 32);
13775 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13777 lazy_ptr_name = alloca (length + 32);
13778 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13780 local_label_0 = alloca (length + 32);
13781 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13783 if (flag_pic == 2)
13784 machopic_picsymbol_stub1_section ();
13785 else
13786 machopic_symbol_stub1_section ();
13787 fprintf (file, "\t.align 2\n");
13789 fprintf (file, "%s:\n", stub);
13790 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13792 if (flag_pic == 2)
13794 fprintf (file, "\tmflr r0\n");
13795 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13796 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13797 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13798 lazy_ptr_name, local_label_0);
13799 fprintf (file, "\tmtlr r0\n");
13800 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13801 lazy_ptr_name, local_label_0);
13802 fprintf (file, "\tmtctr r12\n");
13803 fprintf (file, "\tbctr\n");
13805 else
13807 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13808 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13809 fprintf (file, "\tmtctr r12\n");
13810 fprintf (file, "\tbctr\n");
13813 machopic_lazy_symbol_ptr_section ();
13814 fprintf (file, "%s:\n", lazy_ptr_name);
13815 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13816 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13819 /* Legitimize PIC addresses. If the address is already
13820 position-independent, we return ORIG. Newly generated
13821 position-independent addresses go into a reg. This is REG if non
13822 zero, otherwise we allocate register(s) as necessary. */
13824 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13827 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13828 rtx orig;
13829 enum machine_mode mode;
13830 rtx reg;
13832 rtx base, offset;
13834 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13835 reg = gen_reg_rtx (Pmode);
13837 if (GET_CODE (orig) == CONST)
13839 if (GET_CODE (XEXP (orig, 0)) == PLUS
13840 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13841 return orig;
13843 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13845 base =
13846 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13847 Pmode, reg);
13848 offset =
13849 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13850 Pmode, reg);
13852 else
13853 abort ();
13855 if (GET_CODE (offset) == CONST_INT)
13857 if (SMALL_INT (offset))
13858 return plus_constant (base, INTVAL (offset));
13859 else if (! reload_in_progress && ! reload_completed)
13860 offset = force_reg (Pmode, offset);
13861 else
13863 rtx mem = force_const_mem (Pmode, orig);
13864 return machopic_legitimize_pic_address (mem, Pmode, reg);
13867 return gen_rtx (PLUS, Pmode, base, offset);
13870 /* Fall back on generic machopic code. */
13871 return machopic_legitimize_pic_address (orig, mode, reg);
13874 /* This is just a placeholder to make linking work without having to
13875 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13876 ever needed for Darwin (not too likely!) this would have to get a
13877 real definition. */
13879 void
13880 toc_section ()
13884 #endif /* TARGET_MACHO */
13886 #if TARGET_ELF
13887 static unsigned int
13888 rs6000_elf_section_type_flags (decl, name, reloc)
13889 tree decl;
13890 const char *name;
13891 int reloc;
13893 unsigned int flags
13894 = default_section_type_flags_1 (decl, name, reloc,
13895 flag_pic || DEFAULT_ABI == ABI_AIX);
13897 if (TARGET_RELOCATABLE)
13898 flags |= SECTION_WRITE;
13900 return flags;
13903 /* Record an element in the table of global constructors. SYMBOL is
13904 a SYMBOL_REF of the function to be called; PRIORITY is a number
13905 between 0 and MAX_INIT_PRIORITY.
13907 This differs from default_named_section_asm_out_constructor in
13908 that we have special handling for -mrelocatable. */
13910 static void
13911 rs6000_elf_asm_out_constructor (symbol, priority)
13912 rtx symbol;
13913 int priority;
13915 const char *section = ".ctors";
13916 char buf[16];
13918 if (priority != DEFAULT_INIT_PRIORITY)
13920 sprintf (buf, ".ctors.%.5u",
13921 /* Invert the numbering so the linker puts us in the proper
13922 order; constructors are run from right to left, and the
13923 linker sorts in increasing order. */
13924 MAX_INIT_PRIORITY - priority);
13925 section = buf;
13928 named_section_flags (section, SECTION_WRITE);
13929 assemble_align (POINTER_SIZE);
13931 if (TARGET_RELOCATABLE)
13933 fputs ("\t.long (", asm_out_file);
13934 output_addr_const (asm_out_file, symbol);
13935 fputs (")@fixup\n", asm_out_file);
13937 else
13938 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13941 static void
13942 rs6000_elf_asm_out_destructor (symbol, priority)
13943 rtx symbol;
13944 int priority;
13946 const char *section = ".dtors";
13947 char buf[16];
13949 if (priority != DEFAULT_INIT_PRIORITY)
13951 sprintf (buf, ".dtors.%.5u",
13952 /* Invert the numbering so the linker puts us in the proper
13953 order; constructors are run from right to left, and the
13954 linker sorts in increasing order. */
13955 MAX_INIT_PRIORITY - priority);
13956 section = buf;
13959 named_section_flags (section, SECTION_WRITE);
13960 assemble_align (POINTER_SIZE);
13962 if (TARGET_RELOCATABLE)
13964 fputs ("\t.long (", asm_out_file);
13965 output_addr_const (asm_out_file, symbol);
13966 fputs (")@fixup\n", asm_out_file);
13968 else
13969 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13971 #endif
13973 #if TARGET_XCOFF
13974 static void
13975 rs6000_xcoff_asm_globalize_label (stream, name)
13976 FILE *stream;
13977 const char *name;
13979 fputs (GLOBAL_ASM_OP, stream);
13980 RS6000_OUTPUT_BASENAME (stream, name);
13981 putc ('\n', stream);
13984 static void
13985 rs6000_xcoff_asm_named_section (name, flags)
13986 const char *name;
13987 unsigned int flags;
13989 int smclass;
13990 static const char * const suffix[3] = { "PR", "RO", "RW" };
13992 if (flags & SECTION_CODE)
13993 smclass = 0;
13994 else if (flags & SECTION_WRITE)
13995 smclass = 2;
13996 else
13997 smclass = 1;
13999 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14000 (flags & SECTION_CODE) ? "." : "",
14001 name, suffix[smclass], flags & SECTION_ENTSIZE);
14004 static void
14005 rs6000_xcoff_select_section (decl, reloc, align)
14006 tree decl;
14007 int reloc;
14008 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14010 if (decl_readonly_section_1 (decl, reloc, 1))
14012 if (TREE_PUBLIC (decl))
14013 read_only_data_section ();
14014 else
14015 read_only_private_data_section ();
14017 else
14019 if (TREE_PUBLIC (decl))
14020 data_section ();
14021 else
14022 private_data_section ();
14026 static void
14027 rs6000_xcoff_unique_section (decl, reloc)
14028 tree decl;
14029 int reloc ATTRIBUTE_UNUSED;
14031 const char *name;
14033 /* Use select_section for private and uninitialized data. */
14034 if (!TREE_PUBLIC (decl)
14035 || DECL_COMMON (decl)
14036 || DECL_INITIAL (decl) == NULL_TREE
14037 || DECL_INITIAL (decl) == error_mark_node
14038 || (flag_zero_initialized_in_bss
14039 && initializer_zerop (DECL_INITIAL (decl))))
14040 return;
14042 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14043 name = (*targetm.strip_name_encoding) (name);
14044 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14047 /* Select section for constant in constant pool.
14049 On RS/6000, all constants are in the private read-only data area.
14050 However, if this is being placed in the TOC it must be output as a
14051 toc entry. */
14053 static void
14054 rs6000_xcoff_select_rtx_section (mode, x, align)
14055 enum machine_mode mode;
14056 rtx x;
14057 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14059 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14060 toc_section ();
14061 else
14062 read_only_private_data_section ();
14065 /* Remove any trailing [DS] or the like from the symbol name. */
14067 static const char *
14068 rs6000_xcoff_strip_name_encoding (name)
14069 const char *name;
14071 size_t len;
14072 if (*name == '*')
14073 name++;
14074 len = strlen (name);
14075 if (name[len - 1] == ']')
14076 return ggc_alloc_string (name, len - 4);
14077 else
14078 return name;
14081 /* Section attributes. AIX is always PIC. */
14083 static unsigned int
14084 rs6000_xcoff_section_type_flags (decl, name, reloc)
14085 tree decl;
14086 const char *name;
14087 int reloc;
14089 unsigned int align;
14090 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14092 /* Align to at least UNIT size. */
14093 if (flags & SECTION_CODE)
14094 align = MIN_UNITS_PER_WORD;
14095 else
14096 /* Increase alignment of large objects if not already stricter. */
14097 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14098 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14099 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14101 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14103 #endif /* TARGET_XCOFF */
14105 #if TARGET_MACHO
14106 /* Cross-module name binding. Darwin does not support overriding
14107 functions at dynamic-link time. */
14109 static bool
14110 rs6000_binds_local_p (decl)
14111 tree decl;
14113 return default_binds_local_p_1 (decl, 0);
14115 #endif
14117 /* Compute a (partial) cost for rtx X. Return true if the complete
14118 cost has been computed, and false if subexpressions should be
14119 scanned. In either case, *TOTAL contains the cost result. */
14121 static bool
14122 rs6000_rtx_costs (x, code, outer_code, total)
14123 rtx x;
14124 int code, outer_code ATTRIBUTE_UNUSED;
14125 int *total;
14127 switch (code)
14129 /* On the RS/6000, if it is valid in the insn, it is free.
14130 So this always returns 0. */
14131 case CONST_INT:
14132 case CONST:
14133 case LABEL_REF:
14134 case SYMBOL_REF:
14135 case CONST_DOUBLE:
14136 case HIGH:
14137 *total = 0;
14138 return true;
14140 case PLUS:
14141 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14142 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14143 + 0x8000) >= 0x10000)
14144 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14145 ? COSTS_N_INSNS (2)
14146 : COSTS_N_INSNS (1));
14147 return true;
14149 case AND:
14150 case IOR:
14151 case XOR:
14152 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14153 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14154 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14155 ? COSTS_N_INSNS (2)
14156 : COSTS_N_INSNS (1));
14157 return true;
14159 case MULT:
14160 if (optimize_size)
14162 *total = COSTS_N_INSNS (2);
14163 return true;
14165 switch (rs6000_cpu)
14167 case PROCESSOR_RIOS1:
14168 case PROCESSOR_PPC405:
14169 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14170 ? COSTS_N_INSNS (5)
14171 : (INTVAL (XEXP (x, 1)) >= -256
14172 && INTVAL (XEXP (x, 1)) <= 255)
14173 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14174 return true;
14176 case PROCESSOR_PPC440:
14177 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14178 ? COSTS_N_INSNS (3)
14179 : COSTS_N_INSNS (2));
14180 return true;
14182 case PROCESSOR_RS64A:
14183 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14184 ? GET_MODE (XEXP (x, 1)) != DImode
14185 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14186 : (INTVAL (XEXP (x, 1)) >= -256
14187 && INTVAL (XEXP (x, 1)) <= 255)
14188 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14189 return true;
14191 case PROCESSOR_RIOS2:
14192 case PROCESSOR_MPCCORE:
14193 case PROCESSOR_PPC604e:
14194 *total = COSTS_N_INSNS (2);
14195 return true;
14197 case PROCESSOR_PPC601:
14198 *total = COSTS_N_INSNS (5);
14199 return true;
14201 case PROCESSOR_PPC603:
14202 case PROCESSOR_PPC7400:
14203 case PROCESSOR_PPC750:
14204 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14205 ? COSTS_N_INSNS (5)
14206 : (INTVAL (XEXP (x, 1)) >= -256
14207 && INTVAL (XEXP (x, 1)) <= 255)
14208 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14209 return true;
14211 case PROCESSOR_PPC7450:
14212 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14213 ? COSTS_N_INSNS (4)
14214 : COSTS_N_INSNS (3));
14215 return true;
14217 case PROCESSOR_PPC403:
14218 case PROCESSOR_PPC604:
14219 case PROCESSOR_PPC8540:
14220 *total = COSTS_N_INSNS (4);
14221 return true;
14223 case PROCESSOR_PPC620:
14224 case PROCESSOR_PPC630:
14225 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14226 ? GET_MODE (XEXP (x, 1)) != DImode
14227 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14228 : (INTVAL (XEXP (x, 1)) >= -256
14229 && INTVAL (XEXP (x, 1)) <= 255)
14230 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14231 return true;
14233 case PROCESSOR_POWER4:
14234 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14235 ? GET_MODE (XEXP (x, 1)) != DImode
14236 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14237 : COSTS_N_INSNS (2));
14238 return true;
14240 default:
14241 abort ();
14244 case DIV:
14245 case MOD:
14246 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14247 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14249 *total = COSTS_N_INSNS (2);
14250 return true;
14252 /* FALLTHRU */
14254 case UDIV:
14255 case UMOD:
14256 switch (rs6000_cpu)
14258 case PROCESSOR_RIOS1:
14259 *total = COSTS_N_INSNS (19);
14260 return true;
14262 case PROCESSOR_RIOS2:
14263 *total = COSTS_N_INSNS (13);
14264 return true;
14266 case PROCESSOR_RS64A:
14267 *total = (GET_MODE (XEXP (x, 1)) != DImode
14268 ? COSTS_N_INSNS (65)
14269 : COSTS_N_INSNS (67));
14270 return true;
14272 case PROCESSOR_MPCCORE:
14273 *total = COSTS_N_INSNS (6);
14274 return true;
14276 case PROCESSOR_PPC403:
14277 *total = COSTS_N_INSNS (33);
14278 return true;
14280 case PROCESSOR_PPC405:
14281 *total = COSTS_N_INSNS (35);
14282 return true;
14284 case PROCESSOR_PPC440:
14285 *total = COSTS_N_INSNS (34);
14286 return true;
14288 case PROCESSOR_PPC601:
14289 *total = COSTS_N_INSNS (36);
14290 return true;
14292 case PROCESSOR_PPC603:
14293 *total = COSTS_N_INSNS (37);
14294 return true;
14296 case PROCESSOR_PPC604:
14297 case PROCESSOR_PPC604e:
14298 *total = COSTS_N_INSNS (20);
14299 return true;
14301 case PROCESSOR_PPC620:
14302 case PROCESSOR_PPC630:
14303 *total = (GET_MODE (XEXP (x, 1)) != DImode
14304 ? COSTS_N_INSNS (21)
14305 : COSTS_N_INSNS (37));
14306 return true;
14308 case PROCESSOR_PPC750:
14309 case PROCESSOR_PPC8540:
14310 case PROCESSOR_PPC7400:
14311 *total = COSTS_N_INSNS (19);
14312 return true;
14314 case PROCESSOR_PPC7450:
14315 *total = COSTS_N_INSNS (23);
14316 return true;
14318 case PROCESSOR_POWER4:
14319 *total = (GET_MODE (XEXP (x, 1)) != DImode
14320 ? COSTS_N_INSNS (18)
14321 : COSTS_N_INSNS (34));
14322 return true;
14324 default:
14325 abort ();
14328 case FFS:
14329 *total = COSTS_N_INSNS (4);
14330 return true;
14332 case MEM:
14333 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14334 *total = 5;
14335 return true;
14337 default:
14338 return false;
14342 /* A C expression returning the cost of moving data from a register of class
14343 CLASS1 to one of CLASS2. */
14346 rs6000_register_move_cost (mode, from, to)
14347 enum machine_mode mode;
14348 enum reg_class from, to;
14350 /* Moves from/to GENERAL_REGS. */
14351 if (reg_classes_intersect_p (to, GENERAL_REGS)
14352 || reg_classes_intersect_p (from, GENERAL_REGS))
14354 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14355 from = to;
14357 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14358 return (rs6000_memory_move_cost (mode, from, 0)
14359 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14361 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14362 else if (from == CR_REGS)
14363 return 4;
14365 else
14366 /* A move will cost one instruction per GPR moved. */
14367 return 2 * HARD_REGNO_NREGS (0, mode);
14370 /* Moving between two similar registers is just one instruction. */
14371 else if (reg_classes_intersect_p (to, from))
14372 return mode == TFmode ? 4 : 2;
14374 /* Everything else has to go through GENERAL_REGS. */
14375 else
14376 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14377 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14380 /* A C expressions returning the cost of moving data of MODE from a register to
14381 or from memory. */
14384 rs6000_memory_move_cost (mode, class, in)
14385 enum machine_mode mode;
14386 enum reg_class class;
14387 int in ATTRIBUTE_UNUSED;
14389 if (reg_classes_intersect_p (class, GENERAL_REGS))
14390 return 4 * HARD_REGNO_NREGS (0, mode);
14391 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14392 return 4 * HARD_REGNO_NREGS (32, mode);
14393 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14394 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14395 else
14396 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14399 /* Define how to find the value returned by a function.
14400 VALTYPE is the data type of the value (as a tree).
14401 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14402 otherwise, FUNC is 0.
14404 On the SPE, both FPs and vectors are returned in r3.
14406 On RS/6000 an integer value is in r3 and a floating-point value is in
14407 fp1, unless -msoft-float. */
14410 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14412 enum machine_mode mode;
14413 unsigned int regno = GP_ARG_RETURN;
14415 if ((INTEGRAL_TYPE_P (valtype)
14416 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14417 || POINTER_TYPE_P (valtype))
14418 mode = word_mode;
14419 else
14420 mode = TYPE_MODE (valtype);
14422 if (TREE_CODE (valtype) == REAL_TYPE)
14424 if (TARGET_HARD_FLOAT && TARGET_FPRS)
14425 regno = FP_ARG_RETURN;
14426 else if (TARGET_SPE_ABI && !TARGET_FPRS)
14427 regno = GP_ARG_RETURN;
14429 else if (TARGET_ALTIVEC && TREE_CODE (valtype) == VECTOR_TYPE)
14430 regno = ALTIVEC_ARG_RETURN;
14431 else
14432 regno = GP_ARG_RETURN;
14434 return gen_rtx_REG (mode, regno);
14437 /* Return true if TYPE is of type __ev64_opaque__. */
14439 static bool
14440 is_ev64_opaque_type (type)
14441 tree type;
14443 return (TARGET_SPE
14444 && (type == opaque_V2SI_type_node
14445 || type == opaque_V2SF_type_node
14446 || type == opaque_p_V2SI_type_node
14447 || (TREE_CODE (type) == VECTOR_TYPE
14448 && TYPE_NAME (type)
14449 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14450 && DECL_NAME (TYPE_NAME (type))
14451 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14452 "__ev64_opaque__") == 0)));
14455 static rtx
14456 rs6000_dwarf_register_span (reg)
14457 rtx reg;
14459 unsigned regno;
14461 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14462 return NULL_RTX;
14464 regno = REGNO (reg);
14466 /* The duality of the SPE register size wreaks all kinds of havoc.
14467 This is a way of distinguishing r0 in 32-bits from r0 in
14468 64-bits. */
14469 return
14470 gen_rtx_PARALLEL (VOIDmode,
14471 BYTES_BIG_ENDIAN
14472 ? gen_rtvec (2,
14473 gen_rtx_REG (SImode, regno + 1200),
14474 gen_rtx_REG (SImode, regno))
14475 : gen_rtvec (2,
14476 gen_rtx_REG (SImode, regno),
14477 gen_rtx_REG (SImode, regno + 1200)));
14480 #include "gt-rs6000.h"