PR target/14547
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blobd53c32eea9351bbd64e13577bd980d55893caf49
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #if TARGET_XCOFF
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
57 #endif
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
61 #endif
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
67 && !((n) & 1) \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
108 not in save_size */
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
114 } rs6000_stack_t;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
154 int rs6000_spe_abi;
156 /* Whether isel instructions should be generated. */
157 int rs6000_isel;
159 /* Whether SPE simd instructions should be generated. */
160 int rs6000_spe;
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
178 emitted. */
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
186 #ifdef USING_ELFOS_H
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
198 #endif
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
210 /* Debug flags */
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
215 /* Opaque types. */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
220 /* AltiVec requires a few more basic types in addition to the vector
221 types already defined in tree.c. */
222 static GTY(()) tree bool_char_type_node; /* __bool char */
223 static GTY(()) tree bool_short_type_node; /* __bool short */
224 static GTY(()) tree bool_int_type_node; /* __bool int */
225 static GTY(()) tree pixel_type_node; /* __pixel */
226 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
227 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
228 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
229 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
231 int rs6000_warn_altivec_long = 1; /* On by default. */
232 const char *rs6000_warn_altivec_long_switch;
234 const char *rs6000_traceback_name;
235 static enum {
236 traceback_default = 0,
237 traceback_none,
238 traceback_part,
239 traceback_full
240 } rs6000_traceback;
242 /* Flag to say the TOC is initialized */
243 int toc_initialized;
244 char toc_label_name[10];
246 /* Alias set for saves and restores from the rs6000 stack. */
247 static GTY(()) int rs6000_sr_alias_set;
249 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
250 The only place that looks at this is rs6000_set_default_type_attributes;
251 everywhere else should rely on the presence or absence of a longcall
252 attribute on the function declaration. */
253 int rs6000_default_long_calls;
254 const char *rs6000_longcall_switch;
256 /* Control alignment for fields within structures. */
257 /* String from -malign-XXXXX. */
258 const char *rs6000_alignment_string;
259 int rs6000_alignment_flags;
261 struct builtin_description
263 /* mask is not const because we're going to alter it below. This
264 nonsense will go away when we rewrite the -march infrastructure
265 to give us more target flag bits. */
266 unsigned int mask;
267 const enum insn_code icode;
268 const char *const name;
269 const enum rs6000_builtins code;
272 static bool rs6000_function_ok_for_sibcall (tree, tree);
273 static int num_insns_constant_wide (HOST_WIDE_INT);
274 static void validate_condition_mode (enum rtx_code, enum machine_mode);
275 static rtx rs6000_generate_compare (enum rtx_code);
276 static void rs6000_maybe_dead (rtx);
277 static void rs6000_emit_stack_tie (void);
278 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
279 static rtx spe_synthesize_frame_save (rtx);
280 static bool spe_func_has_64bit_regs_p (void);
281 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
282 int, HOST_WIDE_INT);
283 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
284 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
285 static unsigned rs6000_hash_constant (rtx);
286 static unsigned toc_hash_function (const void *);
287 static int toc_hash_eq (const void *, const void *);
288 static int constant_pool_expr_1 (rtx, int *, int *);
289 static bool constant_pool_expr_p (rtx);
290 static bool toc_relative_expr_p (rtx);
291 static bool legitimate_small_data_p (enum machine_mode, rtx);
292 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
293 static bool legitimate_indexed_address_p (rtx, int);
294 static bool legitimate_indirect_address_p (rtx, int);
295 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
296 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
297 static struct machine_function * rs6000_init_machine_status (void);
298 static bool rs6000_assemble_integer (rtx, unsigned int, int);
299 #ifdef HAVE_GAS_HIDDEN
300 static void rs6000_assemble_visibility (tree, int);
301 #endif
302 static int rs6000_ra_ever_killed (void);
303 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
304 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
305 extern const struct attribute_spec rs6000_attribute_table[];
306 static void rs6000_set_default_type_attributes (tree);
307 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
308 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
309 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
310 tree);
311 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
312 static bool rs6000_return_in_memory (tree, tree);
313 static void rs6000_file_start (void);
314 #if TARGET_ELF
315 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
316 static void rs6000_elf_asm_out_constructor (rtx, int);
317 static void rs6000_elf_asm_out_destructor (rtx, int);
318 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
319 static void rs6000_elf_unique_section (tree, int);
320 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
321 unsigned HOST_WIDE_INT);
322 static void rs6000_elf_encode_section_info (tree, rtx, int)
323 ATTRIBUTE_UNUSED;
324 static bool rs6000_elf_in_small_data_p (tree);
325 #endif
326 #if TARGET_XCOFF
327 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
328 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
329 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
330 static void rs6000_xcoff_unique_section (tree, int);
331 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
332 unsigned HOST_WIDE_INT);
333 static const char * rs6000_xcoff_strip_name_encoding (const char *);
334 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
335 static void rs6000_xcoff_file_start (void);
336 static void rs6000_xcoff_file_end (void);
337 #endif
338 #if TARGET_MACHO
339 static bool rs6000_binds_local_p (tree);
340 #endif
341 static int rs6000_use_dfa_pipeline_interface (void);
342 static int rs6000_variable_issue (FILE *, int, rtx, int);
343 static bool rs6000_rtx_costs (rtx, int, int, int *);
344 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
345 static bool is_microcoded_insn (rtx);
346 static int is_dispatch_slot_restricted (rtx);
347 static bool is_cracked_insn (rtx);
348 static bool is_branch_slot_insn (rtx);
349 static int rs6000_adjust_priority (rtx, int);
350 static int rs6000_issue_rate (void);
351 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
352 static rtx get_next_active_insn (rtx, rtx);
353 static bool insn_terminates_group_p (rtx , enum group_termination);
354 static bool is_costly_group (rtx *, rtx);
355 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
356 static int redefine_groups (FILE *, int, rtx, rtx);
357 static int pad_groups (FILE *, int, rtx, rtx);
358 static void rs6000_sched_finish (FILE *, int);
359 static int rs6000_use_sched_lookahead (void);
361 static void rs6000_init_builtins (void);
362 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
363 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
364 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
365 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
366 static void altivec_init_builtins (void);
367 static void rs6000_common_init_builtins (void);
368 static void rs6000_init_libfuncs (void);
370 static void enable_mask_for_builtins (struct builtin_description *, int,
371 enum rs6000_builtins,
372 enum rs6000_builtins);
373 static void spe_init_builtins (void);
374 static rtx spe_expand_builtin (tree, rtx, bool *);
375 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
376 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
377 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
378 static rs6000_stack_t *rs6000_stack_info (void);
379 static void debug_stack_info (rs6000_stack_t *);
381 static rtx altivec_expand_builtin (tree, rtx, bool *);
382 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
383 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
384 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
385 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
386 static rtx altivec_expand_predicate_builtin (enum insn_code,
387 const char *, tree, rtx);
388 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
389 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
390 static void rs6000_parse_abi_options (void);
391 static void rs6000_parse_alignment_option (void);
392 static void rs6000_parse_tls_size_option (void);
393 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
394 static int first_altivec_reg_to_save (void);
395 static unsigned int compute_vrsave_mask (void);
396 static void is_altivec_return_reg (rtx, void *);
397 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
398 int easy_vector_constant (rtx, enum machine_mode);
399 static int easy_vector_same (rtx, enum machine_mode);
400 static bool is_ev64_opaque_type (tree);
401 static rtx rs6000_dwarf_register_span (rtx);
402 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
403 static rtx rs6000_tls_get_addr (void);
404 static rtx rs6000_got_sym (void);
405 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
406 static const char *rs6000_get_some_local_dynamic_name (void);
407 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
408 static rtx rs6000_complex_function_value (enum machine_mode);
409 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
410 enum machine_mode, tree);
411 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
412 enum machine_mode, tree, int);
413 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
414 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
415 enum machine_mode, tree,
416 int *, int);
417 #if TARGET_MACHO
418 static void macho_branch_islands (void);
419 static void add_compiler_branch_island (tree, tree, int);
420 static int no_previous_def (tree function_name);
421 static tree get_prev_label (tree function_name);
422 #endif
424 static tree rs6000_build_builtin_va_list (void);
426 /* Hash table stuff for keeping track of TOC entries. */
428 struct toc_hash_struct GTY(())
430 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
431 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
432 rtx key;
433 enum machine_mode key_mode;
434 int labelno;
437 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
439 /* Default register names. */
440 char rs6000_reg_names[][8] =
442 "0", "1", "2", "3", "4", "5", "6", "7",
443 "8", "9", "10", "11", "12", "13", "14", "15",
444 "16", "17", "18", "19", "20", "21", "22", "23",
445 "24", "25", "26", "27", "28", "29", "30", "31",
446 "0", "1", "2", "3", "4", "5", "6", "7",
447 "8", "9", "10", "11", "12", "13", "14", "15",
448 "16", "17", "18", "19", "20", "21", "22", "23",
449 "24", "25", "26", "27", "28", "29", "30", "31",
450 "mq", "lr", "ctr","ap",
451 "0", "1", "2", "3", "4", "5", "6", "7",
452 "xer",
453 /* AltiVec registers. */
454 "0", "1", "2", "3", "4", "5", "6", "7",
455 "8", "9", "10", "11", "12", "13", "14", "15",
456 "16", "17", "18", "19", "20", "21", "22", "23",
457 "24", "25", "26", "27", "28", "29", "30", "31",
458 "vrsave", "vscr",
459 /* SPE registers. */
460 "spe_acc", "spefscr"
463 #ifdef TARGET_REGNAMES
464 static const char alt_reg_names[][8] =
466 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
467 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
468 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
469 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
470 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
471 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
472 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
473 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
474 "mq", "lr", "ctr", "ap",
475 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
476 "xer",
477 /* AltiVec registers. */
478 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
479 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
480 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
481 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
482 "vrsave", "vscr",
483 /* SPE registers. */
484 "spe_acc", "spefscr"
486 #endif
488 #ifndef MASK_STRICT_ALIGN
489 #define MASK_STRICT_ALIGN 0
490 #endif
491 #ifndef TARGET_PROFILE_KERNEL
492 #define TARGET_PROFILE_KERNEL 0
493 #endif
495 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
496 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
498 /* Return 1 for a symbol ref for a thread-local storage symbol. */
499 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
500 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
502 /* Initialize the GCC target structure. */
503 #undef TARGET_ATTRIBUTE_TABLE
504 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
505 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
506 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
508 #undef TARGET_ASM_ALIGNED_DI_OP
509 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
511 /* Default unaligned ops are only provided for ELF. Find the ops needed
512 for non-ELF systems. */
513 #ifndef OBJECT_FORMAT_ELF
514 #if TARGET_XCOFF
515 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
516 64-bit targets. */
517 #undef TARGET_ASM_UNALIGNED_HI_OP
518 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
519 #undef TARGET_ASM_UNALIGNED_SI_OP
520 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
521 #undef TARGET_ASM_UNALIGNED_DI_OP
522 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
523 #else
524 /* For Darwin. */
525 #undef TARGET_ASM_UNALIGNED_HI_OP
526 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
527 #undef TARGET_ASM_UNALIGNED_SI_OP
528 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
529 #endif
530 #endif
532 /* This hook deals with fixups for relocatable code and DI-mode objects
533 in 64-bit code. */
534 #undef TARGET_ASM_INTEGER
535 #define TARGET_ASM_INTEGER rs6000_assemble_integer
537 #ifdef HAVE_GAS_HIDDEN
538 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
539 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
540 #endif
542 #undef TARGET_HAVE_TLS
543 #define TARGET_HAVE_TLS HAVE_AS_TLS
545 #undef TARGET_CANNOT_FORCE_CONST_MEM
546 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
548 #undef TARGET_ASM_FUNCTION_PROLOGUE
549 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
550 #undef TARGET_ASM_FUNCTION_EPILOGUE
551 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
553 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
554 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
555 #undef TARGET_SCHED_VARIABLE_ISSUE
556 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
558 #undef TARGET_SCHED_ISSUE_RATE
559 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
560 #undef TARGET_SCHED_ADJUST_COST
561 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
562 #undef TARGET_SCHED_ADJUST_PRIORITY
563 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
564 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
565 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
566 #undef TARGET_SCHED_FINISH
567 #define TARGET_SCHED_FINISH rs6000_sched_finish
569 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
570 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
572 #undef TARGET_INIT_BUILTINS
573 #define TARGET_INIT_BUILTINS rs6000_init_builtins
575 #undef TARGET_EXPAND_BUILTIN
576 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
578 #undef TARGET_INIT_LIBFUNCS
579 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
581 #if TARGET_MACHO
582 #undef TARGET_BINDS_LOCAL_P
583 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
584 #endif
586 #undef TARGET_ASM_OUTPUT_MI_THUNK
587 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
589 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
590 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
592 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
593 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
595 #undef TARGET_RTX_COSTS
596 #define TARGET_RTX_COSTS rs6000_rtx_costs
597 #undef TARGET_ADDRESS_COST
598 #define TARGET_ADDRESS_COST hook_int_rtx_0
600 #undef TARGET_VECTOR_OPAQUE_P
601 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
603 #undef TARGET_DWARF_REGISTER_SPAN
604 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
606 /* On rs6000, function arguments are promoted, as are function return
607 values. */
608 #undef TARGET_PROMOTE_FUNCTION_ARGS
609 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
610 #undef TARGET_PROMOTE_FUNCTION_RETURN
611 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
613 #undef TARGET_RETURN_IN_MEMORY
614 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
616 #undef TARGET_SETUP_INCOMING_VARARGS
617 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
619 /* Always strict argument naming on rs6000. */
620 #undef TARGET_STRICT_ARGUMENT_NAMING
621 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
622 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
623 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
624 #undef TARGET_SPLIT_COMPLEX_ARG
625 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
627 #undef TARGET_BUILD_BUILTIN_VA_LIST
628 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
630 struct gcc_target targetm = TARGET_INITIALIZER;
632 /* Override command line options. Mostly we process the processor
633 type and sometimes adjust other TARGET_ options. */
635 void
636 rs6000_override_options (const char *default_cpu)
638 size_t i, j;
639 struct rs6000_cpu_select *ptr;
640 int set_masks;
642 /* Simplifications for entries below. */
644 enum {
645 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
646 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
649 /* This table occasionally claims that a processor does not support
650 a particular feature even though it does, but the feature is slower
651 than the alternative. Thus, it shouldn't be relied on as a
652 complete description of the processor's support.
654 Please keep this list in order, and don't forget to update the
655 documentation in invoke.texi when adding a new processor or
656 flag. */
657 static struct ptt
659 const char *const name; /* Canonical processor name. */
660 const enum processor_type processor; /* Processor type enum value. */
661 const int target_enable; /* Target flags to enable. */
662 } const processor_target_table[]
663 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
664 {"403", PROCESSOR_PPC403,
665 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
666 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
667 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
668 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
669 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
670 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
671 {"601", PROCESSOR_PPC601,
672 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
673 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
674 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
675 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
676 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
677 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
678 {"620", PROCESSOR_PPC620,
679 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
680 {"630", PROCESSOR_PPC630,
681 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
682 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
683 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
684 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
685 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
686 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
687 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
688 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
689 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
690 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
691 {"970", PROCESSOR_POWER4,
692 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
693 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
694 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
695 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
696 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
697 {"G5", PROCESSOR_POWER4,
698 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
699 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
700 {"power2", PROCESSOR_POWER,
701 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
702 {"power3", PROCESSOR_PPC630,
703 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
704 {"power4", PROCESSOR_POWER4,
705 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
706 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
707 {"powerpc64", PROCESSOR_POWERPC64,
708 POWERPC_BASE_MASK | MASK_POWERPC64},
709 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
710 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
711 {"rios2", PROCESSOR_RIOS2,
712 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
713 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
714 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
715 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
718 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
720 /* Some OSs don't support saving the high part of 64-bit registers on
721 context switch. Other OSs don't support saving Altivec registers.
722 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
723 settings; if the user wants either, the user must explicitly specify
724 them and we won't interfere with the user's specification. */
726 enum {
727 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
728 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
729 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
730 | MASK_MFCRF)
732 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
733 #ifdef OS_MISSING_POWERPC64
734 if (OS_MISSING_POWERPC64)
735 set_masks &= ~MASK_POWERPC64;
736 #endif
737 #ifdef OS_MISSING_ALTIVEC
738 if (OS_MISSING_ALTIVEC)
739 set_masks &= ~MASK_ALTIVEC;
740 #endif
742 /* Don't override these by the processor default if given explicitly. */
743 set_masks &= ~(target_flags_explicit
744 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
746 /* Identify the processor type. */
747 rs6000_select[0].string = default_cpu;
748 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
750 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
752 ptr = &rs6000_select[i];
753 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
755 for (j = 0; j < ptt_size; j++)
756 if (! strcmp (ptr->string, processor_target_table[j].name))
758 if (ptr->set_tune_p)
759 rs6000_cpu = processor_target_table[j].processor;
761 if (ptr->set_arch_p)
763 target_flags &= ~set_masks;
764 target_flags |= (processor_target_table[j].target_enable
765 & set_masks);
767 break;
770 if (j == ptt_size)
771 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
775 if (TARGET_E500)
776 rs6000_isel = 1;
778 /* If we are optimizing big endian systems for space, use the load/store
779 multiple and string instructions. */
780 if (BYTES_BIG_ENDIAN && optimize_size)
781 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
783 /* Don't allow -mmultiple or -mstring on little endian systems
784 unless the cpu is a 750, because the hardware doesn't support the
785 instructions used in little endian mode, and causes an alignment
786 trap. The 750 does not cause an alignment trap (except when the
787 target is unaligned). */
789 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
791 if (TARGET_MULTIPLE)
793 target_flags &= ~MASK_MULTIPLE;
794 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
795 warning ("-mmultiple is not supported on little endian systems");
798 if (TARGET_STRING)
800 target_flags &= ~MASK_STRING;
801 if ((target_flags_explicit & MASK_STRING) != 0)
802 warning ("-mstring is not supported on little endian systems");
806 /* Set debug flags */
807 if (rs6000_debug_name)
809 if (! strcmp (rs6000_debug_name, "all"))
810 rs6000_debug_stack = rs6000_debug_arg = 1;
811 else if (! strcmp (rs6000_debug_name, "stack"))
812 rs6000_debug_stack = 1;
813 else if (! strcmp (rs6000_debug_name, "arg"))
814 rs6000_debug_arg = 1;
815 else
816 error ("unknown -mdebug-%s switch", rs6000_debug_name);
819 if (rs6000_traceback_name)
821 if (! strncmp (rs6000_traceback_name, "full", 4))
822 rs6000_traceback = traceback_full;
823 else if (! strncmp (rs6000_traceback_name, "part", 4))
824 rs6000_traceback = traceback_part;
825 else if (! strncmp (rs6000_traceback_name, "no", 2))
826 rs6000_traceback = traceback_none;
827 else
828 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
829 rs6000_traceback_name);
832 /* Set size of long double */
833 rs6000_long_double_type_size = 64;
834 if (rs6000_long_double_size_string)
836 char *tail;
837 int size = strtol (rs6000_long_double_size_string, &tail, 10);
838 if (*tail != '\0' || (size != 64 && size != 128))
839 error ("Unknown switch -mlong-double-%s",
840 rs6000_long_double_size_string);
841 else
842 rs6000_long_double_type_size = size;
845 /* Set Altivec ABI as default for powerpc64 linux. */
846 if (TARGET_ELF && TARGET_64BIT)
848 rs6000_altivec_abi = 1;
849 rs6000_altivec_vrsave = 1;
852 /* Handle -mabi= options. */
853 rs6000_parse_abi_options ();
855 /* Handle -malign-XXXXX option. */
856 rs6000_parse_alignment_option ();
858 /* Handle generic -mFOO=YES/NO options. */
859 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
860 &rs6000_altivec_vrsave);
861 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
862 &rs6000_isel);
863 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
864 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
865 &rs6000_float_gprs);
867 /* Handle -mtls-size option. */
868 rs6000_parse_tls_size_option ();
870 #ifdef SUBTARGET_OVERRIDE_OPTIONS
871 SUBTARGET_OVERRIDE_OPTIONS;
872 #endif
873 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
874 SUBSUBTARGET_OVERRIDE_OPTIONS;
875 #endif
877 if (TARGET_E500)
879 /* The e500 does not have string instructions, and we set
880 MASK_STRING above when optimizing for size. */
881 if ((target_flags & MASK_STRING) != 0)
882 target_flags = target_flags & ~MASK_STRING;
884 /* No SPE means 64-bit long doubles, even if an E500. */
885 if (rs6000_spe_string != 0
886 && !strcmp (rs6000_spe_string, "no"))
887 rs6000_long_double_type_size = 64;
889 else if (rs6000_select[1].string != NULL)
891 /* For the powerpc-eabispe configuration, we set all these by
892 default, so let's unset them if we manually set another
893 CPU that is not the E500. */
894 if (rs6000_abi_string == 0)
895 rs6000_spe_abi = 0;
896 if (rs6000_spe_string == 0)
897 rs6000_spe = 0;
898 if (rs6000_float_gprs_string == 0)
899 rs6000_float_gprs = 0;
900 if (rs6000_isel_string == 0)
901 rs6000_isel = 0;
902 if (rs6000_long_double_size_string == 0)
903 rs6000_long_double_type_size = 64;
906 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
907 using TARGET_OPTIONS to handle a toggle switch, but we're out of
908 bits in target_flags so TARGET_SWITCHES cannot be used.
909 Assumption here is that rs6000_longcall_switch points into the
910 text of the complete option, rather than being a copy, so we can
911 scan back for the presence or absence of the no- modifier. */
912 if (rs6000_longcall_switch)
914 const char *base = rs6000_longcall_switch;
915 while (base[-1] != 'm') base--;
917 if (*rs6000_longcall_switch != '\0')
918 error ("invalid option `%s'", base);
919 rs6000_default_long_calls = (base[0] != 'n');
922 /* Handle -m(no-)warn-altivec-long similarly. */
923 if (rs6000_warn_altivec_long_switch)
925 const char *base = rs6000_warn_altivec_long_switch;
926 while (base[-1] != 'm') base--;
928 if (*rs6000_warn_altivec_long_switch != '\0')
929 error ("invalid option `%s'", base);
930 rs6000_warn_altivec_long = (base[0] != 'n');
933 /* Handle -mprioritize-restricted-insns option. */
934 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
935 if (rs6000_sched_restricted_insns_priority_str)
936 rs6000_sched_restricted_insns_priority =
937 atoi (rs6000_sched_restricted_insns_priority_str);
939 /* Handle -msched-costly-dep option. */
940 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
941 if (rs6000_sched_costly_dep_str)
943 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
944 rs6000_sched_costly_dep = no_dep_costly;
945 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
946 rs6000_sched_costly_dep = all_deps_costly;
947 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
948 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
949 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
950 rs6000_sched_costly_dep = store_to_load_dep_costly;
951 else
952 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
955 /* Handle -minsert-sched-nops option. */
956 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
957 if (rs6000_sched_insert_nops_str)
959 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
960 rs6000_sched_insert_nops = sched_finish_none;
961 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
962 rs6000_sched_insert_nops = sched_finish_pad_groups;
963 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
964 rs6000_sched_insert_nops = sched_finish_regroup_exact;
965 else
966 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
969 #ifdef TARGET_REGNAMES
970 /* If the user desires alternate register names, copy in the
971 alternate names now. */
972 if (TARGET_REGNAMES)
973 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
974 #endif
976 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
977 If -maix-struct-return or -msvr4-struct-return was explicitly
978 used, don't override with the ABI default. */
979 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
981 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
982 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
983 else
984 target_flags |= MASK_AIX_STRUCT_RET;
987 if (TARGET_LONG_DOUBLE_128
988 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
989 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
991 /* Allocate an alias set for register saves & restores from stack. */
992 rs6000_sr_alias_set = new_alias_set ();
994 if (TARGET_TOC)
995 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
997 /* We can only guarantee the availability of DI pseudo-ops when
998 assembling for 64-bit targets. */
999 if (!TARGET_64BIT)
1001 targetm.asm_out.aligned_op.di = NULL;
1002 targetm.asm_out.unaligned_op.di = NULL;
1005 /* Set maximum branch target alignment at two instructions, eight bytes. */
1006 align_jumps_max_skip = 8;
1007 align_loops_max_skip = 8;
1009 /* Arrange to save and restore machine status around nested functions. */
1010 init_machine_status = rs6000_init_machine_status;
1012 /* We should always be splitting complex arguments, but we can't break
1013 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1014 if (DEFAULT_ABI != ABI_AIX)
1015 targetm.calls.split_complex_arg = NULL;
1018 /* Handle generic options of the form -mfoo=yes/no.
1019 NAME is the option name.
1020 VALUE is the option value.
1021 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1022 whether the option value is 'yes' or 'no' respectively. */
1023 static void
1024 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1026 if (value == 0)
1027 return;
1028 else if (!strcmp (value, "yes"))
1029 *flag = 1;
1030 else if (!strcmp (value, "no"))
1031 *flag = 0;
1032 else
1033 error ("unknown -m%s= option specified: '%s'", name, value);
1036 /* Handle -mabi= options. */
1037 static void
1038 rs6000_parse_abi_options (void)
1040 if (rs6000_abi_string == 0)
1041 return;
1042 else if (! strcmp (rs6000_abi_string, "altivec"))
1044 rs6000_altivec_abi = 1;
1045 rs6000_spe_abi = 0;
1047 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1048 rs6000_altivec_abi = 0;
1049 else if (! strcmp (rs6000_abi_string, "spe"))
1051 rs6000_spe_abi = 1;
1052 rs6000_altivec_abi = 0;
1053 if (!TARGET_SPE_ABI)
1054 error ("not configured for ABI: '%s'", rs6000_abi_string);
1057 else if (! strcmp (rs6000_abi_string, "no-spe"))
1058 rs6000_spe_abi = 0;
1059 else
1060 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1063 /* Handle -malign-XXXXXX options. */
1064 static void
1065 rs6000_parse_alignment_option (void)
1067 if (rs6000_alignment_string == 0)
1068 return;
1069 else if (! strcmp (rs6000_alignment_string, "power"))
1070 rs6000_alignment_flags = MASK_ALIGN_POWER;
1071 else if (! strcmp (rs6000_alignment_string, "natural"))
1072 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1073 else
1074 error ("unknown -malign-XXXXX option specified: '%s'",
1075 rs6000_alignment_string);
1078 /* Validate and record the size specified with the -mtls-size option. */
1080 static void
1081 rs6000_parse_tls_size_option (void)
1083 if (rs6000_tls_size_string == 0)
1084 return;
1085 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1086 rs6000_tls_size = 16;
1087 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1088 rs6000_tls_size = 32;
1089 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1090 rs6000_tls_size = 64;
1091 else
1092 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1095 void
1096 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1100 /* Do anything needed at the start of the asm file. */
1102 static void
1103 rs6000_file_start (void)
1105 size_t i;
1106 char buffer[80];
1107 const char *start = buffer;
1108 struct rs6000_cpu_select *ptr;
1109 const char *default_cpu = TARGET_CPU_DEFAULT;
1110 FILE *file = asm_out_file;
1112 default_file_start ();
1114 #ifdef TARGET_BI_ARCH
1115 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1116 default_cpu = 0;
1117 #endif
1119 if (flag_verbose_asm)
1121 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1122 rs6000_select[0].string = default_cpu;
1124 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1126 ptr = &rs6000_select[i];
1127 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1129 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1130 start = "";
1134 #ifdef USING_ELFOS_H
1135 switch (rs6000_sdata)
1137 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1138 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1139 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1140 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1143 if (rs6000_sdata && g_switch_value)
1145 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1146 g_switch_value);
1147 start = "";
1149 #endif
1151 if (*start == '\0')
1152 putc ('\n', file);
1156 /* Return nonzero if this function is known to have a null epilogue. */
1159 direct_return (void)
1161 if (reload_completed)
1163 rs6000_stack_t *info = rs6000_stack_info ();
1165 if (info->first_gp_reg_save == 32
1166 && info->first_fp_reg_save == 64
1167 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1168 && ! info->lr_save_p
1169 && ! info->cr_save_p
1170 && info->vrsave_mask == 0
1171 && ! info->push_p)
1172 return 1;
1175 return 0;
1178 /* Returns 1 always. */
1181 any_operand (rtx op ATTRIBUTE_UNUSED,
1182 enum machine_mode mode ATTRIBUTE_UNUSED)
1184 return 1;
1187 /* Returns 1 if op is the count register. */
1189 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1191 if (GET_CODE (op) != REG)
1192 return 0;
1194 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1195 return 1;
1197 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1198 return 1;
1200 return 0;
1203 /* Returns 1 if op is an altivec register. */
1205 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1208 return (register_operand (op, mode)
1209 && (GET_CODE (op) != REG
1210 || REGNO (op) > FIRST_PSEUDO_REGISTER
1211 || ALTIVEC_REGNO_P (REGNO (op))));
1215 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1217 if (GET_CODE (op) != REG)
1218 return 0;
1220 if (XER_REGNO_P (REGNO (op)))
1221 return 1;
1223 return 0;
1226 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1227 by such constants completes more quickly. */
1230 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1232 return ( GET_CODE (op) == CONST_INT
1233 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1236 /* Return 1 if OP is a constant that can fit in a D field. */
1239 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1241 return (GET_CODE (op) == CONST_INT
1242 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1245 /* Similar for an unsigned D field. */
1248 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1250 return (GET_CODE (op) == CONST_INT
1251 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1254 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1257 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1259 return (GET_CODE (op) == CONST_INT
1260 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1263 /* Returns 1 if OP is a CONST_INT that is a positive value
1264 and an exact power of 2. */
1267 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1269 return (GET_CODE (op) == CONST_INT
1270 && INTVAL (op) > 0
1271 && exact_log2 (INTVAL (op)) >= 0);
1274 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1275 ctr, or lr). */
1278 gpc_reg_operand (rtx op, enum machine_mode mode)
1280 return (register_operand (op, mode)
1281 && (GET_CODE (op) != REG
1282 || (REGNO (op) >= ARG_POINTER_REGNUM
1283 && !XER_REGNO_P (REGNO (op)))
1284 || REGNO (op) < MQ_REGNO));
1287 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1288 CR field. */
1291 cc_reg_operand (rtx op, enum machine_mode mode)
1293 return (register_operand (op, mode)
1294 && (GET_CODE (op) != REG
1295 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1296 || CR_REGNO_P (REGNO (op))));
1299 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1300 CR field that isn't CR0. */
1303 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1305 return (register_operand (op, mode)
1306 && (GET_CODE (op) != REG
1307 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1308 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1311 /* Returns 1 if OP is either a constant integer valid for a D-field or
1312 a non-special register. If a register, it must be in the proper
1313 mode unless MODE is VOIDmode. */
1316 reg_or_short_operand (rtx op, enum machine_mode mode)
1318 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1321 /* Similar, except check if the negation of the constant would be
1322 valid for a D-field. */
1325 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1327 if (GET_CODE (op) == CONST_INT)
1328 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1330 return gpc_reg_operand (op, mode);
1333 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1334 a non-special register. If a register, it must be in the proper
1335 mode unless MODE is VOIDmode. */
1338 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1340 if (gpc_reg_operand (op, mode))
1341 return 1;
1342 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1343 return 1;
1345 return 0;
1349 /* Return 1 if the operand is either a register or an integer whose
1350 high-order 16 bits are zero. */
1353 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1355 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1358 /* Return 1 is the operand is either a non-special register or ANY
1359 constant integer. */
1362 reg_or_cint_operand (rtx op, enum machine_mode mode)
1364 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1367 /* Return 1 is the operand is either a non-special register or ANY
1368 32-bit signed constant integer. */
1371 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1373 return (gpc_reg_operand (op, mode)
1374 || (GET_CODE (op) == CONST_INT
1375 #if HOST_BITS_PER_WIDE_INT != 32
1376 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1377 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1378 #endif
1382 /* Return 1 is the operand is either a non-special register or a 32-bit
1383 signed constant integer valid for 64-bit addition. */
1386 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1388 return (gpc_reg_operand (op, mode)
1389 || (GET_CODE (op) == CONST_INT
1390 #if HOST_BITS_PER_WIDE_INT == 32
1391 && INTVAL (op) < 0x7fff8000
1392 #else
1393 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1394 < 0x100000000ll)
1395 #endif
1399 /* Return 1 is the operand is either a non-special register or a 32-bit
1400 signed constant integer valid for 64-bit subtraction. */
1403 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1405 return (gpc_reg_operand (op, mode)
1406 || (GET_CODE (op) == CONST_INT
1407 #if HOST_BITS_PER_WIDE_INT == 32
1408 && (- INTVAL (op)) < 0x7fff8000
1409 #else
1410 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1411 < 0x100000000ll)
1412 #endif
1416 /* Return 1 is the operand is either a non-special register or ANY
1417 32-bit unsigned constant integer. */
1420 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1422 if (GET_CODE (op) == CONST_INT)
1424 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1426 if (GET_MODE_BITSIZE (mode) <= 32)
1427 abort ();
1429 if (INTVAL (op) < 0)
1430 return 0;
1433 return ((INTVAL (op) & GET_MODE_MASK (mode)
1434 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1436 else if (GET_CODE (op) == CONST_DOUBLE)
1438 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1439 || mode != DImode)
1440 abort ();
1442 return CONST_DOUBLE_HIGH (op) == 0;
1444 else
1445 return gpc_reg_operand (op, mode);
1448 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1451 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1453 return (GET_CODE (op) == SYMBOL_REF
1454 || GET_CODE (op) == CONST
1455 || GET_CODE (op) == LABEL_REF);
1458 /* Return 1 if the operand is a simple references that can be loaded via
1459 the GOT (labels involving addition aren't allowed). */
1462 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1464 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1467 /* Return the number of instructions it takes to form a constant in an
1468 integer register. */
1470 static int
1471 num_insns_constant_wide (HOST_WIDE_INT value)
1473 /* signed constant loadable with {cal|addi} */
1474 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1475 return 1;
1477 /* constant loadable with {cau|addis} */
1478 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1479 return 1;
1481 #if HOST_BITS_PER_WIDE_INT == 64
1482 else if (TARGET_POWERPC64)
1484 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1485 HOST_WIDE_INT high = value >> 31;
1487 if (high == 0 || high == -1)
1488 return 2;
1490 high >>= 1;
1492 if (low == 0)
1493 return num_insns_constant_wide (high) + 1;
1494 else
1495 return (num_insns_constant_wide (high)
1496 + num_insns_constant_wide (low) + 1);
1498 #endif
1500 else
1501 return 2;
1505 num_insns_constant (rtx op, enum machine_mode mode)
1507 if (GET_CODE (op) == CONST_INT)
1509 #if HOST_BITS_PER_WIDE_INT == 64
1510 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1511 && mask64_operand (op, mode))
1512 return 2;
1513 else
1514 #endif
1515 return num_insns_constant_wide (INTVAL (op));
1518 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1520 long l;
1521 REAL_VALUE_TYPE rv;
1523 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1524 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1525 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1528 else if (GET_CODE (op) == CONST_DOUBLE)
1530 HOST_WIDE_INT low;
1531 HOST_WIDE_INT high;
1532 long l[2];
1533 REAL_VALUE_TYPE rv;
1534 int endian = (WORDS_BIG_ENDIAN == 0);
1536 if (mode == VOIDmode || mode == DImode)
1538 high = CONST_DOUBLE_HIGH (op);
1539 low = CONST_DOUBLE_LOW (op);
1541 else
1543 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1544 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1545 high = l[endian];
1546 low = l[1 - endian];
1549 if (TARGET_32BIT)
1550 return (num_insns_constant_wide (low)
1551 + num_insns_constant_wide (high));
1553 else
1555 if (high == 0 && low >= 0)
1556 return num_insns_constant_wide (low);
1558 else if (high == -1 && low < 0)
1559 return num_insns_constant_wide (low);
1561 else if (mask64_operand (op, mode))
1562 return 2;
1564 else if (low == 0)
1565 return num_insns_constant_wide (high) + 1;
1567 else
1568 return (num_insns_constant_wide (high)
1569 + num_insns_constant_wide (low) + 1);
1573 else
1574 abort ();
1577 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1578 register with one instruction per word. We only do this if we can
1579 safely read CONST_DOUBLE_{LOW,HIGH}. */
1582 easy_fp_constant (rtx op, enum machine_mode mode)
1584 if (GET_CODE (op) != CONST_DOUBLE
1585 || GET_MODE (op) != mode
1586 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1587 return 0;
1589 /* Consider all constants with -msoft-float to be easy. */
1590 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1591 && mode != DImode)
1592 return 1;
1594 /* If we are using V.4 style PIC, consider all constants to be hard. */
1595 if (flag_pic && DEFAULT_ABI == ABI_V4)
1596 return 0;
1598 #ifdef TARGET_RELOCATABLE
1599 /* Similarly if we are using -mrelocatable, consider all constants
1600 to be hard. */
1601 if (TARGET_RELOCATABLE)
1602 return 0;
1603 #endif
1605 if (mode == TFmode)
1607 long k[4];
1608 REAL_VALUE_TYPE rv;
1610 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1611 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1613 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1614 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1615 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1616 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1619 else if (mode == DFmode)
1621 long k[2];
1622 REAL_VALUE_TYPE rv;
1624 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1625 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1627 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1628 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1631 else if (mode == SFmode)
1633 long l;
1634 REAL_VALUE_TYPE rv;
1636 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1637 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1639 return num_insns_constant_wide (l) == 1;
1642 else if (mode == DImode)
1643 return ((TARGET_POWERPC64
1644 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1645 || (num_insns_constant (op, DImode) <= 2));
1647 else if (mode == SImode)
1648 return 1;
1649 else
1650 abort ();
1653 /* Return nonzero if all elements of a vector have the same value. */
1655 static int
1656 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1658 int units, i, cst;
1660 units = CONST_VECTOR_NUNITS (op);
1662 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1663 for (i = 1; i < units; ++i)
1664 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1665 break;
1666 if (i == units)
1667 return 1;
1668 return 0;
1671 /* Return 1 if the operand is a CONST_INT and can be put into a
1672 register without using memory. */
1675 easy_vector_constant (rtx op, enum machine_mode mode)
1677 int cst, cst2;
1679 if (GET_CODE (op) != CONST_VECTOR
1680 || (!TARGET_ALTIVEC
1681 && !TARGET_SPE))
1682 return 0;
1684 if (zero_constant (op, mode)
1685 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1686 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1687 return 1;
1689 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1690 return 0;
1692 if (TARGET_SPE && mode == V1DImode)
1693 return 0;
1695 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1696 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1698 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1699 li r0, CONSTANT1
1700 evmergelo r0, r0, r0
1701 li r0, CONSTANT2
1703 I don't know how efficient it would be to allow bigger constants,
1704 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1705 instructions is better than a 64-bit memory load, but I don't
1706 have the e500 timing specs. */
1707 if (TARGET_SPE && mode == V2SImode
1708 && cst >= -0x7fff && cst <= 0x7fff
1709 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1710 return 1;
1712 if (TARGET_ALTIVEC)
1713 switch (mode)
1715 case V4SImode:
1716 if (EASY_VECTOR_15 (cst, op, mode))
1717 return 1;
1718 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1719 break;
1720 cst = cst >> 16;
1721 case V8HImode:
1722 if (EASY_VECTOR_15 (cst, op, mode))
1723 return 1;
1724 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1725 break;
1726 cst = cst >> 8;
1727 case V16QImode:
1728 if (EASY_VECTOR_15 (cst, op, mode))
1729 return 1;
1730 default:
1731 break;
1734 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1735 return 1;
1737 return 0;
1740 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1743 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1745 int cst;
1747 if (!easy_vector_constant (op, mode))
1748 return 0;
1750 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1752 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1755 const char *
1756 output_vec_const_move (rtx *operands)
1758 int cst, cst2;
1759 enum machine_mode mode;
1760 rtx dest, vec;
1762 dest = operands[0];
1763 vec = operands[1];
1765 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1766 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1767 mode = GET_MODE (dest);
1769 if (TARGET_ALTIVEC)
1771 if (zero_constant (vec, mode))
1772 return "vxor %0,%0,%0";
1773 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1774 return "#";
1775 else if (easy_vector_constant (vec, mode))
1777 operands[1] = GEN_INT (cst);
1778 switch (mode)
1780 case V4SImode:
1781 if (EASY_VECTOR_15 (cst, vec, mode))
1783 operands[1] = GEN_INT (cst);
1784 return "vspltisw %0,%1";
1786 cst = cst >> 16;
1787 case V8HImode:
1788 if (EASY_VECTOR_15 (cst, vec, mode))
1790 operands[1] = GEN_INT (cst);
1791 return "vspltish %0,%1";
1793 cst = cst >> 8;
1794 case V16QImode:
1795 if (EASY_VECTOR_15 (cst, vec, mode))
1797 operands[1] = GEN_INT (cst);
1798 return "vspltisb %0,%1";
1800 default:
1801 abort ();
1804 else
1805 abort ();
1808 if (TARGET_SPE)
1810 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1811 pattern of V1DI, V4HI, and V2SF.
1813 FIXME: We should probably return # and add post reload
1814 splitters for these, but this way is so easy ;-).
1816 operands[1] = GEN_INT (cst);
1817 operands[2] = GEN_INT (cst2);
1818 if (cst == cst2)
1819 return "li %0,%1\n\tevmergelo %0,%0,%0";
1820 else
1821 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1824 abort ();
1827 /* Return 1 if the operand is the constant 0. This works for scalars
1828 as well as vectors. */
1830 zero_constant (rtx op, enum machine_mode mode)
1832 return op == CONST0_RTX (mode);
1835 /* Return 1 if the operand is 0.0. */
1837 zero_fp_constant (rtx op, enum machine_mode mode)
1839 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1842 /* Return 1 if the operand is in volatile memory. Note that during
1843 the RTL generation phase, memory_operand does not return TRUE for
1844 volatile memory references. So this function allows us to
1845 recognize volatile references where its safe. */
1848 volatile_mem_operand (rtx op, enum machine_mode mode)
1850 if (GET_CODE (op) != MEM)
1851 return 0;
1853 if (!MEM_VOLATILE_P (op))
1854 return 0;
1856 if (mode != GET_MODE (op))
1857 return 0;
1859 if (reload_completed)
1860 return memory_operand (op, mode);
1862 if (reload_in_progress)
1863 return strict_memory_address_p (mode, XEXP (op, 0));
1865 return memory_address_p (mode, XEXP (op, 0));
1868 /* Return 1 if the operand is an offsettable memory operand. */
1871 offsettable_mem_operand (rtx op, enum machine_mode mode)
1873 return ((GET_CODE (op) == MEM)
1874 && offsettable_address_p (reload_completed || reload_in_progress,
1875 mode, XEXP (op, 0)));
1878 /* Return 1 if the operand is either an easy FP constant (see above) or
1879 memory. */
1882 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1884 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1887 /* Return 1 if the operand is either a non-special register or an item
1888 that can be used as the operand of a `mode' add insn. */
1891 add_operand (rtx op, enum machine_mode mode)
1893 if (GET_CODE (op) == CONST_INT)
1894 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1895 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1897 return gpc_reg_operand (op, mode);
1900 /* Return 1 if OP is a constant but not a valid add_operand. */
1903 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1905 return (GET_CODE (op) == CONST_INT
1906 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1907 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1910 /* Return 1 if the operand is a non-special register or a constant that
1911 can be used as the operand of an OR or XOR insn on the RS/6000. */
1914 logical_operand (rtx op, enum machine_mode mode)
1916 HOST_WIDE_INT opl, oph;
1918 if (gpc_reg_operand (op, mode))
1919 return 1;
1921 if (GET_CODE (op) == CONST_INT)
1923 opl = INTVAL (op) & GET_MODE_MASK (mode);
1925 #if HOST_BITS_PER_WIDE_INT <= 32
1926 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1927 return 0;
1928 #endif
1930 else if (GET_CODE (op) == CONST_DOUBLE)
1932 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1933 abort ();
1935 opl = CONST_DOUBLE_LOW (op);
1936 oph = CONST_DOUBLE_HIGH (op);
1937 if (oph != 0)
1938 return 0;
1940 else
1941 return 0;
1943 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1944 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1947 /* Return 1 if C is a constant that is not a logical operand (as
1948 above), but could be split into one. */
1951 non_logical_cint_operand (rtx op, enum machine_mode mode)
1953 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1954 && ! logical_operand (op, mode)
1955 && reg_or_logical_cint_operand (op, mode));
1958 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1959 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1960 Reject all ones and all zeros, since these should have been optimized
1961 away and confuse the making of MB and ME. */
1964 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1966 HOST_WIDE_INT c, lsb;
1968 if (GET_CODE (op) != CONST_INT)
1969 return 0;
1971 c = INTVAL (op);
1973 /* Fail in 64-bit mode if the mask wraps around because the upper
1974 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1975 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1976 return 0;
1978 /* We don't change the number of transitions by inverting,
1979 so make sure we start with the LS bit zero. */
1980 if (c & 1)
1981 c = ~c;
1983 /* Reject all zeros or all ones. */
1984 if (c == 0)
1985 return 0;
1987 /* Find the first transition. */
1988 lsb = c & -c;
1990 /* Invert to look for a second transition. */
1991 c = ~c;
1993 /* Erase first transition. */
1994 c &= -lsb;
1996 /* Find the second transition (if any). */
1997 lsb = c & -c;
1999 /* Match if all the bits above are 1's (or c is zero). */
2000 return c == -lsb;
2003 /* Return 1 for the PowerPC64 rlwinm corner case. */
2006 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2008 HOST_WIDE_INT c, lsb;
2010 if (GET_CODE (op) != CONST_INT)
2011 return 0;
2013 c = INTVAL (op);
2015 if ((c & 0x80000001) != 0x80000001)
2016 return 0;
2018 c = ~c;
2019 if (c == 0)
2020 return 0;
2022 lsb = c & -c;
2023 c = ~c;
2024 c &= -lsb;
2025 lsb = c & -c;
2026 return c == -lsb;
2029 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2030 It is if there are no more than one 1->0 or 0->1 transitions.
2031 Reject all zeros, since zero should have been optimized away and
2032 confuses the making of MB and ME. */
2035 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2037 if (GET_CODE (op) == CONST_INT)
2039 HOST_WIDE_INT c, lsb;
2041 c = INTVAL (op);
2043 /* Reject all zeros. */
2044 if (c == 0)
2045 return 0;
2047 /* We don't change the number of transitions by inverting,
2048 so make sure we start with the LS bit zero. */
2049 if (c & 1)
2050 c = ~c;
2052 /* Find the transition, and check that all bits above are 1's. */
2053 lsb = c & -c;
2055 /* Match if all the bits above are 1's (or c is zero). */
2056 return c == -lsb;
2058 return 0;
2061 /* Like mask64_operand, but allow up to three transitions. This
2062 predicate is used by insn patterns that generate two rldicl or
2063 rldicr machine insns. */
2066 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2068 if (GET_CODE (op) == CONST_INT)
2070 HOST_WIDE_INT c, lsb;
2072 c = INTVAL (op);
2074 /* Disallow all zeros. */
2075 if (c == 0)
2076 return 0;
2078 /* We don't change the number of transitions by inverting,
2079 so make sure we start with the LS bit zero. */
2080 if (c & 1)
2081 c = ~c;
2083 /* Find the first transition. */
2084 lsb = c & -c;
2086 /* Invert to look for a second transition. */
2087 c = ~c;
2089 /* Erase first transition. */
2090 c &= -lsb;
2092 /* Find the second transition. */
2093 lsb = c & -c;
2095 /* Invert to look for a third transition. */
2096 c = ~c;
2098 /* Erase second transition. */
2099 c &= -lsb;
2101 /* Find the third transition (if any). */
2102 lsb = c & -c;
2104 /* Match if all the bits above are 1's (or c is zero). */
2105 return c == -lsb;
2107 return 0;
2110 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2111 implement ANDing by the mask IN. */
2112 void
2113 build_mask64_2_operands (rtx in, rtx *out)
2115 #if HOST_BITS_PER_WIDE_INT >= 64
2116 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2117 int shift;
2119 if (GET_CODE (in) != CONST_INT)
2120 abort ();
2122 c = INTVAL (in);
2123 if (c & 1)
2125 /* Assume c initially something like 0x00fff000000fffff. The idea
2126 is to rotate the word so that the middle ^^^^^^ group of zeros
2127 is at the MS end and can be cleared with an rldicl mask. We then
2128 rotate back and clear off the MS ^^ group of zeros with a
2129 second rldicl. */
2130 c = ~c; /* c == 0xff000ffffff00000 */
2131 lsb = c & -c; /* lsb == 0x0000000000100000 */
2132 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2133 c = ~c; /* c == 0x00fff000000fffff */
2134 c &= -lsb; /* c == 0x00fff00000000000 */
2135 lsb = c & -c; /* lsb == 0x0000100000000000 */
2136 c = ~c; /* c == 0xff000fffffffffff */
2137 c &= -lsb; /* c == 0xff00000000000000 */
2138 shift = 0;
2139 while ((lsb >>= 1) != 0)
2140 shift++; /* shift == 44 on exit from loop */
2141 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2142 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2143 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2145 else
2147 /* Assume c initially something like 0xff000f0000000000. The idea
2148 is to rotate the word so that the ^^^ middle group of zeros
2149 is at the LS end and can be cleared with an rldicr mask. We then
2150 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2151 a second rldicr. */
2152 lsb = c & -c; /* lsb == 0x0000010000000000 */
2153 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2154 c = ~c; /* c == 0x00fff0ffffffffff */
2155 c &= -lsb; /* c == 0x00fff00000000000 */
2156 lsb = c & -c; /* lsb == 0x0000100000000000 */
2157 c = ~c; /* c == 0xff000fffffffffff */
2158 c &= -lsb; /* c == 0xff00000000000000 */
2159 shift = 0;
2160 while ((lsb >>= 1) != 0)
2161 shift++; /* shift == 44 on exit from loop */
2162 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2163 m1 >>= shift; /* m1 == 0x0000000000000fff */
2164 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2167 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2168 masks will be all 1's. We are guaranteed more than one transition. */
2169 out[0] = GEN_INT (64 - shift);
2170 out[1] = GEN_INT (m1);
2171 out[2] = GEN_INT (shift);
2172 out[3] = GEN_INT (m2);
2173 #else
2174 (void)in;
2175 (void)out;
2176 abort ();
2177 #endif
2180 /* Return 1 if the operand is either a non-special register or a constant
2181 that can be used as the operand of a PowerPC64 logical AND insn. */
2184 and64_operand (rtx op, enum machine_mode mode)
2186 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2187 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2189 return (logical_operand (op, mode) || mask64_operand (op, mode));
2192 /* Like the above, but also match constants that can be implemented
2193 with two rldicl or rldicr insns. */
2196 and64_2_operand (rtx op, enum machine_mode mode)
2198 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2199 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2201 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2204 /* Return 1 if the operand is either a non-special register or a
2205 constant that can be used as the operand of an RS/6000 logical AND insn. */
2208 and_operand (rtx op, enum machine_mode mode)
2210 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2211 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2213 return (logical_operand (op, mode) || mask_operand (op, mode));
2216 /* Return 1 if the operand is a general register or memory operand. */
2219 reg_or_mem_operand (rtx op, enum machine_mode mode)
2221 return (gpc_reg_operand (op, mode)
2222 || memory_operand (op, mode)
2223 || macho_lo_sum_memory_operand (op, mode)
2224 || volatile_mem_operand (op, mode));
2227 /* Return 1 if the operand is a general register or memory operand without
2228 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2229 instruction. */
2232 lwa_operand (rtx op, enum machine_mode mode)
2234 rtx inner = op;
2236 if (reload_completed && GET_CODE (inner) == SUBREG)
2237 inner = SUBREG_REG (inner);
2239 return gpc_reg_operand (inner, mode)
2240 || (memory_operand (inner, mode)
2241 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2242 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2243 && (GET_CODE (XEXP (inner, 0)) != PLUS
2244 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2245 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2248 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2251 symbol_ref_operand (rtx op, enum machine_mode mode)
2253 if (mode != VOIDmode && GET_MODE (op) != mode)
2254 return 0;
2256 return (GET_CODE (op) == SYMBOL_REF
2257 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2260 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2261 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2264 call_operand (rtx op, enum machine_mode mode)
2266 if (mode != VOIDmode && GET_MODE (op) != mode)
2267 return 0;
2269 return (GET_CODE (op) == SYMBOL_REF
2270 || (GET_CODE (op) == REG
2271 && (REGNO (op) == LINK_REGISTER_REGNUM
2272 || REGNO (op) == COUNT_REGISTER_REGNUM
2273 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2276 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2277 this file. */
2280 current_file_function_operand (rtx op,
2281 enum machine_mode mode ATTRIBUTE_UNUSED)
2283 return (GET_CODE (op) == SYMBOL_REF
2284 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2285 && (SYMBOL_REF_LOCAL_P (op)
2286 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2289 /* Return 1 if this operand is a valid input for a move insn. */
2292 input_operand (rtx op, enum machine_mode mode)
2294 /* Memory is always valid. */
2295 if (memory_operand (op, mode))
2296 return 1;
2298 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2299 if (GET_CODE (op) == CONSTANT_P_RTX)
2300 return 1;
2302 /* For floating-point, easy constants are valid. */
2303 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2304 && CONSTANT_P (op)
2305 && easy_fp_constant (op, mode))
2306 return 1;
2308 /* Allow any integer constant. */
2309 if (GET_MODE_CLASS (mode) == MODE_INT
2310 && (GET_CODE (op) == CONST_INT
2311 || GET_CODE (op) == CONST_DOUBLE))
2312 return 1;
2314 /* Allow easy vector constants. */
2315 if (GET_CODE (op) == CONST_VECTOR
2316 && easy_vector_constant (op, mode))
2317 return 1;
2319 /* For floating-point or multi-word mode, the only remaining valid type
2320 is a register. */
2321 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2322 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2323 return register_operand (op, mode);
2325 /* The only cases left are integral modes one word or smaller (we
2326 do not get called for MODE_CC values). These can be in any
2327 register. */
2328 if (register_operand (op, mode))
2329 return 1;
2331 /* A SYMBOL_REF referring to the TOC is valid. */
2332 if (legitimate_constant_pool_address_p (op))
2333 return 1;
2335 /* A constant pool expression (relative to the TOC) is valid */
2336 if (toc_relative_expr_p (op))
2337 return 1;
2339 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2340 to be valid. */
2341 if (DEFAULT_ABI == ABI_V4
2342 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2343 && small_data_operand (op, Pmode))
2344 return 1;
2346 return 0;
2350 /* Darwin, AIX increases natural record alignment to doubleword if the first
2351 field is an FP double while the FP fields remain word aligned. */
2353 unsigned int
2354 rs6000_special_round_type_align (tree type, int computed, int specified)
2356 tree field = TYPE_FIELDS (type);
2358 /* Skip all the static variables only if ABI is greater than
2359 1 or equal to 0. */
2360 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2361 field = TREE_CHAIN (field);
2363 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2364 return MAX (computed, specified);
2366 return MAX (MAX (computed, specified), 64);
2369 /* Return 1 for an operand in small memory on V.4/eabi. */
2372 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2373 enum machine_mode mode ATTRIBUTE_UNUSED)
2375 #if TARGET_ELF
2376 rtx sym_ref;
2378 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2379 return 0;
2381 if (DEFAULT_ABI != ABI_V4)
2382 return 0;
2384 if (GET_CODE (op) == SYMBOL_REF)
2385 sym_ref = op;
2387 else if (GET_CODE (op) != CONST
2388 || GET_CODE (XEXP (op, 0)) != PLUS
2389 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2390 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2391 return 0;
2393 else
2395 rtx sum = XEXP (op, 0);
2396 HOST_WIDE_INT summand;
2398 /* We have to be careful here, because it is the referenced address
2399 that must be 32k from _SDA_BASE_, not just the symbol. */
2400 summand = INTVAL (XEXP (sum, 1));
2401 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2402 return 0;
2404 sym_ref = XEXP (sum, 0);
2407 return SYMBOL_REF_SMALL_P (sym_ref);
2408 #else
2409 return 0;
2410 #endif
2413 /* Return true, if operand is a memory operand and has a
2414 displacement divisible by 4. */
2417 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2419 rtx addr;
2420 int off = 0;
2422 if (!memory_operand (op, mode))
2423 return 0;
2425 addr = XEXP (op, 0);
2426 if (GET_CODE (addr) == PLUS
2427 && GET_CODE (XEXP (addr, 0)) == REG
2428 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2429 off = INTVAL (XEXP (addr, 1));
2431 return (off % 4) == 0;
2434 /* Return true if either operand is a general purpose register. */
2436 bool
2437 gpr_or_gpr_p (rtx op0, rtx op1)
2439 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2440 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2444 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2446 static int
2447 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2449 switch (GET_CODE(op))
2451 case SYMBOL_REF:
2452 if (RS6000_SYMBOL_REF_TLS_P (op))
2453 return 0;
2454 else if (CONSTANT_POOL_ADDRESS_P (op))
2456 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2458 *have_sym = 1;
2459 return 1;
2461 else
2462 return 0;
2464 else if (! strcmp (XSTR (op, 0), toc_label_name))
2466 *have_toc = 1;
2467 return 1;
2469 else
2470 return 0;
2471 case PLUS:
2472 case MINUS:
2473 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2474 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2475 case CONST:
2476 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2477 case CONST_INT:
2478 return 1;
2479 default:
2480 return 0;
2484 static bool
2485 constant_pool_expr_p (rtx op)
2487 int have_sym = 0;
2488 int have_toc = 0;
2489 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2492 static bool
2493 toc_relative_expr_p (rtx op)
2495 int have_sym = 0;
2496 int have_toc = 0;
2497 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2500 /* SPE offset addressing is limited to 5-bits worth of double words. */
2501 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2503 bool
2504 legitimate_constant_pool_address_p (rtx x)
2506 return (TARGET_TOC
2507 && GET_CODE (x) == PLUS
2508 && GET_CODE (XEXP (x, 0)) == REG
2509 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2510 && constant_pool_expr_p (XEXP (x, 1)));
2513 static bool
2514 legitimate_small_data_p (enum machine_mode mode, rtx x)
2516 return (DEFAULT_ABI == ABI_V4
2517 && !flag_pic && !TARGET_TOC
2518 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2519 && small_data_operand (x, mode));
2522 static bool
2523 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2525 unsigned HOST_WIDE_INT offset, extra;
2527 if (GET_CODE (x) != PLUS)
2528 return false;
2529 if (GET_CODE (XEXP (x, 0)) != REG)
2530 return false;
2531 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2532 return false;
2533 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2534 return false;
2536 offset = INTVAL (XEXP (x, 1));
2537 extra = 0;
2538 switch (mode)
2540 case V16QImode:
2541 case V8HImode:
2542 case V4SFmode:
2543 case V4SImode:
2544 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2545 which leaves the only valid constant offset of zero, which by
2546 canonicalization rules is also invalid. */
2547 return false;
2549 case V4HImode:
2550 case V2SImode:
2551 case V1DImode:
2552 case V2SFmode:
2553 /* SPE vector modes. */
2554 return SPE_CONST_OFFSET_OK (offset);
2556 case DFmode:
2557 case DImode:
2558 if (mode == DFmode || !TARGET_POWERPC64)
2559 extra = 4;
2560 else if (offset & 3)
2561 return false;
2562 break;
2564 case TFmode:
2565 case TImode:
2566 if (mode == TFmode || !TARGET_POWERPC64)
2567 extra = 12;
2568 else if (offset & 3)
2569 return false;
2570 else
2571 extra = 8;
2572 break;
2574 default:
2575 break;
2578 offset += 0x8000;
2579 return (offset < 0x10000) && (offset + extra < 0x10000);
2582 static bool
2583 legitimate_indexed_address_p (rtx x, int strict)
2585 rtx op0, op1;
2587 if (GET_CODE (x) != PLUS)
2588 return false;
2589 op0 = XEXP (x, 0);
2590 op1 = XEXP (x, 1);
2592 if (!REG_P (op0) || !REG_P (op1))
2593 return false;
2595 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2596 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2597 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2598 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2601 static inline bool
2602 legitimate_indirect_address_p (rtx x, int strict)
2604 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2607 static bool
2608 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2610 if (!TARGET_MACHO || !flag_pic
2611 || mode != SImode || GET_CODE(x) != MEM)
2612 return false;
2613 x = XEXP (x, 0);
2615 if (GET_CODE (x) != LO_SUM)
2616 return false;
2617 if (GET_CODE (XEXP (x, 0)) != REG)
2618 return false;
2619 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2620 return false;
2621 x = XEXP (x, 1);
2623 return CONSTANT_P (x);
2626 static bool
2627 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2629 if (GET_CODE (x) != LO_SUM)
2630 return false;
2631 if (GET_CODE (XEXP (x, 0)) != REG)
2632 return false;
2633 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2634 return false;
2635 x = XEXP (x, 1);
2637 if (TARGET_ELF || TARGET_MACHO)
2639 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2640 return false;
2641 if (TARGET_TOC)
2642 return false;
2643 if (GET_MODE_NUNITS (mode) != 1)
2644 return false;
2645 if (GET_MODE_BITSIZE (mode) > 32
2646 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2647 return false;
2649 return CONSTANT_P (x);
2652 return false;
2656 /* Try machine-dependent ways of modifying an illegitimate address
2657 to be legitimate. If we find one, return the new, valid address.
2658 This is used from only one place: `memory_address' in explow.c.
2660 OLDX is the address as it was before break_out_memory_refs was
2661 called. In some cases it is useful to look at this to decide what
2662 needs to be done.
2664 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2666 It is always safe for this function to do nothing. It exists to
2667 recognize opportunities to optimize the output.
2669 On RS/6000, first check for the sum of a register with a constant
2670 integer that is out of range. If so, generate code to add the
2671 constant with the low-order 16 bits masked to the register and force
2672 this result into another register (this can be done with `cau').
2673 Then generate an address of REG+(CONST&0xffff), allowing for the
2674 possibility of bit 16 being a one.
2676 Then check for the sum of a register and something not constant, try to
2677 load the other things into a register and return the sum. */
2680 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2681 enum machine_mode mode)
2683 if (GET_CODE (x) == SYMBOL_REF)
2685 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2686 if (model != 0)
2687 return rs6000_legitimize_tls_address (x, model);
2690 if (GET_CODE (x) == PLUS
2691 && GET_CODE (XEXP (x, 0)) == REG
2692 && GET_CODE (XEXP (x, 1)) == CONST_INT
2693 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2695 HOST_WIDE_INT high_int, low_int;
2696 rtx sum;
2697 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2698 high_int = INTVAL (XEXP (x, 1)) - low_int;
2699 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2700 GEN_INT (high_int)), 0);
2701 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2703 else if (GET_CODE (x) == PLUS
2704 && GET_CODE (XEXP (x, 0)) == REG
2705 && GET_CODE (XEXP (x, 1)) != CONST_INT
2706 && GET_MODE_NUNITS (mode) == 1
2707 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2708 || TARGET_POWERPC64
2709 || (mode != DFmode && mode != TFmode))
2710 && (TARGET_POWERPC64 || mode != DImode)
2711 && mode != TImode)
2713 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2714 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2716 else if (ALTIVEC_VECTOR_MODE (mode))
2718 rtx reg;
2720 /* Make sure both operands are registers. */
2721 if (GET_CODE (x) == PLUS)
2722 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2723 force_reg (Pmode, XEXP (x, 1)));
2725 reg = force_reg (Pmode, x);
2726 return reg;
2728 else if (SPE_VECTOR_MODE (mode))
2730 /* We accept [reg + reg] and [reg + OFFSET]. */
2732 if (GET_CODE (x) == PLUS)
2734 rtx op1 = XEXP (x, 0);
2735 rtx op2 = XEXP (x, 1);
2737 op1 = force_reg (Pmode, op1);
2739 if (GET_CODE (op2) != REG
2740 && (GET_CODE (op2) != CONST_INT
2741 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2742 op2 = force_reg (Pmode, op2);
2744 return gen_rtx_PLUS (Pmode, op1, op2);
2747 return force_reg (Pmode, x);
2749 else if (TARGET_ELF
2750 && TARGET_32BIT
2751 && TARGET_NO_TOC
2752 && ! flag_pic
2753 && GET_CODE (x) != CONST_INT
2754 && GET_CODE (x) != CONST_DOUBLE
2755 && CONSTANT_P (x)
2756 && GET_MODE_NUNITS (mode) == 1
2757 && (GET_MODE_BITSIZE (mode) <= 32
2758 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2760 rtx reg = gen_reg_rtx (Pmode);
2761 emit_insn (gen_elf_high (reg, x));
2762 return gen_rtx_LO_SUM (Pmode, reg, x);
2764 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2765 && ! flag_pic
2766 #if TARGET_MACHO
2767 && ! MACHO_DYNAMIC_NO_PIC_P
2768 #endif
2769 && GET_CODE (x) != CONST_INT
2770 && GET_CODE (x) != CONST_DOUBLE
2771 && CONSTANT_P (x)
2772 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2773 && mode != DImode
2774 && mode != TImode)
2776 rtx reg = gen_reg_rtx (Pmode);
2777 emit_insn (gen_macho_high (reg, x));
2778 return gen_rtx_LO_SUM (Pmode, reg, x);
2780 else if (TARGET_TOC
2781 && constant_pool_expr_p (x)
2782 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2784 return create_TOC_reference (x);
2786 else
2787 return NULL_RTX;
2790 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2792 static GTY(()) rtx rs6000_tls_symbol;
2793 static rtx
2794 rs6000_tls_get_addr (void)
2796 if (!rs6000_tls_symbol)
2797 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2799 return rs6000_tls_symbol;
2802 /* Construct the SYMBOL_REF for TLS GOT references. */
2804 static GTY(()) rtx rs6000_got_symbol;
2805 static rtx
2806 rs6000_got_sym (void)
2808 if (!rs6000_got_symbol)
2810 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2811 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2812 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2815 return rs6000_got_symbol;
2818 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2819 this (thread-local) address. */
2821 static rtx
2822 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2824 rtx dest, insn;
2826 dest = gen_reg_rtx (Pmode);
2827 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2829 rtx tlsreg;
2831 if (TARGET_64BIT)
2833 tlsreg = gen_rtx_REG (Pmode, 13);
2834 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2836 else
2838 tlsreg = gen_rtx_REG (Pmode, 2);
2839 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2841 emit_insn (insn);
2843 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2845 rtx tlsreg, tmp;
2847 tmp = gen_reg_rtx (Pmode);
2848 if (TARGET_64BIT)
2850 tlsreg = gen_rtx_REG (Pmode, 13);
2851 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2853 else
2855 tlsreg = gen_rtx_REG (Pmode, 2);
2856 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2858 emit_insn (insn);
2859 if (TARGET_64BIT)
2860 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2861 else
2862 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2863 emit_insn (insn);
2865 else
2867 rtx r3, got, tga, tmp1, tmp2, eqv;
2869 if (TARGET_64BIT)
2870 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2871 else
2873 if (flag_pic == 1)
2874 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2875 else
2877 rtx gsym = rs6000_got_sym ();
2878 got = gen_reg_rtx (Pmode);
2879 if (flag_pic == 0)
2880 rs6000_emit_move (got, gsym, Pmode);
2881 else
2883 char buf[30];
2884 static int tls_got_labelno = 0;
2885 rtx tempLR, lab, tmp3, mem;
2886 rtx first, last;
2888 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2889 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2890 tempLR = gen_reg_rtx (Pmode);
2891 tmp1 = gen_reg_rtx (Pmode);
2892 tmp2 = gen_reg_rtx (Pmode);
2893 tmp3 = gen_reg_rtx (Pmode);
2894 mem = gen_rtx_MEM (Pmode, tmp1);
2895 RTX_UNCHANGING_P (mem) = 1;
2897 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2898 gsym));
2899 emit_move_insn (tmp1, tempLR);
2900 emit_move_insn (tmp2, mem);
2901 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2902 last = emit_move_insn (got, tmp3);
2903 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2904 REG_NOTES (last));
2905 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2906 REG_NOTES (first));
2907 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2908 REG_NOTES (last));
2913 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2915 r3 = gen_rtx_REG (Pmode, 3);
2916 if (TARGET_64BIT)
2917 insn = gen_tls_gd_64 (r3, got, addr);
2918 else
2919 insn = gen_tls_gd_32 (r3, got, addr);
2920 start_sequence ();
2921 emit_insn (insn);
2922 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2923 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2924 insn = emit_call_insn (insn);
2925 CONST_OR_PURE_CALL_P (insn) = 1;
2926 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2927 insn = get_insns ();
2928 end_sequence ();
2929 emit_libcall_block (insn, dest, r3, addr);
2931 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2933 r3 = gen_rtx_REG (Pmode, 3);
2934 if (TARGET_64BIT)
2935 insn = gen_tls_ld_64 (r3, got);
2936 else
2937 insn = gen_tls_ld_32 (r3, got);
2938 start_sequence ();
2939 emit_insn (insn);
2940 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2941 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2942 insn = emit_call_insn (insn);
2943 CONST_OR_PURE_CALL_P (insn) = 1;
2944 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2945 insn = get_insns ();
2946 end_sequence ();
2947 tmp1 = gen_reg_rtx (Pmode);
2948 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2949 UNSPEC_TLSLD);
2950 emit_libcall_block (insn, tmp1, r3, eqv);
2951 if (rs6000_tls_size == 16)
2953 if (TARGET_64BIT)
2954 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2955 else
2956 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2958 else if (rs6000_tls_size == 32)
2960 tmp2 = gen_reg_rtx (Pmode);
2961 if (TARGET_64BIT)
2962 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2963 else
2964 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2965 emit_insn (insn);
2966 if (TARGET_64BIT)
2967 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2968 else
2969 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2971 else
2973 tmp2 = gen_reg_rtx (Pmode);
2974 if (TARGET_64BIT)
2975 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2976 else
2977 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2978 emit_insn (insn);
2979 insn = gen_rtx_SET (Pmode, dest,
2980 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2982 emit_insn (insn);
2984 else
2986 /* IE, or 64 bit offset LE. */
2987 tmp2 = gen_reg_rtx (Pmode);
2988 if (TARGET_64BIT)
2989 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2990 else
2991 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2992 emit_insn (insn);
2993 if (TARGET_64BIT)
2994 insn = gen_tls_tls_64 (dest, tmp2, addr);
2995 else
2996 insn = gen_tls_tls_32 (dest, tmp2, addr);
2997 emit_insn (insn);
3001 return dest;
3004 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3005 instruction definitions. */
3008 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3010 return RS6000_SYMBOL_REF_TLS_P (x);
3013 /* Return 1 if X contains a thread-local symbol. */
3015 bool
3016 rs6000_tls_referenced_p (rtx x)
3018 if (! TARGET_HAVE_TLS)
3019 return false;
3021 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3024 /* Return 1 if *X is a thread-local symbol. This is the same as
3025 rs6000_tls_symbol_ref except for the type of the unused argument. */
3027 static inline int
3028 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3030 return RS6000_SYMBOL_REF_TLS_P (*x);
3033 /* The convention appears to be to define this wherever it is used.
3034 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3035 is now used here. */
3036 #ifndef REG_MODE_OK_FOR_BASE_P
3037 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3038 #endif
3040 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3041 replace the input X, or the original X if no replacement is called for.
3042 The output parameter *WIN is 1 if the calling macro should goto WIN,
3043 0 if it should not.
3045 For RS/6000, we wish to handle large displacements off a base
3046 register by splitting the addend across an addiu/addis and the mem insn.
3047 This cuts number of extra insns needed from 3 to 1.
3049 On Darwin, we use this to generate code for floating point constants.
3050 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3051 The Darwin code is inside #if TARGET_MACHO because only then is
3052 machopic_function_base_name() defined. */
3054 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3055 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3057 /* We must recognize output that we have already generated ourselves. */
3058 if (GET_CODE (x) == PLUS
3059 && GET_CODE (XEXP (x, 0)) == PLUS
3060 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3061 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3062 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3064 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3065 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3066 opnum, (enum reload_type)type);
3067 *win = 1;
3068 return x;
3071 #if TARGET_MACHO
3072 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3073 && GET_CODE (x) == LO_SUM
3074 && GET_CODE (XEXP (x, 0)) == PLUS
3075 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3076 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3077 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3078 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3079 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3080 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3081 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3083 /* Result of previous invocation of this function on Darwin
3084 floating point constant. */
3085 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3086 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3087 opnum, (enum reload_type)type);
3088 *win = 1;
3089 return x;
3091 #endif
3092 if (GET_CODE (x) == PLUS
3093 && GET_CODE (XEXP (x, 0)) == REG
3094 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3095 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3096 && GET_CODE (XEXP (x, 1)) == CONST_INT
3097 && !SPE_VECTOR_MODE (mode)
3098 && !ALTIVEC_VECTOR_MODE (mode))
3100 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3101 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3102 HOST_WIDE_INT high
3103 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3105 /* Check for 32-bit overflow. */
3106 if (high + low != val)
3108 *win = 0;
3109 return x;
3112 /* Reload the high part into a base reg; leave the low part
3113 in the mem directly. */
3115 x = gen_rtx_PLUS (GET_MODE (x),
3116 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3117 GEN_INT (high)),
3118 GEN_INT (low));
3120 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3121 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3122 opnum, (enum reload_type)type);
3123 *win = 1;
3124 return x;
3126 #if TARGET_MACHO
3127 if (GET_CODE (x) == SYMBOL_REF
3128 && DEFAULT_ABI == ABI_DARWIN
3129 && !ALTIVEC_VECTOR_MODE (mode)
3130 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3131 /* Don't do this for TFmode, since the result isn't offsettable. */
3132 && mode != TFmode)
3134 if (flag_pic)
3136 rtx offset = gen_rtx_CONST (Pmode,
3137 gen_rtx_MINUS (Pmode, x,
3138 gen_rtx_SYMBOL_REF (Pmode,
3139 machopic_function_base_name ())));
3140 x = gen_rtx_LO_SUM (GET_MODE (x),
3141 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3142 gen_rtx_HIGH (Pmode, offset)), offset);
3144 else
3145 x = gen_rtx_LO_SUM (GET_MODE (x),
3146 gen_rtx_HIGH (Pmode, x), x);
3148 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3149 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3150 opnum, (enum reload_type)type);
3151 *win = 1;
3152 return x;
3154 #endif
3155 if (TARGET_TOC
3156 && constant_pool_expr_p (x)
3157 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3159 (x) = create_TOC_reference (x);
3160 *win = 1;
3161 return x;
3163 *win = 0;
3164 return x;
3167 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3168 that is a valid memory address for an instruction.
3169 The MODE argument is the machine mode for the MEM expression
3170 that wants to use this address.
3172 On the RS/6000, there are four valid address: a SYMBOL_REF that
3173 refers to a constant pool entry of an address (or the sum of it
3174 plus a constant), a short (16-bit signed) constant plus a register,
3175 the sum of two registers, or a register indirect, possibly with an
3176 auto-increment. For DFmode and DImode with a constant plus register,
3177 we must ensure that both words are addressable or PowerPC64 with offset
3178 word aligned.
3180 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3181 32-bit DImode, TImode), indexed addressing cannot be used because
3182 adjacent memory cells are accessed by adding word-sized offsets
3183 during assembly output. */
3185 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3187 if (RS6000_SYMBOL_REF_TLS_P (x))
3188 return 0;
3189 if (legitimate_indirect_address_p (x, reg_ok_strict))
3190 return 1;
3191 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3192 && !ALTIVEC_VECTOR_MODE (mode)
3193 && !SPE_VECTOR_MODE (mode)
3194 && TARGET_UPDATE
3195 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3196 return 1;
3197 if (legitimate_small_data_p (mode, x))
3198 return 1;
3199 if (legitimate_constant_pool_address_p (x))
3200 return 1;
3201 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3202 if (! reg_ok_strict
3203 && GET_CODE (x) == PLUS
3204 && GET_CODE (XEXP (x, 0)) == REG
3205 && XEXP (x, 0) == virtual_stack_vars_rtx
3206 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3207 return 1;
3208 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3209 return 1;
3210 if (mode != TImode
3211 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3212 || TARGET_POWERPC64
3213 || (mode != DFmode && mode != TFmode))
3214 && (TARGET_POWERPC64 || mode != DImode)
3215 && legitimate_indexed_address_p (x, reg_ok_strict))
3216 return 1;
3217 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3218 return 1;
3219 return 0;
3222 /* Go to LABEL if ADDR (a legitimate address expression)
3223 has an effect that depends on the machine mode it is used for.
3225 On the RS/6000 this is true of all integral offsets (since AltiVec
3226 modes don't allow them) or is a pre-increment or decrement.
3228 ??? Except that due to conceptual problems in offsettable_address_p
3229 we can't really report the problems of integral offsets. So leave
3230 this assuming that the adjustable offset must be valid for the
3231 sub-words of a TFmode operand, which is what we had before. */
3233 bool
3234 rs6000_mode_dependent_address (rtx addr)
3236 switch (GET_CODE (addr))
3238 case PLUS:
3239 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3241 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3242 return val + 12 + 0x8000 >= 0x10000;
3244 break;
3246 case LO_SUM:
3247 return true;
3249 case PRE_INC:
3250 case PRE_DEC:
3251 return TARGET_UPDATE;
3253 default:
3254 break;
3257 return false;
3260 /* Try to output insns to set TARGET equal to the constant C if it can
3261 be done in less than N insns. Do all computations in MODE.
3262 Returns the place where the output has been placed if it can be
3263 done and the insns have been emitted. If it would take more than N
3264 insns, zero is returned and no insns and emitted. */
3267 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3268 rtx source, int n ATTRIBUTE_UNUSED)
3270 rtx result, insn, set;
3271 HOST_WIDE_INT c0, c1;
3273 if (mode == QImode || mode == HImode)
3275 if (dest == NULL)
3276 dest = gen_reg_rtx (mode);
3277 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3278 return dest;
3280 else if (mode == SImode)
3282 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3284 emit_insn (gen_rtx_SET (VOIDmode, result,
3285 GEN_INT (INTVAL (source)
3286 & (~ (HOST_WIDE_INT) 0xffff))));
3287 emit_insn (gen_rtx_SET (VOIDmode, dest,
3288 gen_rtx_IOR (SImode, result,
3289 GEN_INT (INTVAL (source) & 0xffff))));
3290 result = dest;
3292 else if (mode == DImode)
3294 if (GET_CODE (source) == CONST_INT)
3296 c0 = INTVAL (source);
3297 c1 = -(c0 < 0);
3299 else if (GET_CODE (source) == CONST_DOUBLE)
3301 #if HOST_BITS_PER_WIDE_INT >= 64
3302 c0 = CONST_DOUBLE_LOW (source);
3303 c1 = -(c0 < 0);
3304 #else
3305 c0 = CONST_DOUBLE_LOW (source);
3306 c1 = CONST_DOUBLE_HIGH (source);
3307 #endif
3309 else
3310 abort ();
3312 result = rs6000_emit_set_long_const (dest, c0, c1);
3314 else
3315 abort ();
3317 insn = get_last_insn ();
3318 set = single_set (insn);
3319 if (! CONSTANT_P (SET_SRC (set)))
3320 set_unique_reg_note (insn, REG_EQUAL, source);
3322 return result;
3325 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3326 fall back to a straight forward decomposition. We do this to avoid
3327 exponential run times encountered when looking for longer sequences
3328 with rs6000_emit_set_const. */
3329 static rtx
3330 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3332 if (!TARGET_POWERPC64)
3334 rtx operand1, operand2;
3336 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3337 DImode);
3338 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3339 DImode);
3340 emit_move_insn (operand1, GEN_INT (c1));
3341 emit_move_insn (operand2, GEN_INT (c2));
3343 else
3345 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3347 ud1 = c1 & 0xffff;
3348 ud2 = (c1 & 0xffff0000) >> 16;
3349 #if HOST_BITS_PER_WIDE_INT >= 64
3350 c2 = c1 >> 32;
3351 #endif
3352 ud3 = c2 & 0xffff;
3353 ud4 = (c2 & 0xffff0000) >> 16;
3355 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3356 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3358 if (ud1 & 0x8000)
3359 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3360 else
3361 emit_move_insn (dest, GEN_INT (ud1));
3364 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3365 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3367 if (ud2 & 0x8000)
3368 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3369 - 0x80000000));
3370 else
3371 emit_move_insn (dest, GEN_INT (ud2 << 16));
3372 if (ud1 != 0)
3373 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3375 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3376 || (ud4 == 0 && ! (ud3 & 0x8000)))
3378 if (ud3 & 0x8000)
3379 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3380 - 0x80000000));
3381 else
3382 emit_move_insn (dest, GEN_INT (ud3 << 16));
3384 if (ud2 != 0)
3385 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3386 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3387 if (ud1 != 0)
3388 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3390 else
3392 if (ud4 & 0x8000)
3393 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3394 - 0x80000000));
3395 else
3396 emit_move_insn (dest, GEN_INT (ud4 << 16));
3398 if (ud3 != 0)
3399 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3401 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3402 if (ud2 != 0)
3403 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3404 GEN_INT (ud2 << 16)));
3405 if (ud1 != 0)
3406 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3409 return dest;
3412 /* Emit a move from SOURCE to DEST in mode MODE. */
3413 void
3414 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3416 rtx operands[2];
3417 operands[0] = dest;
3418 operands[1] = source;
3420 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3421 if (GET_CODE (operands[1]) == CONST_DOUBLE
3422 && ! FLOAT_MODE_P (mode)
3423 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3425 /* FIXME. This should never happen. */
3426 /* Since it seems that it does, do the safe thing and convert
3427 to a CONST_INT. */
3428 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3430 if (GET_CODE (operands[1]) == CONST_DOUBLE
3431 && ! FLOAT_MODE_P (mode)
3432 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3433 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3434 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3435 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3436 abort ();
3438 /* Check if GCC is setting up a block move that will end up using FP
3439 registers as temporaries. We must make sure this is acceptable. */
3440 if (GET_CODE (operands[0]) == MEM
3441 && GET_CODE (operands[1]) == MEM
3442 && mode == DImode
3443 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3444 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3445 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3446 ? 32 : MEM_ALIGN (operands[0])))
3447 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3448 ? 32
3449 : MEM_ALIGN (operands[1]))))
3450 && ! MEM_VOLATILE_P (operands [0])
3451 && ! MEM_VOLATILE_P (operands [1]))
3453 emit_move_insn (adjust_address (operands[0], SImode, 0),
3454 adjust_address (operands[1], SImode, 0));
3455 emit_move_insn (adjust_address (operands[0], SImode, 4),
3456 adjust_address (operands[1], SImode, 4));
3457 return;
3460 if (!no_new_pseudos)
3462 if (GET_CODE (operands[1]) == MEM && optimize > 0
3463 && (mode == QImode || mode == HImode || mode == SImode)
3464 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3466 rtx reg = gen_reg_rtx (word_mode);
3468 emit_insn (gen_rtx_SET (word_mode, reg,
3469 gen_rtx_ZERO_EXTEND (word_mode,
3470 operands[1])));
3471 operands[1] = gen_lowpart (mode, reg);
3473 if (GET_CODE (operands[0]) != REG)
3474 operands[1] = force_reg (mode, operands[1]);
3477 if (mode == SFmode && ! TARGET_POWERPC
3478 && TARGET_HARD_FLOAT && TARGET_FPRS
3479 && GET_CODE (operands[0]) == MEM)
3481 int regnum;
3483 if (reload_in_progress || reload_completed)
3484 regnum = true_regnum (operands[1]);
3485 else if (GET_CODE (operands[1]) == REG)
3486 regnum = REGNO (operands[1]);
3487 else
3488 regnum = -1;
3490 /* If operands[1] is a register, on POWER it may have
3491 double-precision data in it, so truncate it to single
3492 precision. */
3493 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3495 rtx newreg;
3496 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3497 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3498 operands[1] = newreg;
3502 /* Recognize the case where operand[1] is a reference to thread-local
3503 data and load its address to a register. */
3504 if (GET_CODE (operands[1]) == SYMBOL_REF)
3506 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3507 if (model != 0)
3508 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3511 /* Handle the case where reload calls us with an invalid address. */
3512 if (reload_in_progress && mode == Pmode
3513 && (! general_operand (operands[1], mode)
3514 || ! nonimmediate_operand (operands[0], mode)))
3515 goto emit_set;
3517 /* Handle the case of CONSTANT_P_RTX. */
3518 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3519 goto emit_set;
3521 /* 128-bit constant floating-point values on Darwin should really be
3522 loaded as two parts. */
3523 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3524 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3525 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3527 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3528 know how to get a DFmode SUBREG of a TFmode. */
3529 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3530 simplify_gen_subreg (DImode, operands[1], mode, 0),
3531 DImode);
3532 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3533 GET_MODE_SIZE (DImode)),
3534 simplify_gen_subreg (DImode, operands[1], mode,
3535 GET_MODE_SIZE (DImode)),
3536 DImode);
3537 return;
3540 /* FIXME: In the long term, this switch statement should go away
3541 and be replaced by a sequence of tests based on things like
3542 mode == Pmode. */
3543 switch (mode)
3545 case HImode:
3546 case QImode:
3547 if (CONSTANT_P (operands[1])
3548 && GET_CODE (operands[1]) != CONST_INT)
3549 operands[1] = force_const_mem (mode, operands[1]);
3550 break;
3552 case TFmode:
3553 case DFmode:
3554 case SFmode:
3555 if (CONSTANT_P (operands[1])
3556 && ! easy_fp_constant (operands[1], mode))
3557 operands[1] = force_const_mem (mode, operands[1]);
3558 break;
3560 case V16QImode:
3561 case V8HImode:
3562 case V4SFmode:
3563 case V4SImode:
3564 case V4HImode:
3565 case V2SFmode:
3566 case V2SImode:
3567 case V1DImode:
3568 if (CONSTANT_P (operands[1])
3569 && !easy_vector_constant (operands[1], mode))
3570 operands[1] = force_const_mem (mode, operands[1]);
3571 break;
3573 case SImode:
3574 case DImode:
3575 /* Use default pattern for address of ELF small data */
3576 if (TARGET_ELF
3577 && mode == Pmode
3578 && DEFAULT_ABI == ABI_V4
3579 && (GET_CODE (operands[1]) == SYMBOL_REF
3580 || GET_CODE (operands[1]) == CONST)
3581 && small_data_operand (operands[1], mode))
3583 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3584 return;
3587 if (DEFAULT_ABI == ABI_V4
3588 && mode == Pmode && mode == SImode
3589 && flag_pic == 1 && got_operand (operands[1], mode))
3591 emit_insn (gen_movsi_got (operands[0], operands[1]));
3592 return;
3595 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3596 && TARGET_NO_TOC
3597 && ! flag_pic
3598 && mode == Pmode
3599 && CONSTANT_P (operands[1])
3600 && GET_CODE (operands[1]) != HIGH
3601 && GET_CODE (operands[1]) != CONST_INT)
3603 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3605 /* If this is a function address on -mcall-aixdesc,
3606 convert it to the address of the descriptor. */
3607 if (DEFAULT_ABI == ABI_AIX
3608 && GET_CODE (operands[1]) == SYMBOL_REF
3609 && XSTR (operands[1], 0)[0] == '.')
3611 const char *name = XSTR (operands[1], 0);
3612 rtx new_ref;
3613 while (*name == '.')
3614 name++;
3615 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3616 CONSTANT_POOL_ADDRESS_P (new_ref)
3617 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3618 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3619 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3620 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3621 operands[1] = new_ref;
3624 if (DEFAULT_ABI == ABI_DARWIN)
3626 #if TARGET_MACHO
3627 if (MACHO_DYNAMIC_NO_PIC_P)
3629 /* Take care of any required data indirection. */
3630 operands[1] = rs6000_machopic_legitimize_pic_address (
3631 operands[1], mode, operands[0]);
3632 if (operands[0] != operands[1])
3633 emit_insn (gen_rtx_SET (VOIDmode,
3634 operands[0], operands[1]));
3635 return;
3637 #endif
3638 emit_insn (gen_macho_high (target, operands[1]));
3639 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3640 return;
3643 emit_insn (gen_elf_high (target, operands[1]));
3644 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3645 return;
3648 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3649 and we have put it in the TOC, we just need to make a TOC-relative
3650 reference to it. */
3651 if (TARGET_TOC
3652 && GET_CODE (operands[1]) == SYMBOL_REF
3653 && constant_pool_expr_p (operands[1])
3654 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3655 get_pool_mode (operands[1])))
3657 operands[1] = create_TOC_reference (operands[1]);
3659 else if (mode == Pmode
3660 && CONSTANT_P (operands[1])
3661 && ((GET_CODE (operands[1]) != CONST_INT
3662 && ! easy_fp_constant (operands[1], mode))
3663 || (GET_CODE (operands[1]) == CONST_INT
3664 && num_insns_constant (operands[1], mode) > 2)
3665 || (GET_CODE (operands[0]) == REG
3666 && FP_REGNO_P (REGNO (operands[0]))))
3667 && GET_CODE (operands[1]) != HIGH
3668 && ! legitimate_constant_pool_address_p (operands[1])
3669 && ! toc_relative_expr_p (operands[1]))
3671 /* Emit a USE operation so that the constant isn't deleted if
3672 expensive optimizations are turned on because nobody
3673 references it. This should only be done for operands that
3674 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3675 This should not be done for operands that contain LABEL_REFs.
3676 For now, we just handle the obvious case. */
3677 if (GET_CODE (operands[1]) != LABEL_REF)
3678 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3680 #if TARGET_MACHO
3681 /* Darwin uses a special PIC legitimizer. */
3682 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3684 operands[1] =
3685 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3686 operands[0]);
3687 if (operands[0] != operands[1])
3688 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3689 return;
3691 #endif
3693 /* If we are to limit the number of things we put in the TOC and
3694 this is a symbol plus a constant we can add in one insn,
3695 just put the symbol in the TOC and add the constant. Don't do
3696 this if reload is in progress. */
3697 if (GET_CODE (operands[1]) == CONST
3698 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3699 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3700 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3701 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3702 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3703 && ! side_effects_p (operands[0]))
3705 rtx sym =
3706 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3707 rtx other = XEXP (XEXP (operands[1], 0), 1);
3709 sym = force_reg (mode, sym);
3710 if (mode == SImode)
3711 emit_insn (gen_addsi3 (operands[0], sym, other));
3712 else
3713 emit_insn (gen_adddi3 (operands[0], sym, other));
3714 return;
3717 operands[1] = force_const_mem (mode, operands[1]);
3719 if (TARGET_TOC
3720 && constant_pool_expr_p (XEXP (operands[1], 0))
3721 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3722 get_pool_constant (XEXP (operands[1], 0)),
3723 get_pool_mode (XEXP (operands[1], 0))))
3725 operands[1]
3726 = gen_rtx_MEM (mode,
3727 create_TOC_reference (XEXP (operands[1], 0)));
3728 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3729 RTX_UNCHANGING_P (operands[1]) = 1;
3732 break;
3734 case TImode:
3735 if (GET_CODE (operands[0]) == MEM
3736 && GET_CODE (XEXP (operands[0], 0)) != REG
3737 && ! reload_in_progress)
3738 operands[0]
3739 = replace_equiv_address (operands[0],
3740 copy_addr_to_reg (XEXP (operands[0], 0)));
3742 if (GET_CODE (operands[1]) == MEM
3743 && GET_CODE (XEXP (operands[1], 0)) != REG
3744 && ! reload_in_progress)
3745 operands[1]
3746 = replace_equiv_address (operands[1],
3747 copy_addr_to_reg (XEXP (operands[1], 0)));
3748 if (TARGET_POWER)
3750 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3751 gen_rtvec (2,
3752 gen_rtx_SET (VOIDmode,
3753 operands[0], operands[1]),
3754 gen_rtx_CLOBBER (VOIDmode,
3755 gen_rtx_SCRATCH (SImode)))));
3756 return;
3758 break;
3760 default:
3761 abort ();
3764 /* Above, we may have called force_const_mem which may have returned
3765 an invalid address. If we can, fix this up; otherwise, reload will
3766 have to deal with it. */
3767 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3768 operands[1] = validize_mem (operands[1]);
3770 emit_set:
3771 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3774 /* Nonzero if we can use a floating-point register to pass this arg. */
3775 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3776 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3777 && (CUM)->fregno <= FP_ARG_MAX_REG \
3778 && TARGET_HARD_FLOAT && TARGET_FPRS)
3780 /* Nonzero if we can use an AltiVec register to pass this arg. */
3781 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3782 (ALTIVEC_VECTOR_MODE (MODE) \
3783 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3784 && TARGET_ALTIVEC_ABI \
3785 && (NAMED))
3787 /* Return a nonzero value to say to return the function value in
3788 memory, just as large structures are always returned. TYPE will be
3789 the data type of the value, and FNTYPE will be the type of the
3790 function doing the returning, or @code{NULL} for libcalls.
3792 The AIX ABI for the RS/6000 specifies that all structures are
3793 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3794 specifies that structures <= 8 bytes are returned in r3/r4, but a
3795 draft put them in memory, and GCC used to implement the draft
3796 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3797 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3798 compatibility can change DRAFT_V4_STRUCT_RET to override the
3799 default, and -m switches get the final word. See
3800 rs6000_override_options for more details.
3802 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3803 long double support is enabled. These values are returned in memory.
3805 int_size_in_bytes returns -1 for variable size objects, which go in
3806 memory always. The cast to unsigned makes -1 > 8. */
3808 static bool
3809 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3811 if (AGGREGATE_TYPE_P (type)
3812 && (TARGET_AIX_STRUCT_RET
3813 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3814 return true;
3815 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3816 return true;
3817 return false;
3820 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3821 for a call to a function whose data type is FNTYPE.
3822 For a library call, FNTYPE is 0.
3824 For incoming args we set the number of arguments in the prototype large
3825 so we never return a PARALLEL. */
3827 void
3828 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3829 rtx libname ATTRIBUTE_UNUSED, int incoming,
3830 int libcall, int n_named_args)
3832 static CUMULATIVE_ARGS zero_cumulative;
3834 *cum = zero_cumulative;
3835 cum->words = 0;
3836 cum->fregno = FP_ARG_MIN_REG;
3837 cum->vregno = ALTIVEC_ARG_MIN_REG;
3838 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3839 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3840 ? CALL_LIBCALL : CALL_NORMAL);
3841 cum->sysv_gregno = GP_ARG_MIN_REG;
3842 cum->stdarg = fntype
3843 && (TYPE_ARG_TYPES (fntype) != 0
3844 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3845 != void_type_node));
3847 cum->nargs_prototype = 0;
3848 if (incoming || cum->prototype)
3849 cum->nargs_prototype = n_named_args;
3851 /* Check for a longcall attribute. */
3852 if (fntype
3853 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3854 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3855 cum->call_cookie = CALL_LONG;
3857 if (TARGET_DEBUG_ARG)
3859 fprintf (stderr, "\ninit_cumulative_args:");
3860 if (fntype)
3862 tree ret_type = TREE_TYPE (fntype);
3863 fprintf (stderr, " ret code = %s,",
3864 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3867 if (cum->call_cookie & CALL_LONG)
3868 fprintf (stderr, " longcall,");
3870 fprintf (stderr, " proto = %d, nargs = %d\n",
3871 cum->prototype, cum->nargs_prototype);
3874 if (fntype
3875 && !TARGET_ALTIVEC
3876 && TARGET_ALTIVEC_ABI
3877 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3879 error ("Cannot return value in vector register because"
3880 " altivec instructions are disabled, use -maltivec"
3881 " to enable them.");
3885 /* If defined, a C expression which determines whether, and in which
3886 direction, to pad out an argument with extra space. The value
3887 should be of type `enum direction': either `upward' to pad above
3888 the argument, `downward' to pad below, or `none' to inhibit
3889 padding.
3891 For the AIX ABI structs are always stored left shifted in their
3892 argument slot. */
3894 enum direction
3895 function_arg_padding (enum machine_mode mode, tree type)
3897 #ifndef AGGREGATE_PADDING_FIXED
3898 #define AGGREGATE_PADDING_FIXED 0
3899 #endif
3900 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3901 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3902 #endif
3904 if (!AGGREGATE_PADDING_FIXED)
3906 /* GCC used to pass structures of the same size as integer types as
3907 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3908 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3909 passed padded downward, except that -mstrict-align further
3910 muddied the water in that multi-component structures of 2 and 4
3911 bytes in size were passed padded upward.
3913 The following arranges for best compatibility with previous
3914 versions of gcc, but removes the -mstrict-align dependency. */
3915 if (BYTES_BIG_ENDIAN)
3917 HOST_WIDE_INT size = 0;
3919 if (mode == BLKmode)
3921 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3922 size = int_size_in_bytes (type);
3924 else
3925 size = GET_MODE_SIZE (mode);
3927 if (size == 1 || size == 2 || size == 4)
3928 return downward;
3930 return upward;
3933 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3935 if (type != 0 && AGGREGATE_TYPE_P (type))
3936 return upward;
3939 /* Fall back to the default. */
3940 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3943 /* If defined, a C expression that gives the alignment boundary, in bits,
3944 of an argument with the specified mode and type. If it is not defined,
3945 PARM_BOUNDARY is used for all arguments.
3947 V.4 wants long longs to be double word aligned. */
3950 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3952 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3953 return 64;
3954 else if (SPE_VECTOR_MODE (mode))
3955 return 64;
3956 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3957 return 128;
3958 else
3959 return PARM_BOUNDARY;
3962 /* Update the data in CUM to advance over an argument
3963 of mode MODE and data type TYPE.
3964 (TYPE is null for libcalls where that information may not be available.) */
3966 void
3967 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3968 tree type, int named)
3970 cum->nargs_prototype--;
3972 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3974 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3976 cum->vregno++;
3977 if (!TARGET_ALTIVEC)
3978 error ("Cannot pass argument in vector register because"
3979 " altivec instructions are disabled, use -maltivec"
3980 " to enable them.");
3982 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
3983 even if it is going to be passed in a vector register.
3984 Darwin does the same for variable-argument functions. */
3985 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3986 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
3988 int align;
3990 /* Vector parameters must be 16-byte aligned. This places
3991 them at 2 mod 4 in terms of words in 32-bit mode, since
3992 the parameter save area starts at offset 24 from the
3993 stack. In 64-bit mode, they just have to start on an
3994 even word, since the parameter save area is 16-byte
3995 aligned. Space for GPRs is reserved even if the argument
3996 will be passed in memory. */
3997 if (TARGET_32BIT)
3998 align = ((6 - (cum->words & 3)) & 3);
3999 else
4000 align = cum->words & 1;
4001 cum->words += align + RS6000_ARG_SIZE (mode, type);
4003 if (TARGET_DEBUG_ARG)
4005 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4006 cum->words, align);
4007 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4008 cum->nargs_prototype, cum->prototype,
4009 GET_MODE_NAME (mode));
4013 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4014 && !cum->stdarg
4015 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4016 cum->sysv_gregno++;
4017 else if (DEFAULT_ABI == ABI_V4)
4019 if (TARGET_HARD_FLOAT && TARGET_FPRS
4020 && (mode == SFmode || mode == DFmode))
4022 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4023 cum->fregno++;
4024 else
4026 if (mode == DFmode)
4027 cum->words += cum->words & 1;
4028 cum->words += RS6000_ARG_SIZE (mode, type);
4031 else
4033 int n_words;
4034 int gregno = cum->sysv_gregno;
4036 /* Aggregates and IEEE quad get passed by reference. */
4037 if ((type && AGGREGATE_TYPE_P (type))
4038 || mode == TFmode)
4039 n_words = 1;
4040 else
4041 n_words = RS6000_ARG_SIZE (mode, type);
4043 /* Long long and SPE vectors are put in odd registers. */
4044 if (n_words == 2 && (gregno & 1) == 0)
4045 gregno += 1;
4047 /* Long long and SPE vectors are not split between registers
4048 and stack. */
4049 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4051 /* Long long is aligned on the stack. */
4052 if (n_words == 2)
4053 cum->words += cum->words & 1;
4054 cum->words += n_words;
4057 /* Note: continuing to accumulate gregno past when we've started
4058 spilling to the stack indicates the fact that we've started
4059 spilling to the stack to expand_builtin_saveregs. */
4060 cum->sysv_gregno = gregno + n_words;
4063 if (TARGET_DEBUG_ARG)
4065 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4066 cum->words, cum->fregno);
4067 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4068 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4069 fprintf (stderr, "mode = %4s, named = %d\n",
4070 GET_MODE_NAME (mode), named);
4073 else
4075 int align = (TARGET_32BIT && (cum->words & 1) != 0
4076 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4078 cum->words += align + RS6000_ARG_SIZE (mode, type);
4080 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4081 && TARGET_HARD_FLOAT && TARGET_FPRS)
4082 cum->fregno += (mode == TFmode ? 2 : 1);
4084 if (TARGET_DEBUG_ARG)
4086 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4087 cum->words, cum->fregno);
4088 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4089 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4090 fprintf (stderr, "named = %d, align = %d\n", named, align);
4095 /* Determine where to put a SIMD argument on the SPE. */
4097 static rtx
4098 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4099 tree type)
4101 if (cum->stdarg)
4103 int gregno = cum->sysv_gregno;
4104 int n_words = RS6000_ARG_SIZE (mode, type);
4106 /* SPE vectors are put in odd registers. */
4107 if (n_words == 2 && (gregno & 1) == 0)
4108 gregno += 1;
4110 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4112 rtx r1, r2;
4113 enum machine_mode m = SImode;
4115 r1 = gen_rtx_REG (m, gregno);
4116 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4117 r2 = gen_rtx_REG (m, gregno + 1);
4118 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4119 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4121 else
4122 return NULL_RTX;
4124 else
4126 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4127 return gen_rtx_REG (mode, cum->sysv_gregno);
4128 else
4129 return NULL_RTX;
4133 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4135 static rtx
4136 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4137 tree type, int align_words)
4139 if (mode == DFmode)
4141 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4142 in vararg list into zero, one or two GPRs */
4143 if (align_words >= GP_ARG_NUM_REG)
4144 return gen_rtx_PARALLEL (DFmode,
4145 gen_rtvec (2,
4146 gen_rtx_EXPR_LIST (VOIDmode,
4147 NULL_RTX, const0_rtx),
4148 gen_rtx_EXPR_LIST (VOIDmode,
4149 gen_rtx_REG (mode,
4150 cum->fregno),
4151 const0_rtx)));
4152 else if (align_words + RS6000_ARG_SIZE (mode, type)
4153 > GP_ARG_NUM_REG)
4154 /* If this is partially on the stack, then we only
4155 include the portion actually in registers here. */
4156 return gen_rtx_PARALLEL (DFmode,
4157 gen_rtvec (2,
4158 gen_rtx_EXPR_LIST (VOIDmode,
4159 gen_rtx_REG (SImode,
4160 GP_ARG_MIN_REG
4161 + align_words),
4162 const0_rtx),
4163 gen_rtx_EXPR_LIST (VOIDmode,
4164 gen_rtx_REG (mode,
4165 cum->fregno),
4166 const0_rtx)));
4168 /* split a DFmode arg into two GPRs */
4169 return gen_rtx_PARALLEL (DFmode,
4170 gen_rtvec (3,
4171 gen_rtx_EXPR_LIST (VOIDmode,
4172 gen_rtx_REG (SImode,
4173 GP_ARG_MIN_REG
4174 + align_words),
4175 const0_rtx),
4176 gen_rtx_EXPR_LIST (VOIDmode,
4177 gen_rtx_REG (SImode,
4178 GP_ARG_MIN_REG
4179 + align_words + 1),
4180 GEN_INT (4)),
4181 gen_rtx_EXPR_LIST (VOIDmode,
4182 gen_rtx_REG (mode, cum->fregno),
4183 const0_rtx)));
4185 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4186 or two GPRs */
4187 else if (mode == DImode)
4189 if (align_words < GP_ARG_NUM_REG - 1)
4190 return gen_rtx_PARALLEL (DImode,
4191 gen_rtvec (2,
4192 gen_rtx_EXPR_LIST (VOIDmode,
4193 gen_rtx_REG (SImode,
4194 GP_ARG_MIN_REG
4195 + align_words),
4196 const0_rtx),
4197 gen_rtx_EXPR_LIST (VOIDmode,
4198 gen_rtx_REG (SImode,
4199 GP_ARG_MIN_REG
4200 + align_words + 1),
4201 GEN_INT (4))));
4202 else if (align_words == GP_ARG_NUM_REG - 1)
4203 return gen_rtx_PARALLEL (DImode,
4204 gen_rtvec (2,
4205 gen_rtx_EXPR_LIST (VOIDmode,
4206 NULL_RTX, const0_rtx),
4207 gen_rtx_EXPR_LIST (VOIDmode,
4208 gen_rtx_REG (SImode,
4209 GP_ARG_MIN_REG
4210 + align_words),
4211 const0_rtx)));
4213 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4215 int k;
4216 int size = int_size_in_bytes (type);
4217 int no_units = ((size - 1) / 4) + 1;
4218 int max_no_words = GP_ARG_NUM_REG - align_words;
4219 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4220 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4222 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4224 for (k=0; k < rtlvec_len; k++)
4225 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4226 gen_rtx_REG (SImode,
4227 GP_ARG_MIN_REG
4228 + align_words + k),
4229 k == 0 ? const0_rtx : GEN_INT (k*4));
4231 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4234 return NULL_RTX;
4237 /* Determine where to put an argument to a function.
4238 Value is zero to push the argument on the stack,
4239 or a hard register in which to store the argument.
4241 MODE is the argument's machine mode.
4242 TYPE is the data type of the argument (as a tree).
4243 This is null for libcalls where that information may
4244 not be available.
4245 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4246 the preceding args and about the function being called.
4247 NAMED is nonzero if this argument is a named parameter
4248 (otherwise it is an extra parameter matching an ellipsis).
4250 On RS/6000 the first eight words of non-FP are normally in registers
4251 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4252 Under V.4, the first 8 FP args are in registers.
4254 If this is floating-point and no prototype is specified, we use
4255 both an FP and integer register (or possibly FP reg and stack). Library
4256 functions (when CALL_LIBCALL is set) always have the proper types for args,
4257 so we can pass the FP value just in one register. emit_library_function
4258 doesn't support PARALLEL anyway. */
4260 struct rtx_def *
4261 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4262 tree type, int named)
4264 enum rs6000_abi abi = DEFAULT_ABI;
4266 /* Return a marker to indicate whether CR1 needs to set or clear the
4267 bit that V.4 uses to say fp args were passed in registers.
4268 Assume that we don't need the marker for software floating point,
4269 or compiler generated library calls. */
4270 if (mode == VOIDmode)
4272 if (abi == ABI_V4
4273 && cum->nargs_prototype < 0
4274 && (cum->call_cookie & CALL_LIBCALL) == 0
4275 && (cum->prototype || TARGET_NO_PROTOTYPE))
4277 /* For the SPE, we need to crxor CR6 always. */
4278 if (TARGET_SPE_ABI)
4279 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4280 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4281 return GEN_INT (cum->call_cookie
4282 | ((cum->fregno == FP_ARG_MIN_REG)
4283 ? CALL_V4_SET_FP_ARGS
4284 : CALL_V4_CLEAR_FP_ARGS));
4287 return GEN_INT (cum->call_cookie);
4290 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4291 if (TARGET_64BIT && ! cum->prototype)
4293 /* Vector parameters get passed in vector register
4294 and also in GPRs or memory, in absence of prototype. */
4295 int align_words;
4296 rtx slot;
4297 align_words = (cum->words + 1) & ~1;
4299 if (align_words >= GP_ARG_NUM_REG)
4301 slot = NULL_RTX;
4303 else
4305 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4307 return gen_rtx_PARALLEL (mode,
4308 gen_rtvec (2,
4309 gen_rtx_EXPR_LIST (VOIDmode,
4310 slot, const0_rtx),
4311 gen_rtx_EXPR_LIST (VOIDmode,
4312 gen_rtx_REG (mode, cum->vregno),
4313 const0_rtx)));
4315 else
4316 return gen_rtx_REG (mode, cum->vregno);
4317 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4319 if (named || abi == ABI_V4)
4320 return NULL_RTX;
4321 else
4323 /* Vector parameters to varargs functions under AIX or Darwin
4324 get passed in memory and possibly also in GPRs. */
4325 int align, align_words;
4326 enum machine_mode part_mode = mode;
4328 /* Vector parameters must be 16-byte aligned. This places them at
4329 2 mod 4 in terms of words in 32-bit mode, since the parameter
4330 save area starts at offset 24 from the stack. In 64-bit mode,
4331 they just have to start on an even word, since the parameter
4332 save area is 16-byte aligned. */
4333 if (TARGET_32BIT)
4334 align = ((6 - (cum->words & 3)) & 3);
4335 else
4336 align = cum->words & 1;
4337 align_words = cum->words + align;
4339 /* Out of registers? Memory, then. */
4340 if (align_words >= GP_ARG_NUM_REG)
4341 return NULL_RTX;
4343 /* The vector value goes in GPRs. Only the part of the
4344 value in GPRs is reported here. */
4345 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4346 > GP_ARG_NUM_REG)
4347 /* Fortunately, there are only two possibilities, the value
4348 is either wholly in GPRs or half in GPRs and half not. */
4349 part_mode = DImode;
4351 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4354 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4355 return rs6000_spe_function_arg (cum, mode, type);
4356 else if (abi == ABI_V4)
4358 if (TARGET_HARD_FLOAT && TARGET_FPRS
4359 && (mode == SFmode || mode == DFmode))
4361 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4362 return gen_rtx_REG (mode, cum->fregno);
4363 else
4364 return NULL_RTX;
4366 else
4368 int n_words;
4369 int gregno = cum->sysv_gregno;
4371 /* Aggregates and IEEE quad get passed by reference. */
4372 if ((type && AGGREGATE_TYPE_P (type))
4373 || mode == TFmode)
4374 n_words = 1;
4375 else
4376 n_words = RS6000_ARG_SIZE (mode, type);
4378 /* Long long and SPE vectors are put in odd registers. */
4379 if (n_words == 2 && (gregno & 1) == 0)
4380 gregno += 1;
4382 /* Long long do not split between registers and stack. */
4383 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4384 return gen_rtx_REG (mode, gregno);
4385 else
4386 return NULL_RTX;
4389 else
4391 int align = (TARGET_32BIT && (cum->words & 1) != 0
4392 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4393 int align_words = cum->words + align;
4395 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4396 return NULL_RTX;
4398 if (TARGET_32BIT && TARGET_POWERPC64
4399 && (mode == DImode || mode == BLKmode))
4400 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4402 if (USE_FP_FOR_ARG_P (cum, mode, type))
4404 if (! type
4405 || ((cum->nargs_prototype > 0)
4406 /* IBM AIX extended its linkage convention definition always
4407 to require FP args after register save area hole on the
4408 stack. */
4409 && (DEFAULT_ABI != ABI_AIX
4410 || ! TARGET_XL_CALL
4411 || (align_words < GP_ARG_NUM_REG))))
4412 return gen_rtx_REG (mode, cum->fregno);
4414 if (TARGET_32BIT && TARGET_POWERPC64
4415 && mode == DFmode && cum->stdarg)
4416 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4418 return gen_rtx_PARALLEL (mode,
4419 gen_rtvec (2,
4420 gen_rtx_EXPR_LIST (VOIDmode,
4421 ((align_words >= GP_ARG_NUM_REG)
4422 ? NULL_RTX
4423 : (align_words
4424 + RS6000_ARG_SIZE (mode, type)
4425 > GP_ARG_NUM_REG
4426 /* If this is partially on the stack, then
4427 we only include the portion actually
4428 in registers here. */
4429 ? gen_rtx_REG (Pmode,
4430 GP_ARG_MIN_REG + align_words)
4431 : gen_rtx_REG (mode,
4432 GP_ARG_MIN_REG + align_words))),
4433 const0_rtx),
4434 gen_rtx_EXPR_LIST (VOIDmode,
4435 gen_rtx_REG (mode, cum->fregno),
4436 const0_rtx)));
4438 else if (align_words < GP_ARG_NUM_REG)
4439 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4440 else
4441 return NULL_RTX;
4445 /* For an arg passed partly in registers and partly in memory,
4446 this is the number of registers used.
4447 For args passed entirely in registers or entirely in memory, zero. */
4450 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4451 tree type, int named)
4453 if (DEFAULT_ABI == ABI_V4)
4454 return 0;
4456 if (USE_FP_FOR_ARG_P (cum, mode, type)
4457 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4459 if (cum->nargs_prototype >= 0)
4460 return 0;
4463 if (cum->words < GP_ARG_NUM_REG
4464 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4466 int ret = GP_ARG_NUM_REG - cum->words;
4467 if (ret && TARGET_DEBUG_ARG)
4468 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4470 return ret;
4473 return 0;
4476 /* A C expression that indicates when an argument must be passed by
4477 reference. If nonzero for an argument, a copy of that argument is
4478 made in memory and a pointer to the argument is passed instead of
4479 the argument itself. The pointer is passed in whatever way is
4480 appropriate for passing a pointer to that type.
4482 Under V.4, structures and unions are passed by reference.
4484 As an extension to all ABIs, variable sized types are passed by
4485 reference. */
4488 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4489 enum machine_mode mode ATTRIBUTE_UNUSED,
4490 tree type, int named ATTRIBUTE_UNUSED)
4492 if (DEFAULT_ABI == ABI_V4
4493 && ((type && AGGREGATE_TYPE_P (type))
4494 || mode == TFmode))
4496 if (TARGET_DEBUG_ARG)
4497 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4499 return 1;
4501 return type && int_size_in_bytes (type) < 0;
4504 static void
4505 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4507 int i;
4508 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4510 if (nregs == 0)
4511 return;
4513 for (i = 0; i < nregs; i++)
4515 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4516 if (reload_completed)
4518 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4519 tem = NULL_RTX;
4520 else
4521 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4522 i * GET_MODE_SIZE(reg_mode));
4524 else
4525 tem = replace_equiv_address (tem, XEXP (tem, 0));
4527 if (tem == NULL_RTX)
4528 abort ();
4530 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4535 /* Perform any needed actions needed for a function that is receiving a
4536 variable number of arguments.
4538 CUM is as above.
4540 MODE and TYPE are the mode and type of the current parameter.
4542 PRETEND_SIZE is a variable that should be set to the amount of stack
4543 that must be pushed by the prolog to pretend that our caller pushed
4546 Normally, this macro will push all remaining incoming registers on the
4547 stack and set PRETEND_SIZE to the length of the registers pushed. */
4549 static void
4550 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4551 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4553 CUMULATIVE_ARGS next_cum;
4554 int reg_size = TARGET_32BIT ? 4 : 8;
4555 rtx save_area = NULL_RTX, mem;
4556 int first_reg_offset, set;
4558 /* Skip the last named argument. */
4559 next_cum = *cum;
4560 function_arg_advance (&next_cum, mode, type, 1);
4562 if (DEFAULT_ABI == ABI_V4)
4564 /* Indicate to allocate space on the stack for varargs save area. */
4565 cfun->machine->sysv_varargs_p = 1;
4566 if (! no_rtl)
4567 save_area = plus_constant (virtual_stack_vars_rtx,
4568 - RS6000_VARARGS_SIZE);
4570 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4572 else
4574 first_reg_offset = next_cum.words;
4575 save_area = virtual_incoming_args_rtx;
4576 cfun->machine->sysv_varargs_p = 0;
4578 if (MUST_PASS_IN_STACK (mode, type))
4579 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4582 set = get_varargs_alias_set ();
4583 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4585 mem = gen_rtx_MEM (BLKmode,
4586 plus_constant (save_area,
4587 first_reg_offset * reg_size)),
4588 set_mem_alias_set (mem, set);
4589 set_mem_align (mem, BITS_PER_WORD);
4591 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4592 GP_ARG_NUM_REG - first_reg_offset);
4595 /* Save FP registers if needed. */
4596 if (DEFAULT_ABI == ABI_V4
4597 && TARGET_HARD_FLOAT && TARGET_FPRS
4598 && ! no_rtl
4599 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4601 int fregno = next_cum.fregno;
4602 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4603 rtx lab = gen_label_rtx ();
4604 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4606 emit_jump_insn (gen_rtx_SET (VOIDmode,
4607 pc_rtx,
4608 gen_rtx_IF_THEN_ELSE (VOIDmode,
4609 gen_rtx_NE (VOIDmode, cr1,
4610 const0_rtx),
4611 gen_rtx_LABEL_REF (VOIDmode, lab),
4612 pc_rtx)));
4614 while (fregno <= FP_ARG_V4_MAX_REG)
4616 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4617 set_mem_alias_set (mem, set);
4618 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4619 fregno++;
4620 off += 8;
4623 emit_label (lab);
4627 /* Create the va_list data type. */
4629 static tree
4630 rs6000_build_builtin_va_list (void)
4632 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4634 /* For AIX, prefer 'char *' because that's what the system
4635 header files like. */
4636 if (DEFAULT_ABI != ABI_V4)
4637 return build_pointer_type (char_type_node);
4639 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4640 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4642 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4643 unsigned_char_type_node);
4644 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4645 unsigned_char_type_node);
4646 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4647 every user file. */
4648 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4649 short_unsigned_type_node);
4650 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4651 ptr_type_node);
4652 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4653 ptr_type_node);
4655 DECL_FIELD_CONTEXT (f_gpr) = record;
4656 DECL_FIELD_CONTEXT (f_fpr) = record;
4657 DECL_FIELD_CONTEXT (f_res) = record;
4658 DECL_FIELD_CONTEXT (f_ovf) = record;
4659 DECL_FIELD_CONTEXT (f_sav) = record;
4661 TREE_CHAIN (record) = type_decl;
4662 TYPE_NAME (record) = type_decl;
4663 TYPE_FIELDS (record) = f_gpr;
4664 TREE_CHAIN (f_gpr) = f_fpr;
4665 TREE_CHAIN (f_fpr) = f_res;
4666 TREE_CHAIN (f_res) = f_ovf;
4667 TREE_CHAIN (f_ovf) = f_sav;
4669 layout_type (record);
4671 /* The correct type is an array type of one element. */
4672 return build_array_type (record, build_index_type (size_zero_node));
4675 /* Implement va_start. */
4677 void
4678 rs6000_va_start (tree valist, rtx nextarg)
4680 HOST_WIDE_INT words, n_gpr, n_fpr;
4681 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4682 tree gpr, fpr, ovf, sav, t;
4684 /* Only SVR4 needs something special. */
4685 if (DEFAULT_ABI != ABI_V4)
4687 std_expand_builtin_va_start (valist, nextarg);
4688 return;
4691 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4692 f_fpr = TREE_CHAIN (f_gpr);
4693 f_res = TREE_CHAIN (f_fpr);
4694 f_ovf = TREE_CHAIN (f_res);
4695 f_sav = TREE_CHAIN (f_ovf);
4697 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4698 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4699 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4700 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4701 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4703 /* Count number of gp and fp argument registers used. */
4704 words = current_function_args_info.words;
4705 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4706 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4708 if (TARGET_DEBUG_ARG)
4709 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4710 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4711 words, n_gpr, n_fpr);
4713 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4714 TREE_SIDE_EFFECTS (t) = 1;
4715 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4717 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4718 TREE_SIDE_EFFECTS (t) = 1;
4719 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4721 /* Find the overflow area. */
4722 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4723 if (words != 0)
4724 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4725 build_int_2 (words * UNITS_PER_WORD, 0));
4726 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4727 TREE_SIDE_EFFECTS (t) = 1;
4728 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4730 /* Find the register save area. */
4731 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4732 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4733 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4734 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4735 TREE_SIDE_EFFECTS (t) = 1;
4736 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4739 /* Implement va_arg. */
4742 rs6000_va_arg (tree valist, tree type)
4744 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4745 tree gpr, fpr, ovf, sav, reg, t, u;
4746 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4747 rtx lab_false, lab_over, addr_rtx, r;
4749 if (DEFAULT_ABI != ABI_V4)
4751 /* Variable sized types are passed by reference. */
4752 if (int_size_in_bytes (type) < 0)
4754 u = build_pointer_type (type);
4756 /* Args grow upward. */
4757 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4758 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4759 TREE_SIDE_EFFECTS (t) = 1;
4761 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4762 TREE_SIDE_EFFECTS (t) = 1;
4764 t = build1 (INDIRECT_REF, u, t);
4765 TREE_SIDE_EFFECTS (t) = 1;
4767 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4769 else
4770 return std_expand_builtin_va_arg (valist, type);
4773 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4774 f_fpr = TREE_CHAIN (f_gpr);
4775 f_res = TREE_CHAIN (f_fpr);
4776 f_ovf = TREE_CHAIN (f_res);
4777 f_sav = TREE_CHAIN (f_ovf);
4779 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4780 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4781 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4782 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4783 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4785 size = int_size_in_bytes (type);
4786 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4788 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4790 /* Aggregates and long doubles are passed by reference. */
4791 indirect_p = 1;
4792 reg = gpr;
4793 n_reg = 1;
4794 sav_ofs = 0;
4795 sav_scale = 4;
4796 size = UNITS_PER_WORD;
4797 rsize = 1;
4799 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4801 /* FP args go in FP registers, if present. */
4802 indirect_p = 0;
4803 reg = fpr;
4804 n_reg = 1;
4805 sav_ofs = 8*4;
4806 sav_scale = 8;
4808 else
4810 /* Otherwise into GP registers. */
4811 indirect_p = 0;
4812 reg = gpr;
4813 n_reg = rsize;
4814 sav_ofs = 0;
4815 sav_scale = 4;
4818 /* Pull the value out of the saved registers.... */
4820 lab_false = gen_label_rtx ();
4821 lab_over = gen_label_rtx ();
4822 addr_rtx = gen_reg_rtx (Pmode);
4824 /* AltiVec vectors never go in registers. */
4825 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4827 TREE_THIS_VOLATILE (reg) = 1;
4828 emit_cmp_and_jump_insns
4829 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4830 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4831 lab_false);
4833 /* Long long is aligned in the registers. */
4834 if (n_reg > 1)
4836 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4837 build_int_2 (n_reg - 1, 0));
4838 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4839 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4840 TREE_SIDE_EFFECTS (u) = 1;
4841 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4844 if (sav_ofs)
4845 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4846 else
4847 t = sav;
4849 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4850 build_int_2 (n_reg, 0));
4851 TREE_SIDE_EFFECTS (u) = 1;
4853 u = build1 (CONVERT_EXPR, integer_type_node, u);
4854 TREE_SIDE_EFFECTS (u) = 1;
4856 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4857 TREE_SIDE_EFFECTS (u) = 1;
4859 t = build (PLUS_EXPR, ptr_type_node, t, u);
4860 TREE_SIDE_EFFECTS (t) = 1;
4862 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4863 if (r != addr_rtx)
4864 emit_move_insn (addr_rtx, r);
4866 emit_jump_insn (gen_jump (lab_over));
4867 emit_barrier ();
4870 emit_label (lab_false);
4872 /* ... otherwise out of the overflow area. */
4874 /* Make sure we don't find reg 7 for the next int arg.
4876 All AltiVec vectors go in the overflow area. So in the AltiVec
4877 case we need to get the vectors from the overflow area, but
4878 remember where the GPRs and FPRs are. */
4879 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4880 || !TARGET_ALTIVEC))
4882 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4883 TREE_SIDE_EFFECTS (t) = 1;
4884 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4887 /* Care for on-stack alignment if needed. */
4888 if (rsize <= 1)
4889 t = ovf;
4890 else
4892 int align;
4894 /* AltiVec vectors are 16 byte aligned. */
4895 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4896 align = 15;
4897 else
4898 align = 7;
4900 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4901 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4903 t = save_expr (t);
4905 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4906 if (r != addr_rtx)
4907 emit_move_insn (addr_rtx, r);
4909 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4910 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4911 TREE_SIDE_EFFECTS (t) = 1;
4912 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4914 emit_label (lab_over);
4916 if (indirect_p)
4918 r = gen_rtx_MEM (Pmode, addr_rtx);
4919 set_mem_alias_set (r, get_varargs_alias_set ());
4920 emit_move_insn (addr_rtx, r);
4923 return addr_rtx;
4926 /* Builtins. */
4928 #define def_builtin(MASK, NAME, TYPE, CODE) \
4929 do { \
4930 if ((MASK) & target_flags) \
4931 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4932 NULL, NULL_TREE); \
4933 } while (0)
4935 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4937 static const struct builtin_description bdesc_3arg[] =
4939 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4940 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4941 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4942 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4943 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4944 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4945 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4946 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4947 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4948 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4949 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4950 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4951 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4952 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4953 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4954 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4955 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4956 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4957 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4958 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4959 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4960 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4961 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4964 /* DST operations: void foo (void *, const int, const char). */
4966 static const struct builtin_description bdesc_dst[] =
4968 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4969 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4970 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4971 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4974 /* Simple binary operations: VECc = foo (VECa, VECb). */
4976 static struct builtin_description bdesc_2arg[] =
4978 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4979 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4980 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4981 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4982 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4983 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4984 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4985 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4986 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4987 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4988 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4989 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4990 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4991 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4992 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4993 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4994 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4995 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4996 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4997 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4998 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5005 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5006 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5007 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5008 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5009 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5010 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5011 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5012 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5013 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5014 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5015 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5016 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5017 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5018 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5019 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5020 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5021 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5022 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5023 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5024 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5025 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5026 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5027 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5028 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5029 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5030 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5031 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5032 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5033 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5034 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5035 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5036 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5037 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5038 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5039 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5040 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5041 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5042 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5043 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5048 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5049 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5050 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5051 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5052 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5053 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5054 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5055 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5056 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5057 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5058 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5059 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5060 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5061 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5062 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5063 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5064 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5065 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5066 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5067 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5068 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5069 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5070 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5071 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5072 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5073 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5074 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5075 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5076 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5077 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5078 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5079 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5080 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5081 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5082 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5083 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5084 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5085 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5086 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5087 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5088 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5089 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5090 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5092 /* Place holder, leave as first spe builtin. */
5093 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5094 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5095 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5096 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5097 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5098 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5099 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5100 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5101 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5102 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5103 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5104 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5105 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5106 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5107 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5108 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5109 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5110 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5111 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5112 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5113 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5114 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5115 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5116 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5117 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5118 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5119 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5120 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5121 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5122 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5123 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5124 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5125 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5126 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5127 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5128 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5129 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5130 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5131 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5132 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5133 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5134 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5135 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5136 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5137 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5138 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5139 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5140 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5141 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5142 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5143 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5144 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5145 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5146 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5147 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5148 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5149 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5150 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5151 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5152 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5153 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5154 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5155 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5156 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5157 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5158 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5159 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5160 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5161 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5162 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5163 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5164 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5165 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5166 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5167 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5168 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5169 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5170 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5171 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5172 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5173 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5174 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5175 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5176 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5177 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5178 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5179 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5180 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5181 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5182 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5183 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5184 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5185 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5186 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5187 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5188 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5189 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5190 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5191 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5192 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5193 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5194 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5195 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5196 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5197 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5198 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5199 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5200 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5201 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5203 /* SPE binary operations expecting a 5-bit unsigned literal. */
5204 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5206 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5207 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5208 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5209 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5210 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5211 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5212 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5213 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5214 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5215 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5216 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5217 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5218 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5219 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5220 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5221 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5222 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5223 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5224 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5225 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5226 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5227 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5228 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5229 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5230 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5231 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5233 /* Place-holder. Leave as last binary SPE builtin. */
5234 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5237 /* AltiVec predicates. */
5239 struct builtin_description_predicates
5241 const unsigned int mask;
5242 const enum insn_code icode;
5243 const char *opcode;
5244 const char *const name;
5245 const enum rs6000_builtins code;
5248 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5250 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5251 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5252 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5253 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5254 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5255 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5256 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5257 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5258 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5259 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5260 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5261 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5262 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5265 /* SPE predicates. */
5266 static struct builtin_description bdesc_spe_predicates[] =
5268 /* Place-holder. Leave as first. */
5269 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5270 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5271 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5272 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5273 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5274 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5275 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5276 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5277 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5278 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5279 /* Place-holder. Leave as last. */
5280 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5283 /* SPE evsel predicates. */
5284 static struct builtin_description bdesc_spe_evsel[] =
5286 /* Place-holder. Leave as first. */
5287 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5288 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5289 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5290 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5291 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5292 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5293 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5294 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5295 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5296 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5297 /* Place-holder. Leave as last. */
5298 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5301 /* ABS* operations. */
5303 static const struct builtin_description bdesc_abs[] =
5305 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5306 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5307 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5308 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5309 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5310 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5311 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5314 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5315 foo (VECa). */
5317 static struct builtin_description bdesc_1arg[] =
5319 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5320 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5321 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5322 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5323 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5324 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5325 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5326 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5327 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5328 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5329 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5330 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5331 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5332 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5333 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5334 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5335 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5337 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5338 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5339 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5340 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5341 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5342 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5343 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5344 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5345 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5346 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5347 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5348 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5349 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5350 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5351 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5352 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5353 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5354 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5355 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5356 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5357 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5358 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5359 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5360 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5361 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5362 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5363 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5364 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5365 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5366 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5368 /* Place-holder. Leave as last unary SPE builtin. */
5369 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5372 static rtx
5373 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5375 rtx pat;
5376 tree arg0 = TREE_VALUE (arglist);
5377 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5378 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5379 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5381 if (icode == CODE_FOR_nothing)
5382 /* Builtin not supported on this processor. */
5383 return 0;
5385 /* If we got invalid arguments bail out before generating bad rtl. */
5386 if (arg0 == error_mark_node)
5387 return const0_rtx;
5389 if (icode == CODE_FOR_altivec_vspltisb
5390 || icode == CODE_FOR_altivec_vspltish
5391 || icode == CODE_FOR_altivec_vspltisw
5392 || icode == CODE_FOR_spe_evsplatfi
5393 || icode == CODE_FOR_spe_evsplati)
5395 /* Only allow 5-bit *signed* literals. */
5396 if (GET_CODE (op0) != CONST_INT
5397 || INTVAL (op0) > 0x1f
5398 || INTVAL (op0) < -0x1f)
5400 error ("argument 1 must be a 5-bit signed literal");
5401 return const0_rtx;
5405 if (target == 0
5406 || GET_MODE (target) != tmode
5407 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5408 target = gen_reg_rtx (tmode);
5410 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5411 op0 = copy_to_mode_reg (mode0, op0);
5413 pat = GEN_FCN (icode) (target, op0);
5414 if (! pat)
5415 return 0;
5416 emit_insn (pat);
5418 return target;
5421 static rtx
5422 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5424 rtx pat, scratch1, scratch2;
5425 tree arg0 = TREE_VALUE (arglist);
5426 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5427 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5428 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5430 /* If we have invalid arguments, bail out before generating bad rtl. */
5431 if (arg0 == error_mark_node)
5432 return const0_rtx;
5434 if (target == 0
5435 || GET_MODE (target) != tmode
5436 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5437 target = gen_reg_rtx (tmode);
5439 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5440 op0 = copy_to_mode_reg (mode0, op0);
5442 scratch1 = gen_reg_rtx (mode0);
5443 scratch2 = gen_reg_rtx (mode0);
5445 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5446 if (! pat)
5447 return 0;
5448 emit_insn (pat);
5450 return target;
5453 static rtx
5454 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5456 rtx pat;
5457 tree arg0 = TREE_VALUE (arglist);
5458 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5459 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5460 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5461 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5462 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5463 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5465 if (icode == CODE_FOR_nothing)
5466 /* Builtin not supported on this processor. */
5467 return 0;
5469 /* If we got invalid arguments bail out before generating bad rtl. */
5470 if (arg0 == error_mark_node || arg1 == error_mark_node)
5471 return const0_rtx;
5473 if (icode == CODE_FOR_altivec_vcfux
5474 || icode == CODE_FOR_altivec_vcfsx
5475 || icode == CODE_FOR_altivec_vctsxs
5476 || icode == CODE_FOR_altivec_vctuxs
5477 || icode == CODE_FOR_altivec_vspltb
5478 || icode == CODE_FOR_altivec_vsplth
5479 || icode == CODE_FOR_altivec_vspltw
5480 || icode == CODE_FOR_spe_evaddiw
5481 || icode == CODE_FOR_spe_evldd
5482 || icode == CODE_FOR_spe_evldh
5483 || icode == CODE_FOR_spe_evldw
5484 || icode == CODE_FOR_spe_evlhhesplat
5485 || icode == CODE_FOR_spe_evlhhossplat
5486 || icode == CODE_FOR_spe_evlhhousplat
5487 || icode == CODE_FOR_spe_evlwhe
5488 || icode == CODE_FOR_spe_evlwhos
5489 || icode == CODE_FOR_spe_evlwhou
5490 || icode == CODE_FOR_spe_evlwhsplat
5491 || icode == CODE_FOR_spe_evlwwsplat
5492 || icode == CODE_FOR_spe_evrlwi
5493 || icode == CODE_FOR_spe_evslwi
5494 || icode == CODE_FOR_spe_evsrwis
5495 || icode == CODE_FOR_spe_evsubifw
5496 || icode == CODE_FOR_spe_evsrwiu)
5498 /* Only allow 5-bit unsigned literals. */
5499 STRIP_NOPS (arg1);
5500 if (TREE_CODE (arg1) != INTEGER_CST
5501 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5503 error ("argument 2 must be a 5-bit unsigned literal");
5504 return const0_rtx;
5508 if (target == 0
5509 || GET_MODE (target) != tmode
5510 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5511 target = gen_reg_rtx (tmode);
5513 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5514 op0 = copy_to_mode_reg (mode0, op0);
5515 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5516 op1 = copy_to_mode_reg (mode1, op1);
5518 pat = GEN_FCN (icode) (target, op0, op1);
5519 if (! pat)
5520 return 0;
5521 emit_insn (pat);
5523 return target;
5526 static rtx
5527 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5528 tree arglist, rtx target)
5530 rtx pat, scratch;
5531 tree cr6_form = TREE_VALUE (arglist);
5532 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5533 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5534 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5535 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5536 enum machine_mode tmode = SImode;
5537 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5538 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5539 int cr6_form_int;
5541 if (TREE_CODE (cr6_form) != INTEGER_CST)
5543 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5544 return const0_rtx;
5546 else
5547 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5549 if (mode0 != mode1)
5550 abort ();
5552 /* If we have invalid arguments, bail out before generating bad rtl. */
5553 if (arg0 == error_mark_node || arg1 == error_mark_node)
5554 return const0_rtx;
5556 if (target == 0
5557 || GET_MODE (target) != tmode
5558 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5559 target = gen_reg_rtx (tmode);
5561 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5562 op0 = copy_to_mode_reg (mode0, op0);
5563 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5564 op1 = copy_to_mode_reg (mode1, op1);
5566 scratch = gen_reg_rtx (mode0);
5568 pat = GEN_FCN (icode) (scratch, op0, op1,
5569 gen_rtx_SYMBOL_REF (Pmode, opcode));
5570 if (! pat)
5571 return 0;
5572 emit_insn (pat);
5574 /* The vec_any* and vec_all* predicates use the same opcodes for two
5575 different operations, but the bits in CR6 will be different
5576 depending on what information we want. So we have to play tricks
5577 with CR6 to get the right bits out.
5579 If you think this is disgusting, look at the specs for the
5580 AltiVec predicates. */
5582 switch (cr6_form_int)
5584 case 0:
5585 emit_insn (gen_cr6_test_for_zero (target));
5586 break;
5587 case 1:
5588 emit_insn (gen_cr6_test_for_zero_reverse (target));
5589 break;
5590 case 2:
5591 emit_insn (gen_cr6_test_for_lt (target));
5592 break;
5593 case 3:
5594 emit_insn (gen_cr6_test_for_lt_reverse (target));
5595 break;
5596 default:
5597 error ("argument 1 of __builtin_altivec_predicate is out of range");
5598 break;
5601 return target;
5604 static rtx
5605 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5607 rtx pat, addr;
5608 tree arg0 = TREE_VALUE (arglist);
5609 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5610 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5611 enum machine_mode mode0 = Pmode;
5612 enum machine_mode mode1 = Pmode;
5613 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5614 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5616 if (icode == CODE_FOR_nothing)
5617 /* Builtin not supported on this processor. */
5618 return 0;
5620 /* If we got invalid arguments bail out before generating bad rtl. */
5621 if (arg0 == error_mark_node || arg1 == error_mark_node)
5622 return const0_rtx;
5624 if (target == 0
5625 || GET_MODE (target) != tmode
5626 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5627 target = gen_reg_rtx (tmode);
5629 op1 = copy_to_mode_reg (mode1, op1);
5631 if (op0 == const0_rtx)
5633 addr = gen_rtx_MEM (tmode, op1);
5635 else
5637 op0 = copy_to_mode_reg (mode0, op0);
5638 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5641 pat = GEN_FCN (icode) (target, addr);
5643 if (! pat)
5644 return 0;
5645 emit_insn (pat);
5647 return target;
5650 static rtx
5651 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5653 tree arg0 = TREE_VALUE (arglist);
5654 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5655 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5656 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5657 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5658 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5659 rtx pat, addr;
5660 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5661 enum machine_mode mode1 = Pmode;
5662 enum machine_mode mode2 = Pmode;
5664 /* Invalid arguments. Bail before doing anything stoopid! */
5665 if (arg0 == error_mark_node
5666 || arg1 == error_mark_node
5667 || arg2 == error_mark_node)
5668 return const0_rtx;
5670 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5671 op0 = copy_to_mode_reg (tmode, op0);
5673 op2 = copy_to_mode_reg (mode2, op2);
5675 if (op1 == const0_rtx)
5677 addr = gen_rtx_MEM (tmode, op2);
5679 else
5681 op1 = copy_to_mode_reg (mode1, op1);
5682 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5685 pat = GEN_FCN (icode) (addr, op0);
5686 if (pat)
5687 emit_insn (pat);
5688 return NULL_RTX;
5691 static rtx
5692 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5694 rtx pat;
5695 tree arg0 = TREE_VALUE (arglist);
5696 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5697 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5698 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5699 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5700 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5701 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5702 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5703 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5704 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5706 if (icode == CODE_FOR_nothing)
5707 /* Builtin not supported on this processor. */
5708 return 0;
5710 /* If we got invalid arguments bail out before generating bad rtl. */
5711 if (arg0 == error_mark_node
5712 || arg1 == error_mark_node
5713 || arg2 == error_mark_node)
5714 return const0_rtx;
5716 if (icode == CODE_FOR_altivec_vsldoi_4sf
5717 || icode == CODE_FOR_altivec_vsldoi_4si
5718 || icode == CODE_FOR_altivec_vsldoi_8hi
5719 || icode == CODE_FOR_altivec_vsldoi_16qi)
5721 /* Only allow 4-bit unsigned literals. */
5722 STRIP_NOPS (arg2);
5723 if (TREE_CODE (arg2) != INTEGER_CST
5724 || TREE_INT_CST_LOW (arg2) & ~0xf)
5726 error ("argument 3 must be a 4-bit unsigned literal");
5727 return const0_rtx;
5731 if (target == 0
5732 || GET_MODE (target) != tmode
5733 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5734 target = gen_reg_rtx (tmode);
5736 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5737 op0 = copy_to_mode_reg (mode0, op0);
5738 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5739 op1 = copy_to_mode_reg (mode1, op1);
5740 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5741 op2 = copy_to_mode_reg (mode2, op2);
5743 pat = GEN_FCN (icode) (target, op0, op1, op2);
5744 if (! pat)
5745 return 0;
5746 emit_insn (pat);
5748 return target;
5751 /* Expand the lvx builtins. */
5752 static rtx
5753 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5755 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5756 tree arglist = TREE_OPERAND (exp, 1);
5757 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5758 tree arg0;
5759 enum machine_mode tmode, mode0;
5760 rtx pat, op0;
5761 enum insn_code icode;
5763 switch (fcode)
5765 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5766 icode = CODE_FOR_altivec_lvx_16qi;
5767 break;
5768 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5769 icode = CODE_FOR_altivec_lvx_8hi;
5770 break;
5771 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5772 icode = CODE_FOR_altivec_lvx_4si;
5773 break;
5774 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5775 icode = CODE_FOR_altivec_lvx_4sf;
5776 break;
5777 default:
5778 *expandedp = false;
5779 return NULL_RTX;
5782 *expandedp = true;
5784 arg0 = TREE_VALUE (arglist);
5785 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5786 tmode = insn_data[icode].operand[0].mode;
5787 mode0 = insn_data[icode].operand[1].mode;
5789 if (target == 0
5790 || GET_MODE (target) != tmode
5791 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5792 target = gen_reg_rtx (tmode);
5794 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5795 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5797 pat = GEN_FCN (icode) (target, op0);
5798 if (! pat)
5799 return 0;
5800 emit_insn (pat);
5801 return target;
5804 /* Expand the stvx builtins. */
5805 static rtx
5806 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5807 bool *expandedp)
5809 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5810 tree arglist = TREE_OPERAND (exp, 1);
5811 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5812 tree arg0, arg1;
5813 enum machine_mode mode0, mode1;
5814 rtx pat, op0, op1;
5815 enum insn_code icode;
5817 switch (fcode)
5819 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5820 icode = CODE_FOR_altivec_stvx_16qi;
5821 break;
5822 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5823 icode = CODE_FOR_altivec_stvx_8hi;
5824 break;
5825 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5826 icode = CODE_FOR_altivec_stvx_4si;
5827 break;
5828 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5829 icode = CODE_FOR_altivec_stvx_4sf;
5830 break;
5831 default:
5832 *expandedp = false;
5833 return NULL_RTX;
5836 arg0 = TREE_VALUE (arglist);
5837 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5838 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5839 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5840 mode0 = insn_data[icode].operand[0].mode;
5841 mode1 = insn_data[icode].operand[1].mode;
5843 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5844 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5845 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5846 op1 = copy_to_mode_reg (mode1, op1);
5848 pat = GEN_FCN (icode) (op0, op1);
5849 if (pat)
5850 emit_insn (pat);
5852 *expandedp = true;
5853 return NULL_RTX;
5856 /* Expand the dst builtins. */
5857 static rtx
5858 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5859 bool *expandedp)
5861 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5862 tree arglist = TREE_OPERAND (exp, 1);
5863 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5864 tree arg0, arg1, arg2;
5865 enum machine_mode mode0, mode1, mode2;
5866 rtx pat, op0, op1, op2;
5867 struct builtin_description *d;
5868 size_t i;
5870 *expandedp = false;
5872 /* Handle DST variants. */
5873 d = (struct builtin_description *) bdesc_dst;
5874 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5875 if (d->code == fcode)
5877 arg0 = TREE_VALUE (arglist);
5878 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5879 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5880 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5881 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5882 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5883 mode0 = insn_data[d->icode].operand[0].mode;
5884 mode1 = insn_data[d->icode].operand[1].mode;
5885 mode2 = insn_data[d->icode].operand[2].mode;
5887 /* Invalid arguments, bail out before generating bad rtl. */
5888 if (arg0 == error_mark_node
5889 || arg1 == error_mark_node
5890 || arg2 == error_mark_node)
5891 return const0_rtx;
5893 STRIP_NOPS (arg2);
5894 if (TREE_CODE (arg2) != INTEGER_CST
5895 || TREE_INT_CST_LOW (arg2) & ~0x3)
5897 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5898 return const0_rtx;
5901 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5902 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5903 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5904 op1 = copy_to_mode_reg (mode1, op1);
5906 pat = GEN_FCN (d->icode) (op0, op1, op2);
5907 if (pat != 0)
5908 emit_insn (pat);
5910 *expandedp = true;
5911 return NULL_RTX;
5914 return NULL_RTX;
5917 /* Expand the builtin in EXP and store the result in TARGET. Store
5918 true in *EXPANDEDP if we found a builtin to expand. */
5919 static rtx
5920 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5922 struct builtin_description *d;
5923 struct builtin_description_predicates *dp;
5924 size_t i;
5925 enum insn_code icode;
5926 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5927 tree arglist = TREE_OPERAND (exp, 1);
5928 tree arg0;
5929 rtx op0, pat;
5930 enum machine_mode tmode, mode0;
5931 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5933 target = altivec_expand_ld_builtin (exp, target, expandedp);
5934 if (*expandedp)
5935 return target;
5937 target = altivec_expand_st_builtin (exp, target, expandedp);
5938 if (*expandedp)
5939 return target;
5941 target = altivec_expand_dst_builtin (exp, target, expandedp);
5942 if (*expandedp)
5943 return target;
5945 *expandedp = true;
5947 switch (fcode)
5949 case ALTIVEC_BUILTIN_STVX:
5950 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5951 case ALTIVEC_BUILTIN_STVEBX:
5952 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5953 case ALTIVEC_BUILTIN_STVEHX:
5954 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5955 case ALTIVEC_BUILTIN_STVEWX:
5956 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5957 case ALTIVEC_BUILTIN_STVXL:
5958 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5960 case ALTIVEC_BUILTIN_MFVSCR:
5961 icode = CODE_FOR_altivec_mfvscr;
5962 tmode = insn_data[icode].operand[0].mode;
5964 if (target == 0
5965 || GET_MODE (target) != tmode
5966 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5967 target = gen_reg_rtx (tmode);
5969 pat = GEN_FCN (icode) (target);
5970 if (! pat)
5971 return 0;
5972 emit_insn (pat);
5973 return target;
5975 case ALTIVEC_BUILTIN_MTVSCR:
5976 icode = CODE_FOR_altivec_mtvscr;
5977 arg0 = TREE_VALUE (arglist);
5978 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5979 mode0 = insn_data[icode].operand[0].mode;
5981 /* If we got invalid arguments bail out before generating bad rtl. */
5982 if (arg0 == error_mark_node)
5983 return const0_rtx;
5985 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5986 op0 = copy_to_mode_reg (mode0, op0);
5988 pat = GEN_FCN (icode) (op0);
5989 if (pat)
5990 emit_insn (pat);
5991 return NULL_RTX;
5993 case ALTIVEC_BUILTIN_DSSALL:
5994 emit_insn (gen_altivec_dssall ());
5995 return NULL_RTX;
5997 case ALTIVEC_BUILTIN_DSS:
5998 icode = CODE_FOR_altivec_dss;
5999 arg0 = TREE_VALUE (arglist);
6000 STRIP_NOPS (arg0);
6001 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6002 mode0 = insn_data[icode].operand[0].mode;
6004 /* If we got invalid arguments bail out before generating bad rtl. */
6005 if (arg0 == error_mark_node)
6006 return const0_rtx;
6008 if (TREE_CODE (arg0) != INTEGER_CST
6009 || TREE_INT_CST_LOW (arg0) & ~0x3)
6011 error ("argument to dss must be a 2-bit unsigned literal");
6012 return const0_rtx;
6015 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6016 op0 = copy_to_mode_reg (mode0, op0);
6018 emit_insn (gen_altivec_dss (op0));
6019 return NULL_RTX;
6021 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6022 arg0 = TREE_VALUE (arglist);
6023 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6024 arg0 = TREE_OPERAND (arg0, 0);
6025 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6026 TREE_STRING_POINTER (arg0));
6028 return const0_rtx;
6031 /* Expand abs* operations. */
6032 d = (struct builtin_description *) bdesc_abs;
6033 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6034 if (d->code == fcode)
6035 return altivec_expand_abs_builtin (d->icode, arglist, target);
6037 /* Expand the AltiVec predicates. */
6038 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6039 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6040 if (dp->code == fcode)
6041 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6043 /* LV* are funky. We initialized them differently. */
6044 switch (fcode)
6046 case ALTIVEC_BUILTIN_LVSL:
6047 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6048 arglist, target);
6049 case ALTIVEC_BUILTIN_LVSR:
6050 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6051 arglist, target);
6052 case ALTIVEC_BUILTIN_LVEBX:
6053 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6054 arglist, target);
6055 case ALTIVEC_BUILTIN_LVEHX:
6056 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6057 arglist, target);
6058 case ALTIVEC_BUILTIN_LVEWX:
6059 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6060 arglist, target);
6061 case ALTIVEC_BUILTIN_LVXL:
6062 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6063 arglist, target);
6064 case ALTIVEC_BUILTIN_LVX:
6065 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6066 arglist, target);
6067 default:
6068 break;
6069 /* Fall through. */
6072 *expandedp = false;
6073 return NULL_RTX;
6076 /* Binops that need to be initialized manually, but can be expanded
6077 automagically by rs6000_expand_binop_builtin. */
6078 static struct builtin_description bdesc_2arg_spe[] =
6080 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6081 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6082 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6083 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6084 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6085 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6086 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6087 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6088 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6089 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6090 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6091 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6092 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6093 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6094 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6095 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6096 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6097 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6098 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6099 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6100 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6101 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6104 /* Expand the builtin in EXP and store the result in TARGET. Store
6105 true in *EXPANDEDP if we found a builtin to expand.
6107 This expands the SPE builtins that are not simple unary and binary
6108 operations. */
6109 static rtx
6110 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6112 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6113 tree arglist = TREE_OPERAND (exp, 1);
6114 tree arg1, arg0;
6115 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6116 enum insn_code icode;
6117 enum machine_mode tmode, mode0;
6118 rtx pat, op0;
6119 struct builtin_description *d;
6120 size_t i;
6122 *expandedp = true;
6124 /* Syntax check for a 5-bit unsigned immediate. */
6125 switch (fcode)
6127 case SPE_BUILTIN_EVSTDD:
6128 case SPE_BUILTIN_EVSTDH:
6129 case SPE_BUILTIN_EVSTDW:
6130 case SPE_BUILTIN_EVSTWHE:
6131 case SPE_BUILTIN_EVSTWHO:
6132 case SPE_BUILTIN_EVSTWWE:
6133 case SPE_BUILTIN_EVSTWWO:
6134 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6135 if (TREE_CODE (arg1) != INTEGER_CST
6136 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6138 error ("argument 2 must be a 5-bit unsigned literal");
6139 return const0_rtx;
6141 break;
6142 default:
6143 break;
6146 /* The evsplat*i instructions are not quite generic. */
6147 switch (fcode)
6149 case SPE_BUILTIN_EVSPLATFI:
6150 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6151 arglist, target);
6152 case SPE_BUILTIN_EVSPLATI:
6153 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6154 arglist, target);
6155 default:
6156 break;
6159 d = (struct builtin_description *) bdesc_2arg_spe;
6160 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6161 if (d->code == fcode)
6162 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6164 d = (struct builtin_description *) bdesc_spe_predicates;
6165 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6166 if (d->code == fcode)
6167 return spe_expand_predicate_builtin (d->icode, arglist, target);
6169 d = (struct builtin_description *) bdesc_spe_evsel;
6170 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6171 if (d->code == fcode)
6172 return spe_expand_evsel_builtin (d->icode, arglist, target);
6174 switch (fcode)
6176 case SPE_BUILTIN_EVSTDDX:
6177 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6178 case SPE_BUILTIN_EVSTDHX:
6179 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6180 case SPE_BUILTIN_EVSTDWX:
6181 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6182 case SPE_BUILTIN_EVSTWHEX:
6183 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6184 case SPE_BUILTIN_EVSTWHOX:
6185 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6186 case SPE_BUILTIN_EVSTWWEX:
6187 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6188 case SPE_BUILTIN_EVSTWWOX:
6189 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6190 case SPE_BUILTIN_EVSTDD:
6191 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6192 case SPE_BUILTIN_EVSTDH:
6193 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6194 case SPE_BUILTIN_EVSTDW:
6195 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6196 case SPE_BUILTIN_EVSTWHE:
6197 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6198 case SPE_BUILTIN_EVSTWHO:
6199 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6200 case SPE_BUILTIN_EVSTWWE:
6201 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6202 case SPE_BUILTIN_EVSTWWO:
6203 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6204 case SPE_BUILTIN_MFSPEFSCR:
6205 icode = CODE_FOR_spe_mfspefscr;
6206 tmode = insn_data[icode].operand[0].mode;
6208 if (target == 0
6209 || GET_MODE (target) != tmode
6210 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6211 target = gen_reg_rtx (tmode);
6213 pat = GEN_FCN (icode) (target);
6214 if (! pat)
6215 return 0;
6216 emit_insn (pat);
6217 return target;
6218 case SPE_BUILTIN_MTSPEFSCR:
6219 icode = CODE_FOR_spe_mtspefscr;
6220 arg0 = TREE_VALUE (arglist);
6221 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6222 mode0 = insn_data[icode].operand[0].mode;
6224 if (arg0 == error_mark_node)
6225 return const0_rtx;
6227 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6228 op0 = copy_to_mode_reg (mode0, op0);
6230 pat = GEN_FCN (icode) (op0);
6231 if (pat)
6232 emit_insn (pat);
6233 return NULL_RTX;
6234 default:
6235 break;
6238 *expandedp = false;
6239 return NULL_RTX;
6242 static rtx
6243 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6245 rtx pat, scratch, tmp;
6246 tree form = TREE_VALUE (arglist);
6247 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6248 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6249 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6250 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6251 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6252 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6253 int form_int;
6254 enum rtx_code code;
6256 if (TREE_CODE (form) != INTEGER_CST)
6258 error ("argument 1 of __builtin_spe_predicate must be a constant");
6259 return const0_rtx;
6261 else
6262 form_int = TREE_INT_CST_LOW (form);
6264 if (mode0 != mode1)
6265 abort ();
6267 if (arg0 == error_mark_node || arg1 == error_mark_node)
6268 return const0_rtx;
6270 if (target == 0
6271 || GET_MODE (target) != SImode
6272 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6273 target = gen_reg_rtx (SImode);
6275 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6276 op0 = copy_to_mode_reg (mode0, op0);
6277 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6278 op1 = copy_to_mode_reg (mode1, op1);
6280 scratch = gen_reg_rtx (CCmode);
6282 pat = GEN_FCN (icode) (scratch, op0, op1);
6283 if (! pat)
6284 return const0_rtx;
6285 emit_insn (pat);
6287 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6288 _lower_. We use one compare, but look in different bits of the
6289 CR for each variant.
6291 There are 2 elements in each SPE simd type (upper/lower). The CR
6292 bits are set as follows:
6294 BIT0 | BIT 1 | BIT 2 | BIT 3
6295 U | L | (U | L) | (U & L)
6297 So, for an "all" relationship, BIT 3 would be set.
6298 For an "any" relationship, BIT 2 would be set. Etc.
6300 Following traditional nomenclature, these bits map to:
6302 BIT0 | BIT 1 | BIT 2 | BIT 3
6303 LT | GT | EQ | OV
6305 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6308 switch (form_int)
6310 /* All variant. OV bit. */
6311 case 0:
6312 /* We need to get to the OV bit, which is the ORDERED bit. We
6313 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6314 that's ugly and will trigger a validate_condition_mode abort.
6315 So let's just use another pattern. */
6316 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6317 return target;
6318 /* Any variant. EQ bit. */
6319 case 1:
6320 code = EQ;
6321 break;
6322 /* Upper variant. LT bit. */
6323 case 2:
6324 code = LT;
6325 break;
6326 /* Lower variant. GT bit. */
6327 case 3:
6328 code = GT;
6329 break;
6330 default:
6331 error ("argument 1 of __builtin_spe_predicate is out of range");
6332 return const0_rtx;
6335 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6336 emit_move_insn (target, tmp);
6338 return target;
6341 /* The evsel builtins look like this:
6343 e = __builtin_spe_evsel_OP (a, b, c, d);
6345 and work like this:
6347 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6348 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6351 static rtx
6352 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6354 rtx pat, scratch;
6355 tree arg0 = TREE_VALUE (arglist);
6356 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6357 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6358 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6359 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6360 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6361 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6362 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6363 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6364 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6366 if (mode0 != mode1)
6367 abort ();
6369 if (arg0 == error_mark_node || arg1 == error_mark_node
6370 || arg2 == error_mark_node || arg3 == error_mark_node)
6371 return const0_rtx;
6373 if (target == 0
6374 || GET_MODE (target) != mode0
6375 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6376 target = gen_reg_rtx (mode0);
6378 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6379 op0 = copy_to_mode_reg (mode0, op0);
6380 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6381 op1 = copy_to_mode_reg (mode0, op1);
6382 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6383 op2 = copy_to_mode_reg (mode0, op2);
6384 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6385 op3 = copy_to_mode_reg (mode0, op3);
6387 /* Generate the compare. */
6388 scratch = gen_reg_rtx (CCmode);
6389 pat = GEN_FCN (icode) (scratch, op0, op1);
6390 if (! pat)
6391 return const0_rtx;
6392 emit_insn (pat);
6394 if (mode0 == V2SImode)
6395 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6396 else
6397 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6399 return target;
6402 /* Expand an expression EXP that calls a built-in function,
6403 with result going to TARGET if that's convenient
6404 (and in mode MODE if that's convenient).
6405 SUBTARGET may be used as the target for computing one of EXP's operands.
6406 IGNORE is nonzero if the value is to be ignored. */
6408 static rtx
6409 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6410 enum machine_mode mode ATTRIBUTE_UNUSED,
6411 int ignore ATTRIBUTE_UNUSED)
6413 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6414 tree arglist = TREE_OPERAND (exp, 1);
6415 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6416 struct builtin_description *d;
6417 size_t i;
6418 rtx ret;
6419 bool success;
6421 if (TARGET_ALTIVEC)
6423 ret = altivec_expand_builtin (exp, target, &success);
6425 if (success)
6426 return ret;
6428 if (TARGET_SPE)
6430 ret = spe_expand_builtin (exp, target, &success);
6432 if (success)
6433 return ret;
6436 if (TARGET_ALTIVEC || TARGET_SPE)
6438 /* Handle simple unary operations. */
6439 d = (struct builtin_description *) bdesc_1arg;
6440 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6441 if (d->code == fcode)
6442 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6444 /* Handle simple binary operations. */
6445 d = (struct builtin_description *) bdesc_2arg;
6446 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6447 if (d->code == fcode)
6448 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6450 /* Handle simple ternary operations. */
6451 d = (struct builtin_description *) bdesc_3arg;
6452 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6453 if (d->code == fcode)
6454 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6457 abort ();
6458 return NULL_RTX;
6461 static void
6462 rs6000_init_builtins (void)
6464 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6465 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6466 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6468 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6469 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6470 'vector unsigned short'. */
6472 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6473 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6474 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6475 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6476 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6477 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6478 pixel_type_node = copy_node (unsigned_intHI_type_node);
6479 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6481 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6482 get_identifier ("__bool char"),
6483 bool_char_type_node));
6484 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6485 get_identifier ("__bool short"),
6486 bool_short_type_node));
6487 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6488 get_identifier ("__bool int"),
6489 bool_int_type_node));
6490 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6491 get_identifier ("__pixel"),
6492 pixel_type_node));
6494 bool_V16QI_type_node = make_vector (V16QImode, bool_char_type_node, 1);
6495 bool_V8HI_type_node = make_vector (V8HImode, bool_short_type_node, 1);
6496 bool_V4SI_type_node = make_vector (V4SImode, bool_int_type_node, 1);
6497 pixel_V8HI_type_node = make_vector (V8HImode, pixel_type_node, 1);
6499 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6500 get_identifier ("__vector unsigned char"),
6501 unsigned_V16QI_type_node));
6502 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6503 get_identifier ("__vector signed char"),
6504 V16QI_type_node));
6505 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6506 get_identifier ("__vector __bool char"),
6507 bool_V16QI_type_node));
6509 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6510 get_identifier ("__vector unsigned short"),
6511 unsigned_V8HI_type_node));
6512 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6513 get_identifier ("__vector signed short"),
6514 V8HI_type_node));
6515 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6516 get_identifier ("__vector __bool short"),
6517 bool_V8HI_type_node));
6519 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6520 get_identifier ("__vector unsigned int"),
6521 unsigned_V4SI_type_node));
6522 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6523 get_identifier ("__vector signed int"),
6524 V4SI_type_node));
6525 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6526 get_identifier ("__vector __bool int"),
6527 bool_V4SI_type_node));
6529 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6530 get_identifier ("__vector float"),
6531 V4SF_type_node));
6532 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6533 get_identifier ("__vector __pixel"),
6534 pixel_V8HI_type_node));
6536 if (TARGET_SPE)
6537 spe_init_builtins ();
6538 if (TARGET_ALTIVEC)
6539 altivec_init_builtins ();
6540 if (TARGET_ALTIVEC || TARGET_SPE)
6541 rs6000_common_init_builtins ();
6544 /* Search through a set of builtins and enable the mask bits.
6545 DESC is an array of builtins.
6546 SIZE is the total number of builtins.
6547 START is the builtin enum at which to start.
6548 END is the builtin enum at which to end. */
6549 static void
6550 enable_mask_for_builtins (struct builtin_description *desc, int size,
6551 enum rs6000_builtins start,
6552 enum rs6000_builtins end)
6554 int i;
6556 for (i = 0; i < size; ++i)
6557 if (desc[i].code == start)
6558 break;
6560 if (i == size)
6561 return;
6563 for (; i < size; ++i)
6565 /* Flip all the bits on. */
6566 desc[i].mask = target_flags;
6567 if (desc[i].code == end)
6568 break;
6572 static void
6573 spe_init_builtins (void)
6575 tree endlink = void_list_node;
6576 tree puint_type_node = build_pointer_type (unsigned_type_node);
6577 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6578 struct builtin_description *d;
6579 size_t i;
6581 tree v2si_ftype_4_v2si
6582 = build_function_type
6583 (opaque_V2SI_type_node,
6584 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6585 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6586 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6587 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6588 endlink)))));
6590 tree v2sf_ftype_4_v2sf
6591 = build_function_type
6592 (opaque_V2SF_type_node,
6593 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6594 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6595 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6596 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6597 endlink)))));
6599 tree int_ftype_int_v2si_v2si
6600 = build_function_type
6601 (integer_type_node,
6602 tree_cons (NULL_TREE, integer_type_node,
6603 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6604 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6605 endlink))));
6607 tree int_ftype_int_v2sf_v2sf
6608 = build_function_type
6609 (integer_type_node,
6610 tree_cons (NULL_TREE, integer_type_node,
6611 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6612 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6613 endlink))));
6615 tree void_ftype_v2si_puint_int
6616 = build_function_type (void_type_node,
6617 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6618 tree_cons (NULL_TREE, puint_type_node,
6619 tree_cons (NULL_TREE,
6620 integer_type_node,
6621 endlink))));
6623 tree void_ftype_v2si_puint_char
6624 = build_function_type (void_type_node,
6625 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6626 tree_cons (NULL_TREE, puint_type_node,
6627 tree_cons (NULL_TREE,
6628 char_type_node,
6629 endlink))));
6631 tree void_ftype_v2si_pv2si_int
6632 = build_function_type (void_type_node,
6633 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6634 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6635 tree_cons (NULL_TREE,
6636 integer_type_node,
6637 endlink))));
6639 tree void_ftype_v2si_pv2si_char
6640 = build_function_type (void_type_node,
6641 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6642 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6643 tree_cons (NULL_TREE,
6644 char_type_node,
6645 endlink))));
6647 tree void_ftype_int
6648 = build_function_type (void_type_node,
6649 tree_cons (NULL_TREE, integer_type_node, endlink));
6651 tree int_ftype_void
6652 = build_function_type (integer_type_node, endlink);
6654 tree v2si_ftype_pv2si_int
6655 = build_function_type (opaque_V2SI_type_node,
6656 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6657 tree_cons (NULL_TREE, integer_type_node,
6658 endlink)));
6660 tree v2si_ftype_puint_int
6661 = build_function_type (opaque_V2SI_type_node,
6662 tree_cons (NULL_TREE, puint_type_node,
6663 tree_cons (NULL_TREE, integer_type_node,
6664 endlink)));
6666 tree v2si_ftype_pushort_int
6667 = build_function_type (opaque_V2SI_type_node,
6668 tree_cons (NULL_TREE, pushort_type_node,
6669 tree_cons (NULL_TREE, integer_type_node,
6670 endlink)));
6672 tree v2si_ftype_signed_char
6673 = build_function_type (opaque_V2SI_type_node,
6674 tree_cons (NULL_TREE, signed_char_type_node,
6675 endlink));
6677 /* The initialization of the simple binary and unary builtins is
6678 done in rs6000_common_init_builtins, but we have to enable the
6679 mask bits here manually because we have run out of `target_flags'
6680 bits. We really need to redesign this mask business. */
6682 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6683 ARRAY_SIZE (bdesc_2arg),
6684 SPE_BUILTIN_EVADDW,
6685 SPE_BUILTIN_EVXOR);
6686 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6687 ARRAY_SIZE (bdesc_1arg),
6688 SPE_BUILTIN_EVABS,
6689 SPE_BUILTIN_EVSUBFUSIAAW);
6690 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6691 ARRAY_SIZE (bdesc_spe_predicates),
6692 SPE_BUILTIN_EVCMPEQ,
6693 SPE_BUILTIN_EVFSTSTLT);
6694 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6695 ARRAY_SIZE (bdesc_spe_evsel),
6696 SPE_BUILTIN_EVSEL_CMPGTS,
6697 SPE_BUILTIN_EVSEL_FSTSTEQ);
6699 (*lang_hooks.decls.pushdecl)
6700 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6701 opaque_V2SI_type_node));
6703 /* Initialize irregular SPE builtins. */
6705 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6706 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6707 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6708 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6709 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6710 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6711 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6712 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6713 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6714 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6715 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6716 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6717 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6718 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6719 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6720 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6721 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6722 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6724 /* Loads. */
6725 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6726 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6727 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6728 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6729 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6730 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6731 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6732 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6733 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6734 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6735 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6736 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6737 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6738 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6739 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6740 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6741 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6742 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6743 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6744 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6745 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6746 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6748 /* Predicates. */
6749 d = (struct builtin_description *) bdesc_spe_predicates;
6750 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6752 tree type;
6754 switch (insn_data[d->icode].operand[1].mode)
6756 case V2SImode:
6757 type = int_ftype_int_v2si_v2si;
6758 break;
6759 case V2SFmode:
6760 type = int_ftype_int_v2sf_v2sf;
6761 break;
6762 default:
6763 abort ();
6766 def_builtin (d->mask, d->name, type, d->code);
6769 /* Evsel predicates. */
6770 d = (struct builtin_description *) bdesc_spe_evsel;
6771 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6773 tree type;
6775 switch (insn_data[d->icode].operand[1].mode)
6777 case V2SImode:
6778 type = v2si_ftype_4_v2si;
6779 break;
6780 case V2SFmode:
6781 type = v2sf_ftype_4_v2sf;
6782 break;
6783 default:
6784 abort ();
6787 def_builtin (d->mask, d->name, type, d->code);
6791 static void
6792 altivec_init_builtins (void)
6794 struct builtin_description *d;
6795 struct builtin_description_predicates *dp;
6796 size_t i;
6797 tree pfloat_type_node = build_pointer_type (float_type_node);
6798 tree pint_type_node = build_pointer_type (integer_type_node);
6799 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6800 tree pchar_type_node = build_pointer_type (char_type_node);
6802 tree pvoid_type_node = build_pointer_type (void_type_node);
6804 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6805 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6806 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6807 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6809 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6811 tree int_ftype_int_v4si_v4si
6812 = build_function_type_list (integer_type_node,
6813 integer_type_node, V4SI_type_node,
6814 V4SI_type_node, NULL_TREE);
6815 tree v4sf_ftype_pcfloat
6816 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6817 tree void_ftype_pfloat_v4sf
6818 = build_function_type_list (void_type_node,
6819 pfloat_type_node, V4SF_type_node, NULL_TREE);
6820 tree v4si_ftype_pcint
6821 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6822 tree void_ftype_pint_v4si
6823 = build_function_type_list (void_type_node,
6824 pint_type_node, V4SI_type_node, NULL_TREE);
6825 tree v8hi_ftype_pcshort
6826 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6827 tree void_ftype_pshort_v8hi
6828 = build_function_type_list (void_type_node,
6829 pshort_type_node, V8HI_type_node, NULL_TREE);
6830 tree v16qi_ftype_pcchar
6831 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6832 tree void_ftype_pchar_v16qi
6833 = build_function_type_list (void_type_node,
6834 pchar_type_node, V16QI_type_node, NULL_TREE);
6835 tree void_ftype_v4si
6836 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6837 tree v8hi_ftype_void
6838 = build_function_type (V8HI_type_node, void_list_node);
6839 tree void_ftype_void
6840 = build_function_type (void_type_node, void_list_node);
6841 tree void_ftype_qi
6842 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6844 tree v16qi_ftype_long_pcvoid
6845 = build_function_type_list (V16QI_type_node,
6846 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6847 tree v8hi_ftype_long_pcvoid
6848 = build_function_type_list (V8HI_type_node,
6849 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6850 tree v4si_ftype_long_pcvoid
6851 = build_function_type_list (V4SI_type_node,
6852 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6854 tree void_ftype_v4si_long_pvoid
6855 = build_function_type_list (void_type_node,
6856 V4SI_type_node, long_integer_type_node,
6857 pvoid_type_node, NULL_TREE);
6858 tree void_ftype_v16qi_long_pvoid
6859 = build_function_type_list (void_type_node,
6860 V16QI_type_node, long_integer_type_node,
6861 pvoid_type_node, NULL_TREE);
6862 tree void_ftype_v8hi_long_pvoid
6863 = build_function_type_list (void_type_node,
6864 V8HI_type_node, long_integer_type_node,
6865 pvoid_type_node, NULL_TREE);
6866 tree int_ftype_int_v8hi_v8hi
6867 = build_function_type_list (integer_type_node,
6868 integer_type_node, V8HI_type_node,
6869 V8HI_type_node, NULL_TREE);
6870 tree int_ftype_int_v16qi_v16qi
6871 = build_function_type_list (integer_type_node,
6872 integer_type_node, V16QI_type_node,
6873 V16QI_type_node, NULL_TREE);
6874 tree int_ftype_int_v4sf_v4sf
6875 = build_function_type_list (integer_type_node,
6876 integer_type_node, V4SF_type_node,
6877 V4SF_type_node, NULL_TREE);
6878 tree v4si_ftype_v4si
6879 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6880 tree v8hi_ftype_v8hi
6881 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6882 tree v16qi_ftype_v16qi
6883 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6884 tree v4sf_ftype_v4sf
6885 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6886 tree void_ftype_pcvoid_int_int
6887 = build_function_type_list (void_type_node,
6888 pcvoid_type_node, integer_type_node,
6889 integer_type_node, NULL_TREE);
6890 tree int_ftype_pcchar
6891 = build_function_type_list (integer_type_node,
6892 pcchar_type_node, NULL_TREE);
6894 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6895 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6896 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6897 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6898 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6899 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6900 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6901 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6902 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6903 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6904 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6905 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6906 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6907 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6908 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6909 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6910 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6911 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6912 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6913 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6914 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6915 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6916 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6917 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6918 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6919 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6920 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6921 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6922 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6923 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6924 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6925 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6927 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
6928 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
6929 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
6931 /* Add the DST variants. */
6932 d = (struct builtin_description *) bdesc_dst;
6933 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6934 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
6936 /* Initialize the predicates. */
6937 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6938 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6940 enum machine_mode mode1;
6941 tree type;
6943 mode1 = insn_data[dp->icode].operand[1].mode;
6945 switch (mode1)
6947 case V4SImode:
6948 type = int_ftype_int_v4si_v4si;
6949 break;
6950 case V8HImode:
6951 type = int_ftype_int_v8hi_v8hi;
6952 break;
6953 case V16QImode:
6954 type = int_ftype_int_v16qi_v16qi;
6955 break;
6956 case V4SFmode:
6957 type = int_ftype_int_v4sf_v4sf;
6958 break;
6959 default:
6960 abort ();
6963 def_builtin (dp->mask, dp->name, type, dp->code);
6966 /* Initialize the abs* operators. */
6967 d = (struct builtin_description *) bdesc_abs;
6968 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6970 enum machine_mode mode0;
6971 tree type;
6973 mode0 = insn_data[d->icode].operand[0].mode;
6975 switch (mode0)
6977 case V4SImode:
6978 type = v4si_ftype_v4si;
6979 break;
6980 case V8HImode:
6981 type = v8hi_ftype_v8hi;
6982 break;
6983 case V16QImode:
6984 type = v16qi_ftype_v16qi;
6985 break;
6986 case V4SFmode:
6987 type = v4sf_ftype_v4sf;
6988 break;
6989 default:
6990 abort ();
6993 def_builtin (d->mask, d->name, type, d->code);
6997 static void
6998 rs6000_common_init_builtins (void)
7000 struct builtin_description *d;
7001 size_t i;
7003 tree v4sf_ftype_v4sf_v4sf_v16qi
7004 = build_function_type_list (V4SF_type_node,
7005 V4SF_type_node, V4SF_type_node,
7006 V16QI_type_node, NULL_TREE);
7007 tree v4si_ftype_v4si_v4si_v16qi
7008 = build_function_type_list (V4SI_type_node,
7009 V4SI_type_node, V4SI_type_node,
7010 V16QI_type_node, NULL_TREE);
7011 tree v8hi_ftype_v8hi_v8hi_v16qi
7012 = build_function_type_list (V8HI_type_node,
7013 V8HI_type_node, V8HI_type_node,
7014 V16QI_type_node, NULL_TREE);
7015 tree v16qi_ftype_v16qi_v16qi_v16qi
7016 = build_function_type_list (V16QI_type_node,
7017 V16QI_type_node, V16QI_type_node,
7018 V16QI_type_node, NULL_TREE);
7019 tree v4si_ftype_int
7020 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7021 tree v8hi_ftype_int
7022 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7023 tree v16qi_ftype_int
7024 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7025 tree v8hi_ftype_v16qi
7026 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7027 tree v4sf_ftype_v4sf
7028 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7030 tree v2si_ftype_v2si_v2si
7031 = build_function_type_list (opaque_V2SI_type_node,
7032 opaque_V2SI_type_node,
7033 opaque_V2SI_type_node, NULL_TREE);
7035 tree v2sf_ftype_v2sf_v2sf
7036 = build_function_type_list (opaque_V2SF_type_node,
7037 opaque_V2SF_type_node,
7038 opaque_V2SF_type_node, NULL_TREE);
7040 tree v2si_ftype_int_int
7041 = build_function_type_list (opaque_V2SI_type_node,
7042 integer_type_node, integer_type_node,
7043 NULL_TREE);
7045 tree v2si_ftype_v2si
7046 = build_function_type_list (opaque_V2SI_type_node,
7047 opaque_V2SI_type_node, NULL_TREE);
7049 tree v2sf_ftype_v2sf
7050 = build_function_type_list (opaque_V2SF_type_node,
7051 opaque_V2SF_type_node, NULL_TREE);
7053 tree v2sf_ftype_v2si
7054 = build_function_type_list (opaque_V2SF_type_node,
7055 opaque_V2SI_type_node, NULL_TREE);
7057 tree v2si_ftype_v2sf
7058 = build_function_type_list (opaque_V2SI_type_node,
7059 opaque_V2SF_type_node, NULL_TREE);
7061 tree v2si_ftype_v2si_char
7062 = build_function_type_list (opaque_V2SI_type_node,
7063 opaque_V2SI_type_node,
7064 char_type_node, NULL_TREE);
7066 tree v2si_ftype_int_char
7067 = build_function_type_list (opaque_V2SI_type_node,
7068 integer_type_node, char_type_node, NULL_TREE);
7070 tree v2si_ftype_char
7071 = build_function_type_list (opaque_V2SI_type_node,
7072 char_type_node, NULL_TREE);
7074 tree int_ftype_int_int
7075 = build_function_type_list (integer_type_node,
7076 integer_type_node, integer_type_node,
7077 NULL_TREE);
7079 tree v4si_ftype_v4si_v4si
7080 = build_function_type_list (V4SI_type_node,
7081 V4SI_type_node, V4SI_type_node, NULL_TREE);
7082 tree v4sf_ftype_v4si_int
7083 = build_function_type_list (V4SF_type_node,
7084 V4SI_type_node, integer_type_node, NULL_TREE);
7085 tree v4si_ftype_v4sf_int
7086 = build_function_type_list (V4SI_type_node,
7087 V4SF_type_node, integer_type_node, NULL_TREE);
7088 tree v4si_ftype_v4si_int
7089 = build_function_type_list (V4SI_type_node,
7090 V4SI_type_node, integer_type_node, NULL_TREE);
7091 tree v8hi_ftype_v8hi_int
7092 = build_function_type_list (V8HI_type_node,
7093 V8HI_type_node, integer_type_node, NULL_TREE);
7094 tree v16qi_ftype_v16qi_int
7095 = build_function_type_list (V16QI_type_node,
7096 V16QI_type_node, integer_type_node, NULL_TREE);
7097 tree v16qi_ftype_v16qi_v16qi_int
7098 = build_function_type_list (V16QI_type_node,
7099 V16QI_type_node, V16QI_type_node,
7100 integer_type_node, NULL_TREE);
7101 tree v8hi_ftype_v8hi_v8hi_int
7102 = build_function_type_list (V8HI_type_node,
7103 V8HI_type_node, V8HI_type_node,
7104 integer_type_node, NULL_TREE);
7105 tree v4si_ftype_v4si_v4si_int
7106 = build_function_type_list (V4SI_type_node,
7107 V4SI_type_node, V4SI_type_node,
7108 integer_type_node, NULL_TREE);
7109 tree v4sf_ftype_v4sf_v4sf_int
7110 = build_function_type_list (V4SF_type_node,
7111 V4SF_type_node, V4SF_type_node,
7112 integer_type_node, NULL_TREE);
7113 tree v4sf_ftype_v4sf_v4sf
7114 = build_function_type_list (V4SF_type_node,
7115 V4SF_type_node, V4SF_type_node, NULL_TREE);
7116 tree v4sf_ftype_v4sf_v4sf_v4si
7117 = build_function_type_list (V4SF_type_node,
7118 V4SF_type_node, V4SF_type_node,
7119 V4SI_type_node, NULL_TREE);
7120 tree v4sf_ftype_v4sf_v4sf_v4sf
7121 = build_function_type_list (V4SF_type_node,
7122 V4SF_type_node, V4SF_type_node,
7123 V4SF_type_node, NULL_TREE);
7124 tree v4si_ftype_v4si_v4si_v4si
7125 = build_function_type_list (V4SI_type_node,
7126 V4SI_type_node, V4SI_type_node,
7127 V4SI_type_node, NULL_TREE);
7128 tree v8hi_ftype_v8hi_v8hi
7129 = build_function_type_list (V8HI_type_node,
7130 V8HI_type_node, V8HI_type_node, NULL_TREE);
7131 tree v8hi_ftype_v8hi_v8hi_v8hi
7132 = build_function_type_list (V8HI_type_node,
7133 V8HI_type_node, V8HI_type_node,
7134 V8HI_type_node, NULL_TREE);
7135 tree v4si_ftype_v8hi_v8hi_v4si
7136 = build_function_type_list (V4SI_type_node,
7137 V8HI_type_node, V8HI_type_node,
7138 V4SI_type_node, NULL_TREE);
7139 tree v4si_ftype_v16qi_v16qi_v4si
7140 = build_function_type_list (V4SI_type_node,
7141 V16QI_type_node, V16QI_type_node,
7142 V4SI_type_node, NULL_TREE);
7143 tree v16qi_ftype_v16qi_v16qi
7144 = build_function_type_list (V16QI_type_node,
7145 V16QI_type_node, V16QI_type_node, NULL_TREE);
7146 tree v4si_ftype_v4sf_v4sf
7147 = build_function_type_list (V4SI_type_node,
7148 V4SF_type_node, V4SF_type_node, NULL_TREE);
7149 tree v8hi_ftype_v16qi_v16qi
7150 = build_function_type_list (V8HI_type_node,
7151 V16QI_type_node, V16QI_type_node, NULL_TREE);
7152 tree v4si_ftype_v8hi_v8hi
7153 = build_function_type_list (V4SI_type_node,
7154 V8HI_type_node, V8HI_type_node, NULL_TREE);
7155 tree v8hi_ftype_v4si_v4si
7156 = build_function_type_list (V8HI_type_node,
7157 V4SI_type_node, V4SI_type_node, NULL_TREE);
7158 tree v16qi_ftype_v8hi_v8hi
7159 = build_function_type_list (V16QI_type_node,
7160 V8HI_type_node, V8HI_type_node, NULL_TREE);
7161 tree v4si_ftype_v16qi_v4si
7162 = build_function_type_list (V4SI_type_node,
7163 V16QI_type_node, V4SI_type_node, NULL_TREE);
7164 tree v4si_ftype_v16qi_v16qi
7165 = build_function_type_list (V4SI_type_node,
7166 V16QI_type_node, V16QI_type_node, NULL_TREE);
7167 tree v4si_ftype_v8hi_v4si
7168 = build_function_type_list (V4SI_type_node,
7169 V8HI_type_node, V4SI_type_node, NULL_TREE);
7170 tree v4si_ftype_v8hi
7171 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7172 tree int_ftype_v4si_v4si
7173 = build_function_type_list (integer_type_node,
7174 V4SI_type_node, V4SI_type_node, NULL_TREE);
7175 tree int_ftype_v4sf_v4sf
7176 = build_function_type_list (integer_type_node,
7177 V4SF_type_node, V4SF_type_node, NULL_TREE);
7178 tree int_ftype_v16qi_v16qi
7179 = build_function_type_list (integer_type_node,
7180 V16QI_type_node, V16QI_type_node, NULL_TREE);
7181 tree int_ftype_v8hi_v8hi
7182 = build_function_type_list (integer_type_node,
7183 V8HI_type_node, V8HI_type_node, NULL_TREE);
7185 /* Add the simple ternary operators. */
7186 d = (struct builtin_description *) bdesc_3arg;
7187 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7190 enum machine_mode mode0, mode1, mode2, mode3;
7191 tree type;
7193 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7194 continue;
7196 mode0 = insn_data[d->icode].operand[0].mode;
7197 mode1 = insn_data[d->icode].operand[1].mode;
7198 mode2 = insn_data[d->icode].operand[2].mode;
7199 mode3 = insn_data[d->icode].operand[3].mode;
7201 /* When all four are of the same mode. */
7202 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7204 switch (mode0)
7206 case V4SImode:
7207 type = v4si_ftype_v4si_v4si_v4si;
7208 break;
7209 case V4SFmode:
7210 type = v4sf_ftype_v4sf_v4sf_v4sf;
7211 break;
7212 case V8HImode:
7213 type = v8hi_ftype_v8hi_v8hi_v8hi;
7214 break;
7215 case V16QImode:
7216 type = v16qi_ftype_v16qi_v16qi_v16qi;
7217 break;
7218 default:
7219 abort();
7222 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7224 switch (mode0)
7226 case V4SImode:
7227 type = v4si_ftype_v4si_v4si_v16qi;
7228 break;
7229 case V4SFmode:
7230 type = v4sf_ftype_v4sf_v4sf_v16qi;
7231 break;
7232 case V8HImode:
7233 type = v8hi_ftype_v8hi_v8hi_v16qi;
7234 break;
7235 case V16QImode:
7236 type = v16qi_ftype_v16qi_v16qi_v16qi;
7237 break;
7238 default:
7239 abort();
7242 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7243 && mode3 == V4SImode)
7244 type = v4si_ftype_v16qi_v16qi_v4si;
7245 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7246 && mode3 == V4SImode)
7247 type = v4si_ftype_v8hi_v8hi_v4si;
7248 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7249 && mode3 == V4SImode)
7250 type = v4sf_ftype_v4sf_v4sf_v4si;
7252 /* vchar, vchar, vchar, 4 bit literal. */
7253 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7254 && mode3 == QImode)
7255 type = v16qi_ftype_v16qi_v16qi_int;
7257 /* vshort, vshort, vshort, 4 bit literal. */
7258 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7259 && mode3 == QImode)
7260 type = v8hi_ftype_v8hi_v8hi_int;
7262 /* vint, vint, vint, 4 bit literal. */
7263 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7264 && mode3 == QImode)
7265 type = v4si_ftype_v4si_v4si_int;
7267 /* vfloat, vfloat, vfloat, 4 bit literal. */
7268 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7269 && mode3 == QImode)
7270 type = v4sf_ftype_v4sf_v4sf_int;
7272 else
7273 abort ();
7275 def_builtin (d->mask, d->name, type, d->code);
7278 /* Add the simple binary operators. */
7279 d = (struct builtin_description *) bdesc_2arg;
7280 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7282 enum machine_mode mode0, mode1, mode2;
7283 tree type;
7285 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7286 continue;
7288 mode0 = insn_data[d->icode].operand[0].mode;
7289 mode1 = insn_data[d->icode].operand[1].mode;
7290 mode2 = insn_data[d->icode].operand[2].mode;
7292 /* When all three operands are of the same mode. */
7293 if (mode0 == mode1 && mode1 == mode2)
7295 switch (mode0)
7297 case V4SFmode:
7298 type = v4sf_ftype_v4sf_v4sf;
7299 break;
7300 case V4SImode:
7301 type = v4si_ftype_v4si_v4si;
7302 break;
7303 case V16QImode:
7304 type = v16qi_ftype_v16qi_v16qi;
7305 break;
7306 case V8HImode:
7307 type = v8hi_ftype_v8hi_v8hi;
7308 break;
7309 case V2SImode:
7310 type = v2si_ftype_v2si_v2si;
7311 break;
7312 case V2SFmode:
7313 type = v2sf_ftype_v2sf_v2sf;
7314 break;
7315 case SImode:
7316 type = int_ftype_int_int;
7317 break;
7318 default:
7319 abort ();
7323 /* A few other combos we really don't want to do manually. */
7325 /* vint, vfloat, vfloat. */
7326 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7327 type = v4si_ftype_v4sf_v4sf;
7329 /* vshort, vchar, vchar. */
7330 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7331 type = v8hi_ftype_v16qi_v16qi;
7333 /* vint, vshort, vshort. */
7334 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7335 type = v4si_ftype_v8hi_v8hi;
7337 /* vshort, vint, vint. */
7338 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7339 type = v8hi_ftype_v4si_v4si;
7341 /* vchar, vshort, vshort. */
7342 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7343 type = v16qi_ftype_v8hi_v8hi;
7345 /* vint, vchar, vint. */
7346 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7347 type = v4si_ftype_v16qi_v4si;
7349 /* vint, vchar, vchar. */
7350 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7351 type = v4si_ftype_v16qi_v16qi;
7353 /* vint, vshort, vint. */
7354 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7355 type = v4si_ftype_v8hi_v4si;
7357 /* vint, vint, 5 bit literal. */
7358 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7359 type = v4si_ftype_v4si_int;
7361 /* vshort, vshort, 5 bit literal. */
7362 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7363 type = v8hi_ftype_v8hi_int;
7365 /* vchar, vchar, 5 bit literal. */
7366 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7367 type = v16qi_ftype_v16qi_int;
7369 /* vfloat, vint, 5 bit literal. */
7370 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7371 type = v4sf_ftype_v4si_int;
7373 /* vint, vfloat, 5 bit literal. */
7374 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7375 type = v4si_ftype_v4sf_int;
7377 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7378 type = v2si_ftype_int_int;
7380 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7381 type = v2si_ftype_v2si_char;
7383 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7384 type = v2si_ftype_int_char;
7386 /* int, x, x. */
7387 else if (mode0 == SImode)
7389 switch (mode1)
7391 case V4SImode:
7392 type = int_ftype_v4si_v4si;
7393 break;
7394 case V4SFmode:
7395 type = int_ftype_v4sf_v4sf;
7396 break;
7397 case V16QImode:
7398 type = int_ftype_v16qi_v16qi;
7399 break;
7400 case V8HImode:
7401 type = int_ftype_v8hi_v8hi;
7402 break;
7403 default:
7404 abort ();
7408 else
7409 abort ();
7411 def_builtin (d->mask, d->name, type, d->code);
7414 /* Add the simple unary operators. */
7415 d = (struct builtin_description *) bdesc_1arg;
7416 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7418 enum machine_mode mode0, mode1;
7419 tree type;
7421 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7422 continue;
7424 mode0 = insn_data[d->icode].operand[0].mode;
7425 mode1 = insn_data[d->icode].operand[1].mode;
7427 if (mode0 == V4SImode && mode1 == QImode)
7428 type = v4si_ftype_int;
7429 else if (mode0 == V8HImode && mode1 == QImode)
7430 type = v8hi_ftype_int;
7431 else if (mode0 == V16QImode && mode1 == QImode)
7432 type = v16qi_ftype_int;
7433 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7434 type = v4sf_ftype_v4sf;
7435 else if (mode0 == V8HImode && mode1 == V16QImode)
7436 type = v8hi_ftype_v16qi;
7437 else if (mode0 == V4SImode && mode1 == V8HImode)
7438 type = v4si_ftype_v8hi;
7439 else if (mode0 == V2SImode && mode1 == V2SImode)
7440 type = v2si_ftype_v2si;
7441 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7442 type = v2sf_ftype_v2sf;
7443 else if (mode0 == V2SFmode && mode1 == V2SImode)
7444 type = v2sf_ftype_v2si;
7445 else if (mode0 == V2SImode && mode1 == V2SFmode)
7446 type = v2si_ftype_v2sf;
7447 else if (mode0 == V2SImode && mode1 == QImode)
7448 type = v2si_ftype_char;
7449 else
7450 abort ();
7452 def_builtin (d->mask, d->name, type, d->code);
7456 static void
7457 rs6000_init_libfuncs (void)
7459 if (!TARGET_HARD_FLOAT)
7460 return;
7462 if (DEFAULT_ABI != ABI_V4)
7464 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7466 /* AIX library routines for float->int conversion. */
7467 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7468 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7469 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7470 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7473 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7474 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7475 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7476 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7477 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7479 else
7481 /* 32-bit SVR4 quad floating point routines. */
7483 set_optab_libfunc (add_optab, TFmode, "_q_add");
7484 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7485 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7486 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7487 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7488 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7489 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7491 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7492 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7493 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7494 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7495 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7496 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7498 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7499 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7500 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7501 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7502 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7503 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7504 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7508 /* Expand a block move operation, and return 1 if successful. Return 0
7509 if we should let the compiler generate normal code.
7511 operands[0] is the destination
7512 operands[1] is the source
7513 operands[2] is the length
7514 operands[3] is the alignment */
7516 #define MAX_MOVE_REG 4
7519 expand_block_move (rtx operands[])
7521 rtx orig_dest = operands[0];
7522 rtx orig_src = operands[1];
7523 rtx bytes_rtx = operands[2];
7524 rtx align_rtx = operands[3];
7525 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7526 int align;
7527 int bytes;
7528 int offset;
7529 int move_bytes;
7530 rtx stores[MAX_MOVE_REG];
7531 int num_reg = 0;
7533 /* If this is not a fixed size move, just call memcpy */
7534 if (! constp)
7535 return 0;
7537 /* If this is not a fixed size alignment, abort */
7538 if (GET_CODE (align_rtx) != CONST_INT)
7539 abort ();
7540 align = INTVAL (align_rtx);
7542 /* Anything to move? */
7543 bytes = INTVAL (bytes_rtx);
7544 if (bytes <= 0)
7545 return 1;
7547 /* store_one_arg depends on expand_block_move to handle at least the size of
7548 reg_parm_stack_space. */
7549 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7550 return 0;
7552 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7554 union {
7555 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7556 rtx (*mov) (rtx, rtx);
7557 } gen_func;
7558 enum machine_mode mode = BLKmode;
7559 rtx src, dest;
7561 if (TARGET_STRING
7562 && bytes > 24 /* move up to 32 bytes at a time */
7563 && ! fixed_regs[5]
7564 && ! fixed_regs[6]
7565 && ! fixed_regs[7]
7566 && ! fixed_regs[8]
7567 && ! fixed_regs[9]
7568 && ! fixed_regs[10]
7569 && ! fixed_regs[11]
7570 && ! fixed_regs[12])
7572 move_bytes = (bytes > 32) ? 32 : bytes;
7573 gen_func.movstrsi = gen_movstrsi_8reg;
7575 else if (TARGET_STRING
7576 && bytes > 16 /* move up to 24 bytes at a time */
7577 && ! fixed_regs[5]
7578 && ! fixed_regs[6]
7579 && ! fixed_regs[7]
7580 && ! fixed_regs[8]
7581 && ! fixed_regs[9]
7582 && ! fixed_regs[10])
7584 move_bytes = (bytes > 24) ? 24 : bytes;
7585 gen_func.movstrsi = gen_movstrsi_6reg;
7587 else if (TARGET_STRING
7588 && bytes > 8 /* move up to 16 bytes at a time */
7589 && ! fixed_regs[5]
7590 && ! fixed_regs[6]
7591 && ! fixed_regs[7]
7592 && ! fixed_regs[8])
7594 move_bytes = (bytes > 16) ? 16 : bytes;
7595 gen_func.movstrsi = gen_movstrsi_4reg;
7597 else if (bytes >= 8 && TARGET_POWERPC64
7598 /* 64-bit loads and stores require word-aligned
7599 displacements. */
7600 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7602 move_bytes = 8;
7603 mode = DImode;
7604 gen_func.mov = gen_movdi;
7606 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7607 { /* move up to 8 bytes at a time */
7608 move_bytes = (bytes > 8) ? 8 : bytes;
7609 gen_func.movstrsi = gen_movstrsi_2reg;
7611 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7612 { /* move 4 bytes */
7613 move_bytes = 4;
7614 mode = SImode;
7615 gen_func.mov = gen_movsi;
7617 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7618 { /* move 2 bytes */
7619 move_bytes = 2;
7620 mode = HImode;
7621 gen_func.mov = gen_movhi;
7623 else if (TARGET_STRING && bytes > 1)
7624 { /* move up to 4 bytes at a time */
7625 move_bytes = (bytes > 4) ? 4 : bytes;
7626 gen_func.movstrsi = gen_movstrsi_1reg;
7628 else /* move 1 byte at a time */
7630 move_bytes = 1;
7631 mode = QImode;
7632 gen_func.mov = gen_movqi;
7635 src = adjust_address (orig_src, mode, offset);
7636 dest = adjust_address (orig_dest, mode, offset);
7638 if (mode != BLKmode)
7640 rtx tmp_reg = gen_reg_rtx (mode);
7642 emit_insn ((*gen_func.mov) (tmp_reg, src));
7643 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7646 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7648 int i;
7649 for (i = 0; i < num_reg; i++)
7650 emit_insn (stores[i]);
7651 num_reg = 0;
7654 if (mode == BLKmode)
7656 /* Move the address into scratch registers. The movstrsi
7657 patterns require zero offset. */
7658 if (!REG_P (XEXP (src, 0)))
7660 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7661 src = replace_equiv_address (src, src_reg);
7663 set_mem_size (src, GEN_INT (move_bytes));
7665 if (!REG_P (XEXP (dest, 0)))
7667 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7668 dest = replace_equiv_address (dest, dest_reg);
7670 set_mem_size (dest, GEN_INT (move_bytes));
7672 emit_insn ((*gen_func.movstrsi) (dest, src,
7673 GEN_INT (move_bytes & 31),
7674 align_rtx));
7678 return 1;
7682 /* Return 1 if OP is a load multiple operation. It is known to be a
7683 PARALLEL and the first section will be tested. */
7686 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7688 int count = XVECLEN (op, 0);
7689 unsigned int dest_regno;
7690 rtx src_addr;
7691 int i;
7693 /* Perform a quick check so we don't blow up below. */
7694 if (count <= 1
7695 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7696 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7697 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7698 return 0;
7700 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7701 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7703 for (i = 1; i < count; i++)
7705 rtx elt = XVECEXP (op, 0, i);
7707 if (GET_CODE (elt) != SET
7708 || GET_CODE (SET_DEST (elt)) != REG
7709 || GET_MODE (SET_DEST (elt)) != SImode
7710 || REGNO (SET_DEST (elt)) != dest_regno + i
7711 || GET_CODE (SET_SRC (elt)) != MEM
7712 || GET_MODE (SET_SRC (elt)) != SImode
7713 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7714 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7715 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7716 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7717 return 0;
7720 return 1;
7723 /* Similar, but tests for store multiple. Here, the second vector element
7724 is a CLOBBER. It will be tested later. */
7727 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7729 int count = XVECLEN (op, 0) - 1;
7730 unsigned int src_regno;
7731 rtx dest_addr;
7732 int i;
7734 /* Perform a quick check so we don't blow up below. */
7735 if (count <= 1
7736 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7737 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7738 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7739 return 0;
7741 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7742 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7744 for (i = 1; i < count; i++)
7746 rtx elt = XVECEXP (op, 0, i + 1);
7748 if (GET_CODE (elt) != SET
7749 || GET_CODE (SET_SRC (elt)) != REG
7750 || GET_MODE (SET_SRC (elt)) != SImode
7751 || REGNO (SET_SRC (elt)) != src_regno + i
7752 || GET_CODE (SET_DEST (elt)) != MEM
7753 || GET_MODE (SET_DEST (elt)) != SImode
7754 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7755 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7756 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7757 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7758 return 0;
7761 return 1;
7764 /* Return a string to perform a load_multiple operation.
7765 operands[0] is the vector.
7766 operands[1] is the source address.
7767 operands[2] is the first destination register. */
7769 const char *
7770 rs6000_output_load_multiple (rtx operands[3])
7772 /* We have to handle the case where the pseudo used to contain the address
7773 is assigned to one of the output registers. */
7774 int i, j;
7775 int words = XVECLEN (operands[0], 0);
7776 rtx xop[10];
7778 if (XVECLEN (operands[0], 0) == 1)
7779 return "{l|lwz} %2,0(%1)";
7781 for (i = 0; i < words; i++)
7782 if (refers_to_regno_p (REGNO (operands[2]) + i,
7783 REGNO (operands[2]) + i + 1, operands[1], 0))
7785 if (i == words-1)
7787 xop[0] = GEN_INT (4 * (words-1));
7788 xop[1] = operands[1];
7789 xop[2] = operands[2];
7790 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7791 return "";
7793 else if (i == 0)
7795 xop[0] = GEN_INT (4 * (words-1));
7796 xop[1] = operands[1];
7797 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7798 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7799 return "";
7801 else
7803 for (j = 0; j < words; j++)
7804 if (j != i)
7806 xop[0] = GEN_INT (j * 4);
7807 xop[1] = operands[1];
7808 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7809 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7811 xop[0] = GEN_INT (i * 4);
7812 xop[1] = operands[1];
7813 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7814 return "";
7818 return "{lsi|lswi} %2,%1,%N0";
7821 /* Return 1 for a parallel vrsave operation. */
7824 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7826 int count = XVECLEN (op, 0);
7827 unsigned int dest_regno, src_regno;
7828 int i;
7830 if (count <= 1
7831 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7832 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7833 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7834 return 0;
7836 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7837 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7839 if (dest_regno != VRSAVE_REGNO
7840 && src_regno != VRSAVE_REGNO)
7841 return 0;
7843 for (i = 1; i < count; i++)
7845 rtx elt = XVECEXP (op, 0, i);
7847 if (GET_CODE (elt) != CLOBBER
7848 && GET_CODE (elt) != SET)
7849 return 0;
7852 return 1;
7855 /* Return 1 for an PARALLEL suitable for mfcr. */
7858 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7860 int count = XVECLEN (op, 0);
7861 int i;
7863 /* Perform a quick check so we don't blow up below. */
7864 if (count < 1
7865 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7866 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7867 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7868 return 0;
7870 for (i = 0; i < count; i++)
7872 rtx exp = XVECEXP (op, 0, i);
7873 rtx unspec;
7874 int maskval;
7875 rtx src_reg;
7877 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7879 if (GET_CODE (src_reg) != REG
7880 || GET_MODE (src_reg) != CCmode
7881 || ! CR_REGNO_P (REGNO (src_reg)))
7882 return 0;
7884 if (GET_CODE (exp) != SET
7885 || GET_CODE (SET_DEST (exp)) != REG
7886 || GET_MODE (SET_DEST (exp)) != SImode
7887 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7888 return 0;
7889 unspec = SET_SRC (exp);
7890 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7892 if (GET_CODE (unspec) != UNSPEC
7893 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7894 || XVECLEN (unspec, 0) != 2
7895 || XVECEXP (unspec, 0, 0) != src_reg
7896 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7897 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7898 return 0;
7900 return 1;
7903 /* Return 1 for an PARALLEL suitable for mtcrf. */
7906 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7908 int count = XVECLEN (op, 0);
7909 int i;
7910 rtx src_reg;
7912 /* Perform a quick check so we don't blow up below. */
7913 if (count < 1
7914 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7915 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7916 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7917 return 0;
7918 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7920 if (GET_CODE (src_reg) != REG
7921 || GET_MODE (src_reg) != SImode
7922 || ! INT_REGNO_P (REGNO (src_reg)))
7923 return 0;
7925 for (i = 0; i < count; i++)
7927 rtx exp = XVECEXP (op, 0, i);
7928 rtx unspec;
7929 int maskval;
7931 if (GET_CODE (exp) != SET
7932 || GET_CODE (SET_DEST (exp)) != REG
7933 || GET_MODE (SET_DEST (exp)) != CCmode
7934 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7935 return 0;
7936 unspec = SET_SRC (exp);
7937 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7939 if (GET_CODE (unspec) != UNSPEC
7940 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7941 || XVECLEN (unspec, 0) != 2
7942 || XVECEXP (unspec, 0, 0) != src_reg
7943 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7944 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7945 return 0;
7947 return 1;
7950 /* Return 1 for an PARALLEL suitable for lmw. */
7953 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7955 int count = XVECLEN (op, 0);
7956 unsigned int dest_regno;
7957 rtx src_addr;
7958 unsigned int base_regno;
7959 HOST_WIDE_INT offset;
7960 int i;
7962 /* Perform a quick check so we don't blow up below. */
7963 if (count <= 1
7964 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7965 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7966 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7967 return 0;
7969 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7970 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7972 if (dest_regno > 31
7973 || count != 32 - (int) dest_regno)
7974 return 0;
7976 if (legitimate_indirect_address_p (src_addr, 0))
7978 offset = 0;
7979 base_regno = REGNO (src_addr);
7980 if (base_regno == 0)
7981 return 0;
7983 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7985 offset = INTVAL (XEXP (src_addr, 1));
7986 base_regno = REGNO (XEXP (src_addr, 0));
7988 else
7989 return 0;
7991 for (i = 0; i < count; i++)
7993 rtx elt = XVECEXP (op, 0, i);
7994 rtx newaddr;
7995 rtx addr_reg;
7996 HOST_WIDE_INT newoffset;
7998 if (GET_CODE (elt) != SET
7999 || GET_CODE (SET_DEST (elt)) != REG
8000 || GET_MODE (SET_DEST (elt)) != SImode
8001 || REGNO (SET_DEST (elt)) != dest_regno + i
8002 || GET_CODE (SET_SRC (elt)) != MEM
8003 || GET_MODE (SET_SRC (elt)) != SImode)
8004 return 0;
8005 newaddr = XEXP (SET_SRC (elt), 0);
8006 if (legitimate_indirect_address_p (newaddr, 0))
8008 newoffset = 0;
8009 addr_reg = newaddr;
8011 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8013 addr_reg = XEXP (newaddr, 0);
8014 newoffset = INTVAL (XEXP (newaddr, 1));
8016 else
8017 return 0;
8018 if (REGNO (addr_reg) != base_regno
8019 || newoffset != offset + 4 * i)
8020 return 0;
8023 return 1;
8026 /* Return 1 for an PARALLEL suitable for stmw. */
8029 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8031 int count = XVECLEN (op, 0);
8032 unsigned int src_regno;
8033 rtx dest_addr;
8034 unsigned int base_regno;
8035 HOST_WIDE_INT offset;
8036 int i;
8038 /* Perform a quick check so we don't blow up below. */
8039 if (count <= 1
8040 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8041 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8042 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8043 return 0;
8045 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8046 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8048 if (src_regno > 31
8049 || count != 32 - (int) src_regno)
8050 return 0;
8052 if (legitimate_indirect_address_p (dest_addr, 0))
8054 offset = 0;
8055 base_regno = REGNO (dest_addr);
8056 if (base_regno == 0)
8057 return 0;
8059 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8061 offset = INTVAL (XEXP (dest_addr, 1));
8062 base_regno = REGNO (XEXP (dest_addr, 0));
8064 else
8065 return 0;
8067 for (i = 0; i < count; i++)
8069 rtx elt = XVECEXP (op, 0, i);
8070 rtx newaddr;
8071 rtx addr_reg;
8072 HOST_WIDE_INT newoffset;
8074 if (GET_CODE (elt) != SET
8075 || GET_CODE (SET_SRC (elt)) != REG
8076 || GET_MODE (SET_SRC (elt)) != SImode
8077 || REGNO (SET_SRC (elt)) != src_regno + i
8078 || GET_CODE (SET_DEST (elt)) != MEM
8079 || GET_MODE (SET_DEST (elt)) != SImode)
8080 return 0;
8081 newaddr = XEXP (SET_DEST (elt), 0);
8082 if (legitimate_indirect_address_p (newaddr, 0))
8084 newoffset = 0;
8085 addr_reg = newaddr;
8087 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8089 addr_reg = XEXP (newaddr, 0);
8090 newoffset = INTVAL (XEXP (newaddr, 1));
8092 else
8093 return 0;
8094 if (REGNO (addr_reg) != base_regno
8095 || newoffset != offset + 4 * i)
8096 return 0;
8099 return 1;
8102 /* A validation routine: say whether CODE, a condition code, and MODE
8103 match. The other alternatives either don't make sense or should
8104 never be generated. */
8106 static void
8107 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8109 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8110 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8111 || GET_MODE_CLASS (mode) != MODE_CC)
8112 abort ();
8114 /* These don't make sense. */
8115 if ((code == GT || code == LT || code == GE || code == LE)
8116 && mode == CCUNSmode)
8117 abort ();
8119 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8120 && mode != CCUNSmode)
8121 abort ();
8123 if (mode != CCFPmode
8124 && (code == ORDERED || code == UNORDERED
8125 || code == UNEQ || code == LTGT
8126 || code == UNGT || code == UNLT
8127 || code == UNGE || code == UNLE))
8128 abort ();
8130 /* These should never be generated except for
8131 flag_finite_math_only. */
8132 if (mode == CCFPmode
8133 && ! flag_finite_math_only
8134 && (code == LE || code == GE
8135 || code == UNEQ || code == LTGT
8136 || code == UNGT || code == UNLT))
8137 abort ();
8139 /* These are invalid; the information is not there. */
8140 if (mode == CCEQmode
8141 && code != EQ && code != NE)
8142 abort ();
8145 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8146 We only check the opcode against the mode of the CC value here. */
8149 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8151 enum rtx_code code = GET_CODE (op);
8152 enum machine_mode cc_mode;
8154 if (!COMPARISON_P (op))
8155 return 0;
8157 cc_mode = GET_MODE (XEXP (op, 0));
8158 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8159 return 0;
8161 validate_condition_mode (code, cc_mode);
8163 return 1;
8166 /* Return 1 if OP is a comparison operation that is valid for a branch
8167 insn and which is true if the corresponding bit in the CC register
8168 is set. */
8171 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8173 enum rtx_code code;
8175 if (! branch_comparison_operator (op, mode))
8176 return 0;
8178 code = GET_CODE (op);
8179 return (code == EQ || code == LT || code == GT
8180 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
8181 || code == LTU || code == GTU
8182 || code == UNORDERED);
8185 /* Return 1 if OP is a comparison operation that is valid for an scc
8186 insn: it must be a positive comparison. */
8189 scc_comparison_operator (rtx op, enum machine_mode mode)
8191 return branch_positive_comparison_operator (op, mode);
8195 trap_comparison_operator (rtx op, enum machine_mode mode)
8197 if (mode != VOIDmode && mode != GET_MODE (op))
8198 return 0;
8199 return COMPARISON_P (op);
8203 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8205 enum rtx_code code = GET_CODE (op);
8206 return (code == AND || code == IOR || code == XOR);
8210 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8212 enum rtx_code code = GET_CODE (op);
8213 return (code == IOR || code == XOR);
8217 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8219 enum rtx_code code = GET_CODE (op);
8220 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8223 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8224 mask required to convert the result of a rotate insn into a shift
8225 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8228 includes_lshift_p (rtx shiftop, rtx andop)
8230 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8232 shift_mask <<= INTVAL (shiftop);
8234 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8237 /* Similar, but for right shift. */
8240 includes_rshift_p (rtx shiftop, rtx andop)
8242 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8244 shift_mask >>= INTVAL (shiftop);
8246 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8249 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8250 to perform a left shift. It must have exactly SHIFTOP least
8251 significant 0's, then one or more 1's, then zero or more 0's. */
8254 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8256 if (GET_CODE (andop) == CONST_INT)
8258 HOST_WIDE_INT c, lsb, shift_mask;
8260 c = INTVAL (andop);
8261 if (c == 0 || c == ~0)
8262 return 0;
8264 shift_mask = ~0;
8265 shift_mask <<= INTVAL (shiftop);
8267 /* Find the least significant one bit. */
8268 lsb = c & -c;
8270 /* It must coincide with the LSB of the shift mask. */
8271 if (-lsb != shift_mask)
8272 return 0;
8274 /* Invert to look for the next transition (if any). */
8275 c = ~c;
8277 /* Remove the low group of ones (originally low group of zeros). */
8278 c &= -lsb;
8280 /* Again find the lsb, and check we have all 1's above. */
8281 lsb = c & -c;
8282 return c == -lsb;
8284 else if (GET_CODE (andop) == CONST_DOUBLE
8285 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8287 HOST_WIDE_INT low, high, lsb;
8288 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8290 low = CONST_DOUBLE_LOW (andop);
8291 if (HOST_BITS_PER_WIDE_INT < 64)
8292 high = CONST_DOUBLE_HIGH (andop);
8294 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8295 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8296 return 0;
8298 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8300 shift_mask_high = ~0;
8301 if (INTVAL (shiftop) > 32)
8302 shift_mask_high <<= INTVAL (shiftop) - 32;
8304 lsb = high & -high;
8306 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8307 return 0;
8309 high = ~high;
8310 high &= -lsb;
8312 lsb = high & -high;
8313 return high == -lsb;
8316 shift_mask_low = ~0;
8317 shift_mask_low <<= INTVAL (shiftop);
8319 lsb = low & -low;
8321 if (-lsb != shift_mask_low)
8322 return 0;
8324 if (HOST_BITS_PER_WIDE_INT < 64)
8325 high = ~high;
8326 low = ~low;
8327 low &= -lsb;
8329 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8331 lsb = high & -high;
8332 return high == -lsb;
8335 lsb = low & -low;
8336 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8338 else
8339 return 0;
8342 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8343 to perform a left shift. It must have SHIFTOP or more least
8344 significant 0's, with the remainder of the word 1's. */
8347 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8349 if (GET_CODE (andop) == CONST_INT)
8351 HOST_WIDE_INT c, lsb, shift_mask;
8353 shift_mask = ~0;
8354 shift_mask <<= INTVAL (shiftop);
8355 c = INTVAL (andop);
8357 /* Find the least significant one bit. */
8358 lsb = c & -c;
8360 /* It must be covered by the shift mask.
8361 This test also rejects c == 0. */
8362 if ((lsb & shift_mask) == 0)
8363 return 0;
8365 /* Check we have all 1's above the transition, and reject all 1's. */
8366 return c == -lsb && lsb != 1;
8368 else if (GET_CODE (andop) == CONST_DOUBLE
8369 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8371 HOST_WIDE_INT low, lsb, shift_mask_low;
8373 low = CONST_DOUBLE_LOW (andop);
8375 if (HOST_BITS_PER_WIDE_INT < 64)
8377 HOST_WIDE_INT high, shift_mask_high;
8379 high = CONST_DOUBLE_HIGH (andop);
8381 if (low == 0)
8383 shift_mask_high = ~0;
8384 if (INTVAL (shiftop) > 32)
8385 shift_mask_high <<= INTVAL (shiftop) - 32;
8387 lsb = high & -high;
8389 if ((lsb & shift_mask_high) == 0)
8390 return 0;
8392 return high == -lsb;
8394 if (high != ~0)
8395 return 0;
8398 shift_mask_low = ~0;
8399 shift_mask_low <<= INTVAL (shiftop);
8401 lsb = low & -low;
8403 if ((lsb & shift_mask_low) == 0)
8404 return 0;
8406 return low == -lsb && lsb != 1;
8408 else
8409 return 0;
8412 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8413 for lfq and stfq insns.
8415 Note reg1 and reg2 *must* be hard registers. To be sure we will
8416 abort if we are passed pseudo registers. */
8419 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8421 /* We might have been passed a SUBREG. */
8422 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8423 return 0;
8425 return (REGNO (reg1) == REGNO (reg2) - 1);
8428 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8429 addr1 and addr2 must be in consecutive memory locations
8430 (addr2 == addr1 + 8). */
8433 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8435 unsigned int reg1;
8436 int offset1;
8438 /* Extract an offset (if used) from the first addr. */
8439 if (GET_CODE (addr1) == PLUS)
8441 /* If not a REG, return zero. */
8442 if (GET_CODE (XEXP (addr1, 0)) != REG)
8443 return 0;
8444 else
8446 reg1 = REGNO (XEXP (addr1, 0));
8447 /* The offset must be constant! */
8448 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8449 return 0;
8450 offset1 = INTVAL (XEXP (addr1, 1));
8453 else if (GET_CODE (addr1) != REG)
8454 return 0;
8455 else
8457 reg1 = REGNO (addr1);
8458 /* This was a simple (mem (reg)) expression. Offset is 0. */
8459 offset1 = 0;
8462 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8463 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8464 register as addr1. */
8465 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8466 return 1;
8467 if (GET_CODE (addr2) != PLUS)
8468 return 0;
8470 if (GET_CODE (XEXP (addr2, 0)) != REG
8471 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8472 return 0;
8474 if (reg1 != REGNO (XEXP (addr2, 0)))
8475 return 0;
8477 /* The offset for the second addr must be 8 more than the first addr. */
8478 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8479 return 0;
8481 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8482 instructions. */
8483 return 1;
8486 /* Return the register class of a scratch register needed to copy IN into
8487 or out of a register in CLASS in MODE. If it can be done directly,
8488 NO_REGS is returned. */
8490 enum reg_class
8491 secondary_reload_class (enum reg_class class,
8492 enum machine_mode mode ATTRIBUTE_UNUSED,
8493 rtx in)
8495 int regno;
8497 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8498 #if TARGET_MACHO
8499 && MACHOPIC_INDIRECT
8500 #endif
8503 /* We cannot copy a symbolic operand directly into anything
8504 other than BASE_REGS for TARGET_ELF. So indicate that a
8505 register from BASE_REGS is needed as an intermediate
8506 register.
8508 On Darwin, pic addresses require a load from memory, which
8509 needs a base register. */
8510 if (class != BASE_REGS
8511 && (GET_CODE (in) == SYMBOL_REF
8512 || GET_CODE (in) == HIGH
8513 || GET_CODE (in) == LABEL_REF
8514 || GET_CODE (in) == CONST))
8515 return BASE_REGS;
8518 if (GET_CODE (in) == REG)
8520 regno = REGNO (in);
8521 if (regno >= FIRST_PSEUDO_REGISTER)
8523 regno = true_regnum (in);
8524 if (regno >= FIRST_PSEUDO_REGISTER)
8525 regno = -1;
8528 else if (GET_CODE (in) == SUBREG)
8530 regno = true_regnum (in);
8531 if (regno >= FIRST_PSEUDO_REGISTER)
8532 regno = -1;
8534 else
8535 regno = -1;
8537 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8538 into anything. */
8539 if (class == GENERAL_REGS || class == BASE_REGS
8540 || (regno >= 0 && INT_REGNO_P (regno)))
8541 return NO_REGS;
8543 /* Constants, memory, and FP registers can go into FP registers. */
8544 if ((regno == -1 || FP_REGNO_P (regno))
8545 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8546 return NO_REGS;
8548 /* Memory, and AltiVec registers can go into AltiVec registers. */
8549 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8550 && class == ALTIVEC_REGS)
8551 return NO_REGS;
8553 /* We can copy among the CR registers. */
8554 if ((class == CR_REGS || class == CR0_REGS)
8555 && regno >= 0 && CR_REGNO_P (regno))
8556 return NO_REGS;
8558 /* Otherwise, we need GENERAL_REGS. */
8559 return GENERAL_REGS;
8562 /* Given a comparison operation, return the bit number in CCR to test. We
8563 know this is a valid comparison.
8565 SCC_P is 1 if this is for an scc. That means that %D will have been
8566 used instead of %C, so the bits will be in different places.
8568 Return -1 if OP isn't a valid comparison for some reason. */
8571 ccr_bit (rtx op, int scc_p)
8573 enum rtx_code code = GET_CODE (op);
8574 enum machine_mode cc_mode;
8575 int cc_regnum;
8576 int base_bit;
8577 rtx reg;
8579 if (!COMPARISON_P (op))
8580 return -1;
8582 reg = XEXP (op, 0);
8584 if (GET_CODE (reg) != REG
8585 || ! CR_REGNO_P (REGNO (reg)))
8586 abort ();
8588 cc_mode = GET_MODE (reg);
8589 cc_regnum = REGNO (reg);
8590 base_bit = 4 * (cc_regnum - CR0_REGNO);
8592 validate_condition_mode (code, cc_mode);
8594 /* When generating a sCOND operation, only positive conditions are
8595 allowed. */
8596 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8597 && code != GTU && code != LTU)
8598 abort ();
8600 switch (code)
8602 case NE:
8603 if (TARGET_E500 && !TARGET_FPRS
8604 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8605 return base_bit + 1;
8606 return scc_p ? base_bit + 3 : base_bit + 2;
8607 case EQ:
8608 if (TARGET_E500 && !TARGET_FPRS
8609 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8610 return base_bit + 1;
8611 return base_bit + 2;
8612 case GT: case GTU: case UNLE:
8613 return base_bit + 1;
8614 case LT: case LTU: case UNGE:
8615 return base_bit;
8616 case ORDERED: case UNORDERED:
8617 return base_bit + 3;
8619 case GE: case GEU:
8620 /* If scc, we will have done a cror to put the bit in the
8621 unordered position. So test that bit. For integer, this is ! LT
8622 unless this is an scc insn. */
8623 return scc_p ? base_bit + 3 : base_bit;
8625 case LE: case LEU:
8626 return scc_p ? base_bit + 3 : base_bit + 1;
8628 default:
8629 abort ();
8633 /* Return the GOT register. */
8635 struct rtx_def *
8636 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8638 /* The second flow pass currently (June 1999) can't update
8639 regs_ever_live without disturbing other parts of the compiler, so
8640 update it here to make the prolog/epilogue code happy. */
8641 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8642 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8644 current_function_uses_pic_offset_table = 1;
8646 return pic_offset_table_rtx;
8649 /* Function to init struct machine_function.
8650 This will be called, via a pointer variable,
8651 from push_function_context. */
8653 static struct machine_function *
8654 rs6000_init_machine_status (void)
8656 return ggc_alloc_cleared (sizeof (machine_function));
8659 /* These macros test for integers and extract the low-order bits. */
8660 #define INT_P(X) \
8661 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8662 && GET_MODE (X) == VOIDmode)
8664 #define INT_LOWPART(X) \
8665 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8668 extract_MB (rtx op)
8670 int i;
8671 unsigned long val = INT_LOWPART (op);
8673 /* If the high bit is zero, the value is the first 1 bit we find
8674 from the left. */
8675 if ((val & 0x80000000) == 0)
8677 if ((val & 0xffffffff) == 0)
8678 abort ();
8680 i = 1;
8681 while (((val <<= 1) & 0x80000000) == 0)
8682 ++i;
8683 return i;
8686 /* If the high bit is set and the low bit is not, or the mask is all
8687 1's, the value is zero. */
8688 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8689 return 0;
8691 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8692 from the right. */
8693 i = 31;
8694 while (((val >>= 1) & 1) != 0)
8695 --i;
8697 return i;
8701 extract_ME (rtx op)
8703 int i;
8704 unsigned long val = INT_LOWPART (op);
8706 /* If the low bit is zero, the value is the first 1 bit we find from
8707 the right. */
8708 if ((val & 1) == 0)
8710 if ((val & 0xffffffff) == 0)
8711 abort ();
8713 i = 30;
8714 while (((val >>= 1) & 1) == 0)
8715 --i;
8717 return i;
8720 /* If the low bit is set and the high bit is not, or the mask is all
8721 1's, the value is 31. */
8722 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8723 return 31;
8725 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8726 from the left. */
8727 i = 0;
8728 while (((val <<= 1) & 0x80000000) != 0)
8729 ++i;
8731 return i;
8734 /* Locate some local-dynamic symbol still in use by this function
8735 so that we can print its name in some tls_ld pattern. */
8737 static const char *
8738 rs6000_get_some_local_dynamic_name (void)
8740 rtx insn;
8742 if (cfun->machine->some_ld_name)
8743 return cfun->machine->some_ld_name;
8745 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8746 if (INSN_P (insn)
8747 && for_each_rtx (&PATTERN (insn),
8748 rs6000_get_some_local_dynamic_name_1, 0))
8749 return cfun->machine->some_ld_name;
8751 abort ();
8754 /* Helper function for rs6000_get_some_local_dynamic_name. */
8756 static int
8757 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8759 rtx x = *px;
8761 if (GET_CODE (x) == SYMBOL_REF)
8763 const char *str = XSTR (x, 0);
8764 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8766 cfun->machine->some_ld_name = str;
8767 return 1;
8771 return 0;
8774 /* Print an operand. Recognize special options, documented below. */
8776 #if TARGET_ELF
8777 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8778 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8779 #else
8780 #define SMALL_DATA_RELOC "sda21"
8781 #define SMALL_DATA_REG 0
8782 #endif
8784 void
8785 print_operand (FILE *file, rtx x, int code)
8787 int i;
8788 HOST_WIDE_INT val;
8789 unsigned HOST_WIDE_INT uval;
8791 switch (code)
8793 case '.':
8794 /* Write out an instruction after the call which may be replaced
8795 with glue code by the loader. This depends on the AIX version. */
8796 asm_fprintf (file, RS6000_CALL_GLUE);
8797 return;
8799 /* %a is output_address. */
8801 case 'A':
8802 /* If X is a constant integer whose low-order 5 bits are zero,
8803 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8804 in the AIX assembler where "sri" with a zero shift count
8805 writes a trash instruction. */
8806 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8807 putc ('l', file);
8808 else
8809 putc ('r', file);
8810 return;
8812 case 'b':
8813 /* If constant, low-order 16 bits of constant, unsigned.
8814 Otherwise, write normally. */
8815 if (INT_P (x))
8816 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8817 else
8818 print_operand (file, x, 0);
8819 return;
8821 case 'B':
8822 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8823 for 64-bit mask direction. */
8824 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8825 return;
8827 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8828 output_operand. */
8830 case 'E':
8831 /* X is a CR register. Print the number of the EQ bit of the CR */
8832 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8833 output_operand_lossage ("invalid %%E value");
8834 else
8835 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8836 return;
8838 case 'f':
8839 /* X is a CR register. Print the shift count needed to move it
8840 to the high-order four bits. */
8841 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8842 output_operand_lossage ("invalid %%f value");
8843 else
8844 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8845 return;
8847 case 'F':
8848 /* Similar, but print the count for the rotate in the opposite
8849 direction. */
8850 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8851 output_operand_lossage ("invalid %%F value");
8852 else
8853 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8854 return;
8856 case 'G':
8857 /* X is a constant integer. If it is negative, print "m",
8858 otherwise print "z". This is to make an aze or ame insn. */
8859 if (GET_CODE (x) != CONST_INT)
8860 output_operand_lossage ("invalid %%G value");
8861 else if (INTVAL (x) >= 0)
8862 putc ('z', file);
8863 else
8864 putc ('m', file);
8865 return;
8867 case 'h':
8868 /* If constant, output low-order five bits. Otherwise, write
8869 normally. */
8870 if (INT_P (x))
8871 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8872 else
8873 print_operand (file, x, 0);
8874 return;
8876 case 'H':
8877 /* If constant, output low-order six bits. Otherwise, write
8878 normally. */
8879 if (INT_P (x))
8880 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8881 else
8882 print_operand (file, x, 0);
8883 return;
8885 case 'I':
8886 /* Print `i' if this is a constant, else nothing. */
8887 if (INT_P (x))
8888 putc ('i', file);
8889 return;
8891 case 'j':
8892 /* Write the bit number in CCR for jump. */
8893 i = ccr_bit (x, 0);
8894 if (i == -1)
8895 output_operand_lossage ("invalid %%j code");
8896 else
8897 fprintf (file, "%d", i);
8898 return;
8900 case 'J':
8901 /* Similar, but add one for shift count in rlinm for scc and pass
8902 scc flag to `ccr_bit'. */
8903 i = ccr_bit (x, 1);
8904 if (i == -1)
8905 output_operand_lossage ("invalid %%J code");
8906 else
8907 /* If we want bit 31, write a shift count of zero, not 32. */
8908 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8909 return;
8911 case 'k':
8912 /* X must be a constant. Write the 1's complement of the
8913 constant. */
8914 if (! INT_P (x))
8915 output_operand_lossage ("invalid %%k value");
8916 else
8917 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8918 return;
8920 case 'K':
8921 /* X must be a symbolic constant on ELF. Write an
8922 expression suitable for an 'addi' that adds in the low 16
8923 bits of the MEM. */
8924 if (GET_CODE (x) != CONST)
8926 print_operand_address (file, x);
8927 fputs ("@l", file);
8929 else
8931 if (GET_CODE (XEXP (x, 0)) != PLUS
8932 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8933 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8934 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8935 output_operand_lossage ("invalid %%K value");
8936 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8937 fputs ("@l", file);
8938 /* For GNU as, there must be a non-alphanumeric character
8939 between 'l' and the number. The '-' is added by
8940 print_operand() already. */
8941 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8942 fputs ("+", file);
8943 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8945 return;
8947 /* %l is output_asm_label. */
8949 case 'L':
8950 /* Write second word of DImode or DFmode reference. Works on register
8951 or non-indexed memory only. */
8952 if (GET_CODE (x) == REG)
8953 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8954 else if (GET_CODE (x) == MEM)
8956 /* Handle possible auto-increment. Since it is pre-increment and
8957 we have already done it, we can just use an offset of word. */
8958 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8959 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8960 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8961 UNITS_PER_WORD));
8962 else
8963 output_address (XEXP (adjust_address_nv (x, SImode,
8964 UNITS_PER_WORD),
8965 0));
8967 if (small_data_operand (x, GET_MODE (x)))
8968 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8969 reg_names[SMALL_DATA_REG]);
8971 return;
8973 case 'm':
8974 /* MB value for a mask operand. */
8975 if (! mask_operand (x, SImode))
8976 output_operand_lossage ("invalid %%m value");
8978 fprintf (file, "%d", extract_MB (x));
8979 return;
8981 case 'M':
8982 /* ME value for a mask operand. */
8983 if (! mask_operand (x, SImode))
8984 output_operand_lossage ("invalid %%M value");
8986 fprintf (file, "%d", extract_ME (x));
8987 return;
8989 /* %n outputs the negative of its operand. */
8991 case 'N':
8992 /* Write the number of elements in the vector times 4. */
8993 if (GET_CODE (x) != PARALLEL)
8994 output_operand_lossage ("invalid %%N value");
8995 else
8996 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8997 return;
8999 case 'O':
9000 /* Similar, but subtract 1 first. */
9001 if (GET_CODE (x) != PARALLEL)
9002 output_operand_lossage ("invalid %%O value");
9003 else
9004 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9005 return;
9007 case 'p':
9008 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9009 if (! INT_P (x)
9010 || INT_LOWPART (x) < 0
9011 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9012 output_operand_lossage ("invalid %%p value");
9013 else
9014 fprintf (file, "%d", i);
9015 return;
9017 case 'P':
9018 /* The operand must be an indirect memory reference. The result
9019 is the register name. */
9020 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9021 || REGNO (XEXP (x, 0)) >= 32)
9022 output_operand_lossage ("invalid %%P value");
9023 else
9024 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9025 return;
9027 case 'q':
9028 /* This outputs the logical code corresponding to a boolean
9029 expression. The expression may have one or both operands
9030 negated (if one, only the first one). For condition register
9031 logical operations, it will also treat the negated
9032 CR codes as NOTs, but not handle NOTs of them. */
9034 const char *const *t = 0;
9035 const char *s;
9036 enum rtx_code code = GET_CODE (x);
9037 static const char * const tbl[3][3] = {
9038 { "and", "andc", "nor" },
9039 { "or", "orc", "nand" },
9040 { "xor", "eqv", "xor" } };
9042 if (code == AND)
9043 t = tbl[0];
9044 else if (code == IOR)
9045 t = tbl[1];
9046 else if (code == XOR)
9047 t = tbl[2];
9048 else
9049 output_operand_lossage ("invalid %%q value");
9051 if (GET_CODE (XEXP (x, 0)) != NOT)
9052 s = t[0];
9053 else
9055 if (GET_CODE (XEXP (x, 1)) == NOT)
9056 s = t[2];
9057 else
9058 s = t[1];
9061 fputs (s, file);
9063 return;
9065 case 'Q':
9066 if (TARGET_MFCRF)
9067 fputc (',', file);
9068 /* FALLTHRU */
9069 else
9070 return;
9072 case 'R':
9073 /* X is a CR register. Print the mask for `mtcrf'. */
9074 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9075 output_operand_lossage ("invalid %%R value");
9076 else
9077 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9078 return;
9080 case 's':
9081 /* Low 5 bits of 32 - value */
9082 if (! INT_P (x))
9083 output_operand_lossage ("invalid %%s value");
9084 else
9085 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9086 return;
9088 case 'S':
9089 /* PowerPC64 mask position. All 0's is excluded.
9090 CONST_INT 32-bit mask is considered sign-extended so any
9091 transition must occur within the CONST_INT, not on the boundary. */
9092 if (! mask64_operand (x, DImode))
9093 output_operand_lossage ("invalid %%S value");
9095 uval = INT_LOWPART (x);
9097 if (uval & 1) /* Clear Left */
9099 #if HOST_BITS_PER_WIDE_INT > 64
9100 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9101 #endif
9102 i = 64;
9104 else /* Clear Right */
9106 uval = ~uval;
9107 #if HOST_BITS_PER_WIDE_INT > 64
9108 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9109 #endif
9110 i = 63;
9112 while (uval != 0)
9113 --i, uval >>= 1;
9114 if (i < 0)
9115 abort ();
9116 fprintf (file, "%d", i);
9117 return;
9119 case 't':
9120 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9121 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9122 abort ();
9124 /* Bit 3 is OV bit. */
9125 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9127 /* If we want bit 31, write a shift count of zero, not 32. */
9128 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9129 return;
9131 case 'T':
9132 /* Print the symbolic name of a branch target register. */
9133 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9134 && REGNO (x) != COUNT_REGISTER_REGNUM))
9135 output_operand_lossage ("invalid %%T value");
9136 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9137 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9138 else
9139 fputs ("ctr", file);
9140 return;
9142 case 'u':
9143 /* High-order 16 bits of constant for use in unsigned operand. */
9144 if (! INT_P (x))
9145 output_operand_lossage ("invalid %%u value");
9146 else
9147 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9148 (INT_LOWPART (x) >> 16) & 0xffff);
9149 return;
9151 case 'v':
9152 /* High-order 16 bits of constant for use in signed operand. */
9153 if (! INT_P (x))
9154 output_operand_lossage ("invalid %%v value");
9155 else
9156 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9157 (INT_LOWPART (x) >> 16) & 0xffff);
9158 return;
9160 case 'U':
9161 /* Print `u' if this has an auto-increment or auto-decrement. */
9162 if (GET_CODE (x) == MEM
9163 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9164 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9165 putc ('u', file);
9166 return;
9168 case 'V':
9169 /* Print the trap code for this operand. */
9170 switch (GET_CODE (x))
9172 case EQ:
9173 fputs ("eq", file); /* 4 */
9174 break;
9175 case NE:
9176 fputs ("ne", file); /* 24 */
9177 break;
9178 case LT:
9179 fputs ("lt", file); /* 16 */
9180 break;
9181 case LE:
9182 fputs ("le", file); /* 20 */
9183 break;
9184 case GT:
9185 fputs ("gt", file); /* 8 */
9186 break;
9187 case GE:
9188 fputs ("ge", file); /* 12 */
9189 break;
9190 case LTU:
9191 fputs ("llt", file); /* 2 */
9192 break;
9193 case LEU:
9194 fputs ("lle", file); /* 6 */
9195 break;
9196 case GTU:
9197 fputs ("lgt", file); /* 1 */
9198 break;
9199 case GEU:
9200 fputs ("lge", file); /* 5 */
9201 break;
9202 default:
9203 abort ();
9205 break;
9207 case 'w':
9208 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9209 normally. */
9210 if (INT_P (x))
9211 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9212 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9213 else
9214 print_operand (file, x, 0);
9215 return;
9217 case 'W':
9218 /* MB value for a PowerPC64 rldic operand. */
9219 val = (GET_CODE (x) == CONST_INT
9220 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9222 if (val < 0)
9223 i = -1;
9224 else
9225 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9226 if ((val <<= 1) < 0)
9227 break;
9229 #if HOST_BITS_PER_WIDE_INT == 32
9230 if (GET_CODE (x) == CONST_INT && i >= 0)
9231 i += 32; /* zero-extend high-part was all 0's */
9232 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9234 val = CONST_DOUBLE_LOW (x);
9236 if (val == 0)
9237 abort ();
9238 else if (val < 0)
9239 --i;
9240 else
9241 for ( ; i < 64; i++)
9242 if ((val <<= 1) < 0)
9243 break;
9245 #endif
9247 fprintf (file, "%d", i + 1);
9248 return;
9250 case 'X':
9251 if (GET_CODE (x) == MEM
9252 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9253 putc ('x', file);
9254 return;
9256 case 'Y':
9257 /* Like 'L', for third word of TImode */
9258 if (GET_CODE (x) == REG)
9259 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9260 else if (GET_CODE (x) == MEM)
9262 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9263 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9264 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9265 else
9266 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9267 if (small_data_operand (x, GET_MODE (x)))
9268 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9269 reg_names[SMALL_DATA_REG]);
9271 return;
9273 case 'z':
9274 /* X is a SYMBOL_REF. Write out the name preceded by a
9275 period and without any trailing data in brackets. Used for function
9276 names. If we are configured for System V (or the embedded ABI) on
9277 the PowerPC, do not emit the period, since those systems do not use
9278 TOCs and the like. */
9279 if (GET_CODE (x) != SYMBOL_REF)
9280 abort ();
9282 if (XSTR (x, 0)[0] != '.')
9284 switch (DEFAULT_ABI)
9286 default:
9287 abort ();
9289 case ABI_AIX:
9290 putc ('.', file);
9291 break;
9293 case ABI_V4:
9294 case ABI_DARWIN:
9295 break;
9298 if (TARGET_AIX)
9299 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9300 else
9301 assemble_name (file, XSTR (x, 0));
9302 return;
9304 case 'Z':
9305 /* Like 'L', for last word of TImode. */
9306 if (GET_CODE (x) == REG)
9307 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9308 else if (GET_CODE (x) == MEM)
9310 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9311 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9312 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9313 else
9314 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9315 if (small_data_operand (x, GET_MODE (x)))
9316 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9317 reg_names[SMALL_DATA_REG]);
9319 return;
9321 /* Print AltiVec or SPE memory operand. */
9322 case 'y':
9324 rtx tmp;
9326 if (GET_CODE (x) != MEM)
9327 abort ();
9329 tmp = XEXP (x, 0);
9331 if (TARGET_E500)
9333 /* Handle [reg]. */
9334 if (GET_CODE (tmp) == REG)
9336 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9337 break;
9339 /* Handle [reg+UIMM]. */
9340 else if (GET_CODE (tmp) == PLUS &&
9341 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9343 int x;
9345 if (GET_CODE (XEXP (tmp, 0)) != REG)
9346 abort ();
9348 x = INTVAL (XEXP (tmp, 1));
9349 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9350 break;
9353 /* Fall through. Must be [reg+reg]. */
9355 if (GET_CODE (tmp) == REG)
9356 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9357 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9359 if (REGNO (XEXP (tmp, 0)) == 0)
9360 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9361 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9362 else
9363 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9364 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9366 else
9367 abort ();
9368 break;
9371 case 0:
9372 if (GET_CODE (x) == REG)
9373 fprintf (file, "%s", reg_names[REGNO (x)]);
9374 else if (GET_CODE (x) == MEM)
9376 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9377 know the width from the mode. */
9378 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9379 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9380 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9381 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9382 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9383 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9384 else
9385 output_address (XEXP (x, 0));
9387 else
9388 output_addr_const (file, x);
9389 return;
9391 case '&':
9392 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9393 return;
9395 default:
9396 output_operand_lossage ("invalid %%xn code");
9400 /* Print the address of an operand. */
9402 void
9403 print_operand_address (FILE *file, rtx x)
9405 if (GET_CODE (x) == REG)
9406 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9407 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9408 || GET_CODE (x) == LABEL_REF)
9410 output_addr_const (file, x);
9411 if (small_data_operand (x, GET_MODE (x)))
9412 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9413 reg_names[SMALL_DATA_REG]);
9414 else if (TARGET_TOC)
9415 abort ();
9417 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9419 if (REGNO (XEXP (x, 0)) == 0)
9420 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9421 reg_names[ REGNO (XEXP (x, 0)) ]);
9422 else
9423 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9424 reg_names[ REGNO (XEXP (x, 1)) ]);
9426 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9427 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9428 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9429 #if TARGET_ELF
9430 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9431 && CONSTANT_P (XEXP (x, 1)))
9433 output_addr_const (file, XEXP (x, 1));
9434 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9436 #endif
9437 #if TARGET_MACHO
9438 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9439 && CONSTANT_P (XEXP (x, 1)))
9441 fprintf (file, "lo16(");
9442 output_addr_const (file, XEXP (x, 1));
9443 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9445 #endif
9446 else if (legitimate_constant_pool_address_p (x))
9448 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9450 rtx contains_minus = XEXP (x, 1);
9451 rtx minus, symref;
9452 const char *name;
9454 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9455 turn it into (sym) for output_addr_const. */
9456 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9457 contains_minus = XEXP (contains_minus, 0);
9459 minus = XEXP (contains_minus, 0);
9460 symref = XEXP (minus, 0);
9461 XEXP (contains_minus, 0) = symref;
9462 if (TARGET_ELF)
9464 char *newname;
9466 name = XSTR (symref, 0);
9467 newname = alloca (strlen (name) + sizeof ("@toc"));
9468 strcpy (newname, name);
9469 strcat (newname, "@toc");
9470 XSTR (symref, 0) = newname;
9472 output_addr_const (file, XEXP (x, 1));
9473 if (TARGET_ELF)
9474 XSTR (symref, 0) = name;
9475 XEXP (contains_minus, 0) = minus;
9477 else
9478 output_addr_const (file, XEXP (x, 1));
9480 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9482 else
9483 abort ();
9486 /* Target hook for assembling integer objects. The PowerPC version has
9487 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9488 is defined. It also needs to handle DI-mode objects on 64-bit
9489 targets. */
9491 static bool
9492 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9494 #ifdef RELOCATABLE_NEEDS_FIXUP
9495 /* Special handling for SI values. */
9496 if (size == 4 && aligned_p)
9498 extern int in_toc_section (void);
9499 static int recurse = 0;
9501 /* For -mrelocatable, we mark all addresses that need to be fixed up
9502 in the .fixup section. */
9503 if (TARGET_RELOCATABLE
9504 && !in_toc_section ()
9505 && !in_text_section ()
9506 && !recurse
9507 && GET_CODE (x) != CONST_INT
9508 && GET_CODE (x) != CONST_DOUBLE
9509 && CONSTANT_P (x))
9511 char buf[256];
9513 recurse = 1;
9514 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9515 fixuplabelno++;
9516 ASM_OUTPUT_LABEL (asm_out_file, buf);
9517 fprintf (asm_out_file, "\t.long\t(");
9518 output_addr_const (asm_out_file, x);
9519 fprintf (asm_out_file, ")@fixup\n");
9520 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9521 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9522 fprintf (asm_out_file, "\t.long\t");
9523 assemble_name (asm_out_file, buf);
9524 fprintf (asm_out_file, "\n\t.previous\n");
9525 recurse = 0;
9526 return true;
9528 /* Remove initial .'s to turn a -mcall-aixdesc function
9529 address into the address of the descriptor, not the function
9530 itself. */
9531 else if (GET_CODE (x) == SYMBOL_REF
9532 && XSTR (x, 0)[0] == '.'
9533 && DEFAULT_ABI == ABI_AIX)
9535 const char *name = XSTR (x, 0);
9536 while (*name == '.')
9537 name++;
9539 fprintf (asm_out_file, "\t.long\t%s\n", name);
9540 return true;
9543 #endif /* RELOCATABLE_NEEDS_FIXUP */
9544 return default_assemble_integer (x, size, aligned_p);
9547 #ifdef HAVE_GAS_HIDDEN
9548 /* Emit an assembler directive to set symbol visibility for DECL to
9549 VISIBILITY_TYPE. */
9551 static void
9552 rs6000_assemble_visibility (tree decl, int vis)
9554 /* Functions need to have their entry point symbol visibility set as
9555 well as their descriptor symbol visibility. */
9556 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9558 static const char * const visibility_types[] = {
9559 NULL, "internal", "hidden", "protected"
9562 const char *name, *type;
9564 name = ((* targetm.strip_name_encoding)
9565 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9566 type = visibility_types[vis];
9568 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9569 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9571 else
9572 default_assemble_visibility (decl, vis);
9574 #endif
9576 enum rtx_code
9577 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9579 /* Reversal of FP compares takes care -- an ordered compare
9580 becomes an unordered compare and vice versa. */
9581 if (mode == CCFPmode
9582 && (!flag_finite_math_only
9583 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9584 || code == UNEQ || code == LTGT))
9585 return reverse_condition_maybe_unordered (code);
9586 else
9587 return reverse_condition (code);
9590 /* Generate a compare for CODE. Return a brand-new rtx that
9591 represents the result of the compare. */
9593 static rtx
9594 rs6000_generate_compare (enum rtx_code code)
9596 enum machine_mode comp_mode;
9597 rtx compare_result;
9599 if (rs6000_compare_fp_p)
9600 comp_mode = CCFPmode;
9601 else if (code == GTU || code == LTU
9602 || code == GEU || code == LEU)
9603 comp_mode = CCUNSmode;
9604 else
9605 comp_mode = CCmode;
9607 /* First, the compare. */
9608 compare_result = gen_reg_rtx (comp_mode);
9610 /* SPE FP compare instructions on the GPRs. Yuck! */
9611 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9612 && rs6000_compare_fp_p)
9614 rtx cmp, or1, or2, or_result, compare_result2;
9616 switch (code)
9618 case EQ:
9619 case UNEQ:
9620 case NE:
9621 case LTGT:
9622 cmp = flag_finite_math_only
9623 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9624 rs6000_compare_op1)
9625 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9626 rs6000_compare_op1);
9627 break;
9628 case GT:
9629 case GTU:
9630 case UNGT:
9631 case UNGE:
9632 case GE:
9633 case GEU:
9634 cmp = flag_finite_math_only
9635 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9636 rs6000_compare_op1)
9637 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9638 rs6000_compare_op1);
9639 break;
9640 case LT:
9641 case LTU:
9642 case UNLT:
9643 case UNLE:
9644 case LE:
9645 case LEU:
9646 cmp = flag_finite_math_only
9647 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9648 rs6000_compare_op1)
9649 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9650 rs6000_compare_op1);
9651 break;
9652 default:
9653 abort ();
9656 /* Synthesize LE and GE from LT/GT || EQ. */
9657 if (code == LE || code == GE || code == LEU || code == GEU)
9659 /* Synthesize GE/LE frome GT/LT || EQ. */
9661 emit_insn (cmp);
9663 switch (code)
9665 case LE: code = LT; break;
9666 case GE: code = GT; break;
9667 case LEU: code = LT; break;
9668 case GEU: code = GT; break;
9669 default: abort ();
9672 or1 = gen_reg_rtx (SImode);
9673 or2 = gen_reg_rtx (SImode);
9674 or_result = gen_reg_rtx (CCEQmode);
9675 compare_result2 = gen_reg_rtx (CCFPmode);
9677 /* Do the EQ. */
9678 cmp = flag_finite_math_only
9679 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9680 rs6000_compare_op1)
9681 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9682 rs6000_compare_op1);
9683 emit_insn (cmp);
9685 /* The MC8540 FP compare instructions set the CR bits
9686 differently than other PPC compare instructions. For
9687 that matter, there is no generic test instruction, but a
9688 testgt, testlt, and testeq. For a true condition, bit 2
9689 is set (x1xx) in the CR. Following the traditional CR
9690 values:
9692 LT GT EQ OV
9693 bit3 bit2 bit1 bit0
9695 ... bit 2 would be a GT CR alias, so later on we
9696 look in the GT bits for the branch instructions.
9697 However, we must be careful to emit correct RTL in
9698 the meantime, so optimizations don't get confused. */
9700 or1 = gen_rtx_NE (SImode, compare_result, const0_rtx);
9701 or2 = gen_rtx_NE (SImode, compare_result2, const0_rtx);
9703 /* OR them together. */
9704 cmp = gen_rtx_SET (VOIDmode, or_result,
9705 gen_rtx_COMPARE (CCEQmode,
9706 gen_rtx_IOR (SImode, or1, or2),
9707 const_true_rtx));
9708 compare_result = or_result;
9709 code = EQ;
9711 else
9713 /* We only care about 1 bit (x1xx), so map everything to NE to
9714 maintain rtl sanity. We'll get to the right bit (x1xx) at
9715 code output time. */
9716 if (code == NE || code == LTGT)
9717 /* Do the inverse here because we have no cmpne
9718 instruction. We use the cmpeq instruction and expect
9719 to get a 0 instead. */
9720 code = EQ;
9721 else
9722 code = NE;
9725 emit_insn (cmp);
9727 else
9728 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9729 gen_rtx_COMPARE (comp_mode,
9730 rs6000_compare_op0,
9731 rs6000_compare_op1)));
9733 /* Some kinds of FP comparisons need an OR operation;
9734 under flag_finite_math_only we don't bother. */
9735 if (rs6000_compare_fp_p
9736 && ! flag_finite_math_only
9737 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9738 && (code == LE || code == GE
9739 || code == UNEQ || code == LTGT
9740 || code == UNGT || code == UNLT))
9742 enum rtx_code or1, or2;
9743 rtx or1_rtx, or2_rtx, compare2_rtx;
9744 rtx or_result = gen_reg_rtx (CCEQmode);
9746 switch (code)
9748 case LE: or1 = LT; or2 = EQ; break;
9749 case GE: or1 = GT; or2 = EQ; break;
9750 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9751 case LTGT: or1 = LT; or2 = GT; break;
9752 case UNGT: or1 = UNORDERED; or2 = GT; break;
9753 case UNLT: or1 = UNORDERED; or2 = LT; break;
9754 default: abort ();
9756 validate_condition_mode (or1, comp_mode);
9757 validate_condition_mode (or2, comp_mode);
9758 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
9759 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
9760 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9761 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9762 const_true_rtx);
9763 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9765 compare_result = or_result;
9766 code = EQ;
9769 validate_condition_mode (code, GET_MODE (compare_result));
9771 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
9775 /* Emit the RTL for an sCOND pattern. */
9777 void
9778 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9780 rtx condition_rtx;
9781 enum machine_mode op_mode;
9782 enum rtx_code cond_code;
9784 condition_rtx = rs6000_generate_compare (code);
9785 cond_code = GET_CODE (condition_rtx);
9787 if (cond_code == NE
9788 || cond_code == GE || cond_code == LE
9789 || cond_code == GEU || cond_code == LEU
9790 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9792 rtx not_result = gen_reg_rtx (CCEQmode);
9793 rtx not_op, rev_cond_rtx;
9794 enum machine_mode cc_mode;
9796 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9798 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
9799 SImode, XEXP (condition_rtx, 0), const0_rtx);
9800 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9801 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9802 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9805 op_mode = GET_MODE (rs6000_compare_op0);
9806 if (op_mode == VOIDmode)
9807 op_mode = GET_MODE (rs6000_compare_op1);
9809 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9811 PUT_MODE (condition_rtx, DImode);
9812 convert_move (result, condition_rtx, 0);
9814 else
9816 PUT_MODE (condition_rtx, SImode);
9817 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9821 /* Emit a branch of kind CODE to location LOC. */
9823 void
9824 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9826 rtx condition_rtx, loc_ref;
9828 condition_rtx = rs6000_generate_compare (code);
9829 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9830 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9831 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9832 loc_ref, pc_rtx)));
9835 /* Return the string to output a conditional branch to LABEL, which is
9836 the operand number of the label, or -1 if the branch is really a
9837 conditional return.
9839 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9840 condition code register and its mode specifies what kind of
9841 comparison we made.
9843 REVERSED is nonzero if we should reverse the sense of the comparison.
9845 INSN is the insn. */
9847 char *
9848 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9850 static char string[64];
9851 enum rtx_code code = GET_CODE (op);
9852 rtx cc_reg = XEXP (op, 0);
9853 enum machine_mode mode = GET_MODE (cc_reg);
9854 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9855 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9856 int really_reversed = reversed ^ need_longbranch;
9857 char *s = string;
9858 const char *ccode;
9859 const char *pred;
9860 rtx note;
9862 validate_condition_mode (code, mode);
9864 /* Work out which way this really branches. We could use
9865 reverse_condition_maybe_unordered here always but this
9866 makes the resulting assembler clearer. */
9867 if (really_reversed)
9869 /* Reversal of FP compares takes care -- an ordered compare
9870 becomes an unordered compare and vice versa. */
9871 if (mode == CCFPmode)
9872 code = reverse_condition_maybe_unordered (code);
9873 else
9874 code = reverse_condition (code);
9877 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9879 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9880 to the GT bit. */
9881 if (code == EQ)
9882 /* Opposite of GT. */
9883 code = UNLE;
9884 else if (code == NE)
9885 code = GT;
9886 else
9887 abort ();
9890 switch (code)
9892 /* Not all of these are actually distinct opcodes, but
9893 we distinguish them for clarity of the resulting assembler. */
9894 case NE: case LTGT:
9895 ccode = "ne"; break;
9896 case EQ: case UNEQ:
9897 ccode = "eq"; break;
9898 case GE: case GEU:
9899 ccode = "ge"; break;
9900 case GT: case GTU: case UNGT:
9901 ccode = "gt"; break;
9902 case LE: case LEU:
9903 ccode = "le"; break;
9904 case LT: case LTU: case UNLT:
9905 ccode = "lt"; break;
9906 case UNORDERED: ccode = "un"; break;
9907 case ORDERED: ccode = "nu"; break;
9908 case UNGE: ccode = "nl"; break;
9909 case UNLE: ccode = "ng"; break;
9910 default:
9911 abort ();
9914 /* Maybe we have a guess as to how likely the branch is.
9915 The old mnemonics don't have a way to specify this information. */
9916 pred = "";
9917 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9918 if (note != NULL_RTX)
9920 /* PROB is the difference from 50%. */
9921 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9922 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9924 /* Only hint for highly probable/improbable branches on newer
9925 cpus as static prediction overrides processor dynamic
9926 prediction. For older cpus we may as well always hint, but
9927 assume not taken for branches that are very close to 50% as a
9928 mispredicted taken branch is more expensive than a
9929 mispredicted not-taken branch. */
9930 if (always_hint
9931 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9933 if (abs (prob) > REG_BR_PROB_BASE / 20
9934 && ((prob > 0) ^ need_longbranch))
9935 pred = "+";
9936 else
9937 pred = "-";
9941 if (label == NULL)
9942 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9943 else
9944 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9946 /* We need to escape any '%' characters in the reg_names string.
9947 Assume they'd only be the first character.... */
9948 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9949 *s++ = '%';
9950 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9952 if (label != NULL)
9954 /* If the branch distance was too far, we may have to use an
9955 unconditional branch to go the distance. */
9956 if (need_longbranch)
9957 s += sprintf (s, ",$+8\n\tb %s", label);
9958 else
9959 s += sprintf (s, ",%s", label);
9962 return string;
9965 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9966 operands of the last comparison is nonzero/true, FALSE_COND if it
9967 is zero/false. Return 0 if the hardware has no such operation. */
9970 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9972 enum rtx_code code = GET_CODE (op);
9973 rtx op0 = rs6000_compare_op0;
9974 rtx op1 = rs6000_compare_op1;
9975 REAL_VALUE_TYPE c1;
9976 enum machine_mode compare_mode = GET_MODE (op0);
9977 enum machine_mode result_mode = GET_MODE (dest);
9978 rtx temp;
9980 /* These modes should always match. */
9981 if (GET_MODE (op1) != compare_mode
9982 /* In the isel case however, we can use a compare immediate, so
9983 op1 may be a small constant. */
9984 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9985 return 0;
9986 if (GET_MODE (true_cond) != result_mode)
9987 return 0;
9988 if (GET_MODE (false_cond) != result_mode)
9989 return 0;
9991 /* First, work out if the hardware can do this at all, or
9992 if it's too slow.... */
9993 if (! rs6000_compare_fp_p)
9995 if (TARGET_ISEL)
9996 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9997 return 0;
10000 /* Eliminate half of the comparisons by switching operands, this
10001 makes the remaining code simpler. */
10002 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10003 || code == LTGT || code == LT || code == UNLE)
10005 code = reverse_condition_maybe_unordered (code);
10006 temp = true_cond;
10007 true_cond = false_cond;
10008 false_cond = temp;
10011 /* UNEQ and LTGT take four instructions for a comparison with zero,
10012 it'll probably be faster to use a branch here too. */
10013 if (code == UNEQ && HONOR_NANS (compare_mode))
10014 return 0;
10016 if (GET_CODE (op1) == CONST_DOUBLE)
10017 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10019 /* We're going to try to implement comparisons by performing
10020 a subtract, then comparing against zero. Unfortunately,
10021 Inf - Inf is NaN which is not zero, and so if we don't
10022 know that the operand is finite and the comparison
10023 would treat EQ different to UNORDERED, we can't do it. */
10024 if (HONOR_INFINITIES (compare_mode)
10025 && code != GT && code != UNGE
10026 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10027 /* Constructs of the form (a OP b ? a : b) are safe. */
10028 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10029 || (! rtx_equal_p (op0, true_cond)
10030 && ! rtx_equal_p (op1, true_cond))))
10031 return 0;
10032 /* At this point we know we can use fsel. */
10034 /* Reduce the comparison to a comparison against zero. */
10035 temp = gen_reg_rtx (compare_mode);
10036 emit_insn (gen_rtx_SET (VOIDmode, temp,
10037 gen_rtx_MINUS (compare_mode, op0, op1)));
10038 op0 = temp;
10039 op1 = CONST0_RTX (compare_mode);
10041 /* If we don't care about NaNs we can reduce some of the comparisons
10042 down to faster ones. */
10043 if (! HONOR_NANS (compare_mode))
10044 switch (code)
10046 case GT:
10047 code = LE;
10048 temp = true_cond;
10049 true_cond = false_cond;
10050 false_cond = temp;
10051 break;
10052 case UNGE:
10053 code = GE;
10054 break;
10055 case UNEQ:
10056 code = EQ;
10057 break;
10058 default:
10059 break;
10062 /* Now, reduce everything down to a GE. */
10063 switch (code)
10065 case GE:
10066 break;
10068 case LE:
10069 temp = gen_reg_rtx (compare_mode);
10070 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10071 op0 = temp;
10072 break;
10074 case ORDERED:
10075 temp = gen_reg_rtx (compare_mode);
10076 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10077 op0 = temp;
10078 break;
10080 case EQ:
10081 temp = gen_reg_rtx (compare_mode);
10082 emit_insn (gen_rtx_SET (VOIDmode, temp,
10083 gen_rtx_NEG (compare_mode,
10084 gen_rtx_ABS (compare_mode, op0))));
10085 op0 = temp;
10086 break;
10088 case UNGE:
10089 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10090 temp = gen_reg_rtx (result_mode);
10091 emit_insn (gen_rtx_SET (VOIDmode, temp,
10092 gen_rtx_IF_THEN_ELSE (result_mode,
10093 gen_rtx_GE (VOIDmode,
10094 op0, op1),
10095 true_cond, false_cond)));
10096 false_cond = true_cond;
10097 true_cond = temp;
10099 temp = gen_reg_rtx (compare_mode);
10100 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10101 op0 = temp;
10102 break;
10104 case GT:
10105 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10106 temp = gen_reg_rtx (result_mode);
10107 emit_insn (gen_rtx_SET (VOIDmode, temp,
10108 gen_rtx_IF_THEN_ELSE (result_mode,
10109 gen_rtx_GE (VOIDmode,
10110 op0, op1),
10111 true_cond, false_cond)));
10112 true_cond = false_cond;
10113 false_cond = temp;
10115 temp = gen_reg_rtx (compare_mode);
10116 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10117 op0 = temp;
10118 break;
10120 default:
10121 abort ();
10124 emit_insn (gen_rtx_SET (VOIDmode, dest,
10125 gen_rtx_IF_THEN_ELSE (result_mode,
10126 gen_rtx_GE (VOIDmode,
10127 op0, op1),
10128 true_cond, false_cond)));
10129 return 1;
10132 /* Same as above, but for ints (isel). */
10134 static int
10135 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10137 rtx condition_rtx, cr;
10139 /* All isel implementations thus far are 32-bits. */
10140 if (GET_MODE (rs6000_compare_op0) != SImode)
10141 return 0;
10143 /* We still have to do the compare, because isel doesn't do a
10144 compare, it just looks at the CRx bits set by a previous compare
10145 instruction. */
10146 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10147 cr = XEXP (condition_rtx, 0);
10149 if (GET_MODE (cr) == CCmode)
10150 emit_insn (gen_isel_signed (dest, condition_rtx,
10151 true_cond, false_cond, cr));
10152 else
10153 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10154 true_cond, false_cond, cr));
10156 return 1;
10159 const char *
10160 output_isel (rtx *operands)
10162 enum rtx_code code;
10164 code = GET_CODE (operands[1]);
10165 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10167 PUT_CODE (operands[1], reverse_condition (code));
10168 return "isel %0,%3,%2,%j1";
10170 else
10171 return "isel %0,%2,%3,%j1";
10174 void
10175 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10177 enum machine_mode mode = GET_MODE (op0);
10178 enum rtx_code c;
10179 rtx target;
10181 if (code == SMAX || code == SMIN)
10182 c = GE;
10183 else
10184 c = GEU;
10186 if (code == SMAX || code == UMAX)
10187 target = emit_conditional_move (dest, c, op0, op1, mode,
10188 op0, op1, mode, 0);
10189 else
10190 target = emit_conditional_move (dest, c, op0, op1, mode,
10191 op1, op0, mode, 0);
10192 if (target == NULL_RTX)
10193 abort ();
10194 if (target != dest)
10195 emit_move_insn (dest, target);
10198 /* Emit instructions to move SRC to DST. Called by splitters for
10199 multi-register moves. It will emit at most one instruction for
10200 each register that is accessed; that is, it won't emit li/lis pairs
10201 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10202 register. */
10204 void
10205 rs6000_split_multireg_move (rtx dst, rtx src)
10207 /* The register number of the first register being moved. */
10208 int reg;
10209 /* The mode that is to be moved. */
10210 enum machine_mode mode;
10211 /* The mode that the move is being done in, and its size. */
10212 enum machine_mode reg_mode;
10213 int reg_mode_size;
10214 /* The number of registers that will be moved. */
10215 int nregs;
10217 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10218 mode = GET_MODE (dst);
10219 nregs = HARD_REGNO_NREGS (reg, mode);
10220 if (FP_REGNO_P (reg))
10221 reg_mode = DFmode;
10222 else if (ALTIVEC_REGNO_P (reg))
10223 reg_mode = V16QImode;
10224 else
10225 reg_mode = word_mode;
10226 reg_mode_size = GET_MODE_SIZE (reg_mode);
10228 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10229 abort ();
10231 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10233 /* Move register range backwards, if we might have destructive
10234 overlap. */
10235 int i;
10236 for (i = nregs - 1; i >= 0; i--)
10237 emit_insn (gen_rtx_SET (VOIDmode,
10238 simplify_gen_subreg (reg_mode, dst, mode,
10239 i * reg_mode_size),
10240 simplify_gen_subreg (reg_mode, src, mode,
10241 i * reg_mode_size)));
10243 else
10245 int i;
10246 int j = -1;
10247 bool used_update = false;
10249 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10251 rtx breg;
10253 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10254 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10256 rtx delta_rtx;
10257 breg = XEXP (XEXP (src, 0), 0);
10258 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10259 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10260 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10261 emit_insn (TARGET_32BIT
10262 ? gen_addsi3 (breg, breg, delta_rtx)
10263 : gen_adddi3 (breg, breg, delta_rtx));
10264 src = gen_rtx_MEM (mode, breg);
10267 /* We have now address involving an base register only.
10268 If we use one of the registers to address memory,
10269 we have change that register last. */
10271 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10272 ? XEXP (XEXP (src, 0), 0)
10273 : XEXP (src, 0));
10275 if (!REG_P (breg))
10276 abort();
10278 if (REGNO (breg) >= REGNO (dst)
10279 && REGNO (breg) < REGNO (dst) + nregs)
10280 j = REGNO (breg) - REGNO (dst);
10283 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10285 rtx breg;
10287 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10288 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10290 rtx delta_rtx;
10291 breg = XEXP (XEXP (dst, 0), 0);
10292 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10293 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10294 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10296 /* We have to update the breg before doing the store.
10297 Use store with update, if available. */
10299 if (TARGET_UPDATE)
10301 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10302 emit_insn (TARGET_32BIT
10303 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10304 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10305 used_update = true;
10307 else
10308 emit_insn (TARGET_32BIT
10309 ? gen_addsi3 (breg, breg, delta_rtx)
10310 : gen_adddi3 (breg, breg, delta_rtx));
10311 dst = gen_rtx_MEM (mode, breg);
10315 for (i = 0; i < nregs; i++)
10317 /* Calculate index to next subword. */
10318 ++j;
10319 if (j == nregs)
10320 j = 0;
10322 /* If compiler already emited move of first word by
10323 store with update, no need to do anything. */
10324 if (j == 0 && used_update)
10325 continue;
10327 emit_insn (gen_rtx_SET (VOIDmode,
10328 simplify_gen_subreg (reg_mode, dst, mode,
10329 j * reg_mode_size),
10330 simplify_gen_subreg (reg_mode, src, mode,
10331 j * reg_mode_size)));
10337 /* This page contains routines that are used to determine what the
10338 function prologue and epilogue code will do and write them out. */
10340 /* Return the first fixed-point register that is required to be
10341 saved. 32 if none. */
10344 first_reg_to_save (void)
10346 int first_reg;
10348 /* Find lowest numbered live register. */
10349 for (first_reg = 13; first_reg <= 31; first_reg++)
10350 if (regs_ever_live[first_reg]
10351 && (! call_used_regs[first_reg]
10352 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10353 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10354 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10355 break;
10357 #if TARGET_MACHO
10358 if (flag_pic
10359 && current_function_uses_pic_offset_table
10360 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10361 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10362 #endif
10364 return first_reg;
10367 /* Similar, for FP regs. */
10370 first_fp_reg_to_save (void)
10372 int first_reg;
10374 /* Find lowest numbered live register. */
10375 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10376 if (regs_ever_live[first_reg])
10377 break;
10379 return first_reg;
10382 /* Similar, for AltiVec regs. */
10384 static int
10385 first_altivec_reg_to_save (void)
10387 int i;
10389 /* Stack frame remains as is unless we are in AltiVec ABI. */
10390 if (! TARGET_ALTIVEC_ABI)
10391 return LAST_ALTIVEC_REGNO + 1;
10393 /* Find lowest numbered live register. */
10394 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10395 if (regs_ever_live[i])
10396 break;
10398 return i;
10401 /* Return a 32-bit mask of the AltiVec registers we need to set in
10402 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10403 the 32-bit word is 0. */
10405 static unsigned int
10406 compute_vrsave_mask (void)
10408 unsigned int i, mask = 0;
10410 /* First, find out if we use _any_ altivec registers. */
10411 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10412 if (regs_ever_live[i])
10413 mask |= ALTIVEC_REG_BIT (i);
10415 if (mask == 0)
10416 return mask;
10418 /* Next, remove the argument registers from the set. These must
10419 be in the VRSAVE mask set by the caller, so we don't need to add
10420 them in again. More importantly, the mask we compute here is
10421 used to generate CLOBBERs in the set_vrsave insn, and we do not
10422 wish the argument registers to die. */
10423 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10424 mask &= ~ALTIVEC_REG_BIT (i);
10426 /* Similarly, remove the return value from the set. */
10428 bool yes = false;
10429 diddle_return_value (is_altivec_return_reg, &yes);
10430 if (yes)
10431 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10434 return mask;
10437 static void
10438 is_altivec_return_reg (rtx reg, void *xyes)
10440 bool *yes = (bool *) xyes;
10441 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10442 *yes = true;
10446 /* Calculate the stack information for the current function. This is
10447 complicated by having two separate calling sequences, the AIX calling
10448 sequence and the V.4 calling sequence.
10450 AIX (and Darwin/Mac OS X) stack frames look like:
10451 32-bit 64-bit
10452 SP----> +---------------------------------------+
10453 | back chain to caller | 0 0
10454 +---------------------------------------+
10455 | saved CR | 4 8 (8-11)
10456 +---------------------------------------+
10457 | saved LR | 8 16
10458 +---------------------------------------+
10459 | reserved for compilers | 12 24
10460 +---------------------------------------+
10461 | reserved for binders | 16 32
10462 +---------------------------------------+
10463 | saved TOC pointer | 20 40
10464 +---------------------------------------+
10465 | Parameter save area (P) | 24 48
10466 +---------------------------------------+
10467 | Alloca space (A) | 24+P etc.
10468 +---------------------------------------+
10469 | Local variable space (L) | 24+P+A
10470 +---------------------------------------+
10471 | Float/int conversion temporary (X) | 24+P+A+L
10472 +---------------------------------------+
10473 | Save area for AltiVec registers (W) | 24+P+A+L+X
10474 +---------------------------------------+
10475 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10476 +---------------------------------------+
10477 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10478 +---------------------------------------+
10479 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10480 +---------------------------------------+
10481 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10482 +---------------------------------------+
10483 old SP->| back chain to caller's caller |
10484 +---------------------------------------+
10486 The required alignment for AIX configurations is two words (i.e., 8
10487 or 16 bytes).
10490 V.4 stack frames look like:
10492 SP----> +---------------------------------------+
10493 | back chain to caller | 0
10494 +---------------------------------------+
10495 | caller's saved LR | 4
10496 +---------------------------------------+
10497 | Parameter save area (P) | 8
10498 +---------------------------------------+
10499 | Alloca space (A) | 8+P
10500 +---------------------------------------+
10501 | Varargs save area (V) | 8+P+A
10502 +---------------------------------------+
10503 | Local variable space (L) | 8+P+A+V
10504 +---------------------------------------+
10505 | Float/int conversion temporary (X) | 8+P+A+V+L
10506 +---------------------------------------+
10507 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10508 +---------------------------------------+
10509 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10510 +---------------------------------------+
10511 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10512 +---------------------------------------+
10513 | SPE: area for 64-bit GP registers |
10514 +---------------------------------------+
10515 | SPE alignment padding |
10516 +---------------------------------------+
10517 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10518 +---------------------------------------+
10519 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10520 +---------------------------------------+
10521 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10522 +---------------------------------------+
10523 old SP->| back chain to caller's caller |
10524 +---------------------------------------+
10526 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10527 given. (But note below and in sysv4.h that we require only 8 and
10528 may round up the size of our stack frame anyways. The historical
10529 reason is early versions of powerpc-linux which didn't properly
10530 align the stack at program startup. A happy side-effect is that
10531 -mno-eabi libraries can be used with -meabi programs.)
10533 The EABI configuration defaults to the V.4 layout. However,
10534 the stack alignment requirements may differ. If -mno-eabi is not
10535 given, the required stack alignment is 8 bytes; if -mno-eabi is
10536 given, the required alignment is 16 bytes. (But see V.4 comment
10537 above.) */
10539 #ifndef ABI_STACK_BOUNDARY
10540 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10541 #endif
10543 static rs6000_stack_t *
10544 rs6000_stack_info (void)
10546 static rs6000_stack_t info, zero_info;
10547 rs6000_stack_t *info_ptr = &info;
10548 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10549 int ehrd_size;
10550 HOST_WIDE_INT total_raw_size;
10552 /* Zero all fields portably. */
10553 info = zero_info;
10555 if (TARGET_SPE)
10557 /* Cache value so we don't rescan instruction chain over and over. */
10558 if (cfun->machine->insn_chain_scanned_p == 0)
10560 cfun->machine->insn_chain_scanned_p = 1;
10561 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10565 /* Select which calling sequence. */
10566 info_ptr->abi = DEFAULT_ABI;
10568 /* Calculate which registers need to be saved & save area size. */
10569 info_ptr->first_gp_reg_save = first_reg_to_save ();
10570 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10571 even if it currently looks like we won't. */
10572 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10573 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10574 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10575 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10576 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10577 else
10578 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10580 /* For the SPE, we have an additional upper 32-bits on each GPR.
10581 Ideally we should save the entire 64-bits only when the upper
10582 half is used in SIMD instructions. Since we only record
10583 registers live (not the size they are used in), this proves
10584 difficult because we'd have to traverse the instruction chain at
10585 the right time, taking reload into account. This is a real pain,
10586 so we opt to save the GPRs in 64-bits always if but one register
10587 gets used in 64-bits. Otherwise, all the registers in the frame
10588 get saved in 32-bits.
10590 So... since when we save all GPRs (except the SP) in 64-bits, the
10591 traditional GP save area will be empty. */
10592 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10593 info_ptr->gp_size = 0;
10595 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10596 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10598 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10599 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10600 - info_ptr->first_altivec_reg_save);
10602 /* Does this function call anything? */
10603 info_ptr->calls_p = (! current_function_is_leaf
10604 || cfun->machine->ra_needs_full_frame);
10606 /* Determine if we need to save the link register. */
10607 if (rs6000_ra_ever_killed ()
10608 || (DEFAULT_ABI == ABI_AIX
10609 && current_function_profile
10610 && !TARGET_PROFILE_KERNEL)
10611 #ifdef TARGET_RELOCATABLE
10612 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10613 #endif
10614 || (info_ptr->first_fp_reg_save != 64
10615 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10616 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10617 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10618 || (DEFAULT_ABI == ABI_DARWIN
10619 && flag_pic
10620 && current_function_uses_pic_offset_table)
10621 || info_ptr->calls_p)
10623 info_ptr->lr_save_p = 1;
10624 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10627 /* Determine if we need to save the condition code registers. */
10628 if (regs_ever_live[CR2_REGNO]
10629 || regs_ever_live[CR3_REGNO]
10630 || regs_ever_live[CR4_REGNO])
10632 info_ptr->cr_save_p = 1;
10633 if (DEFAULT_ABI == ABI_V4)
10634 info_ptr->cr_size = reg_size;
10637 /* If the current function calls __builtin_eh_return, then we need
10638 to allocate stack space for registers that will hold data for
10639 the exception handler. */
10640 if (current_function_calls_eh_return)
10642 unsigned int i;
10643 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10644 continue;
10646 /* SPE saves EH registers in 64-bits. */
10647 ehrd_size = i * (TARGET_SPE_ABI
10648 && info_ptr->spe_64bit_regs_used != 0
10649 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10651 else
10652 ehrd_size = 0;
10654 /* Determine various sizes. */
10655 info_ptr->reg_size = reg_size;
10656 info_ptr->fixed_size = RS6000_SAVE_AREA;
10657 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10658 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10659 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10662 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10663 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10664 else
10665 info_ptr->spe_gp_size = 0;
10667 if (TARGET_ALTIVEC_ABI)
10668 info_ptr->vrsave_mask = compute_vrsave_mask ();
10669 else
10670 info_ptr->vrsave_mask = 0;
10672 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10673 info_ptr->vrsave_size = 4;
10674 else
10675 info_ptr->vrsave_size = 0;
10677 /* Calculate the offsets. */
10678 switch (DEFAULT_ABI)
10680 case ABI_NONE:
10681 default:
10682 abort ();
10684 case ABI_AIX:
10685 case ABI_DARWIN:
10686 info_ptr->fp_save_offset = - info_ptr->fp_size;
10687 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10689 if (TARGET_ALTIVEC_ABI)
10691 info_ptr->vrsave_save_offset
10692 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10694 /* Align stack so vector save area is on a quadword boundary. */
10695 if (info_ptr->altivec_size != 0)
10696 info_ptr->altivec_padding_size
10697 = 16 - (-info_ptr->vrsave_save_offset % 16);
10698 else
10699 info_ptr->altivec_padding_size = 0;
10701 info_ptr->altivec_save_offset
10702 = info_ptr->vrsave_save_offset
10703 - info_ptr->altivec_padding_size
10704 - info_ptr->altivec_size;
10706 /* Adjust for AltiVec case. */
10707 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10709 else
10710 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10711 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10712 info_ptr->lr_save_offset = 2*reg_size;
10713 break;
10715 case ABI_V4:
10716 info_ptr->fp_save_offset = - info_ptr->fp_size;
10717 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10718 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10720 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10722 /* Align stack so SPE GPR save area is aligned on a
10723 double-word boundary. */
10724 if (info_ptr->spe_gp_size != 0)
10725 info_ptr->spe_padding_size
10726 = 8 - (-info_ptr->cr_save_offset % 8);
10727 else
10728 info_ptr->spe_padding_size = 0;
10730 info_ptr->spe_gp_save_offset
10731 = info_ptr->cr_save_offset
10732 - info_ptr->spe_padding_size
10733 - info_ptr->spe_gp_size;
10735 /* Adjust for SPE case. */
10736 info_ptr->toc_save_offset
10737 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10739 else if (TARGET_ALTIVEC_ABI)
10741 info_ptr->vrsave_save_offset
10742 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10744 /* Align stack so vector save area is on a quadword boundary. */
10745 if (info_ptr->altivec_size != 0)
10746 info_ptr->altivec_padding_size
10747 = 16 - (-info_ptr->vrsave_save_offset % 16);
10748 else
10749 info_ptr->altivec_padding_size = 0;
10751 info_ptr->altivec_save_offset
10752 = info_ptr->vrsave_save_offset
10753 - info_ptr->altivec_padding_size
10754 - info_ptr->altivec_size;
10756 /* Adjust for AltiVec case. */
10757 info_ptr->toc_save_offset
10758 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10760 else
10761 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10762 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10763 info_ptr->lr_save_offset = reg_size;
10764 break;
10767 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10768 + info_ptr->gp_size
10769 + info_ptr->altivec_size
10770 + info_ptr->altivec_padding_size
10771 + info_ptr->spe_gp_size
10772 + info_ptr->spe_padding_size
10773 + ehrd_size
10774 + info_ptr->cr_size
10775 + info_ptr->lr_size
10776 + info_ptr->vrsave_size
10777 + info_ptr->toc_size,
10778 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10779 ? 16 : 8);
10781 total_raw_size = (info_ptr->vars_size
10782 + info_ptr->parm_size
10783 + info_ptr->save_size
10784 + info_ptr->varargs_size
10785 + info_ptr->fixed_size);
10787 info_ptr->total_size =
10788 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10790 /* Determine if we need to allocate any stack frame:
10792 For AIX we need to push the stack if a frame pointer is needed
10793 (because the stack might be dynamically adjusted), if we are
10794 debugging, if we make calls, or if the sum of fp_save, gp_save,
10795 and local variables are more than the space needed to save all
10796 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10797 + 18*8 = 288 (GPR13 reserved).
10799 For V.4 we don't have the stack cushion that AIX uses, but assume
10800 that the debugger can handle stackless frames. */
10802 if (info_ptr->calls_p)
10803 info_ptr->push_p = 1;
10805 else if (DEFAULT_ABI == ABI_V4)
10806 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10808 else if (frame_pointer_needed)
10809 info_ptr->push_p = 1;
10811 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10812 info_ptr->push_p = 1;
10814 else
10815 info_ptr->push_p
10816 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10818 /* Zero offsets if we're not saving those registers. */
10819 if (info_ptr->fp_size == 0)
10820 info_ptr->fp_save_offset = 0;
10822 if (info_ptr->gp_size == 0)
10823 info_ptr->gp_save_offset = 0;
10825 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10826 info_ptr->altivec_save_offset = 0;
10828 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10829 info_ptr->vrsave_save_offset = 0;
10831 if (! TARGET_SPE_ABI
10832 || info_ptr->spe_64bit_regs_used == 0
10833 || info_ptr->spe_gp_size == 0)
10834 info_ptr->spe_gp_save_offset = 0;
10836 if (! info_ptr->lr_save_p)
10837 info_ptr->lr_save_offset = 0;
10839 if (! info_ptr->cr_save_p)
10840 info_ptr->cr_save_offset = 0;
10842 if (! info_ptr->toc_save_p)
10843 info_ptr->toc_save_offset = 0;
10845 return info_ptr;
10848 /* Return true if the current function uses any GPRs in 64-bit SIMD
10849 mode. */
10851 static bool
10852 spe_func_has_64bit_regs_p (void)
10854 rtx insns, insn;
10856 /* Functions that save and restore all the call-saved registers will
10857 need to save/restore the registers in 64-bits. */
10858 if (current_function_calls_eh_return
10859 || current_function_calls_setjmp
10860 || current_function_has_nonlocal_goto)
10861 return true;
10863 insns = get_insns ();
10865 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10867 if (INSN_P (insn))
10869 rtx i;
10871 i = PATTERN (insn);
10872 if (GET_CODE (i) == SET
10873 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10874 return true;
10878 return false;
10881 static void
10882 debug_stack_info (rs6000_stack_t *info)
10884 const char *abi_string;
10886 if (! info)
10887 info = rs6000_stack_info ();
10889 fprintf (stderr, "\nStack information for function %s:\n",
10890 ((current_function_decl && DECL_NAME (current_function_decl))
10891 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10892 : "<unknown>"));
10894 switch (info->abi)
10896 default: abi_string = "Unknown"; break;
10897 case ABI_NONE: abi_string = "NONE"; break;
10898 case ABI_AIX: abi_string = "AIX"; break;
10899 case ABI_DARWIN: abi_string = "Darwin"; break;
10900 case ABI_V4: abi_string = "V.4"; break;
10903 fprintf (stderr, "\tABI = %5s\n", abi_string);
10905 if (TARGET_ALTIVEC_ABI)
10906 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10908 if (TARGET_SPE_ABI)
10909 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10911 if (info->first_gp_reg_save != 32)
10912 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10914 if (info->first_fp_reg_save != 64)
10915 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10917 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10918 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10919 info->first_altivec_reg_save);
10921 if (info->lr_save_p)
10922 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10924 if (info->cr_save_p)
10925 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10927 if (info->toc_save_p)
10928 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10930 if (info->vrsave_mask)
10931 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10933 if (info->push_p)
10934 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10936 if (info->calls_p)
10937 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10939 if (info->gp_save_offset)
10940 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10942 if (info->fp_save_offset)
10943 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10945 if (info->altivec_save_offset)
10946 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10947 info->altivec_save_offset);
10949 if (info->spe_gp_save_offset)
10950 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10951 info->spe_gp_save_offset);
10953 if (info->vrsave_save_offset)
10954 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10955 info->vrsave_save_offset);
10957 if (info->lr_save_offset)
10958 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10960 if (info->cr_save_offset)
10961 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10963 if (info->toc_save_offset)
10964 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10966 if (info->varargs_save_offset)
10967 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10969 if (info->total_size)
10970 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10971 info->total_size);
10973 if (info->varargs_size)
10974 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10976 if (info->vars_size)
10977 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10978 info->vars_size);
10980 if (info->parm_size)
10981 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10983 if (info->fixed_size)
10984 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10986 if (info->gp_size)
10987 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10989 if (info->spe_gp_size)
10990 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10992 if (info->fp_size)
10993 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10995 if (info->altivec_size)
10996 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10998 if (info->vrsave_size)
10999 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11001 if (info->altivec_padding_size)
11002 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11003 info->altivec_padding_size);
11005 if (info->spe_padding_size)
11006 fprintf (stderr, "\tspe_padding_size = %5d\n",
11007 info->spe_padding_size);
11009 if (info->lr_size)
11010 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11012 if (info->cr_size)
11013 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11015 if (info->toc_size)
11016 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11018 if (info->save_size)
11019 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11021 if (info->reg_size != 4)
11022 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11024 fprintf (stderr, "\n");
11028 rs6000_return_addr (int count, rtx frame)
11030 /* Currently we don't optimize very well between prolog and body
11031 code and for PIC code the code can be actually quite bad, so
11032 don't try to be too clever here. */
11033 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11035 cfun->machine->ra_needs_full_frame = 1;
11037 return
11038 gen_rtx_MEM
11039 (Pmode,
11040 memory_address
11041 (Pmode,
11042 plus_constant (copy_to_reg
11043 (gen_rtx_MEM (Pmode,
11044 memory_address (Pmode, frame))),
11045 RETURN_ADDRESS_OFFSET)));
11048 cfun->machine->ra_need_lr = 1;
11049 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11052 /* Say whether a function is a candidate for sibcall handling or not.
11053 We do not allow indirect calls to be optimized into sibling calls.
11054 Also, we can't do it if there are any vector parameters; there's
11055 nowhere to put the VRsave code so it works; note that functions with
11056 vector parameters are required to have a prototype, so the argument
11057 type info must be available here. (The tail recursion case can work
11058 with vector parameters, but there's no way to distinguish here.) */
11059 static bool
11060 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11062 tree type;
11063 if (decl)
11065 if (TARGET_ALTIVEC_VRSAVE)
11067 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11068 type; type = TREE_CHAIN (type))
11070 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11071 return false;
11074 if (DEFAULT_ABI == ABI_DARWIN
11075 || (*targetm.binds_local_p) (decl))
11077 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11079 if (!lookup_attribute ("longcall", attr_list)
11080 || lookup_attribute ("shortcall", attr_list))
11081 return true;
11084 return false;
11087 static int
11088 rs6000_ra_ever_killed (void)
11090 rtx top;
11091 rtx reg;
11092 rtx insn;
11094 if (current_function_is_thunk)
11095 return 0;
11097 /* regs_ever_live has LR marked as used if any sibcalls are present,
11098 but this should not force saving and restoring in the
11099 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11100 clobbers LR, so that is inappropriate. */
11102 /* Also, the prologue can generate a store into LR that
11103 doesn't really count, like this:
11105 move LR->R0
11106 bcl to set PIC register
11107 move LR->R31
11108 move R0->LR
11110 When we're called from the epilogue, we need to avoid counting
11111 this as a store. */
11113 push_topmost_sequence ();
11114 top = get_insns ();
11115 pop_topmost_sequence ();
11116 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11118 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11120 if (INSN_P (insn))
11122 if (FIND_REG_INC_NOTE (insn, reg))
11123 return 1;
11124 else if (GET_CODE (insn) == CALL_INSN
11125 && !SIBLING_CALL_P (insn))
11126 return 1;
11127 else if (set_of (reg, insn) != NULL_RTX
11128 && !prologue_epilogue_contains (insn))
11129 return 1;
11132 return 0;
11135 /* Add a REG_MAYBE_DEAD note to the insn. */
11136 static void
11137 rs6000_maybe_dead (rtx insn)
11139 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11140 const0_rtx,
11141 REG_NOTES (insn));
11144 /* Emit instructions needed to load the TOC register.
11145 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11146 a constant pool; or for SVR4 -fpic. */
11148 void
11149 rs6000_emit_load_toc_table (int fromprolog)
11151 rtx dest, insn;
11152 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11154 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11156 rtx temp = (fromprolog
11157 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11158 : gen_reg_rtx (Pmode));
11159 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11160 if (fromprolog)
11161 rs6000_maybe_dead (insn);
11162 insn = emit_move_insn (dest, temp);
11163 if (fromprolog)
11164 rs6000_maybe_dead (insn);
11166 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11168 char buf[30];
11169 rtx tempLR = (fromprolog
11170 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11171 : gen_reg_rtx (Pmode));
11172 rtx temp0 = (fromprolog
11173 ? gen_rtx_REG (Pmode, 0)
11174 : gen_reg_rtx (Pmode));
11175 rtx symF;
11177 /* possibly create the toc section */
11178 if (! toc_initialized)
11180 toc_section ();
11181 function_section (current_function_decl);
11184 if (fromprolog)
11186 rtx symL;
11188 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11189 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11191 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11192 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11194 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11195 symF)));
11196 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11197 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11198 symL,
11199 symF)));
11201 else
11203 rtx tocsym;
11204 static int reload_toc_labelno = 0;
11206 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11208 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11209 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11211 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11212 emit_move_insn (dest, tempLR);
11213 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11215 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11216 if (fromprolog)
11217 rs6000_maybe_dead (insn);
11219 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11221 /* This is for AIX code running in non-PIC ELF32. */
11222 char buf[30];
11223 rtx realsym;
11224 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11225 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11227 insn = emit_insn (gen_elf_high (dest, realsym));
11228 if (fromprolog)
11229 rs6000_maybe_dead (insn);
11230 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11231 if (fromprolog)
11232 rs6000_maybe_dead (insn);
11234 else if (DEFAULT_ABI == ABI_AIX)
11236 if (TARGET_32BIT)
11237 insn = emit_insn (gen_load_toc_aix_si (dest));
11238 else
11239 insn = emit_insn (gen_load_toc_aix_di (dest));
11240 if (fromprolog)
11241 rs6000_maybe_dead (insn);
11243 else
11244 abort ();
11247 /* Emit instructions to restore the link register after determining where
11248 its value has been stored. */
11250 void
11251 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11253 rs6000_stack_t *info = rs6000_stack_info ();
11254 rtx operands[2];
11256 operands[0] = source;
11257 operands[1] = scratch;
11259 if (info->lr_save_p)
11261 rtx frame_rtx = stack_pointer_rtx;
11262 HOST_WIDE_INT sp_offset = 0;
11263 rtx tmp;
11265 if (frame_pointer_needed
11266 || current_function_calls_alloca
11267 || info->total_size > 32767)
11269 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11270 frame_rtx = operands[1];
11272 else if (info->push_p)
11273 sp_offset = info->total_size;
11275 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11276 tmp = gen_rtx_MEM (Pmode, tmp);
11277 emit_move_insn (tmp, operands[0]);
11279 else
11280 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11283 static GTY(()) int set = -1;
11285 int
11286 get_TOC_alias_set (void)
11288 if (set == -1)
11289 set = new_alias_set ();
11290 return set;
11293 /* This returns nonzero if the current function uses the TOC. This is
11294 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11295 is generated by the ABI_V4 load_toc_* patterns. */
11296 #if TARGET_ELF
11297 static int
11298 uses_TOC (void)
11300 rtx insn;
11302 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11303 if (INSN_P (insn))
11305 rtx pat = PATTERN (insn);
11306 int i;
11308 if (GET_CODE (pat) == PARALLEL)
11309 for (i = 0; i < XVECLEN (pat, 0); i++)
11311 rtx sub = XVECEXP (pat, 0, i);
11312 if (GET_CODE (sub) == USE)
11314 sub = XEXP (sub, 0);
11315 if (GET_CODE (sub) == UNSPEC
11316 && XINT (sub, 1) == UNSPEC_TOC)
11317 return 1;
11321 return 0;
11323 #endif
11326 create_TOC_reference (rtx symbol)
11328 return gen_rtx_PLUS (Pmode,
11329 gen_rtx_REG (Pmode, TOC_REGISTER),
11330 gen_rtx_CONST (Pmode,
11331 gen_rtx_MINUS (Pmode, symbol,
11332 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11335 /* If _Unwind_* has been called from within the same module,
11336 toc register is not guaranteed to be saved to 40(1) on function
11337 entry. Save it there in that case. */
11339 void
11340 rs6000_aix_emit_builtin_unwind_init (void)
11342 rtx mem;
11343 rtx stack_top = gen_reg_rtx (Pmode);
11344 rtx opcode_addr = gen_reg_rtx (Pmode);
11345 rtx opcode = gen_reg_rtx (SImode);
11346 rtx tocompare = gen_reg_rtx (SImode);
11347 rtx no_toc_save_needed = gen_label_rtx ();
11349 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11350 emit_move_insn (stack_top, mem);
11352 mem = gen_rtx_MEM (Pmode,
11353 gen_rtx_PLUS (Pmode, stack_top,
11354 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11355 emit_move_insn (opcode_addr, mem);
11356 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11357 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11358 : 0xE8410028, SImode));
11360 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11361 SImode, NULL_RTX, NULL_RTX,
11362 no_toc_save_needed);
11364 mem = gen_rtx_MEM (Pmode,
11365 gen_rtx_PLUS (Pmode, stack_top,
11366 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11367 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11368 emit_label (no_toc_save_needed);
11371 /* This ties together stack memory (MEM with an alias set of
11372 rs6000_sr_alias_set) and the change to the stack pointer. */
11374 static void
11375 rs6000_emit_stack_tie (void)
11377 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11379 set_mem_alias_set (mem, rs6000_sr_alias_set);
11380 emit_insn (gen_stack_tie (mem));
11383 /* Emit the correct code for allocating stack space, as insns.
11384 If COPY_R12, make sure a copy of the old frame is left in r12.
11385 The generated code may use hard register 0 as a temporary. */
11387 static void
11388 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11390 rtx insn;
11391 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11392 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11393 rtx todec = GEN_INT (-size);
11395 if (current_function_limit_stack)
11397 if (REG_P (stack_limit_rtx)
11398 && REGNO (stack_limit_rtx) > 1
11399 && REGNO (stack_limit_rtx) <= 31)
11401 emit_insn (TARGET_32BIT
11402 ? gen_addsi3 (tmp_reg,
11403 stack_limit_rtx,
11404 GEN_INT (size))
11405 : gen_adddi3 (tmp_reg,
11406 stack_limit_rtx,
11407 GEN_INT (size)));
11409 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11410 const0_rtx));
11412 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11413 && TARGET_32BIT
11414 && DEFAULT_ABI == ABI_V4)
11416 rtx toload = gen_rtx_CONST (VOIDmode,
11417 gen_rtx_PLUS (Pmode,
11418 stack_limit_rtx,
11419 GEN_INT (size)));
11421 emit_insn (gen_elf_high (tmp_reg, toload));
11422 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11423 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11424 const0_rtx));
11426 else
11427 warning ("stack limit expression is not supported");
11430 if (copy_r12 || ! TARGET_UPDATE)
11431 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11433 if (TARGET_UPDATE)
11435 if (size > 32767)
11437 /* Need a note here so that try_split doesn't get confused. */
11438 if (get_last_insn() == NULL_RTX)
11439 emit_note (NOTE_INSN_DELETED);
11440 insn = emit_move_insn (tmp_reg, todec);
11441 try_split (PATTERN (insn), insn, 0);
11442 todec = tmp_reg;
11445 insn = emit_insn (TARGET_32BIT
11446 ? gen_movsi_update (stack_reg, stack_reg,
11447 todec, stack_reg)
11448 : gen_movdi_update (stack_reg, stack_reg,
11449 todec, stack_reg));
11451 else
11453 insn = emit_insn (TARGET_32BIT
11454 ? gen_addsi3 (stack_reg, stack_reg, todec)
11455 : gen_adddi3 (stack_reg, stack_reg, todec));
11456 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11457 gen_rtx_REG (Pmode, 12));
11460 RTX_FRAME_RELATED_P (insn) = 1;
11461 REG_NOTES (insn) =
11462 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11463 gen_rtx_SET (VOIDmode, stack_reg,
11464 gen_rtx_PLUS (Pmode, stack_reg,
11465 GEN_INT (-size))),
11466 REG_NOTES (insn));
11469 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11470 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11471 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11472 deduce these equivalences by itself so it wasn't necessary to hold
11473 its hand so much. */
11475 static void
11476 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11477 rtx reg2, rtx rreg)
11479 rtx real, temp;
11481 /* copy_rtx will not make unique copies of registers, so we need to
11482 ensure we don't have unwanted sharing here. */
11483 if (reg == reg2)
11484 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11486 if (reg == rreg)
11487 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11489 real = copy_rtx (PATTERN (insn));
11491 if (reg2 != NULL_RTX)
11492 real = replace_rtx (real, reg2, rreg);
11494 real = replace_rtx (real, reg,
11495 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11496 STACK_POINTER_REGNUM),
11497 GEN_INT (val)));
11499 /* We expect that 'real' is either a SET or a PARALLEL containing
11500 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11501 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11503 if (GET_CODE (real) == SET)
11505 rtx set = real;
11507 temp = simplify_rtx (SET_SRC (set));
11508 if (temp)
11509 SET_SRC (set) = temp;
11510 temp = simplify_rtx (SET_DEST (set));
11511 if (temp)
11512 SET_DEST (set) = temp;
11513 if (GET_CODE (SET_DEST (set)) == MEM)
11515 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11516 if (temp)
11517 XEXP (SET_DEST (set), 0) = temp;
11520 else if (GET_CODE (real) == PARALLEL)
11522 int i;
11523 for (i = 0; i < XVECLEN (real, 0); i++)
11524 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11526 rtx set = XVECEXP (real, 0, i);
11528 temp = simplify_rtx (SET_SRC (set));
11529 if (temp)
11530 SET_SRC (set) = temp;
11531 temp = simplify_rtx (SET_DEST (set));
11532 if (temp)
11533 SET_DEST (set) = temp;
11534 if (GET_CODE (SET_DEST (set)) == MEM)
11536 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11537 if (temp)
11538 XEXP (SET_DEST (set), 0) = temp;
11540 RTX_FRAME_RELATED_P (set) = 1;
11543 else
11544 abort ();
11546 if (TARGET_SPE)
11547 real = spe_synthesize_frame_save (real);
11549 RTX_FRAME_RELATED_P (insn) = 1;
11550 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11551 real,
11552 REG_NOTES (insn));
11555 /* Given an SPE frame note, return a PARALLEL of SETs with the
11556 original note, plus a synthetic register save. */
11558 static rtx
11559 spe_synthesize_frame_save (rtx real)
11561 rtx synth, offset, reg, real2;
11563 if (GET_CODE (real) != SET
11564 || GET_MODE (SET_SRC (real)) != V2SImode)
11565 return real;
11567 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11568 frame related note. The parallel contains a set of the register
11569 being saved, and another set to a synthetic register (n+1200).
11570 This is so we can differentiate between 64-bit and 32-bit saves.
11571 Words cannot describe this nastiness. */
11573 if (GET_CODE (SET_DEST (real)) != MEM
11574 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11575 || GET_CODE (SET_SRC (real)) != REG)
11576 abort ();
11578 /* Transform:
11579 (set (mem (plus (reg x) (const y)))
11580 (reg z))
11581 into:
11582 (set (mem (plus (reg x) (const y+4)))
11583 (reg z+1200))
11586 real2 = copy_rtx (real);
11587 PUT_MODE (SET_DEST (real2), SImode);
11588 reg = SET_SRC (real2);
11589 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11590 synth = copy_rtx (real2);
11592 if (BYTES_BIG_ENDIAN)
11594 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11595 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11598 reg = SET_SRC (synth);
11600 synth = replace_rtx (synth, reg,
11601 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11603 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11604 synth = replace_rtx (synth, offset,
11605 GEN_INT (INTVAL (offset)
11606 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11608 RTX_FRAME_RELATED_P (synth) = 1;
11609 RTX_FRAME_RELATED_P (real2) = 1;
11610 if (BYTES_BIG_ENDIAN)
11611 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11612 else
11613 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11615 return real;
11618 /* Returns an insn that has a vrsave set operation with the
11619 appropriate CLOBBERs. */
11621 static rtx
11622 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11624 int nclobs, i;
11625 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11626 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11628 clobs[0]
11629 = gen_rtx_SET (VOIDmode,
11630 vrsave,
11631 gen_rtx_UNSPEC_VOLATILE (SImode,
11632 gen_rtvec (2, reg, vrsave),
11633 30));
11635 nclobs = 1;
11637 /* We need to clobber the registers in the mask so the scheduler
11638 does not move sets to VRSAVE before sets of AltiVec registers.
11640 However, if the function receives nonlocal gotos, reload will set
11641 all call saved registers live. We will end up with:
11643 (set (reg 999) (mem))
11644 (parallel [ (set (reg vrsave) (unspec blah))
11645 (clobber (reg 999))])
11647 The clobber will cause the store into reg 999 to be dead, and
11648 flow will attempt to delete an epilogue insn. In this case, we
11649 need an unspec use/set of the register. */
11651 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11652 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11654 if (!epiloguep || call_used_regs [i])
11655 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11656 gen_rtx_REG (V4SImode, i));
11657 else
11659 rtx reg = gen_rtx_REG (V4SImode, i);
11661 clobs[nclobs++]
11662 = gen_rtx_SET (VOIDmode,
11663 reg,
11664 gen_rtx_UNSPEC (V4SImode,
11665 gen_rtvec (1, reg), 27));
11669 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11671 for (i = 0; i < nclobs; ++i)
11672 XVECEXP (insn, 0, i) = clobs[i];
11674 return insn;
11677 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11678 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11680 static void
11681 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11682 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11684 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11685 rtx replacea, replaceb;
11687 int_rtx = GEN_INT (offset);
11689 /* Some cases that need register indexed addressing. */
11690 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11691 || (TARGET_SPE_ABI
11692 && SPE_VECTOR_MODE (mode)
11693 && !SPE_CONST_OFFSET_OK (offset)))
11695 /* Whomever calls us must make sure r11 is available in the
11696 flow path of instructions in the prologue. */
11697 offset_rtx = gen_rtx_REG (Pmode, 11);
11698 emit_move_insn (offset_rtx, int_rtx);
11700 replacea = offset_rtx;
11701 replaceb = int_rtx;
11703 else
11705 offset_rtx = int_rtx;
11706 replacea = NULL_RTX;
11707 replaceb = NULL_RTX;
11710 reg = gen_rtx_REG (mode, regno);
11711 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11712 mem = gen_rtx_MEM (mode, addr);
11713 set_mem_alias_set (mem, rs6000_sr_alias_set);
11715 insn = emit_move_insn (mem, reg);
11717 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11720 /* Emit an offset memory reference suitable for a frame store, while
11721 converting to a valid addressing mode. */
11723 static rtx
11724 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11726 rtx int_rtx, offset_rtx;
11728 int_rtx = GEN_INT (offset);
11730 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11732 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11733 emit_move_insn (offset_rtx, int_rtx);
11735 else
11736 offset_rtx = int_rtx;
11738 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11741 /* Emit function prologue as insns. */
11743 void
11744 rs6000_emit_prologue (void)
11746 rs6000_stack_t *info = rs6000_stack_info ();
11747 enum machine_mode reg_mode = Pmode;
11748 int reg_size = UNITS_PER_WORD;
11749 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11750 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11751 rtx frame_reg_rtx = sp_reg_rtx;
11752 rtx cr_save_rtx = NULL_RTX;
11753 rtx insn;
11754 int saving_FPRs_inline;
11755 int using_store_multiple;
11756 HOST_WIDE_INT sp_offset = 0;
11758 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11760 reg_mode = V2SImode;
11761 reg_size = 8;
11764 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11765 && (!TARGET_SPE_ABI
11766 || info->spe_64bit_regs_used == 0)
11767 && info->first_gp_reg_save < 31);
11768 saving_FPRs_inline = (info->first_fp_reg_save == 64
11769 || FP_SAVE_INLINE (info->first_fp_reg_save)
11770 || current_function_calls_eh_return
11771 || cfun->machine->ra_need_lr);
11773 /* For V.4, update stack before we do any saving and set back pointer. */
11774 if (info->push_p
11775 && (DEFAULT_ABI == ABI_V4
11776 || current_function_calls_eh_return))
11778 if (info->total_size < 32767)
11779 sp_offset = info->total_size;
11780 else
11781 frame_reg_rtx = frame_ptr_rtx;
11782 rs6000_emit_allocate_stack (info->total_size,
11783 (frame_reg_rtx != sp_reg_rtx
11784 && (info->cr_save_p
11785 || info->lr_save_p
11786 || info->first_fp_reg_save < 64
11787 || info->first_gp_reg_save < 32
11788 )));
11789 if (frame_reg_rtx != sp_reg_rtx)
11790 rs6000_emit_stack_tie ();
11793 /* Save AltiVec registers if needed. */
11794 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11796 int i;
11798 /* There should be a non inline version of this, for when we
11799 are saving lots of vector registers. */
11800 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11801 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11803 rtx areg, savereg, mem;
11804 int offset;
11806 offset = info->altivec_save_offset + sp_offset
11807 + 16 * (i - info->first_altivec_reg_save);
11809 savereg = gen_rtx_REG (V4SImode, i);
11811 areg = gen_rtx_REG (Pmode, 0);
11812 emit_move_insn (areg, GEN_INT (offset));
11814 /* AltiVec addressing mode is [reg+reg]. */
11815 mem = gen_rtx_MEM (V4SImode,
11816 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11818 set_mem_alias_set (mem, rs6000_sr_alias_set);
11820 insn = emit_move_insn (mem, savereg);
11822 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11823 areg, GEN_INT (offset));
11827 /* VRSAVE is a bit vector representing which AltiVec registers
11828 are used. The OS uses this to determine which vector
11829 registers to save on a context switch. We need to save
11830 VRSAVE on the stack frame, add whatever AltiVec registers we
11831 used in this function, and do the corresponding magic in the
11832 epilogue. */
11834 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
11835 && info->vrsave_mask != 0)
11837 rtx reg, mem, vrsave;
11838 int offset;
11840 /* Get VRSAVE onto a GPR. */
11841 reg = gen_rtx_REG (SImode, 12);
11842 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11843 if (TARGET_MACHO)
11844 emit_insn (gen_get_vrsave_internal (reg));
11845 else
11846 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11848 /* Save VRSAVE. */
11849 offset = info->vrsave_save_offset + sp_offset;
11851 = gen_rtx_MEM (SImode,
11852 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11853 set_mem_alias_set (mem, rs6000_sr_alias_set);
11854 insn = emit_move_insn (mem, reg);
11856 /* Include the registers in the mask. */
11857 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11859 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11862 /* If we use the link register, get it into r0. */
11863 if (info->lr_save_p)
11864 emit_move_insn (gen_rtx_REG (Pmode, 0),
11865 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11867 /* If we need to save CR, put it into r12. */
11868 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11870 cr_save_rtx = gen_rtx_REG (SImode, 12);
11871 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11874 /* Do any required saving of fpr's. If only one or two to save, do
11875 it ourselves. Otherwise, call function. */
11876 if (saving_FPRs_inline)
11878 int i;
11879 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11880 if ((regs_ever_live[info->first_fp_reg_save+i]
11881 && ! call_used_regs[info->first_fp_reg_save+i]))
11882 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11883 info->first_fp_reg_save + i,
11884 info->fp_save_offset + sp_offset + 8 * i,
11885 info->total_size);
11887 else if (info->first_fp_reg_save != 64)
11889 int i;
11890 char rname[30];
11891 const char *alloc_rname;
11892 rtvec p;
11893 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11895 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11896 gen_rtx_REG (Pmode,
11897 LINK_REGISTER_REGNUM));
11898 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11899 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11900 alloc_rname = ggc_strdup (rname);
11901 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11902 gen_rtx_SYMBOL_REF (Pmode,
11903 alloc_rname));
11904 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11906 rtx addr, reg, mem;
11907 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11908 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11909 GEN_INT (info->fp_save_offset
11910 + sp_offset + 8*i));
11911 mem = gen_rtx_MEM (DFmode, addr);
11912 set_mem_alias_set (mem, rs6000_sr_alias_set);
11914 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11916 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11917 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11918 NULL_RTX, NULL_RTX);
11921 /* Save GPRs. This is done as a PARALLEL if we are using
11922 the store-multiple instructions. */
11923 if (using_store_multiple)
11925 rtvec p;
11926 int i;
11927 p = rtvec_alloc (32 - info->first_gp_reg_save);
11928 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11930 rtx addr, reg, mem;
11931 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11932 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11933 GEN_INT (info->gp_save_offset
11934 + sp_offset
11935 + reg_size * i));
11936 mem = gen_rtx_MEM (reg_mode, addr);
11937 set_mem_alias_set (mem, rs6000_sr_alias_set);
11939 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11941 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11942 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11943 NULL_RTX, NULL_RTX);
11945 else
11947 int i;
11948 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11949 if ((regs_ever_live[info->first_gp_reg_save+i]
11950 && ! call_used_regs[info->first_gp_reg_save+i])
11951 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11952 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11953 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11955 rtx addr, reg, mem;
11956 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11958 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11960 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11961 rtx b;
11963 if (!SPE_CONST_OFFSET_OK (offset))
11965 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11966 emit_move_insn (b, GEN_INT (offset));
11968 else
11969 b = GEN_INT (offset);
11971 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11972 mem = gen_rtx_MEM (V2SImode, addr);
11973 set_mem_alias_set (mem, rs6000_sr_alias_set);
11974 insn = emit_move_insn (mem, reg);
11976 if (GET_CODE (b) == CONST_INT)
11977 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11978 NULL_RTX, NULL_RTX);
11979 else
11980 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11981 b, GEN_INT (offset));
11983 else
11985 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11986 GEN_INT (info->gp_save_offset
11987 + sp_offset
11988 + reg_size * i));
11989 mem = gen_rtx_MEM (reg_mode, addr);
11990 set_mem_alias_set (mem, rs6000_sr_alias_set);
11992 insn = emit_move_insn (mem, reg);
11993 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11994 NULL_RTX, NULL_RTX);
11999 /* ??? There's no need to emit actual instructions here, but it's the
12000 easiest way to get the frame unwind information emitted. */
12001 if (current_function_calls_eh_return)
12003 unsigned int i, regno;
12005 /* In AIX ABI we need to pretend we save r2 here. */
12006 if (TARGET_AIX)
12008 rtx addr, reg, mem;
12010 reg = gen_rtx_REG (reg_mode, 2);
12011 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12012 GEN_INT (sp_offset + 5 * reg_size));
12013 mem = gen_rtx_MEM (reg_mode, addr);
12014 set_mem_alias_set (mem, rs6000_sr_alias_set);
12016 insn = emit_move_insn (mem, reg);
12017 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12018 NULL_RTX, NULL_RTX);
12019 PATTERN (insn) = gen_blockage ();
12022 for (i = 0; ; ++i)
12024 regno = EH_RETURN_DATA_REGNO (i);
12025 if (regno == INVALID_REGNUM)
12026 break;
12028 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12029 info->ehrd_offset + sp_offset
12030 + reg_size * (int) i,
12031 info->total_size);
12035 /* Save lr if we used it. */
12036 if (info->lr_save_p)
12038 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12039 GEN_INT (info->lr_save_offset + sp_offset));
12040 rtx reg = gen_rtx_REG (Pmode, 0);
12041 rtx mem = gen_rtx_MEM (Pmode, addr);
12042 /* This should not be of rs6000_sr_alias_set, because of
12043 __builtin_return_address. */
12045 insn = emit_move_insn (mem, reg);
12046 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12047 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12050 /* Save CR if we use any that must be preserved. */
12051 if (info->cr_save_p)
12053 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12054 GEN_INT (info->cr_save_offset + sp_offset));
12055 rtx mem = gen_rtx_MEM (SImode, addr);
12057 set_mem_alias_set (mem, rs6000_sr_alias_set);
12059 /* If r12 was used to hold the original sp, copy cr into r0 now
12060 that it's free. */
12061 if (REGNO (frame_reg_rtx) == 12)
12063 cr_save_rtx = gen_rtx_REG (SImode, 0);
12064 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12066 insn = emit_move_insn (mem, cr_save_rtx);
12068 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12069 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12070 But that's OK. All we have to do is specify that _one_ condition
12071 code register is saved in this stack slot. The thrower's epilogue
12072 will then restore all the call-saved registers.
12073 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12074 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12075 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12078 /* Update stack and set back pointer unless this is V.4,
12079 for which it was done previously. */
12080 if (info->push_p
12081 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12082 rs6000_emit_allocate_stack (info->total_size, FALSE);
12084 /* Set frame pointer, if needed. */
12085 if (frame_pointer_needed)
12087 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12088 sp_reg_rtx);
12089 RTX_FRAME_RELATED_P (insn) = 1;
12092 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12093 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12094 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12095 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12097 /* If emit_load_toc_table will use the link register, we need to save
12098 it. We use R12 for this purpose because emit_load_toc_table
12099 can use register 0. This allows us to use a plain 'blr' to return
12100 from the procedure more often. */
12101 int save_LR_around_toc_setup = (TARGET_ELF
12102 && DEFAULT_ABI != ABI_AIX
12103 && flag_pic
12104 && ! info->lr_save_p
12105 && EXIT_BLOCK_PTR->pred != NULL);
12106 if (save_LR_around_toc_setup)
12108 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12109 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12110 rs6000_emit_load_toc_table (TRUE);
12111 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12113 else
12114 rs6000_emit_load_toc_table (TRUE);
12117 #if TARGET_MACHO
12118 if (DEFAULT_ABI == ABI_DARWIN
12119 && flag_pic && current_function_uses_pic_offset_table)
12121 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12122 const char *picbase = machopic_function_base_name ();
12123 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12125 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12127 rs6000_maybe_dead (
12128 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12129 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12131 #endif
12134 /* Write function prologue. */
12136 static void
12137 rs6000_output_function_prologue (FILE *file,
12138 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12140 rs6000_stack_t *info = rs6000_stack_info ();
12142 if (TARGET_DEBUG_STACK)
12143 debug_stack_info (info);
12145 /* Write .extern for any function we will call to save and restore
12146 fp values. */
12147 if (info->first_fp_reg_save < 64
12148 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12149 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12150 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12151 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12152 RESTORE_FP_SUFFIX);
12154 /* Write .extern for AIX common mode routines, if needed. */
12155 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12157 fputs ("\t.extern __mulh\n", file);
12158 fputs ("\t.extern __mull\n", file);
12159 fputs ("\t.extern __divss\n", file);
12160 fputs ("\t.extern __divus\n", file);
12161 fputs ("\t.extern __quoss\n", file);
12162 fputs ("\t.extern __quous\n", file);
12163 common_mode_defined = 1;
12166 if (! HAVE_prologue)
12168 start_sequence ();
12170 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12171 the "toplevel" insn chain. */
12172 emit_note (NOTE_INSN_DELETED);
12173 rs6000_emit_prologue ();
12174 emit_note (NOTE_INSN_DELETED);
12176 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12178 rtx insn;
12179 unsigned addr = 0;
12180 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12182 INSN_ADDRESSES_NEW (insn, addr);
12183 addr += 4;
12187 if (TARGET_DEBUG_STACK)
12188 debug_rtx_list (get_insns (), 100);
12189 final (get_insns (), file, FALSE, FALSE);
12190 end_sequence ();
12193 rs6000_pic_labelno++;
12196 /* Emit function epilogue as insns.
12198 At present, dwarf2out_frame_debug_expr doesn't understand
12199 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12200 anywhere in the epilogue. Most of the insns below would in any case
12201 need special notes to explain where r11 is in relation to the stack. */
12203 void
12204 rs6000_emit_epilogue (int sibcall)
12206 rs6000_stack_t *info;
12207 int restoring_FPRs_inline;
12208 int using_load_multiple;
12209 int using_mfcr_multiple;
12210 int use_backchain_to_restore_sp;
12211 int sp_offset = 0;
12212 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12213 rtx frame_reg_rtx = sp_reg_rtx;
12214 enum machine_mode reg_mode = Pmode;
12215 int reg_size = UNITS_PER_WORD;
12216 int i;
12218 info = rs6000_stack_info ();
12220 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12222 reg_mode = V2SImode;
12223 reg_size = 8;
12226 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12227 && (!TARGET_SPE_ABI
12228 || info->spe_64bit_regs_used == 0)
12229 && info->first_gp_reg_save < 31);
12230 restoring_FPRs_inline = (sibcall
12231 || current_function_calls_eh_return
12232 || info->first_fp_reg_save == 64
12233 || FP_SAVE_INLINE (info->first_fp_reg_save));
12234 use_backchain_to_restore_sp = (frame_pointer_needed
12235 || current_function_calls_alloca
12236 || info->total_size > 32767);
12237 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12238 || rs6000_cpu == PROCESSOR_PPC603
12239 || rs6000_cpu == PROCESSOR_PPC750
12240 || optimize_size);
12242 /* If we have a frame pointer, a call to alloca, or a large stack
12243 frame, restore the old stack pointer using the backchain. Otherwise,
12244 we know what size to update it with. */
12245 if (use_backchain_to_restore_sp)
12247 /* Under V.4, don't reset the stack pointer until after we're done
12248 loading the saved registers. */
12249 if (DEFAULT_ABI == ABI_V4)
12250 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12252 emit_move_insn (frame_reg_rtx,
12253 gen_rtx_MEM (Pmode, sp_reg_rtx));
12256 else if (info->push_p)
12258 if (DEFAULT_ABI == ABI_V4
12259 || current_function_calls_eh_return)
12260 sp_offset = info->total_size;
12261 else
12263 emit_insn (TARGET_32BIT
12264 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12265 GEN_INT (info->total_size))
12266 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12267 GEN_INT (info->total_size)));
12271 /* Restore AltiVec registers if needed. */
12272 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12274 int i;
12276 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12277 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12279 rtx addr, areg, mem;
12281 areg = gen_rtx_REG (Pmode, 0);
12282 emit_move_insn
12283 (areg, GEN_INT (info->altivec_save_offset
12284 + sp_offset
12285 + 16 * (i - info->first_altivec_reg_save)));
12287 /* AltiVec addressing mode is [reg+reg]. */
12288 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12289 mem = gen_rtx_MEM (V4SImode, addr);
12290 set_mem_alias_set (mem, rs6000_sr_alias_set);
12292 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12296 /* Restore VRSAVE if needed. */
12297 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
12298 && info->vrsave_mask != 0)
12300 rtx addr, mem, reg;
12302 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12303 GEN_INT (info->vrsave_save_offset + sp_offset));
12304 mem = gen_rtx_MEM (SImode, addr);
12305 set_mem_alias_set (mem, rs6000_sr_alias_set);
12306 reg = gen_rtx_REG (SImode, 12);
12307 emit_move_insn (reg, mem);
12309 emit_insn (generate_set_vrsave (reg, info, 1));
12312 /* Get the old lr if we saved it. */
12313 if (info->lr_save_p)
12315 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12316 info->lr_save_offset + sp_offset);
12318 set_mem_alias_set (mem, rs6000_sr_alias_set);
12320 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12323 /* Get the old cr if we saved it. */
12324 if (info->cr_save_p)
12326 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12327 GEN_INT (info->cr_save_offset + sp_offset));
12328 rtx mem = gen_rtx_MEM (SImode, addr);
12330 set_mem_alias_set (mem, rs6000_sr_alias_set);
12332 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12335 /* Set LR here to try to overlap restores below. */
12336 if (info->lr_save_p)
12337 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12338 gen_rtx_REG (Pmode, 0));
12340 /* Load exception handler data registers, if needed. */
12341 if (current_function_calls_eh_return)
12343 unsigned int i, regno;
12345 if (TARGET_AIX)
12347 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12348 GEN_INT (sp_offset + 5 * reg_size));
12349 rtx mem = gen_rtx_MEM (reg_mode, addr);
12351 set_mem_alias_set (mem, rs6000_sr_alias_set);
12353 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12356 for (i = 0; ; ++i)
12358 rtx mem;
12360 regno = EH_RETURN_DATA_REGNO (i);
12361 if (regno == INVALID_REGNUM)
12362 break;
12364 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12365 info->ehrd_offset + sp_offset
12366 + reg_size * (int) i);
12367 set_mem_alias_set (mem, rs6000_sr_alias_set);
12369 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12373 /* Restore GPRs. This is done as a PARALLEL if we are using
12374 the load-multiple instructions. */
12375 if (using_load_multiple)
12377 rtvec p;
12378 p = rtvec_alloc (32 - info->first_gp_reg_save);
12379 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12381 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12382 GEN_INT (info->gp_save_offset
12383 + sp_offset
12384 + reg_size * i));
12385 rtx mem = gen_rtx_MEM (reg_mode, addr);
12387 set_mem_alias_set (mem, rs6000_sr_alias_set);
12389 RTVEC_ELT (p, i) =
12390 gen_rtx_SET (VOIDmode,
12391 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12392 mem);
12394 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12396 else
12397 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12398 if ((regs_ever_live[info->first_gp_reg_save+i]
12399 && ! call_used_regs[info->first_gp_reg_save+i])
12400 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12401 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12402 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12404 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12405 GEN_INT (info->gp_save_offset
12406 + sp_offset
12407 + reg_size * i));
12408 rtx mem = gen_rtx_MEM (reg_mode, addr);
12410 /* Restore 64-bit quantities for SPE. */
12411 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12413 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12414 rtx b;
12416 if (!SPE_CONST_OFFSET_OK (offset))
12418 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12419 emit_move_insn (b, GEN_INT (offset));
12421 else
12422 b = GEN_INT (offset);
12424 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12425 mem = gen_rtx_MEM (V2SImode, addr);
12428 set_mem_alias_set (mem, rs6000_sr_alias_set);
12430 emit_move_insn (gen_rtx_REG (reg_mode,
12431 info->first_gp_reg_save + i), mem);
12434 /* Restore fpr's if we need to do it without calling a function. */
12435 if (restoring_FPRs_inline)
12436 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12437 if ((regs_ever_live[info->first_fp_reg_save+i]
12438 && ! call_used_regs[info->first_fp_reg_save+i]))
12440 rtx addr, mem;
12441 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12442 GEN_INT (info->fp_save_offset
12443 + sp_offset
12444 + 8 * i));
12445 mem = gen_rtx_MEM (DFmode, addr);
12446 set_mem_alias_set (mem, rs6000_sr_alias_set);
12448 emit_move_insn (gen_rtx_REG (DFmode,
12449 info->first_fp_reg_save + i),
12450 mem);
12453 /* If we saved cr, restore it here. Just those that were used. */
12454 if (info->cr_save_p)
12456 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12457 int count = 0;
12459 if (using_mfcr_multiple)
12461 for (i = 0; i < 8; i++)
12462 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12463 count++;
12464 if (count == 0)
12465 abort ();
12468 if (using_mfcr_multiple && count > 1)
12470 rtvec p;
12471 int ndx;
12473 p = rtvec_alloc (count);
12475 ndx = 0;
12476 for (i = 0; i < 8; i++)
12477 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12479 rtvec r = rtvec_alloc (2);
12480 RTVEC_ELT (r, 0) = r12_rtx;
12481 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12482 RTVEC_ELT (p, ndx) =
12483 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12484 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12485 ndx++;
12487 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12488 if (ndx != count)
12489 abort ();
12491 else
12492 for (i = 0; i < 8; i++)
12493 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12495 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12496 CR0_REGNO+i),
12497 r12_rtx));
12501 /* If this is V.4, unwind the stack pointer after all of the loads
12502 have been done. We need to emit a block here so that sched
12503 doesn't decide to move the sp change before the register restores
12504 (which may not have any obvious dependency on the stack). This
12505 doesn't hurt performance, because there is no scheduling that can
12506 be done after this point. */
12507 if (DEFAULT_ABI == ABI_V4
12508 || current_function_calls_eh_return)
12510 if (frame_reg_rtx != sp_reg_rtx)
12511 rs6000_emit_stack_tie ();
12513 if (use_backchain_to_restore_sp)
12515 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12517 else if (sp_offset != 0)
12519 emit_insn (TARGET_32BIT
12520 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12521 GEN_INT (sp_offset))
12522 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12523 GEN_INT (sp_offset)));
12527 if (current_function_calls_eh_return)
12529 rtx sa = EH_RETURN_STACKADJ_RTX;
12530 emit_insn (TARGET_32BIT
12531 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12532 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12535 if (!sibcall)
12537 rtvec p;
12538 if (! restoring_FPRs_inline)
12539 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12540 else
12541 p = rtvec_alloc (2);
12543 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12544 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12545 gen_rtx_REG (Pmode,
12546 LINK_REGISTER_REGNUM));
12548 /* If we have to restore more than two FP registers, branch to the
12549 restore function. It will return to our caller. */
12550 if (! restoring_FPRs_inline)
12552 int i;
12553 char rname[30];
12554 const char *alloc_rname;
12556 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12557 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12558 alloc_rname = ggc_strdup (rname);
12559 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12560 gen_rtx_SYMBOL_REF (Pmode,
12561 alloc_rname));
12563 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12565 rtx addr, mem;
12566 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12567 GEN_INT (info->fp_save_offset + 8*i));
12568 mem = gen_rtx_MEM (DFmode, addr);
12569 set_mem_alias_set (mem, rs6000_sr_alias_set);
12571 RTVEC_ELT (p, i+3) =
12572 gen_rtx_SET (VOIDmode,
12573 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12574 mem);
12578 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12582 /* Write function epilogue. */
12584 static void
12585 rs6000_output_function_epilogue (FILE *file,
12586 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12588 rs6000_stack_t *info = rs6000_stack_info ();
12590 if (! HAVE_epilogue)
12592 rtx insn = get_last_insn ();
12593 /* If the last insn was a BARRIER, we don't have to write anything except
12594 the trace table. */
12595 if (GET_CODE (insn) == NOTE)
12596 insn = prev_nonnote_insn (insn);
12597 if (insn == 0 || GET_CODE (insn) != BARRIER)
12599 /* This is slightly ugly, but at least we don't have two
12600 copies of the epilogue-emitting code. */
12601 start_sequence ();
12603 /* A NOTE_INSN_DELETED is supposed to be at the start
12604 and end of the "toplevel" insn chain. */
12605 emit_note (NOTE_INSN_DELETED);
12606 rs6000_emit_epilogue (FALSE);
12607 emit_note (NOTE_INSN_DELETED);
12609 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12611 rtx insn;
12612 unsigned addr = 0;
12613 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12615 INSN_ADDRESSES_NEW (insn, addr);
12616 addr += 4;
12620 if (TARGET_DEBUG_STACK)
12621 debug_rtx_list (get_insns (), 100);
12622 final (get_insns (), file, FALSE, FALSE);
12623 end_sequence ();
12627 #if TARGET_MACHO
12628 macho_branch_islands ();
12629 /* Mach-O doesn't support labels at the end of objects, so if
12630 it looks like we might want one, insert a NOP. */
12632 rtx insn = get_last_insn ();
12633 while (insn
12634 && NOTE_P (insn)
12635 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12636 insn = PREV_INSN (insn);
12637 if (insn
12638 && (LABEL_P (insn)
12639 || (NOTE_P (insn)
12640 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12641 fputs ("\tnop\n", file);
12643 #endif
12645 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12646 on its format.
12648 We don't output a traceback table if -finhibit-size-directive was
12649 used. The documentation for -finhibit-size-directive reads
12650 ``don't output a @code{.size} assembler directive, or anything
12651 else that would cause trouble if the function is split in the
12652 middle, and the two halves are placed at locations far apart in
12653 memory.'' The traceback table has this property, since it
12654 includes the offset from the start of the function to the
12655 traceback table itself.
12657 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12658 different traceback table. */
12659 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12660 && rs6000_traceback != traceback_none)
12662 const char *fname = NULL;
12663 const char *language_string = lang_hooks.name;
12664 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12665 int i;
12666 int optional_tbtab;
12668 if (rs6000_traceback == traceback_full)
12669 optional_tbtab = 1;
12670 else if (rs6000_traceback == traceback_part)
12671 optional_tbtab = 0;
12672 else
12673 optional_tbtab = !optimize_size && !TARGET_ELF;
12675 if (optional_tbtab)
12677 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12678 while (*fname == '.') /* V.4 encodes . in the name */
12679 fname++;
12681 /* Need label immediately before tbtab, so we can compute
12682 its offset from the function start. */
12683 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12684 ASM_OUTPUT_LABEL (file, fname);
12687 /* The .tbtab pseudo-op can only be used for the first eight
12688 expressions, since it can't handle the possibly variable
12689 length fields that follow. However, if you omit the optional
12690 fields, the assembler outputs zeros for all optional fields
12691 anyways, giving each variable length field is minimum length
12692 (as defined in sys/debug.h). Thus we can not use the .tbtab
12693 pseudo-op at all. */
12695 /* An all-zero word flags the start of the tbtab, for debuggers
12696 that have to find it by searching forward from the entry
12697 point or from the current pc. */
12698 fputs ("\t.long 0\n", file);
12700 /* Tbtab format type. Use format type 0. */
12701 fputs ("\t.byte 0,", file);
12703 /* Language type. Unfortunately, there does not seem to be any
12704 official way to discover the language being compiled, so we
12705 use language_string.
12706 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12707 Java is 13. Objective-C is 14. */
12708 if (! strcmp (language_string, "GNU C"))
12709 i = 0;
12710 else if (! strcmp (language_string, "GNU F77"))
12711 i = 1;
12712 else if (! strcmp (language_string, "GNU Pascal"))
12713 i = 2;
12714 else if (! strcmp (language_string, "GNU Ada"))
12715 i = 3;
12716 else if (! strcmp (language_string, "GNU C++"))
12717 i = 9;
12718 else if (! strcmp (language_string, "GNU Java"))
12719 i = 13;
12720 else if (! strcmp (language_string, "GNU Objective-C"))
12721 i = 14;
12722 else
12723 abort ();
12724 fprintf (file, "%d,", i);
12726 /* 8 single bit fields: global linkage (not set for C extern linkage,
12727 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12728 from start of procedure stored in tbtab, internal function, function
12729 has controlled storage, function has no toc, function uses fp,
12730 function logs/aborts fp operations. */
12731 /* Assume that fp operations are used if any fp reg must be saved. */
12732 fprintf (file, "%d,",
12733 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12735 /* 6 bitfields: function is interrupt handler, name present in
12736 proc table, function calls alloca, on condition directives
12737 (controls stack walks, 3 bits), saves condition reg, saves
12738 link reg. */
12739 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12740 set up as a frame pointer, even when there is no alloca call. */
12741 fprintf (file, "%d,",
12742 ((optional_tbtab << 6)
12743 | ((optional_tbtab & frame_pointer_needed) << 5)
12744 | (info->cr_save_p << 1)
12745 | (info->lr_save_p)));
12747 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12748 (6 bits). */
12749 fprintf (file, "%d,",
12750 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12752 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12753 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12755 if (optional_tbtab)
12757 /* Compute the parameter info from the function decl argument
12758 list. */
12759 tree decl;
12760 int next_parm_info_bit = 31;
12762 for (decl = DECL_ARGUMENTS (current_function_decl);
12763 decl; decl = TREE_CHAIN (decl))
12765 rtx parameter = DECL_INCOMING_RTL (decl);
12766 enum machine_mode mode = GET_MODE (parameter);
12768 if (GET_CODE (parameter) == REG)
12770 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12772 int bits;
12774 float_parms++;
12776 if (mode == SFmode)
12777 bits = 0x2;
12778 else if (mode == DFmode || mode == TFmode)
12779 bits = 0x3;
12780 else
12781 abort ();
12783 /* If only one bit will fit, don't or in this entry. */
12784 if (next_parm_info_bit > 0)
12785 parm_info |= (bits << (next_parm_info_bit - 1));
12786 next_parm_info_bit -= 2;
12788 else
12790 fixed_parms += ((GET_MODE_SIZE (mode)
12791 + (UNITS_PER_WORD - 1))
12792 / UNITS_PER_WORD);
12793 next_parm_info_bit -= 1;
12799 /* Number of fixed point parameters. */
12800 /* This is actually the number of words of fixed point parameters; thus
12801 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12802 fprintf (file, "%d,", fixed_parms);
12804 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12805 all on stack. */
12806 /* This is actually the number of fp registers that hold parameters;
12807 and thus the maximum value is 13. */
12808 /* Set parameters on stack bit if parameters are not in their original
12809 registers, regardless of whether they are on the stack? Xlc
12810 seems to set the bit when not optimizing. */
12811 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12813 if (! optional_tbtab)
12814 return;
12816 /* Optional fields follow. Some are variable length. */
12818 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12819 11 double float. */
12820 /* There is an entry for each parameter in a register, in the order that
12821 they occur in the parameter list. Any intervening arguments on the
12822 stack are ignored. If the list overflows a long (max possible length
12823 34 bits) then completely leave off all elements that don't fit. */
12824 /* Only emit this long if there was at least one parameter. */
12825 if (fixed_parms || float_parms)
12826 fprintf (file, "\t.long %d\n", parm_info);
12828 /* Offset from start of code to tb table. */
12829 fputs ("\t.long ", file);
12830 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12831 #if TARGET_AIX
12832 RS6000_OUTPUT_BASENAME (file, fname);
12833 #else
12834 assemble_name (file, fname);
12835 #endif
12836 fputs ("-.", file);
12837 #if TARGET_AIX
12838 RS6000_OUTPUT_BASENAME (file, fname);
12839 #else
12840 assemble_name (file, fname);
12841 #endif
12842 putc ('\n', file);
12844 /* Interrupt handler mask. */
12845 /* Omit this long, since we never set the interrupt handler bit
12846 above. */
12848 /* Number of CTL (controlled storage) anchors. */
12849 /* Omit this long, since the has_ctl bit is never set above. */
12851 /* Displacement into stack of each CTL anchor. */
12852 /* Omit this list of longs, because there are no CTL anchors. */
12854 /* Length of function name. */
12855 if (*fname == '*')
12856 ++fname;
12857 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12859 /* Function name. */
12860 assemble_string (fname, strlen (fname));
12862 /* Register for alloca automatic storage; this is always reg 31.
12863 Only emit this if the alloca bit was set above. */
12864 if (frame_pointer_needed)
12865 fputs ("\t.byte 31\n", file);
12867 fputs ("\t.align 2\n", file);
12871 /* A C compound statement that outputs the assembler code for a thunk
12872 function, used to implement C++ virtual function calls with
12873 multiple inheritance. The thunk acts as a wrapper around a virtual
12874 function, adjusting the implicit object parameter before handing
12875 control off to the real function.
12877 First, emit code to add the integer DELTA to the location that
12878 contains the incoming first argument. Assume that this argument
12879 contains a pointer, and is the one used to pass the `this' pointer
12880 in C++. This is the incoming argument *before* the function
12881 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12882 values of all other incoming arguments.
12884 After the addition, emit code to jump to FUNCTION, which is a
12885 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12886 not touch the return address. Hence returning from FUNCTION will
12887 return to whoever called the current `thunk'.
12889 The effect must be as if FUNCTION had been called directly with the
12890 adjusted first argument. This macro is responsible for emitting
12891 all of the code for a thunk function; output_function_prologue()
12892 and output_function_epilogue() are not invoked.
12894 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12895 been extracted from it.) It might possibly be useful on some
12896 targets, but probably not.
12898 If you do not define this macro, the target-independent code in the
12899 C++ frontend will generate a less efficient heavyweight thunk that
12900 calls FUNCTION instead of jumping to it. The generic approach does
12901 not support varargs. */
12903 static void
12904 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12905 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12906 tree function)
12908 rtx this, insn, funexp;
12910 reload_completed = 1;
12911 epilogue_completed = 1;
12912 no_new_pseudos = 1;
12914 /* Mark the end of the (empty) prologue. */
12915 emit_note (NOTE_INSN_PROLOGUE_END);
12917 /* Find the "this" pointer. If the function returns a structure,
12918 the structure return pointer is in r3. */
12919 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12920 this = gen_rtx_REG (Pmode, 4);
12921 else
12922 this = gen_rtx_REG (Pmode, 3);
12924 /* Apply the constant offset, if required. */
12925 if (delta)
12927 rtx delta_rtx = GEN_INT (delta);
12928 emit_insn (TARGET_32BIT
12929 ? gen_addsi3 (this, this, delta_rtx)
12930 : gen_adddi3 (this, this, delta_rtx));
12933 /* Apply the offset from the vtable, if required. */
12934 if (vcall_offset)
12936 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12937 rtx tmp = gen_rtx_REG (Pmode, 12);
12939 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12940 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12942 emit_insn (TARGET_32BIT
12943 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12944 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12945 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12947 else
12949 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12951 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12953 emit_insn (TARGET_32BIT
12954 ? gen_addsi3 (this, this, tmp)
12955 : gen_adddi3 (this, this, tmp));
12958 /* Generate a tail call to the target function. */
12959 if (!TREE_USED (function))
12961 assemble_external (function);
12962 TREE_USED (function) = 1;
12964 funexp = XEXP (DECL_RTL (function), 0);
12965 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12967 #if TARGET_MACHO
12968 if (MACHOPIC_INDIRECT)
12969 funexp = machopic_indirect_call_target (funexp);
12970 #endif
12972 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12973 generate sibcall RTL explicitly to avoid constraint abort. */
12974 insn = emit_call_insn (
12975 gen_rtx_PARALLEL (VOIDmode,
12976 gen_rtvec (4,
12977 gen_rtx_CALL (VOIDmode,
12978 funexp, const0_rtx),
12979 gen_rtx_USE (VOIDmode, const0_rtx),
12980 gen_rtx_USE (VOIDmode,
12981 gen_rtx_REG (SImode,
12982 LINK_REGISTER_REGNUM)),
12983 gen_rtx_RETURN (VOIDmode))));
12984 SIBLING_CALL_P (insn) = 1;
12985 emit_barrier ();
12987 /* Run just enough of rest_of_compilation to get the insns emitted.
12988 There's not really enough bulk here to make other passes such as
12989 instruction scheduling worth while. Note that use_thunk calls
12990 assemble_start_function and assemble_end_function. */
12991 insn = get_insns ();
12992 insn_locators_initialize ();
12993 shorten_branches (insn);
12994 final_start_function (insn, file, 1);
12995 final (insn, file, 1, 0);
12996 final_end_function ();
12998 reload_completed = 0;
12999 epilogue_completed = 0;
13000 no_new_pseudos = 0;
13003 /* A quick summary of the various types of 'constant-pool tables'
13004 under PowerPC:
13006 Target Flags Name One table per
13007 AIX (none) AIX TOC object file
13008 AIX -mfull-toc AIX TOC object file
13009 AIX -mminimal-toc AIX minimal TOC translation unit
13010 SVR4/EABI (none) SVR4 SDATA object file
13011 SVR4/EABI -fpic SVR4 pic object file
13012 SVR4/EABI -fPIC SVR4 PIC translation unit
13013 SVR4/EABI -mrelocatable EABI TOC function
13014 SVR4/EABI -maix AIX TOC object file
13015 SVR4/EABI -maix -mminimal-toc
13016 AIX minimal TOC translation unit
13018 Name Reg. Set by entries contains:
13019 made by addrs? fp? sum?
13021 AIX TOC 2 crt0 as Y option option
13022 AIX minimal TOC 30 prolog gcc Y Y option
13023 SVR4 SDATA 13 crt0 gcc N Y N
13024 SVR4 pic 30 prolog ld Y not yet N
13025 SVR4 PIC 30 prolog gcc Y option option
13026 EABI TOC 30 prolog gcc Y option option
13030 /* Hash functions for the hash table. */
13032 static unsigned
13033 rs6000_hash_constant (rtx k)
13035 enum rtx_code code = GET_CODE (k);
13036 enum machine_mode mode = GET_MODE (k);
13037 unsigned result = (code << 3) ^ mode;
13038 const char *format;
13039 int flen, fidx;
13041 format = GET_RTX_FORMAT (code);
13042 flen = strlen (format);
13043 fidx = 0;
13045 switch (code)
13047 case LABEL_REF:
13048 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13050 case CONST_DOUBLE:
13051 if (mode != VOIDmode)
13052 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13053 flen = 2;
13054 break;
13056 case CODE_LABEL:
13057 fidx = 3;
13058 break;
13060 default:
13061 break;
13064 for (; fidx < flen; fidx++)
13065 switch (format[fidx])
13067 case 's':
13069 unsigned i, len;
13070 const char *str = XSTR (k, fidx);
13071 len = strlen (str);
13072 result = result * 613 + len;
13073 for (i = 0; i < len; i++)
13074 result = result * 613 + (unsigned) str[i];
13075 break;
13077 case 'u':
13078 case 'e':
13079 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13080 break;
13081 case 'i':
13082 case 'n':
13083 result = result * 613 + (unsigned) XINT (k, fidx);
13084 break;
13085 case 'w':
13086 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13087 result = result * 613 + (unsigned) XWINT (k, fidx);
13088 else
13090 size_t i;
13091 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13092 result = result * 613 + (unsigned) (XWINT (k, fidx)
13093 >> CHAR_BIT * i);
13095 break;
13096 case '0':
13097 break;
13098 default:
13099 abort ();
13102 return result;
13105 static unsigned
13106 toc_hash_function (const void *hash_entry)
13108 const struct toc_hash_struct *thc =
13109 (const struct toc_hash_struct *) hash_entry;
13110 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13113 /* Compare H1 and H2 for equivalence. */
13115 static int
13116 toc_hash_eq (const void *h1, const void *h2)
13118 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13119 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13121 if (((const struct toc_hash_struct *) h1)->key_mode
13122 != ((const struct toc_hash_struct *) h2)->key_mode)
13123 return 0;
13125 return rtx_equal_p (r1, r2);
13128 /* These are the names given by the C++ front-end to vtables, and
13129 vtable-like objects. Ideally, this logic should not be here;
13130 instead, there should be some programmatic way of inquiring as
13131 to whether or not an object is a vtable. */
13133 #define VTABLE_NAME_P(NAME) \
13134 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13135 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13136 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13137 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13139 void
13140 rs6000_output_symbol_ref (FILE *file, rtx x)
13142 /* Currently C++ toc references to vtables can be emitted before it
13143 is decided whether the vtable is public or private. If this is
13144 the case, then the linker will eventually complain that there is
13145 a reference to an unknown section. Thus, for vtables only,
13146 we emit the TOC reference to reference the symbol and not the
13147 section. */
13148 const char *name = XSTR (x, 0);
13150 if (VTABLE_NAME_P (name))
13152 RS6000_OUTPUT_BASENAME (file, name);
13154 else
13155 assemble_name (file, name);
13158 /* Output a TOC entry. We derive the entry name from what is being
13159 written. */
13161 void
13162 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13164 char buf[256];
13165 const char *name = buf;
13166 const char *real_name;
13167 rtx base = x;
13168 int offset = 0;
13170 if (TARGET_NO_TOC)
13171 abort ();
13173 /* When the linker won't eliminate them, don't output duplicate
13174 TOC entries (this happens on AIX if there is any kind of TOC,
13175 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13176 CODE_LABELs. */
13177 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13179 struct toc_hash_struct *h;
13180 void * * found;
13182 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13183 time because GGC is not initialized at that point. */
13184 if (toc_hash_table == NULL)
13185 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13186 toc_hash_eq, NULL);
13188 h = ggc_alloc (sizeof (*h));
13189 h->key = x;
13190 h->key_mode = mode;
13191 h->labelno = labelno;
13193 found = htab_find_slot (toc_hash_table, h, 1);
13194 if (*found == NULL)
13195 *found = h;
13196 else /* This is indeed a duplicate.
13197 Set this label equal to that label. */
13199 fputs ("\t.set ", file);
13200 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13201 fprintf (file, "%d,", labelno);
13202 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13203 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13204 found)->labelno));
13205 return;
13209 /* If we're going to put a double constant in the TOC, make sure it's
13210 aligned properly when strict alignment is on. */
13211 if (GET_CODE (x) == CONST_DOUBLE
13212 && STRICT_ALIGNMENT
13213 && GET_MODE_BITSIZE (mode) >= 64
13214 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13215 ASM_OUTPUT_ALIGN (file, 3);
13218 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13220 /* Handle FP constants specially. Note that if we have a minimal
13221 TOC, things we put here aren't actually in the TOC, so we can allow
13222 FP constants. */
13223 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13225 REAL_VALUE_TYPE rv;
13226 long k[4];
13228 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13229 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13231 if (TARGET_64BIT)
13233 if (TARGET_MINIMAL_TOC)
13234 fputs (DOUBLE_INT_ASM_OP, file);
13235 else
13236 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13237 k[0] & 0xffffffff, k[1] & 0xffffffff,
13238 k[2] & 0xffffffff, k[3] & 0xffffffff);
13239 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13240 k[0] & 0xffffffff, k[1] & 0xffffffff,
13241 k[2] & 0xffffffff, k[3] & 0xffffffff);
13242 return;
13244 else
13246 if (TARGET_MINIMAL_TOC)
13247 fputs ("\t.long ", file);
13248 else
13249 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13250 k[0] & 0xffffffff, k[1] & 0xffffffff,
13251 k[2] & 0xffffffff, k[3] & 0xffffffff);
13252 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13253 k[0] & 0xffffffff, k[1] & 0xffffffff,
13254 k[2] & 0xffffffff, k[3] & 0xffffffff);
13255 return;
13258 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13260 REAL_VALUE_TYPE rv;
13261 long k[2];
13263 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13264 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13266 if (TARGET_64BIT)
13268 if (TARGET_MINIMAL_TOC)
13269 fputs (DOUBLE_INT_ASM_OP, file);
13270 else
13271 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13272 k[0] & 0xffffffff, k[1] & 0xffffffff);
13273 fprintf (file, "0x%lx%08lx\n",
13274 k[0] & 0xffffffff, k[1] & 0xffffffff);
13275 return;
13277 else
13279 if (TARGET_MINIMAL_TOC)
13280 fputs ("\t.long ", file);
13281 else
13282 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13283 k[0] & 0xffffffff, k[1] & 0xffffffff);
13284 fprintf (file, "0x%lx,0x%lx\n",
13285 k[0] & 0xffffffff, k[1] & 0xffffffff);
13286 return;
13289 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13291 REAL_VALUE_TYPE rv;
13292 long l;
13294 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13295 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13297 if (TARGET_64BIT)
13299 if (TARGET_MINIMAL_TOC)
13300 fputs (DOUBLE_INT_ASM_OP, file);
13301 else
13302 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13303 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13304 return;
13306 else
13308 if (TARGET_MINIMAL_TOC)
13309 fputs ("\t.long ", file);
13310 else
13311 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13312 fprintf (file, "0x%lx\n", l & 0xffffffff);
13313 return;
13316 else if (GET_MODE (x) == VOIDmode
13317 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13319 unsigned HOST_WIDE_INT low;
13320 HOST_WIDE_INT high;
13322 if (GET_CODE (x) == CONST_DOUBLE)
13324 low = CONST_DOUBLE_LOW (x);
13325 high = CONST_DOUBLE_HIGH (x);
13327 else
13328 #if HOST_BITS_PER_WIDE_INT == 32
13330 low = INTVAL (x);
13331 high = (low & 0x80000000) ? ~0 : 0;
13333 #else
13335 low = INTVAL (x) & 0xffffffff;
13336 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13338 #endif
13340 /* TOC entries are always Pmode-sized, but since this
13341 is a bigendian machine then if we're putting smaller
13342 integer constants in the TOC we have to pad them.
13343 (This is still a win over putting the constants in
13344 a separate constant pool, because then we'd have
13345 to have both a TOC entry _and_ the actual constant.)
13347 For a 32-bit target, CONST_INT values are loaded and shifted
13348 entirely within `low' and can be stored in one TOC entry. */
13350 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13351 abort ();/* It would be easy to make this work, but it doesn't now. */
13353 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13355 #if HOST_BITS_PER_WIDE_INT == 32
13356 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13357 POINTER_SIZE, &low, &high, 0);
13358 #else
13359 low |= high << 32;
13360 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13361 high = (HOST_WIDE_INT) low >> 32;
13362 low &= 0xffffffff;
13363 #endif
13366 if (TARGET_64BIT)
13368 if (TARGET_MINIMAL_TOC)
13369 fputs (DOUBLE_INT_ASM_OP, file);
13370 else
13371 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13372 (long) high & 0xffffffff, (long) low & 0xffffffff);
13373 fprintf (file, "0x%lx%08lx\n",
13374 (long) high & 0xffffffff, (long) low & 0xffffffff);
13375 return;
13377 else
13379 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13381 if (TARGET_MINIMAL_TOC)
13382 fputs ("\t.long ", file);
13383 else
13384 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13385 (long) high & 0xffffffff, (long) low & 0xffffffff);
13386 fprintf (file, "0x%lx,0x%lx\n",
13387 (long) high & 0xffffffff, (long) low & 0xffffffff);
13389 else
13391 if (TARGET_MINIMAL_TOC)
13392 fputs ("\t.long ", file);
13393 else
13394 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13395 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13397 return;
13401 if (GET_CODE (x) == CONST)
13403 if (GET_CODE (XEXP (x, 0)) != PLUS)
13404 abort ();
13406 base = XEXP (XEXP (x, 0), 0);
13407 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13410 if (GET_CODE (base) == SYMBOL_REF)
13411 name = XSTR (base, 0);
13412 else if (GET_CODE (base) == LABEL_REF)
13413 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13414 else if (GET_CODE (base) == CODE_LABEL)
13415 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13416 else
13417 abort ();
13419 real_name = (*targetm.strip_name_encoding) (name);
13420 if (TARGET_MINIMAL_TOC)
13421 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13422 else
13424 fprintf (file, "\t.tc %s", real_name);
13426 if (offset < 0)
13427 fprintf (file, ".N%d", - offset);
13428 else if (offset)
13429 fprintf (file, ".P%d", offset);
13431 fputs ("[TC],", file);
13434 /* Currently C++ toc references to vtables can be emitted before it
13435 is decided whether the vtable is public or private. If this is
13436 the case, then the linker will eventually complain that there is
13437 a TOC reference to an unknown section. Thus, for vtables only,
13438 we emit the TOC reference to reference the symbol and not the
13439 section. */
13440 if (VTABLE_NAME_P (name))
13442 RS6000_OUTPUT_BASENAME (file, name);
13443 if (offset < 0)
13444 fprintf (file, "%d", offset);
13445 else if (offset > 0)
13446 fprintf (file, "+%d", offset);
13448 else
13449 output_addr_const (file, x);
13450 putc ('\n', file);
13453 /* Output an assembler pseudo-op to write an ASCII string of N characters
13454 starting at P to FILE.
13456 On the RS/6000, we have to do this using the .byte operation and
13457 write out special characters outside the quoted string.
13458 Also, the assembler is broken; very long strings are truncated,
13459 so we must artificially break them up early. */
13461 void
13462 output_ascii (FILE *file, const char *p, int n)
13464 char c;
13465 int i, count_string;
13466 const char *for_string = "\t.byte \"";
13467 const char *for_decimal = "\t.byte ";
13468 const char *to_close = NULL;
13470 count_string = 0;
13471 for (i = 0; i < n; i++)
13473 c = *p++;
13474 if (c >= ' ' && c < 0177)
13476 if (for_string)
13477 fputs (for_string, file);
13478 putc (c, file);
13480 /* Write two quotes to get one. */
13481 if (c == '"')
13483 putc (c, file);
13484 ++count_string;
13487 for_string = NULL;
13488 for_decimal = "\"\n\t.byte ";
13489 to_close = "\"\n";
13490 ++count_string;
13492 if (count_string >= 512)
13494 fputs (to_close, file);
13496 for_string = "\t.byte \"";
13497 for_decimal = "\t.byte ";
13498 to_close = NULL;
13499 count_string = 0;
13502 else
13504 if (for_decimal)
13505 fputs (for_decimal, file);
13506 fprintf (file, "%d", c);
13508 for_string = "\n\t.byte \"";
13509 for_decimal = ", ";
13510 to_close = "\n";
13511 count_string = 0;
13515 /* Now close the string if we have written one. Then end the line. */
13516 if (to_close)
13517 fputs (to_close, file);
13520 /* Generate a unique section name for FILENAME for a section type
13521 represented by SECTION_DESC. Output goes into BUF.
13523 SECTION_DESC can be any string, as long as it is different for each
13524 possible section type.
13526 We name the section in the same manner as xlc. The name begins with an
13527 underscore followed by the filename (after stripping any leading directory
13528 names) with the last period replaced by the string SECTION_DESC. If
13529 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13530 the name. */
13532 void
13533 rs6000_gen_section_name (char **buf, const char *filename,
13534 const char *section_desc)
13536 const char *q, *after_last_slash, *last_period = 0;
13537 char *p;
13538 int len;
13540 after_last_slash = filename;
13541 for (q = filename; *q; q++)
13543 if (*q == '/')
13544 after_last_slash = q + 1;
13545 else if (*q == '.')
13546 last_period = q;
13549 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13550 *buf = (char *) xmalloc (len);
13552 p = *buf;
13553 *p++ = '_';
13555 for (q = after_last_slash; *q; q++)
13557 if (q == last_period)
13559 strcpy (p, section_desc);
13560 p += strlen (section_desc);
13561 break;
13564 else if (ISALNUM (*q))
13565 *p++ = *q;
13568 if (last_period == 0)
13569 strcpy (p, section_desc);
13570 else
13571 *p = '\0';
13574 /* Emit profile function. */
13576 void
13577 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13579 if (TARGET_PROFILE_KERNEL)
13580 return;
13582 if (DEFAULT_ABI == ABI_AIX)
13584 #ifndef NO_PROFILE_COUNTERS
13585 # define NO_PROFILE_COUNTERS 0
13586 #endif
13587 if (NO_PROFILE_COUNTERS)
13588 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13589 else
13591 char buf[30];
13592 const char *label_name;
13593 rtx fun;
13595 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13596 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13597 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13599 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13600 fun, Pmode);
13603 else if (DEFAULT_ABI == ABI_DARWIN)
13605 const char *mcount_name = RS6000_MCOUNT;
13606 int caller_addr_regno = LINK_REGISTER_REGNUM;
13608 /* Be conservative and always set this, at least for now. */
13609 current_function_uses_pic_offset_table = 1;
13611 #if TARGET_MACHO
13612 /* For PIC code, set up a stub and collect the caller's address
13613 from r0, which is where the prologue puts it. */
13614 if (MACHOPIC_INDIRECT)
13616 mcount_name = machopic_stub_name (mcount_name);
13617 if (current_function_uses_pic_offset_table)
13618 caller_addr_regno = 0;
13620 #endif
13621 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13622 0, VOIDmode, 1,
13623 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13627 /* Write function profiler code. */
13629 void
13630 output_function_profiler (FILE *file, int labelno)
13632 char buf[100];
13633 int save_lr = 8;
13635 switch (DEFAULT_ABI)
13637 default:
13638 abort ();
13640 case ABI_V4:
13641 save_lr = 4;
13642 if (!TARGET_32BIT)
13644 warning ("no profiling of 64-bit code for this ABI");
13645 return;
13647 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13648 fprintf (file, "\tmflr %s\n", reg_names[0]);
13649 if (flag_pic == 1)
13651 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13652 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13653 reg_names[0], save_lr, reg_names[1]);
13654 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13655 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13656 assemble_name (file, buf);
13657 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13659 else if (flag_pic > 1)
13661 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13662 reg_names[0], save_lr, reg_names[1]);
13663 /* Now, we need to get the address of the label. */
13664 fputs ("\tbl 1f\n\t.long ", file);
13665 assemble_name (file, buf);
13666 fputs ("-.\n1:", file);
13667 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13668 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13669 reg_names[0], reg_names[11]);
13670 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13671 reg_names[0], reg_names[0], reg_names[11]);
13673 else
13675 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13676 assemble_name (file, buf);
13677 fputs ("@ha\n", file);
13678 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13679 reg_names[0], save_lr, reg_names[1]);
13680 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13681 assemble_name (file, buf);
13682 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13685 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13686 fprintf (file, "\tbl %s%s\n",
13687 RS6000_MCOUNT, flag_pic ? "@plt" : "");
13688 break;
13690 case ABI_AIX:
13691 case ABI_DARWIN:
13692 if (!TARGET_PROFILE_KERNEL)
13694 /* Don't do anything, done in output_profile_hook (). */
13696 else
13698 if (TARGET_32BIT)
13699 abort ();
13701 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13702 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13704 if (current_function_needs_context)
13706 asm_fprintf (file, "\tstd %s,24(%s)\n",
13707 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13708 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13709 asm_fprintf (file, "\tld %s,24(%s)\n",
13710 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13712 else
13713 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13715 break;
13720 static int
13721 rs6000_use_dfa_pipeline_interface (void)
13723 return 1;
13726 /* Power4 load update and store update instructions are cracked into a
13727 load or store and an integer insn which are executed in the same cycle.
13728 Branches have their own dispatch slot which does not count against the
13729 GCC issue rate, but it changes the program flow so there are no other
13730 instructions to issue in this cycle. */
13732 static int
13733 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13734 int verbose ATTRIBUTE_UNUSED,
13735 rtx insn, int more)
13737 if (GET_CODE (PATTERN (insn)) == USE
13738 || GET_CODE (PATTERN (insn)) == CLOBBER)
13739 return more;
13741 if (rs6000_cpu == PROCESSOR_POWER4)
13743 if (is_microcoded_insn (insn))
13744 return 0;
13745 else if (is_cracked_insn (insn))
13746 return more > 2 ? more - 2 : 0;
13749 return more - 1;
13752 /* Adjust the cost of a scheduling dependency. Return the new cost of
13753 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13755 static int
13756 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13757 int cost)
13759 if (! recog_memoized (insn))
13760 return 0;
13762 if (REG_NOTE_KIND (link) != 0)
13763 return 0;
13765 if (REG_NOTE_KIND (link) == 0)
13767 /* Data dependency; DEP_INSN writes a register that INSN reads
13768 some cycles later. */
13769 switch (get_attr_type (insn))
13771 case TYPE_JMPREG:
13772 /* Tell the first scheduling pass about the latency between
13773 a mtctr and bctr (and mtlr and br/blr). The first
13774 scheduling pass will not know about this latency since
13775 the mtctr instruction, which has the latency associated
13776 to it, will be generated by reload. */
13777 return TARGET_POWER ? 5 : 4;
13778 case TYPE_BRANCH:
13779 /* Leave some extra cycles between a compare and its
13780 dependent branch, to inhibit expensive mispredicts. */
13781 if ((rs6000_cpu_attr == CPU_PPC603
13782 || rs6000_cpu_attr == CPU_PPC604
13783 || rs6000_cpu_attr == CPU_PPC604E
13784 || rs6000_cpu_attr == CPU_PPC620
13785 || rs6000_cpu_attr == CPU_PPC630
13786 || rs6000_cpu_attr == CPU_PPC750
13787 || rs6000_cpu_attr == CPU_PPC7400
13788 || rs6000_cpu_attr == CPU_PPC7450
13789 || rs6000_cpu_attr == CPU_POWER4)
13790 && recog_memoized (dep_insn)
13791 && (INSN_CODE (dep_insn) >= 0)
13792 && (get_attr_type (dep_insn) == TYPE_CMP
13793 || get_attr_type (dep_insn) == TYPE_COMPARE
13794 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13795 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13796 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13797 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13798 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13799 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13800 return cost + 2;
13801 default:
13802 break;
13804 /* Fall out to return default cost. */
13807 return cost;
13810 /* The function returns a true if INSN is microcoded.
13811 Return false otherwise. */
13813 static bool
13814 is_microcoded_insn (rtx insn)
13816 if (!insn || !INSN_P (insn)
13817 || GET_CODE (PATTERN (insn)) == USE
13818 || GET_CODE (PATTERN (insn)) == CLOBBER)
13819 return false;
13821 if (rs6000_cpu == PROCESSOR_POWER4)
13823 enum attr_type type = get_attr_type (insn);
13824 if (type == TYPE_LOAD_EXT_U
13825 || type == TYPE_LOAD_EXT_UX
13826 || type == TYPE_LOAD_UX
13827 || type == TYPE_STORE_UX
13828 || type == TYPE_MFCR)
13829 return true;
13832 return false;
13835 /* The function returns a nonzero value if INSN can be scheduled only
13836 as the first insn in a dispatch group ("dispatch-slot restricted").
13837 In this case, the returned value indicates how many dispatch slots
13838 the insn occupies (at the beginning of the group).
13839 Return 0 otherwise. */
13841 static int
13842 is_dispatch_slot_restricted (rtx insn)
13844 enum attr_type type;
13846 if (rs6000_cpu != PROCESSOR_POWER4)
13847 return 0;
13849 if (!insn
13850 || insn == NULL_RTX
13851 || GET_CODE (insn) == NOTE
13852 || GET_CODE (PATTERN (insn)) == USE
13853 || GET_CODE (PATTERN (insn)) == CLOBBER)
13854 return 0;
13856 type = get_attr_type (insn);
13858 switch (type){
13859 case TYPE_MFCR:
13860 case TYPE_MFCRF:
13861 case TYPE_MTCR:
13862 case TYPE_DELAYED_CR:
13863 case TYPE_CR_LOGICAL:
13864 case TYPE_MTJMPR:
13865 case TYPE_MFJMPR:
13866 return 1;
13867 case TYPE_IDIV:
13868 case TYPE_LDIV:
13869 return 2;
13870 default:
13871 return 0;
13875 /* The function returns true if INSN is cracked into 2 instructions
13876 by the processor (and therefore occupies 2 issue slots). */
13878 static bool
13879 is_cracked_insn (rtx insn)
13881 if (!insn || !INSN_P (insn)
13882 || GET_CODE (PATTERN (insn)) == USE
13883 || GET_CODE (PATTERN (insn)) == CLOBBER)
13884 return false;
13886 if (rs6000_cpu == PROCESSOR_POWER4)
13888 enum attr_type type = get_attr_type (insn);
13889 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13890 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13891 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13892 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13893 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13894 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13895 || type == TYPE_IDIV || type == TYPE_LDIV
13896 || type == TYPE_INSERT_WORD)
13897 return true;
13900 return false;
13903 /* The function returns true if INSN can be issued only from
13904 the branch slot. */
13906 static bool
13907 is_branch_slot_insn (rtx insn)
13909 if (!insn || !INSN_P (insn)
13910 || GET_CODE (PATTERN (insn)) == USE
13911 || GET_CODE (PATTERN (insn)) == CLOBBER)
13912 return false;
13914 if (rs6000_cpu == PROCESSOR_POWER4)
13916 enum attr_type type = get_attr_type (insn);
13917 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13918 return true;
13919 return false;
13922 return false;
13925 /* A C statement (sans semicolon) to update the integer scheduling
13926 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13927 INSN earlier, reduce the priority to execute INSN later. Do not
13928 define this macro if you do not need to adjust the scheduling
13929 priorities of insns. */
13931 static int
13932 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13934 /* On machines (like the 750) which have asymmetric integer units,
13935 where one integer unit can do multiply and divides and the other
13936 can't, reduce the priority of multiply/divide so it is scheduled
13937 before other integer operations. */
13939 #if 0
13940 if (! INSN_P (insn))
13941 return priority;
13943 if (GET_CODE (PATTERN (insn)) == USE)
13944 return priority;
13946 switch (rs6000_cpu_attr) {
13947 case CPU_PPC750:
13948 switch (get_attr_type (insn))
13950 default:
13951 break;
13953 case TYPE_IMUL:
13954 case TYPE_IDIV:
13955 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13956 priority, priority);
13957 if (priority >= 0 && priority < 0x01000000)
13958 priority >>= 3;
13959 break;
13962 #endif
13964 if (is_dispatch_slot_restricted (insn)
13965 && reload_completed
13966 && current_sched_info->sched_max_insns_priority
13967 && rs6000_sched_restricted_insns_priority)
13970 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13971 if (rs6000_sched_restricted_insns_priority == 1)
13972 /* Attach highest priority to insn. This means that in
13973 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13974 precede 'priority' (critical path) considerations. */
13975 return current_sched_info->sched_max_insns_priority;
13976 else if (rs6000_sched_restricted_insns_priority == 2)
13977 /* Increase priority of insn by a minimal amount. This means that in
13978 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13979 precede dispatch-slot restriction considerations. */
13980 return (priority + 1);
13983 return priority;
13986 /* Return how many instructions the machine can issue per cycle. */
13988 static int
13989 rs6000_issue_rate (void)
13991 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13992 if (!reload_completed)
13993 return 1;
13995 switch (rs6000_cpu_attr) {
13996 case CPU_RIOS1: /* ? */
13997 case CPU_RS64A:
13998 case CPU_PPC601: /* ? */
13999 case CPU_PPC7450:
14000 return 3;
14001 case CPU_PPC440:
14002 case CPU_PPC603:
14003 case CPU_PPC750:
14004 case CPU_PPC7400:
14005 case CPU_PPC8540:
14006 return 2;
14007 case CPU_RIOS2:
14008 case CPU_PPC604:
14009 case CPU_PPC604E:
14010 case CPU_PPC620:
14011 case CPU_PPC630:
14012 return 4;
14013 case CPU_POWER4:
14014 return 5;
14015 default:
14016 return 1;
14020 /* Return how many instructions to look ahead for better insn
14021 scheduling. */
14023 static int
14024 rs6000_use_sched_lookahead (void)
14026 if (rs6000_cpu_attr == CPU_PPC8540)
14027 return 4;
14028 return 0;
14031 /* Determine is PAT refers to memory. */
14033 static bool
14034 is_mem_ref (rtx pat)
14036 const char * fmt;
14037 int i, j;
14038 bool ret = false;
14040 if (GET_CODE (pat) == MEM)
14041 return true;
14043 /* Recursively process the pattern. */
14044 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14046 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14048 if (fmt[i] == 'e')
14049 ret |= is_mem_ref (XEXP (pat, i));
14050 else if (fmt[i] == 'E')
14051 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14052 ret |= is_mem_ref (XVECEXP (pat, i, j));
14055 return ret;
14058 /* Determine if PAT is a PATTERN of a load insn. */
14060 static bool
14061 is_load_insn1 (rtx pat)
14063 if (!pat || pat == NULL_RTX)
14064 return false;
14066 if (GET_CODE (pat) == SET)
14067 return is_mem_ref (SET_SRC (pat));
14069 if (GET_CODE (pat) == PARALLEL)
14071 int i;
14073 for (i = 0; i < XVECLEN (pat, 0); i++)
14074 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14075 return true;
14078 return false;
14081 /* Determine if INSN loads from memory. */
14083 static bool
14084 is_load_insn (rtx insn)
14086 if (!insn || !INSN_P (insn))
14087 return false;
14089 if (GET_CODE (insn) == CALL_INSN)
14090 return false;
14092 return is_load_insn1 (PATTERN (insn));
14095 /* Determine if PAT is a PATTERN of a store insn. */
14097 static bool
14098 is_store_insn1 (rtx pat)
14100 if (!pat || pat == NULL_RTX)
14101 return false;
14103 if (GET_CODE (pat) == SET)
14104 return is_mem_ref (SET_DEST (pat));
14106 if (GET_CODE (pat) == PARALLEL)
14108 int i;
14110 for (i = 0; i < XVECLEN (pat, 0); i++)
14111 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14112 return true;
14115 return false;
14118 /* Determine if INSN stores to memory. */
14120 static bool
14121 is_store_insn (rtx insn)
14123 if (!insn || !INSN_P (insn))
14124 return false;
14126 return is_store_insn1 (PATTERN (insn));
14129 /* Returns whether the dependence between INSN and NEXT is considered
14130 costly by the given target. */
14132 static bool
14133 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14135 /* If the flag is not enbled - no dependence is considered costly;
14136 allow all dependent insns in the same group.
14137 This is the most aggressive option. */
14138 if (rs6000_sched_costly_dep == no_dep_costly)
14139 return false;
14141 /* If the flag is set to 1 - a dependence is always considered costly;
14142 do not allow dependent instructions in the same group.
14143 This is the most conservative option. */
14144 if (rs6000_sched_costly_dep == all_deps_costly)
14145 return true;
14147 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14148 && is_load_insn (next)
14149 && is_store_insn (insn))
14150 /* Prevent load after store in the same group. */
14151 return true;
14153 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14154 && is_load_insn (next)
14155 && is_store_insn (insn)
14156 && (!link || (int) REG_NOTE_KIND (link) == 0))
14157 /* Prevent load after store in the same group if it is a true dependence. */
14158 return true;
14160 /* The flag is set to X; dependences with latency >= X are considered costly,
14161 and will not be scheduled in the same group. */
14162 if (rs6000_sched_costly_dep <= max_dep_latency
14163 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14164 return true;
14166 return false;
14169 /* Return the next insn after INSN that is found before TAIL is reached,
14170 skipping any "non-active" insns - insns that will not actually occupy
14171 an issue slot. Return NULL_RTX if such an insn is not found. */
14173 static rtx
14174 get_next_active_insn (rtx insn, rtx tail)
14176 rtx next_insn;
14178 if (!insn || insn == tail)
14179 return NULL_RTX;
14181 next_insn = NEXT_INSN (insn);
14183 while (next_insn
14184 && next_insn != tail
14185 && (GET_CODE(next_insn) == NOTE
14186 || GET_CODE (PATTERN (next_insn)) == USE
14187 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14189 next_insn = NEXT_INSN (next_insn);
14192 if (!next_insn || next_insn == tail)
14193 return NULL_RTX;
14195 return next_insn;
14198 /* Return whether the presence of INSN causes a dispatch group termination
14199 of group WHICH_GROUP.
14201 If WHICH_GROUP == current_group, this function will return true if INSN
14202 causes the termination of the current group (i.e, the dispatch group to
14203 which INSN belongs). This means that INSN will be the last insn in the
14204 group it belongs to.
14206 If WHICH_GROUP == previous_group, this function will return true if INSN
14207 causes the termination of the previous group (i.e, the dispatch group that
14208 precedes the group to which INSN belongs). This means that INSN will be
14209 the first insn in the group it belongs to). */
14211 static bool
14212 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14214 enum attr_type type;
14216 if (! insn)
14217 return false;
14219 type = get_attr_type (insn);
14221 if (is_microcoded_insn (insn))
14222 return true;
14224 if (which_group == current_group)
14226 if (is_branch_slot_insn (insn))
14227 return true;
14228 return false;
14230 else if (which_group == previous_group)
14232 if (is_dispatch_slot_restricted (insn))
14233 return true;
14234 return false;
14237 return false;
14240 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14241 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14243 static bool
14244 is_costly_group (rtx *group_insns, rtx next_insn)
14246 int i;
14247 rtx link;
14248 int cost;
14249 int issue_rate = rs6000_issue_rate ();
14251 for (i = 0; i < issue_rate; i++)
14253 rtx insn = group_insns[i];
14254 if (!insn)
14255 continue;
14256 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14258 rtx next = XEXP (link, 0);
14259 if (next == next_insn)
14261 cost = insn_cost (insn, link, next_insn);
14262 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14263 return true;
14268 return false;
14271 /* Utility of the function redefine_groups.
14272 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14273 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14274 to keep it "far" (in a separate group) from GROUP_INSNS, following
14275 one of the following schemes, depending on the value of the flag
14276 -minsert_sched_nops = X:
14277 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14278 in order to force NEXT_INSN into a separate group.
14279 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14280 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14281 insertion (has a group just ended, how many vacant issue slots remain in the
14282 last group, and how many dispatch groups were encountered so far). */
14284 static int
14285 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14286 bool *group_end, int can_issue_more, int *group_count)
14288 rtx nop;
14289 bool force;
14290 int issue_rate = rs6000_issue_rate ();
14291 bool end = *group_end;
14292 int i;
14294 if (next_insn == NULL_RTX)
14295 return can_issue_more;
14297 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14298 return can_issue_more;
14300 force = is_costly_group (group_insns, next_insn);
14301 if (!force)
14302 return can_issue_more;
14304 if (sched_verbose > 6)
14305 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14306 *group_count ,can_issue_more);
14308 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14310 if (*group_end)
14311 can_issue_more = 0;
14313 /* Since only a branch can be issued in the last issue_slot, it is
14314 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14315 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14316 in this case the last nop will start a new group and the branch will be
14317 forced to the new group. */
14318 if (can_issue_more && !is_branch_slot_insn (next_insn))
14319 can_issue_more--;
14321 while (can_issue_more > 0)
14323 nop = gen_nop();
14324 emit_insn_before (nop, next_insn);
14325 can_issue_more--;
14328 *group_end = true;
14329 return 0;
14332 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14334 int n_nops = rs6000_sched_insert_nops;
14336 /* Nops can't be issued from the branch slot, so the effective
14337 issue_rate for nops is 'issue_rate - 1'. */
14338 if (can_issue_more == 0)
14339 can_issue_more = issue_rate;
14340 can_issue_more--;
14341 if (can_issue_more == 0)
14343 can_issue_more = issue_rate - 1;
14344 (*group_count)++;
14345 end = true;
14346 for (i = 0; i < issue_rate; i++)
14348 group_insns[i] = 0;
14352 while (n_nops > 0)
14354 nop = gen_nop ();
14355 emit_insn_before (nop, next_insn);
14356 if (can_issue_more == issue_rate - 1) /* new group begins */
14357 end = false;
14358 can_issue_more--;
14359 if (can_issue_more == 0)
14361 can_issue_more = issue_rate - 1;
14362 (*group_count)++;
14363 end = true;
14364 for (i = 0; i < issue_rate; i++)
14366 group_insns[i] = 0;
14369 n_nops--;
14372 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14373 can_issue_more++;
14375 *group_end = /* Is next_insn going to start a new group? */
14376 (end
14377 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14378 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14379 || (can_issue_more < issue_rate &&
14380 insn_terminates_group_p (next_insn, previous_group)));
14381 if (*group_end && end)
14382 (*group_count)--;
14384 if (sched_verbose > 6)
14385 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14386 *group_count, can_issue_more);
14387 return can_issue_more;
14390 return can_issue_more;
14393 /* This function tries to synch the dispatch groups that the compiler "sees"
14394 with the dispatch groups that the processor dispatcher is expected to
14395 form in practice. It tries to achieve this synchronization by forcing the
14396 estimated processor grouping on the compiler (as opposed to the function
14397 'pad_goups' which tries to force the scheduler's grouping on the processor).
14399 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14400 examines the (estimated) dispatch groups that will be formed by the processor
14401 dispatcher. It marks these group boundaries to reflect the estimated
14402 processor grouping, overriding the grouping that the scheduler had marked.
14403 Depending on the value of the flag '-minsert-sched-nops' this function can
14404 force certain insns into separate groups or force a certain distance between
14405 them by inserting nops, for example, if there exists a "costly dependence"
14406 between the insns.
14408 The function estimates the group boundaries that the processor will form as
14409 folllows: It keeps track of how many vacant issue slots are available after
14410 each insn. A subsequent insn will start a new group if one of the following
14411 4 cases applies:
14412 - no more vacant issue slots remain in the current dispatch group.
14413 - only the last issue slot, which is the branch slot, is vacant, but the next
14414 insn is not a branch.
14415 - only the last 2 or less issue slots, including the branch slot, are vacant,
14416 which means that a cracked insn (which occupies two issue slots) can't be
14417 issued in this group.
14418 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14419 start a new group. */
14421 static int
14422 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14424 rtx insn, next_insn;
14425 int issue_rate;
14426 int can_issue_more;
14427 int slot, i;
14428 bool group_end;
14429 int group_count = 0;
14430 rtx *group_insns;
14432 /* Initialize. */
14433 issue_rate = rs6000_issue_rate ();
14434 group_insns = alloca (issue_rate * sizeof (rtx));
14435 for (i = 0; i < issue_rate; i++)
14437 group_insns[i] = 0;
14439 can_issue_more = issue_rate;
14440 slot = 0;
14441 insn = get_next_active_insn (prev_head_insn, tail);
14442 group_end = false;
14444 while (insn != NULL_RTX)
14446 slot = (issue_rate - can_issue_more);
14447 group_insns[slot] = insn;
14448 can_issue_more =
14449 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14450 if (insn_terminates_group_p (insn, current_group))
14451 can_issue_more = 0;
14453 next_insn = get_next_active_insn (insn, tail);
14454 if (next_insn == NULL_RTX)
14455 return group_count + 1;
14457 group_end = /* Is next_insn going to start a new group? */
14458 (can_issue_more == 0
14459 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14460 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14461 || (can_issue_more < issue_rate &&
14462 insn_terminates_group_p (next_insn, previous_group)));
14464 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14465 next_insn, &group_end, can_issue_more, &group_count);
14467 if (group_end)
14469 group_count++;
14470 can_issue_more = 0;
14471 for (i = 0; i < issue_rate; i++)
14473 group_insns[i] = 0;
14477 if (GET_MODE (next_insn) == TImode && can_issue_more)
14478 PUT_MODE(next_insn, VOIDmode);
14479 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14480 PUT_MODE (next_insn, TImode);
14482 insn = next_insn;
14483 if (can_issue_more == 0)
14484 can_issue_more = issue_rate;
14485 } /* while */
14487 return group_count;
14490 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14491 dispatch group boundaries that the scheduler had marked. Pad with nops
14492 any dispatch groups which have vacant issue slots, in order to force the
14493 scheduler's grouping on the processor dispatcher. The function
14494 returns the number of dispatch groups found. */
14496 static int
14497 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14499 rtx insn, next_insn;
14500 rtx nop;
14501 int issue_rate;
14502 int can_issue_more;
14503 int group_end;
14504 int group_count = 0;
14506 /* Initialize issue_rate. */
14507 issue_rate = rs6000_issue_rate ();
14508 can_issue_more = issue_rate;
14510 insn = get_next_active_insn (prev_head_insn, tail);
14511 next_insn = get_next_active_insn (insn, tail);
14513 while (insn != NULL_RTX)
14515 can_issue_more =
14516 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14518 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14520 if (next_insn == NULL_RTX)
14521 break;
14523 if (group_end)
14525 /* If the scheduler had marked group termination at this location
14526 (between insn and next_indn), and neither insn nor next_insn will
14527 force group termination, pad the group with nops to force group
14528 termination. */
14529 if (can_issue_more
14530 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14531 && !insn_terminates_group_p (insn, current_group)
14532 && !insn_terminates_group_p (next_insn, previous_group))
14534 if (!is_branch_slot_insn(next_insn))
14535 can_issue_more--;
14537 while (can_issue_more)
14539 nop = gen_nop ();
14540 emit_insn_before (nop, next_insn);
14541 can_issue_more--;
14545 can_issue_more = issue_rate;
14546 group_count++;
14549 insn = next_insn;
14550 next_insn = get_next_active_insn (insn, tail);
14553 return group_count;
14556 /* The following function is called at the end of scheduling BB.
14557 After reload, it inserts nops at insn group bundling. */
14559 static void
14560 rs6000_sched_finish (FILE *dump, int sched_verbose)
14562 int n_groups;
14564 if (sched_verbose)
14565 fprintf (dump, "=== Finishing schedule.\n");
14567 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14569 if (rs6000_sched_insert_nops == sched_finish_none)
14570 return;
14572 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14573 n_groups = pad_groups (dump, sched_verbose,
14574 current_sched_info->prev_head,
14575 current_sched_info->next_tail);
14576 else
14577 n_groups = redefine_groups (dump, sched_verbose,
14578 current_sched_info->prev_head,
14579 current_sched_info->next_tail);
14581 if (sched_verbose >= 6)
14583 fprintf (dump, "ngroups = %d\n", n_groups);
14584 print_rtl (dump, current_sched_info->prev_head);
14585 fprintf (dump, "Done finish_sched\n");
14590 /* Length in units of the trampoline for entering a nested function. */
14593 rs6000_trampoline_size (void)
14595 int ret = 0;
14597 switch (DEFAULT_ABI)
14599 default:
14600 abort ();
14602 case ABI_AIX:
14603 ret = (TARGET_32BIT) ? 12 : 24;
14604 break;
14606 case ABI_DARWIN:
14607 case ABI_V4:
14608 ret = (TARGET_32BIT) ? 40 : 48;
14609 break;
14612 return ret;
14615 /* Emit RTL insns to initialize the variable parts of a trampoline.
14616 FNADDR is an RTX for the address of the function's pure code.
14617 CXT is an RTX for the static chain value for the function. */
14619 void
14620 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14622 enum machine_mode pmode = Pmode;
14623 int regsize = (TARGET_32BIT) ? 4 : 8;
14624 rtx ctx_reg = force_reg (pmode, cxt);
14626 switch (DEFAULT_ABI)
14628 default:
14629 abort ();
14631 /* Macros to shorten the code expansions below. */
14632 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14633 #define MEM_PLUS(addr,offset) \
14634 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14636 /* Under AIX, just build the 3 word function descriptor */
14637 case ABI_AIX:
14639 rtx fn_reg = gen_reg_rtx (pmode);
14640 rtx toc_reg = gen_reg_rtx (pmode);
14641 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14642 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14643 emit_move_insn (MEM_DEREF (addr), fn_reg);
14644 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14645 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14647 break;
14649 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14650 case ABI_DARWIN:
14651 case ABI_V4:
14652 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14653 FALSE, VOIDmode, 4,
14654 addr, pmode,
14655 GEN_INT (rs6000_trampoline_size ()), SImode,
14656 fnaddr, pmode,
14657 ctx_reg, pmode);
14658 break;
14661 return;
14665 /* Table of valid machine attributes. */
14667 const struct attribute_spec rs6000_attribute_table[] =
14669 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14670 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
14671 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14672 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14673 { NULL, 0, 0, false, false, false, NULL }
14676 /* Handle the "altivec" attribute. The attribute may have
14677 arguments as follows:
14679 __attribute__((altivec(vector__)))
14680 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
14681 __attribute__((altivec(bool__))) (always followed by 'unsigned')
14683 and may appear more than once (e.g., 'vector bool char') in a
14684 given declaration. */
14686 static tree
14687 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14688 int flags ATTRIBUTE_UNUSED,
14689 bool *no_add_attrs)
14691 tree type = *node, result = NULL_TREE;
14692 enum machine_mode mode;
14693 int unsigned_p;
14694 char altivec_type
14695 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14696 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14697 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14698 : '?');
14700 while (POINTER_TYPE_P (type)
14701 || TREE_CODE (type) == FUNCTION_TYPE
14702 || TREE_CODE (type) == METHOD_TYPE
14703 || TREE_CODE (type) == ARRAY_TYPE)
14704 type = TREE_TYPE (type);
14706 mode = TYPE_MODE (type);
14708 if (rs6000_warn_altivec_long
14709 && (type == long_unsigned_type_node || type == long_integer_type_node))
14710 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
14712 switch (altivec_type)
14714 case 'v':
14715 unsigned_p = TREE_UNSIGNED (type);
14716 switch (mode)
14718 case SImode:
14719 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
14720 break;
14721 case HImode:
14722 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
14723 break;
14724 case QImode:
14725 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
14726 break;
14727 case SFmode: result = V4SF_type_node; break;
14728 /* If the user says 'vector int bool', we may be handed the 'bool'
14729 attribute _before_ the 'vector' attribute, and so select the proper
14730 type in the 'b' case below. */
14731 case V4SImode: case V8HImode: case V16QImode: result = type;
14732 default: break;
14734 break;
14735 case 'b':
14736 switch (mode)
14738 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
14739 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
14740 case QImode: case V16QImode: result = bool_V16QI_type_node;
14741 default: break;
14743 break;
14744 case 'p':
14745 switch (mode)
14747 case V8HImode: result = pixel_V8HI_type_node;
14748 default: break;
14750 default: break;
14753 *no_add_attrs = true; /* No need to hang on to the attribute. */
14755 if (!result)
14756 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
14757 else
14758 *node = reconstruct_complex_type (*node, result);
14760 return NULL_TREE;
14763 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14764 struct attribute_spec.handler. */
14766 static tree
14767 rs6000_handle_longcall_attribute (tree *node, tree name,
14768 tree args ATTRIBUTE_UNUSED,
14769 int flags ATTRIBUTE_UNUSED,
14770 bool *no_add_attrs)
14772 if (TREE_CODE (*node) != FUNCTION_TYPE
14773 && TREE_CODE (*node) != FIELD_DECL
14774 && TREE_CODE (*node) != TYPE_DECL)
14776 warning ("`%s' attribute only applies to functions",
14777 IDENTIFIER_POINTER (name));
14778 *no_add_attrs = true;
14781 return NULL_TREE;
14784 /* Set longcall attributes on all functions declared when
14785 rs6000_default_long_calls is true. */
14786 static void
14787 rs6000_set_default_type_attributes (tree type)
14789 if (rs6000_default_long_calls
14790 && (TREE_CODE (type) == FUNCTION_TYPE
14791 || TREE_CODE (type) == METHOD_TYPE))
14792 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14793 NULL_TREE,
14794 TYPE_ATTRIBUTES (type));
14797 /* Return a reference suitable for calling a function with the
14798 longcall attribute. */
14800 struct rtx_def *
14801 rs6000_longcall_ref (rtx call_ref)
14803 const char *call_name;
14804 tree node;
14806 if (GET_CODE (call_ref) != SYMBOL_REF)
14807 return call_ref;
14809 /* System V adds '.' to the internal name, so skip them. */
14810 call_name = XSTR (call_ref, 0);
14811 if (*call_name == '.')
14813 while (*call_name == '.')
14814 call_name++;
14816 node = get_identifier (call_name);
14817 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14820 return force_reg (Pmode, call_ref);
14823 #ifdef USING_ELFOS_H
14825 /* A C statement or statements to switch to the appropriate section
14826 for output of RTX in mode MODE. You can assume that RTX is some
14827 kind of constant in RTL. The argument MODE is redundant except in
14828 the case of a `const_int' rtx. Select the section by calling
14829 `text_section' or one of the alternatives for other sections.
14831 Do not define this macro if you put all constants in the read-only
14832 data section. */
14834 static void
14835 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14836 unsigned HOST_WIDE_INT align)
14838 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14839 toc_section ();
14840 else
14841 default_elf_select_rtx_section (mode, x, align);
14844 /* A C statement or statements to switch to the appropriate
14845 section for output of DECL. DECL is either a `VAR_DECL' node
14846 or a constant of some sort. RELOC indicates whether forming
14847 the initial value of DECL requires link-time relocations. */
14849 static void
14850 rs6000_elf_select_section (tree decl, int reloc,
14851 unsigned HOST_WIDE_INT align)
14853 /* Pretend that we're always building for a shared library when
14854 ABI_AIX, because otherwise we end up with dynamic relocations
14855 in read-only sections. This happens for function pointers,
14856 references to vtables in typeinfo, and probably other cases. */
14857 default_elf_select_section_1 (decl, reloc, align,
14858 flag_pic || DEFAULT_ABI == ABI_AIX);
14861 /* A C statement to build up a unique section name, expressed as a
14862 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14863 RELOC indicates whether the initial value of EXP requires
14864 link-time relocations. If you do not define this macro, GCC will use
14865 the symbol name prefixed by `.' as the section name. Note - this
14866 macro can now be called for uninitialized data items as well as
14867 initialized data and functions. */
14869 static void
14870 rs6000_elf_unique_section (tree decl, int reloc)
14872 /* As above, pretend that we're always building for a shared library
14873 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14874 default_unique_section_1 (decl, reloc,
14875 flag_pic || DEFAULT_ABI == ABI_AIX);
14878 /* For a SYMBOL_REF, set generic flags and then perform some
14879 target-specific processing.
14881 When the AIX ABI is requested on a non-AIX system, replace the
14882 function name with the real name (with a leading .) rather than the
14883 function descriptor name. This saves a lot of overriding code to
14884 read the prefixes. */
14886 static void
14887 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14889 default_encode_section_info (decl, rtl, first);
14891 if (first
14892 && TREE_CODE (decl) == FUNCTION_DECL
14893 && !TARGET_AIX
14894 && DEFAULT_ABI == ABI_AIX)
14896 rtx sym_ref = XEXP (rtl, 0);
14897 size_t len = strlen (XSTR (sym_ref, 0));
14898 char *str = alloca (len + 2);
14899 str[0] = '.';
14900 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14901 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14905 static bool
14906 rs6000_elf_in_small_data_p (tree decl)
14908 if (rs6000_sdata == SDATA_NONE)
14909 return false;
14911 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14913 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14914 if (strcmp (section, ".sdata") == 0
14915 || strcmp (section, ".sdata2") == 0
14916 || strcmp (section, ".sbss") == 0
14917 || strcmp (section, ".sbss2") == 0
14918 || strcmp (section, ".PPC.EMB.sdata0") == 0
14919 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14920 return true;
14922 else
14924 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14926 if (size > 0
14927 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14928 /* If it's not public, and we're not going to reference it there,
14929 there's no need to put it in the small data section. */
14930 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14931 return true;
14934 return false;
14937 #endif /* USING_ELFOS_H */
14940 /* Return a REG that occurs in ADDR with coefficient 1.
14941 ADDR can be effectively incremented by incrementing REG.
14943 r0 is special and we must not select it as an address
14944 register by this routine since our caller will try to
14945 increment the returned register via an "la" instruction. */
14947 struct rtx_def *
14948 find_addr_reg (rtx addr)
14950 while (GET_CODE (addr) == PLUS)
14952 if (GET_CODE (XEXP (addr, 0)) == REG
14953 && REGNO (XEXP (addr, 0)) != 0)
14954 addr = XEXP (addr, 0);
14955 else if (GET_CODE (XEXP (addr, 1)) == REG
14956 && REGNO (XEXP (addr, 1)) != 0)
14957 addr = XEXP (addr, 1);
14958 else if (CONSTANT_P (XEXP (addr, 0)))
14959 addr = XEXP (addr, 1);
14960 else if (CONSTANT_P (XEXP (addr, 1)))
14961 addr = XEXP (addr, 0);
14962 else
14963 abort ();
14965 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14966 return addr;
14967 abort ();
14970 void
14971 rs6000_fatal_bad_address (rtx op)
14973 fatal_insn ("bad address", op);
14976 #if TARGET_MACHO
14978 #if 0
14979 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14980 reference and a constant. */
14983 symbolic_operand (rtx op)
14985 switch (GET_CODE (op))
14987 case SYMBOL_REF:
14988 case LABEL_REF:
14989 return 1;
14990 case CONST:
14991 op = XEXP (op, 0);
14992 return (GET_CODE (op) == SYMBOL_REF ||
14993 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14994 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14995 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14996 default:
14997 return 0;
15000 #endif
15002 #if TARGET_MACHO
15004 static tree branch_island_list = 0;
15006 /* Remember to generate a branch island for far calls to the given
15007 function. */
15009 static void
15010 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15012 tree branch_island = build_tree_list (function_name, label_name);
15013 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15014 TREE_CHAIN (branch_island) = branch_island_list;
15015 branch_island_list = branch_island;
15018 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15019 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15020 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15021 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15023 /* Generate far-jump branch islands for everything on the
15024 branch_island_list. Invoked immediately after the last instruction
15025 of the epilogue has been emitted; the branch-islands must be
15026 appended to, and contiguous with, the function body. Mach-O stubs
15027 are generated in machopic_output_stub(). */
15029 static void
15030 macho_branch_islands (void)
15032 char tmp_buf[512];
15033 tree branch_island;
15035 for (branch_island = branch_island_list;
15036 branch_island;
15037 branch_island = TREE_CHAIN (branch_island))
15039 const char *label =
15040 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15041 const char *name =
15042 darwin_strip_name_encoding (
15043 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15044 char name_buf[512];
15045 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15046 if (name[0] == '*' || name[0] == '&')
15047 strcpy (name_buf, name+1);
15048 else
15050 name_buf[0] = '_';
15051 strcpy (name_buf+1, name);
15053 strcpy (tmp_buf, "\n");
15054 strcat (tmp_buf, label);
15055 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15056 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15057 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15058 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15059 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15060 if (flag_pic)
15062 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15063 strcat (tmp_buf, label);
15064 strcat (tmp_buf, "_pic\n");
15065 strcat (tmp_buf, label);
15066 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15068 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15069 strcat (tmp_buf, name_buf);
15070 strcat (tmp_buf, " - ");
15071 strcat (tmp_buf, label);
15072 strcat (tmp_buf, "_pic)\n");
15074 strcat (tmp_buf, "\tmtlr r0\n");
15076 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15077 strcat (tmp_buf, name_buf);
15078 strcat (tmp_buf, " - ");
15079 strcat (tmp_buf, label);
15080 strcat (tmp_buf, "_pic)\n");
15082 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15084 else
15086 strcat (tmp_buf, ":\nlis r12,hi16(");
15087 strcat (tmp_buf, name_buf);
15088 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15089 strcat (tmp_buf, name_buf);
15090 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15092 output_asm_insn (tmp_buf, 0);
15093 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15094 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15095 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15096 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15097 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15100 branch_island_list = 0;
15103 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15104 already there or not. */
15106 static int
15107 no_previous_def (tree function_name)
15109 tree branch_island;
15110 for (branch_island = branch_island_list;
15111 branch_island;
15112 branch_island = TREE_CHAIN (branch_island))
15113 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15114 return 0;
15115 return 1;
15118 /* GET_PREV_LABEL gets the label name from the previous definition of
15119 the function. */
15121 static tree
15122 get_prev_label (tree function_name)
15124 tree branch_island;
15125 for (branch_island = branch_island_list;
15126 branch_island;
15127 branch_island = TREE_CHAIN (branch_island))
15128 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15129 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15130 return 0;
15133 /* INSN is either a function call or a millicode call. It may have an
15134 unconditional jump in its delay slot.
15136 CALL_DEST is the routine we are calling. */
15138 char *
15139 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15141 static char buf[256];
15142 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15143 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15145 tree labelname;
15146 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15148 if (no_previous_def (funname))
15150 int line_number = 0;
15151 rtx label_rtx = gen_label_rtx ();
15152 char *label_buf, temp_buf[256];
15153 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15154 CODE_LABEL_NUMBER (label_rtx));
15155 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15156 labelname = get_identifier (label_buf);
15157 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15158 if (insn)
15159 line_number = NOTE_LINE_NUMBER (insn);
15160 add_compiler_branch_island (labelname, funname, line_number);
15162 else
15163 labelname = get_prev_label (funname);
15165 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15166 instruction will reach 'foo', otherwise link as 'bl L42'".
15167 "L42" should be a 'branch island', that will do a far jump to
15168 'foo'. Branch islands are generated in
15169 macho_branch_islands(). */
15170 sprintf (buf, "jbsr %%z%d,%.246s",
15171 dest_operand_number, IDENTIFIER_POINTER (labelname));
15173 else
15174 sprintf (buf, "bl %%z%d", dest_operand_number);
15175 return buf;
15178 #endif /* TARGET_MACHO */
15180 /* Generate PIC and indirect symbol stubs. */
15182 void
15183 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15185 unsigned int length;
15186 char *symbol_name, *lazy_ptr_name;
15187 char *local_label_0;
15188 static int label = 0;
15190 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15191 symb = (*targetm.strip_name_encoding) (symb);
15194 length = strlen (symb);
15195 symbol_name = alloca (length + 32);
15196 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15198 lazy_ptr_name = alloca (length + 32);
15199 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15201 if (flag_pic == 2)
15202 machopic_picsymbol_stub1_section ();
15203 else
15204 machopic_symbol_stub1_section ();
15205 fprintf (file, "\t.align 2\n");
15207 fprintf (file, "%s:\n", stub);
15208 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15210 if (flag_pic == 2)
15212 label++;
15213 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15214 sprintf (local_label_0, "\"L%011d$spb\"", label);
15216 fprintf (file, "\tmflr r0\n");
15217 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15218 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15219 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15220 lazy_ptr_name, local_label_0);
15221 fprintf (file, "\tmtlr r0\n");
15222 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15223 lazy_ptr_name, local_label_0);
15224 fprintf (file, "\tmtctr r12\n");
15225 fprintf (file, "\tbctr\n");
15227 else
15229 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15230 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15231 fprintf (file, "\tmtctr r12\n");
15232 fprintf (file, "\tbctr\n");
15235 machopic_lazy_symbol_ptr_section ();
15236 fprintf (file, "%s:\n", lazy_ptr_name);
15237 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15238 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15241 /* Legitimize PIC addresses. If the address is already
15242 position-independent, we return ORIG. Newly generated
15243 position-independent addresses go into a reg. This is REG if non
15244 zero, otherwise we allocate register(s) as necessary. */
15246 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15249 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15250 rtx reg)
15252 rtx base, offset;
15254 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15255 reg = gen_reg_rtx (Pmode);
15257 if (GET_CODE (orig) == CONST)
15259 if (GET_CODE (XEXP (orig, 0)) == PLUS
15260 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15261 return orig;
15263 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15265 /* Use a different reg for the intermediate value, as
15266 it will be marked UNCHANGING. */
15267 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15269 base =
15270 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15271 Pmode, reg_temp);
15272 offset =
15273 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15274 Pmode, reg);
15276 else
15277 abort ();
15279 if (GET_CODE (offset) == CONST_INT)
15281 if (SMALL_INT (offset))
15282 return plus_constant (base, INTVAL (offset));
15283 else if (! reload_in_progress && ! reload_completed)
15284 offset = force_reg (Pmode, offset);
15285 else
15287 rtx mem = force_const_mem (Pmode, orig);
15288 return machopic_legitimize_pic_address (mem, Pmode, reg);
15291 return gen_rtx_PLUS (Pmode, base, offset);
15294 /* Fall back on generic machopic code. */
15295 return machopic_legitimize_pic_address (orig, mode, reg);
15298 /* This is just a placeholder to make linking work without having to
15299 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15300 ever needed for Darwin (not too likely!) this would have to get a
15301 real definition. */
15303 void
15304 toc_section (void)
15308 #endif /* TARGET_MACHO */
15310 #if TARGET_ELF
15311 static unsigned int
15312 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15314 return default_section_type_flags_1 (decl, name, reloc,
15315 flag_pic || DEFAULT_ABI == ABI_AIX);
15318 /* Record an element in the table of global constructors. SYMBOL is
15319 a SYMBOL_REF of the function to be called; PRIORITY is a number
15320 between 0 and MAX_INIT_PRIORITY.
15322 This differs from default_named_section_asm_out_constructor in
15323 that we have special handling for -mrelocatable. */
15325 static void
15326 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15328 const char *section = ".ctors";
15329 char buf[16];
15331 if (priority != DEFAULT_INIT_PRIORITY)
15333 sprintf (buf, ".ctors.%.5u",
15334 /* Invert the numbering so the linker puts us in the proper
15335 order; constructors are run from right to left, and the
15336 linker sorts in increasing order. */
15337 MAX_INIT_PRIORITY - priority);
15338 section = buf;
15341 named_section_flags (section, SECTION_WRITE);
15342 assemble_align (POINTER_SIZE);
15344 if (TARGET_RELOCATABLE)
15346 fputs ("\t.long (", asm_out_file);
15347 output_addr_const (asm_out_file, symbol);
15348 fputs (")@fixup\n", asm_out_file);
15350 else
15351 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15354 static void
15355 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15357 const char *section = ".dtors";
15358 char buf[16];
15360 if (priority != DEFAULT_INIT_PRIORITY)
15362 sprintf (buf, ".dtors.%.5u",
15363 /* Invert the numbering so the linker puts us in the proper
15364 order; constructors are run from right to left, and the
15365 linker sorts in increasing order. */
15366 MAX_INIT_PRIORITY - priority);
15367 section = buf;
15370 named_section_flags (section, SECTION_WRITE);
15371 assemble_align (POINTER_SIZE);
15373 if (TARGET_RELOCATABLE)
15375 fputs ("\t.long (", asm_out_file);
15376 output_addr_const (asm_out_file, symbol);
15377 fputs (")@fixup\n", asm_out_file);
15379 else
15380 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15383 void
15384 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15386 if (TARGET_64BIT)
15388 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15389 ASM_OUTPUT_LABEL (file, name);
15390 fputs (DOUBLE_INT_ASM_OP, file);
15391 putc ('.', file);
15392 assemble_name (file, name);
15393 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15394 assemble_name (file, name);
15395 fputs (",24\n\t.type\t.", file);
15396 assemble_name (file, name);
15397 fputs (",@function\n", file);
15398 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15400 fputs ("\t.globl\t.", file);
15401 assemble_name (file, name);
15402 putc ('\n', file);
15404 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15405 putc ('.', file);
15406 ASM_OUTPUT_LABEL (file, name);
15407 return;
15410 if (TARGET_RELOCATABLE
15411 && (get_pool_size () != 0 || current_function_profile)
15412 && uses_TOC ())
15414 char buf[256];
15416 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15418 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15419 fprintf (file, "\t.long ");
15420 assemble_name (file, buf);
15421 putc ('-', file);
15422 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15423 assemble_name (file, buf);
15424 putc ('\n', file);
15427 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15428 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15430 if (DEFAULT_ABI == ABI_AIX)
15432 const char *desc_name, *orig_name;
15434 orig_name = (*targetm.strip_name_encoding) (name);
15435 desc_name = orig_name;
15436 while (*desc_name == '.')
15437 desc_name++;
15439 if (TREE_PUBLIC (decl))
15440 fprintf (file, "\t.globl %s\n", desc_name);
15442 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15443 fprintf (file, "%s:\n", desc_name);
15444 fprintf (file, "\t.long %s\n", orig_name);
15445 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15446 if (DEFAULT_ABI == ABI_AIX)
15447 fputs ("\t.long 0\n", file);
15448 fprintf (file, "\t.previous\n");
15450 ASM_OUTPUT_LABEL (file, name);
15452 #endif
15454 #if TARGET_XCOFF
15455 static void
15456 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15458 fputs (GLOBAL_ASM_OP, stream);
15459 RS6000_OUTPUT_BASENAME (stream, name);
15460 putc ('\n', stream);
15463 static void
15464 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15466 int smclass;
15467 static const char * const suffix[3] = { "PR", "RO", "RW" };
15469 if (flags & SECTION_CODE)
15470 smclass = 0;
15471 else if (flags & SECTION_WRITE)
15472 smclass = 2;
15473 else
15474 smclass = 1;
15476 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15477 (flags & SECTION_CODE) ? "." : "",
15478 name, suffix[smclass], flags & SECTION_ENTSIZE);
15481 static void
15482 rs6000_xcoff_select_section (tree decl, int reloc,
15483 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15485 if (decl_readonly_section_1 (decl, reloc, 1))
15487 if (TREE_PUBLIC (decl))
15488 read_only_data_section ();
15489 else
15490 read_only_private_data_section ();
15492 else
15494 if (TREE_PUBLIC (decl))
15495 data_section ();
15496 else
15497 private_data_section ();
15501 static void
15502 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15504 const char *name;
15506 /* Use select_section for private and uninitialized data. */
15507 if (!TREE_PUBLIC (decl)
15508 || DECL_COMMON (decl)
15509 || DECL_INITIAL (decl) == NULL_TREE
15510 || DECL_INITIAL (decl) == error_mark_node
15511 || (flag_zero_initialized_in_bss
15512 && initializer_zerop (DECL_INITIAL (decl))))
15513 return;
15515 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15516 name = (*targetm.strip_name_encoding) (name);
15517 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15520 /* Select section for constant in constant pool.
15522 On RS/6000, all constants are in the private read-only data area.
15523 However, if this is being placed in the TOC it must be output as a
15524 toc entry. */
15526 static void
15527 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15528 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15530 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15531 toc_section ();
15532 else
15533 read_only_private_data_section ();
15536 /* Remove any trailing [DS] or the like from the symbol name. */
15538 static const char *
15539 rs6000_xcoff_strip_name_encoding (const char *name)
15541 size_t len;
15542 if (*name == '*')
15543 name++;
15544 len = strlen (name);
15545 if (name[len - 1] == ']')
15546 return ggc_alloc_string (name, len - 4);
15547 else
15548 return name;
15551 /* Section attributes. AIX is always PIC. */
15553 static unsigned int
15554 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15556 unsigned int align;
15557 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15559 /* Align to at least UNIT size. */
15560 if (flags & SECTION_CODE)
15561 align = MIN_UNITS_PER_WORD;
15562 else
15563 /* Increase alignment of large objects if not already stricter. */
15564 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15565 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15566 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15568 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15571 /* Output at beginning of assembler file.
15573 Initialize the section names for the RS/6000 at this point.
15575 Specify filename, including full path, to assembler.
15577 We want to go into the TOC section so at least one .toc will be emitted.
15578 Also, in order to output proper .bs/.es pairs, we need at least one static
15579 [RW] section emitted.
15581 Finally, declare mcount when profiling to make the assembler happy. */
15583 static void
15584 rs6000_xcoff_file_start (void)
15586 rs6000_gen_section_name (&xcoff_bss_section_name,
15587 main_input_filename, ".bss_");
15588 rs6000_gen_section_name (&xcoff_private_data_section_name,
15589 main_input_filename, ".rw_");
15590 rs6000_gen_section_name (&xcoff_read_only_section_name,
15591 main_input_filename, ".ro_");
15593 fputs ("\t.file\t", asm_out_file);
15594 output_quoted_string (asm_out_file, main_input_filename);
15595 fputc ('\n', asm_out_file);
15596 toc_section ();
15597 if (write_symbols != NO_DEBUG)
15598 private_data_section ();
15599 text_section ();
15600 if (profile_flag)
15601 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15602 rs6000_file_start ();
15605 /* Output at end of assembler file.
15606 On the RS/6000, referencing data should automatically pull in text. */
15608 static void
15609 rs6000_xcoff_file_end (void)
15611 text_section ();
15612 fputs ("_section_.text:\n", asm_out_file);
15613 data_section ();
15614 fputs (TARGET_32BIT
15615 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15616 asm_out_file);
15618 #endif /* TARGET_XCOFF */
15620 #if TARGET_MACHO
15621 /* Cross-module name binding. Darwin does not support overriding
15622 functions at dynamic-link time. */
15624 static bool
15625 rs6000_binds_local_p (tree decl)
15627 return default_binds_local_p_1 (decl, 0);
15629 #endif
15631 /* Compute a (partial) cost for rtx X. Return true if the complete
15632 cost has been computed, and false if subexpressions should be
15633 scanned. In either case, *TOTAL contains the cost result. */
15635 static bool
15636 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15637 int *total)
15639 switch (code)
15641 /* On the RS/6000, if it is valid in the insn, it is free.
15642 So this always returns 0. */
15643 case CONST_INT:
15644 case CONST:
15645 case LABEL_REF:
15646 case SYMBOL_REF:
15647 case CONST_DOUBLE:
15648 case HIGH:
15649 *total = 0;
15650 return true;
15652 case PLUS:
15653 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15654 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15655 + 0x8000) >= 0x10000)
15656 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15657 ? COSTS_N_INSNS (2)
15658 : COSTS_N_INSNS (1));
15659 return true;
15661 case AND:
15662 case IOR:
15663 case XOR:
15664 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15665 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15666 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15667 ? COSTS_N_INSNS (2)
15668 : COSTS_N_INSNS (1));
15669 return true;
15671 case MULT:
15672 if (optimize_size)
15674 *total = COSTS_N_INSNS (2);
15675 return true;
15677 switch (rs6000_cpu)
15679 case PROCESSOR_RIOS1:
15680 case PROCESSOR_PPC405:
15681 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15682 ? COSTS_N_INSNS (5)
15683 : (INTVAL (XEXP (x, 1)) >= -256
15684 && INTVAL (XEXP (x, 1)) <= 255)
15685 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15686 return true;
15688 case PROCESSOR_PPC440:
15689 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15690 ? COSTS_N_INSNS (3)
15691 : COSTS_N_INSNS (2));
15692 return true;
15694 case PROCESSOR_RS64A:
15695 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15696 ? GET_MODE (XEXP (x, 1)) != DImode
15697 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15698 : (INTVAL (XEXP (x, 1)) >= -256
15699 && INTVAL (XEXP (x, 1)) <= 255)
15700 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15701 return true;
15703 case PROCESSOR_RIOS2:
15704 case PROCESSOR_MPCCORE:
15705 case PROCESSOR_PPC604e:
15706 *total = COSTS_N_INSNS (2);
15707 return true;
15709 case PROCESSOR_PPC601:
15710 *total = COSTS_N_INSNS (5);
15711 return true;
15713 case PROCESSOR_PPC603:
15714 case PROCESSOR_PPC7400:
15715 case PROCESSOR_PPC750:
15716 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15717 ? COSTS_N_INSNS (5)
15718 : (INTVAL (XEXP (x, 1)) >= -256
15719 && INTVAL (XEXP (x, 1)) <= 255)
15720 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15721 return true;
15723 case PROCESSOR_PPC7450:
15724 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15725 ? COSTS_N_INSNS (4)
15726 : COSTS_N_INSNS (3));
15727 return true;
15729 case PROCESSOR_PPC403:
15730 case PROCESSOR_PPC604:
15731 case PROCESSOR_PPC8540:
15732 *total = COSTS_N_INSNS (4);
15733 return true;
15735 case PROCESSOR_PPC620:
15736 case PROCESSOR_PPC630:
15737 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15738 ? GET_MODE (XEXP (x, 1)) != DImode
15739 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15740 : (INTVAL (XEXP (x, 1)) >= -256
15741 && INTVAL (XEXP (x, 1)) <= 255)
15742 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15743 return true;
15745 case PROCESSOR_POWER4:
15746 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15747 ? GET_MODE (XEXP (x, 1)) != DImode
15748 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15749 : COSTS_N_INSNS (2));
15750 return true;
15752 default:
15753 abort ();
15756 case DIV:
15757 case MOD:
15758 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15759 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15761 *total = COSTS_N_INSNS (2);
15762 return true;
15764 /* FALLTHRU */
15766 case UDIV:
15767 case UMOD:
15768 switch (rs6000_cpu)
15770 case PROCESSOR_RIOS1:
15771 *total = COSTS_N_INSNS (19);
15772 return true;
15774 case PROCESSOR_RIOS2:
15775 *total = COSTS_N_INSNS (13);
15776 return true;
15778 case PROCESSOR_RS64A:
15779 *total = (GET_MODE (XEXP (x, 1)) != DImode
15780 ? COSTS_N_INSNS (65)
15781 : COSTS_N_INSNS (67));
15782 return true;
15784 case PROCESSOR_MPCCORE:
15785 *total = COSTS_N_INSNS (6);
15786 return true;
15788 case PROCESSOR_PPC403:
15789 *total = COSTS_N_INSNS (33);
15790 return true;
15792 case PROCESSOR_PPC405:
15793 *total = COSTS_N_INSNS (35);
15794 return true;
15796 case PROCESSOR_PPC440:
15797 *total = COSTS_N_INSNS (34);
15798 return true;
15800 case PROCESSOR_PPC601:
15801 *total = COSTS_N_INSNS (36);
15802 return true;
15804 case PROCESSOR_PPC603:
15805 *total = COSTS_N_INSNS (37);
15806 return true;
15808 case PROCESSOR_PPC604:
15809 case PROCESSOR_PPC604e:
15810 *total = COSTS_N_INSNS (20);
15811 return true;
15813 case PROCESSOR_PPC620:
15814 case PROCESSOR_PPC630:
15815 *total = (GET_MODE (XEXP (x, 1)) != DImode
15816 ? COSTS_N_INSNS (21)
15817 : COSTS_N_INSNS (37));
15818 return true;
15820 case PROCESSOR_PPC750:
15821 case PROCESSOR_PPC8540:
15822 case PROCESSOR_PPC7400:
15823 *total = COSTS_N_INSNS (19);
15824 return true;
15826 case PROCESSOR_PPC7450:
15827 *total = COSTS_N_INSNS (23);
15828 return true;
15830 case PROCESSOR_POWER4:
15831 *total = (GET_MODE (XEXP (x, 1)) != DImode
15832 ? COSTS_N_INSNS (18)
15833 : COSTS_N_INSNS (34));
15834 return true;
15836 default:
15837 abort ();
15840 case FFS:
15841 *total = COSTS_N_INSNS (4);
15842 return true;
15844 case MEM:
15845 /* MEM should be slightly more expensive than (plus (reg) (const)). */
15846 *total = 5;
15847 return true;
15849 default:
15850 return false;
15854 /* A C expression returning the cost of moving data from a register of class
15855 CLASS1 to one of CLASS2. */
15858 rs6000_register_move_cost (enum machine_mode mode,
15859 enum reg_class from, enum reg_class to)
15861 /* Moves from/to GENERAL_REGS. */
15862 if (reg_classes_intersect_p (to, GENERAL_REGS)
15863 || reg_classes_intersect_p (from, GENERAL_REGS))
15865 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15866 from = to;
15868 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15869 return (rs6000_memory_move_cost (mode, from, 0)
15870 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15872 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
15873 else if (from == CR_REGS)
15874 return 4;
15876 else
15877 /* A move will cost one instruction per GPR moved. */
15878 return 2 * HARD_REGNO_NREGS (0, mode);
15881 /* Moving between two similar registers is just one instruction. */
15882 else if (reg_classes_intersect_p (to, from))
15883 return mode == TFmode ? 4 : 2;
15885 /* Everything else has to go through GENERAL_REGS. */
15886 else
15887 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15888 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15891 /* A C expressions returning the cost of moving data of MODE from a register to
15892 or from memory. */
15895 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15896 int in ATTRIBUTE_UNUSED)
15898 if (reg_classes_intersect_p (class, GENERAL_REGS))
15899 return 4 * HARD_REGNO_NREGS (0, mode);
15900 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15901 return 4 * HARD_REGNO_NREGS (32, mode);
15902 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15903 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15904 else
15905 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15908 /* Return an RTX representing where to find the function value of a
15909 function returning MODE. */
15910 static rtx
15911 rs6000_complex_function_value (enum machine_mode mode)
15913 unsigned int regno;
15914 rtx r1, r2;
15915 enum machine_mode inner = GET_MODE_INNER (mode);
15916 unsigned int inner_bytes = GET_MODE_SIZE (inner);
15918 if (FLOAT_MODE_P (mode))
15919 regno = FP_ARG_RETURN;
15920 else
15922 regno = GP_ARG_RETURN;
15924 /* 32-bit is OK since it'll go in r3/r4. */
15925 if (TARGET_32BIT && inner_bytes >= 4)
15926 return gen_rtx_REG (mode, regno);
15929 if (inner_bytes >= 8)
15930 return gen_rtx_REG (mode, regno);
15932 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15933 const0_rtx);
15934 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15935 GEN_INT (inner_bytes));
15936 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15939 /* Define how to find the value returned by a function.
15940 VALTYPE is the data type of the value (as a tree).
15941 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15942 otherwise, FUNC is 0.
15944 On the SPE, both FPs and vectors are returned in r3.
15946 On RS/6000 an integer value is in r3 and a floating-point value is in
15947 fp1, unless -msoft-float. */
15950 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15952 enum machine_mode mode;
15953 unsigned int regno;
15955 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15957 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15958 return gen_rtx_PARALLEL (DImode,
15959 gen_rtvec (2,
15960 gen_rtx_EXPR_LIST (VOIDmode,
15961 gen_rtx_REG (SImode, GP_ARG_RETURN),
15962 const0_rtx),
15963 gen_rtx_EXPR_LIST (VOIDmode,
15964 gen_rtx_REG (SImode,
15965 GP_ARG_RETURN + 1),
15966 GEN_INT (4))));
15969 if ((INTEGRAL_TYPE_P (valtype)
15970 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15971 || POINTER_TYPE_P (valtype))
15972 mode = TARGET_32BIT ? SImode : DImode;
15973 else
15974 mode = TYPE_MODE (valtype);
15976 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15977 regno = FP_ARG_RETURN;
15978 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15979 && TARGET_HARD_FLOAT
15980 && targetm.calls.split_complex_arg)
15981 return rs6000_complex_function_value (mode);
15982 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15983 regno = ALTIVEC_ARG_RETURN;
15984 else
15985 regno = GP_ARG_RETURN;
15987 return gen_rtx_REG (mode, regno);
15990 /* Define how to find the value returned by a library function
15991 assuming the value has mode MODE. */
15993 rs6000_libcall_value (enum machine_mode mode)
15995 unsigned int regno;
15997 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15998 && TARGET_HARD_FLOAT && TARGET_FPRS)
15999 regno = FP_ARG_RETURN;
16000 else if (ALTIVEC_VECTOR_MODE (mode))
16001 regno = ALTIVEC_ARG_RETURN;
16002 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16003 return rs6000_complex_function_value (mode);
16004 else
16005 regno = GP_ARG_RETURN;
16007 return gen_rtx_REG (mode, regno);
16010 /* Define the offset between two registers, FROM to be eliminated and its
16011 replacement TO, at the start of a routine. */
16012 HOST_WIDE_INT
16013 rs6000_initial_elimination_offset (int from, int to)
16015 rs6000_stack_t *info = rs6000_stack_info ();
16016 HOST_WIDE_INT offset;
16018 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16019 offset = info->push_p ? 0 : -info->total_size;
16020 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16021 offset = info->total_size;
16022 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16023 offset = info->push_p ? info->total_size : 0;
16024 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16025 offset = 0;
16026 else
16027 abort ();
16029 return offset;
16032 /* Return true if TYPE is of type __ev64_opaque__. */
16034 static bool
16035 is_ev64_opaque_type (tree type)
16037 return (TARGET_SPE
16038 && (type == opaque_V2SI_type_node
16039 || type == opaque_V2SF_type_node
16040 || type == opaque_p_V2SI_type_node));
16043 static rtx
16044 rs6000_dwarf_register_span (rtx reg)
16046 unsigned regno;
16048 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16049 return NULL_RTX;
16051 regno = REGNO (reg);
16053 /* The duality of the SPE register size wreaks all kinds of havoc.
16054 This is a way of distinguishing r0 in 32-bits from r0 in
16055 64-bits. */
16056 return
16057 gen_rtx_PARALLEL (VOIDmode,
16058 BYTES_BIG_ENDIAN
16059 ? gen_rtvec (2,
16060 gen_rtx_REG (SImode, regno + 1200),
16061 gen_rtx_REG (SImode, regno))
16062 : gen_rtvec (2,
16063 gen_rtx_REG (SImode, regno),
16064 gen_rtx_REG (SImode, regno + 1200)));
16067 /* Map internal gcc register numbers to DWARF2 register numbers. */
16069 unsigned int
16070 rs6000_dbx_register_number (unsigned int regno)
16072 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16073 return regno;
16074 if (regno == MQ_REGNO)
16075 return 100;
16076 if (regno == LINK_REGISTER_REGNUM)
16077 return 108;
16078 if (regno == COUNT_REGISTER_REGNUM)
16079 return 109;
16080 if (CR_REGNO_P (regno))
16081 return regno - CR0_REGNO + 86;
16082 if (regno == XER_REGNO)
16083 return 101;
16084 if (ALTIVEC_REGNO_P (regno))
16085 return regno - FIRST_ALTIVEC_REGNO + 1124;
16086 if (regno == VRSAVE_REGNO)
16087 return 356;
16088 if (regno == VSCR_REGNO)
16089 return 67;
16090 if (regno == SPE_ACC_REGNO)
16091 return 99;
16092 if (regno == SPEFSCR_REGNO)
16093 return 612;
16094 /* SPE high reg number. We get these values of regno from
16095 rs6000_dwarf_register_span. */
16096 if (regno >= 1200 && regno < 1232)
16097 return regno;
16099 abort ();
16102 #include "gt-rs6000.h"