1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu
;
62 struct rs6000_cpu_select rs6000_select
[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string
;
72 int rs6000_long_double_type_size
;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi
;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave
;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string
;
83 /* Nonzero if we want SPE ABI extensions. */
86 /* Whether isel instructions should be generated. */
89 /* Nonzero if we have FPRs. */
92 /* String from -misel=. */
93 const char *rs6000_isel_string
;
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined
;
98 /* Save information from a "cmpxx" operation until the branch or scc is
100 rtx rs6000_compare_op0
, rs6000_compare_op1
;
101 int rs6000_compare_fp_p
;
103 /* Label number of label created for -mrelocatable, to call to so we can
104 get the address of the GOT section */
105 int rs6000_pic_labelno
;
108 /* Which abi to adhere to */
109 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
111 /* Semantics of the small data area */
112 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
114 /* Which small data model to use */
115 const char *rs6000_sdata_name
= (char *)0;
117 /* Counter for labels which are to be placed in .fixup. */
118 int fixuplabelno
= 0;
121 /* ABI enumeration available for subtarget to use. */
122 enum rs6000_abi rs6000_current_abi
;
124 /* ABI string from -mabi= option. */
125 const char *rs6000_abi_string
;
128 const char *rs6000_debug_name
;
129 int rs6000_debug_stack
; /* debug stack applications */
130 int rs6000_debug_arg
; /* debug argument handling */
132 const char *rs6000_traceback_name
;
134 traceback_default
= 0,
140 /* Flag to say the TOC is initialized */
142 char toc_label_name
[10];
144 /* Alias set for saves and restores from the rs6000 stack. */
145 static int rs6000_sr_alias_set
;
147 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
148 The only place that looks at this is rs6000_set_default_type_attributes;
149 everywhere else should rely on the presence or absence of a longcall
150 attribute on the function declaration. */
151 int rs6000_default_long_calls
;
152 const char *rs6000_longcall_switch
;
154 struct builtin_description
156 /* mask is not const because we're going to alter it below. This
157 nonsense will go away when we rewrite the -march infrastructure
158 to give us more target flag bits. */
160 const enum insn_code icode
;
161 const char *const name
;
162 const enum rs6000_builtins code
;
165 static void rs6000_add_gc_roots
PARAMS ((void));
166 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
167 static rtx expand_block_move_mem
PARAMS ((enum machine_mode
, rtx
, rtx
));
168 static void validate_condition_mode
169 PARAMS ((enum rtx_code
, enum machine_mode
));
170 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
171 static void rs6000_maybe_dead
PARAMS ((rtx
));
172 static void rs6000_emit_stack_tie
PARAMS ((void));
173 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
174 static void emit_frame_save
PARAMS ((rtx
, rtx
, enum machine_mode
,
175 unsigned int, int, int));
176 static rtx gen_frame_mem_offset
PARAMS ((enum machine_mode
, rtx
, int));
177 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
178 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
179 static unsigned toc_hash_function
PARAMS ((const void *));
180 static int toc_hash_eq
PARAMS ((const void *, const void *));
181 static int toc_hash_mark_entry
PARAMS ((void **, void *));
182 static void toc_hash_mark_table
PARAMS ((void *));
183 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
184 static struct machine_function
* rs6000_init_machine_status
PARAMS ((void));
185 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
186 static int rs6000_ra_ever_killed
PARAMS ((void));
187 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
188 const struct attribute_spec rs6000_attribute_table
[];
189 static void rs6000_set_default_type_attributes
PARAMS ((tree
));
190 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
191 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
192 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
193 HOST_WIDE_INT
, HOST_WIDE_INT
));
195 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
197 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
198 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
199 static void rs6000_elf_select_section
PARAMS ((tree
, int,
200 unsigned HOST_WIDE_INT
));
201 static void rs6000_elf_unique_section
PARAMS ((tree
, int));
202 static void rs6000_elf_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
203 unsigned HOST_WIDE_INT
));
204 static void rs6000_elf_encode_section_info
PARAMS ((tree
, int));
205 static const char *rs6000_elf_strip_name_encoding
PARAMS ((const char *));
208 static void rs6000_xcoff_asm_globalize_label
PARAMS ((FILE *, const char *));
209 static void rs6000_xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
210 static void rs6000_xcoff_select_section
PARAMS ((tree
, int,
211 unsigned HOST_WIDE_INT
));
212 static void rs6000_xcoff_unique_section
PARAMS ((tree
, int));
213 static void rs6000_xcoff_select_rtx_section
PARAMS ((enum machine_mode
, rtx
,
214 unsigned HOST_WIDE_INT
));
215 static const char * rs6000_xcoff_strip_name_encoding
PARAMS ((const char *));
217 static void rs6000_xcoff_encode_section_info
PARAMS ((tree
, int))
219 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
220 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
221 static int rs6000_issue_rate
PARAMS ((void));
223 static void rs6000_init_builtins
PARAMS ((void));
224 static rtx rs6000_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
225 static rtx rs6000_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
226 static rtx rs6000_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
227 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
228 static void altivec_init_builtins
PARAMS ((void));
229 static void rs6000_common_init_builtins
PARAMS ((void));
231 static void enable_mask_for_builtins
PARAMS ((struct builtin_description
*,
232 int, enum rs6000_builtins
,
233 enum rs6000_builtins
));
234 static void spe_init_builtins
PARAMS ((void));
235 static rtx spe_expand_builtin
PARAMS ((tree
, rtx
, bool *));
236 static rtx spe_expand_predicate_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
237 static rtx spe_expand_evsel_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
238 static int rs6000_emit_int_cmove
PARAMS ((rtx
, rtx
, rtx
, rtx
));
240 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
, bool *));
241 static rtx altivec_expand_ld_builtin
PARAMS ((tree
, rtx
, bool *));
242 static rtx altivec_expand_st_builtin
PARAMS ((tree
, rtx
, bool *));
243 static rtx altivec_expand_dst_builtin
PARAMS ((tree
, rtx
, bool *));
244 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
245 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
246 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
247 static void rs6000_parse_abi_options
PARAMS ((void));
248 static void rs6000_parse_vrsave_option
PARAMS ((void));
249 static void rs6000_parse_isel_option
PARAMS ((void));
250 static int first_altivec_reg_to_save
PARAMS ((void));
251 static unsigned int compute_vrsave_mask
PARAMS ((void));
252 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
253 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
254 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
255 static int easy_vector_constant
PARAMS ((rtx
));
257 /* Default register names. */
258 char rs6000_reg_names
[][8] =
260 "0", "1", "2", "3", "4", "5", "6", "7",
261 "8", "9", "10", "11", "12", "13", "14", "15",
262 "16", "17", "18", "19", "20", "21", "22", "23",
263 "24", "25", "26", "27", "28", "29", "30", "31",
264 "0", "1", "2", "3", "4", "5", "6", "7",
265 "8", "9", "10", "11", "12", "13", "14", "15",
266 "16", "17", "18", "19", "20", "21", "22", "23",
267 "24", "25", "26", "27", "28", "29", "30", "31",
268 "mq", "lr", "ctr","ap",
269 "0", "1", "2", "3", "4", "5", "6", "7",
271 /* AltiVec registers. */
272 "0", "1", "2", "3", "4", "5", "6", "7",
273 "8", "9", "10", "11", "12", "13", "14", "15",
274 "16", "17", "18", "19", "20", "21", "22", "23",
275 "24", "25", "26", "27", "28", "29", "30", "31",
279 #ifdef TARGET_REGNAMES
280 static const char alt_reg_names
[][8] =
282 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
283 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
284 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
285 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
286 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
287 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
288 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
289 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
290 "mq", "lr", "ctr", "ap",
291 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
293 /* AltiVec registers. */
294 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
295 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
296 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
297 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
302 #ifndef MASK_STRICT_ALIGN
303 #define MASK_STRICT_ALIGN 0
306 /* Initialize the GCC target structure. */
307 #undef TARGET_ATTRIBUTE_TABLE
308 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
309 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
310 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
312 #undef TARGET_ASM_ALIGNED_DI_OP
313 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
315 /* Default unaligned ops are only provided for ELF. Find the ops needed
316 for non-ELF systems. */
317 #ifndef OBJECT_FORMAT_ELF
319 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
321 #undef TARGET_ASM_UNALIGNED_HI_OP
322 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
323 #undef TARGET_ASM_UNALIGNED_SI_OP
324 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
325 #undef TARGET_ASM_UNALIGNED_DI_OP
326 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
329 #undef TARGET_ASM_UNALIGNED_HI_OP
330 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
331 #undef TARGET_ASM_UNALIGNED_SI_OP
332 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
336 /* This hook deals with fixups for relocatable code and DI-mode objects
338 #undef TARGET_ASM_INTEGER
339 #define TARGET_ASM_INTEGER rs6000_assemble_integer
341 #undef TARGET_ASM_FUNCTION_PROLOGUE
342 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
343 #undef TARGET_ASM_FUNCTION_EPILOGUE
344 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
347 #undef TARGET_SECTION_TYPE_FLAGS
348 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
351 #undef TARGET_SCHED_ISSUE_RATE
352 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
353 #undef TARGET_SCHED_ADJUST_COST
354 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
355 #undef TARGET_SCHED_ADJUST_PRIORITY
356 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
358 #undef TARGET_INIT_BUILTINS
359 #define TARGET_INIT_BUILTINS rs6000_init_builtins
361 #undef TARGET_EXPAND_BUILTIN
362 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
364 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
365 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
367 struct gcc_target targetm
= TARGET_INITIALIZER
;
369 /* Override command line options. Mostly we process the processor
370 type and sometimes adjust other TARGET_ options. */
373 rs6000_override_options (default_cpu
)
374 const char *default_cpu
;
377 struct rs6000_cpu_select
*ptr
;
379 /* Simplify the entries below by making a mask for any POWER
380 variant and any PowerPC variant. */
382 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
383 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
384 | MASK_PPC_GFXOPT | MASK_POWERPC64)
385 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
389 const char *const name
; /* Canonical processor name. */
390 const enum processor_type processor
; /* Processor type enum value. */
391 const int target_enable
; /* Target flags to enable. */
392 const int target_disable
; /* Target flags to disable. */
393 } const processor_target_table
[]
394 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
395 POWER_MASKS
| POWERPC_MASKS
},
396 {"power", PROCESSOR_POWER
,
397 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
398 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
399 {"power2", PROCESSOR_POWER
,
400 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
401 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
402 {"power3", PROCESSOR_PPC630
,
403 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
404 POWER_MASKS
| MASK_PPC_GPOPT
},
405 {"power4", PROCESSOR_POWER4
,
406 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
407 POWER_MASKS
| MASK_PPC_GPOPT
},
408 {"powerpc", PROCESSOR_POWERPC
,
409 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
410 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
411 {"powerpc64", PROCESSOR_POWERPC64
,
412 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
413 POWER_MASKS
| POWERPC_OPT_MASKS
},
414 {"rios", PROCESSOR_RIOS1
,
415 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
416 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
417 {"rios1", PROCESSOR_RIOS1
,
418 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
419 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
420 {"rsc", PROCESSOR_PPC601
,
421 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
422 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
423 {"rsc1", PROCESSOR_PPC601
,
424 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
425 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
426 {"rios2", PROCESSOR_RIOS2
,
427 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
428 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
429 {"rs64a", PROCESSOR_RS64A
,
430 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
431 POWER_MASKS
| POWERPC_OPT_MASKS
},
432 {"401", PROCESSOR_PPC403
,
433 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
434 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
435 {"403", PROCESSOR_PPC403
,
436 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
437 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
438 {"405", PROCESSOR_PPC405
,
439 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
440 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
441 {"505", PROCESSOR_MPCCORE
,
442 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
443 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
444 {"601", PROCESSOR_PPC601
,
445 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
446 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
447 {"602", PROCESSOR_PPC603
,
448 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
449 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
450 {"603", PROCESSOR_PPC603
,
451 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
452 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
453 {"603e", PROCESSOR_PPC603
,
454 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
455 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
456 {"ec603e", PROCESSOR_PPC603
,
457 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
458 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
459 {"604", PROCESSOR_PPC604
,
460 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
461 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
462 {"604e", PROCESSOR_PPC604e
,
463 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
464 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
465 {"620", PROCESSOR_PPC620
,
466 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
467 POWER_MASKS
| MASK_PPC_GPOPT
},
468 {"630", PROCESSOR_PPC630
,
469 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
470 POWER_MASKS
| MASK_PPC_GPOPT
},
471 {"740", PROCESSOR_PPC750
,
472 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
473 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
474 {"750", PROCESSOR_PPC750
,
475 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
476 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
477 {"7400", PROCESSOR_PPC7400
,
478 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
479 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
480 {"7450", PROCESSOR_PPC7450
,
481 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
482 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
483 {"8540", PROCESSOR_PPC8540
,
484 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
485 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
486 {"801", PROCESSOR_MPCCORE
,
487 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
488 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
489 {"821", PROCESSOR_MPCCORE
,
490 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
491 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
492 {"823", PROCESSOR_MPCCORE
,
493 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
494 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
495 {"860", PROCESSOR_MPCCORE
,
496 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
497 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
499 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
501 /* Save current -mmultiple/-mno-multiple status. */
502 int multiple
= TARGET_MULTIPLE
;
503 /* Save current -mstring/-mno-string status. */
504 int string
= TARGET_STRING
;
506 /* Identify the processor type. */
507 rs6000_select
[0].string
= default_cpu
;
508 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
510 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
512 ptr
= &rs6000_select
[i
];
513 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
515 for (j
= 0; j
< ptt_size
; j
++)
516 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
519 rs6000_cpu
= processor_target_table
[j
].processor
;
523 target_flags
|= processor_target_table
[j
].target_enable
;
524 target_flags
&= ~processor_target_table
[j
].target_disable
;
530 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
534 if (rs6000_cpu
== PROCESSOR_PPC8540
)
537 /* If we are optimizing big endian systems for space, use the store
538 multiple instructions. */
539 if (BYTES_BIG_ENDIAN
&& optimize_size
)
540 target_flags
|= MASK_MULTIPLE
;
542 /* If -mmultiple or -mno-multiple was explicitly used, don't
543 override with the processor default */
544 if (TARGET_MULTIPLE_SET
)
545 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
547 /* If -mstring or -mno-string was explicitly used, don't override
548 with the processor default. */
549 if (TARGET_STRING_SET
)
550 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
552 /* Don't allow -mmultiple or -mstring on little endian systems
553 unless the cpu is a 750, because the hardware doesn't support the
554 instructions used in little endian mode, and causes an alignment
555 trap. The 750 does not cause an alignment trap (except when the
556 target is unaligned). */
558 if (! BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
562 target_flags
&= ~MASK_MULTIPLE
;
563 if (TARGET_MULTIPLE_SET
)
564 warning ("-mmultiple is not supported on little endian systems");
569 target_flags
&= ~MASK_STRING
;
570 if (TARGET_STRING_SET
)
571 warning ("-mstring is not supported on little endian systems");
575 if (flag_pic
!= 0 && DEFAULT_ABI
== ABI_AIX
)
580 warning ("-f%s ignored (all code is position independent)",
581 (flag_pic
> 1) ? "PIC" : "pic");
584 #ifdef XCOFF_DEBUGGING_INFO
585 if (flag_function_sections
&& (write_symbols
!= NO_DEBUG
)
586 && DEFAULT_ABI
== ABI_AIX
)
588 warning ("-ffunction-sections disabled on AIX when debugging");
589 flag_function_sections
= 0;
592 if (flag_data_sections
&& (DEFAULT_ABI
== ABI_AIX
))
594 warning ("-fdata-sections not supported on AIX");
595 flag_data_sections
= 0;
599 /* Set debug flags */
600 if (rs6000_debug_name
)
602 if (! strcmp (rs6000_debug_name
, "all"))
603 rs6000_debug_stack
= rs6000_debug_arg
= 1;
604 else if (! strcmp (rs6000_debug_name
, "stack"))
605 rs6000_debug_stack
= 1;
606 else if (! strcmp (rs6000_debug_name
, "arg"))
607 rs6000_debug_arg
= 1;
609 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
612 if (rs6000_traceback_name
)
614 if (! strncmp (rs6000_traceback_name
, "full", 4))
615 rs6000_traceback
= traceback_full
;
616 else if (! strncmp (rs6000_traceback_name
, "part", 4))
617 rs6000_traceback
= traceback_part
;
618 else if (! strncmp (rs6000_traceback_name
, "no", 2))
619 rs6000_traceback
= traceback_none
;
621 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
622 rs6000_traceback_name
);
625 /* Set size of long double */
626 rs6000_long_double_type_size
= 64;
627 if (rs6000_long_double_size_string
)
630 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
631 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
632 error ("Unknown switch -mlong-double-%s",
633 rs6000_long_double_size_string
);
635 rs6000_long_double_type_size
= size
;
638 /* Handle -mabi= options. */
639 rs6000_parse_abi_options ();
641 /* Handle -mvrsave= option. */
642 rs6000_parse_vrsave_option ();
644 /* Handle -misel= option. */
645 rs6000_parse_isel_option ();
647 #ifdef SUBTARGET_OVERRIDE_OPTIONS
648 SUBTARGET_OVERRIDE_OPTIONS
;
650 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
651 SUBSUBTARGET_OVERRIDE_OPTIONS
;
654 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
655 using TARGET_OPTIONS to handle a toggle switch, but we're out of
656 bits in target_flags so TARGET_SWITCHES cannot be used.
657 Assumption here is that rs6000_longcall_switch points into the
658 text of the complete option, rather than being a copy, so we can
659 scan back for the presence or absence of the no- modifier. */
660 if (rs6000_longcall_switch
)
662 const char *base
= rs6000_longcall_switch
;
663 while (base
[-1] != 'm') base
--;
665 if (*rs6000_longcall_switch
!= '\0')
666 error ("invalid option `%s'", base
);
667 rs6000_default_long_calls
= (base
[0] != 'n');
670 #ifdef TARGET_REGNAMES
671 /* If the user desires alternate register names, copy in the
672 alternate names now. */
674 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
677 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
678 If -maix-struct-return or -msvr4-struct-return was explicitly
679 used, don't override with the ABI default. */
680 if (!(target_flags
& MASK_AIX_STRUCT_RET_SET
))
682 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
683 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
685 target_flags
|= MASK_AIX_STRUCT_RET
;
688 /* Register global variables with the garbage collector. */
689 rs6000_add_gc_roots ();
691 /* Allocate an alias set for register saves & restores from stack. */
692 rs6000_sr_alias_set
= new_alias_set ();
695 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
697 /* We can only guarantee the availability of DI pseudo-ops when
698 assembling for 64-bit targets. */
701 targetm
.asm_out
.aligned_op
.di
= NULL
;
702 targetm
.asm_out
.unaligned_op
.di
= NULL
;
705 /* Arrange to save and restore machine status around nested functions. */
706 init_machine_status
= rs6000_init_machine_status
;
709 /* Handle -misel= option. */
711 rs6000_parse_isel_option ()
713 if (rs6000_isel_string
== 0)
715 else if (! strcmp (rs6000_isel_string
, "yes"))
717 else if (! strcmp (rs6000_isel_string
, "no"))
720 error ("unknown -misel= option specified: '%s'",
724 /* Handle -mvrsave= options. */
726 rs6000_parse_vrsave_option ()
728 /* Generate VRSAVE instructions by default. */
729 if (rs6000_altivec_vrsave_string
== 0
730 || ! strcmp (rs6000_altivec_vrsave_string
, "yes"))
731 rs6000_altivec_vrsave
= 1;
732 else if (! strcmp (rs6000_altivec_vrsave_string
, "no"))
733 rs6000_altivec_vrsave
= 0;
735 error ("unknown -mvrsave= option specified: '%s'",
736 rs6000_altivec_vrsave_string
);
739 /* Handle -mabi= options. */
741 rs6000_parse_abi_options ()
743 if (rs6000_abi_string
== 0)
745 else if (! strcmp (rs6000_abi_string
, "altivec"))
746 rs6000_altivec_abi
= 1;
747 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
748 rs6000_altivec_abi
= 0;
749 else if (! strcmp (rs6000_abi_string
, "spe"))
751 else if (! strcmp (rs6000_abi_string
, "no-spe"))
754 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
758 optimization_options (level
, size
)
759 int level ATTRIBUTE_UNUSED
;
760 int size ATTRIBUTE_UNUSED
;
764 /* Do anything needed at the start of the asm file. */
767 rs6000_file_start (file
, default_cpu
)
769 const char *default_cpu
;
773 const char *start
= buffer
;
774 struct rs6000_cpu_select
*ptr
;
776 if (flag_verbose_asm
)
778 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
779 rs6000_select
[0].string
= default_cpu
;
781 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
783 ptr
= &rs6000_select
[i
];
784 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
786 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
792 switch (rs6000_sdata
)
794 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
795 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
796 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
797 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
800 if (rs6000_sdata
&& g_switch_value
)
802 fprintf (file
, "%s -G %d", start
, g_switch_value
);
812 /* Return non-zero if this function is known to have a null epilogue. */
817 if (reload_completed
)
819 rs6000_stack_t
*info
= rs6000_stack_info ();
821 if (info
->first_gp_reg_save
== 32
822 && info
->first_fp_reg_save
== 64
823 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
826 && info
->vrsave_mask
== 0
834 /* Returns 1 always. */
837 any_operand (op
, mode
)
838 rtx op ATTRIBUTE_UNUSED
;
839 enum machine_mode mode ATTRIBUTE_UNUSED
;
844 /* Returns 1 if op is the count register. */
846 count_register_operand (op
, mode
)
848 enum machine_mode mode ATTRIBUTE_UNUSED
;
850 if (GET_CODE (op
) != REG
)
853 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
856 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
862 /* Returns 1 if op is an altivec register. */
864 altivec_register_operand (op
, mode
)
866 enum machine_mode mode ATTRIBUTE_UNUSED
;
869 return (register_operand (op
, mode
)
870 && (GET_CODE (op
) != REG
871 || REGNO (op
) > FIRST_PSEUDO_REGISTER
872 || ALTIVEC_REGNO_P (REGNO (op
))));
876 xer_operand (op
, mode
)
878 enum machine_mode mode ATTRIBUTE_UNUSED
;
880 if (GET_CODE (op
) != REG
)
883 if (XER_REGNO_P (REGNO (op
)))
889 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
890 by such constants completes more quickly. */
893 s8bit_cint_operand (op
, mode
)
895 enum machine_mode mode ATTRIBUTE_UNUSED
;
897 return ( GET_CODE (op
) == CONST_INT
898 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
901 /* Return 1 if OP is a constant that can fit in a D field. */
904 short_cint_operand (op
, mode
)
906 enum machine_mode mode ATTRIBUTE_UNUSED
;
908 return (GET_CODE (op
) == CONST_INT
909 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
912 /* Similar for an unsigned D field. */
915 u_short_cint_operand (op
, mode
)
917 enum machine_mode mode ATTRIBUTE_UNUSED
;
919 return (GET_CODE (op
) == CONST_INT
920 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
923 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
926 non_short_cint_operand (op
, mode
)
928 enum machine_mode mode ATTRIBUTE_UNUSED
;
930 return (GET_CODE (op
) == CONST_INT
931 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
934 /* Returns 1 if OP is a CONST_INT that is a positive value
935 and an exact power of 2. */
938 exact_log2_cint_operand (op
, mode
)
940 enum machine_mode mode ATTRIBUTE_UNUSED
;
942 return (GET_CODE (op
) == CONST_INT
944 && exact_log2 (INTVAL (op
)) >= 0);
947 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
951 gpc_reg_operand (op
, mode
)
953 enum machine_mode mode
;
955 return (register_operand (op
, mode
)
956 && (GET_CODE (op
) != REG
957 || (REGNO (op
) >= ARG_POINTER_REGNUM
958 && !XER_REGNO_P (REGNO (op
)))
959 || REGNO (op
) < MQ_REGNO
));
962 /* Returns 1 if OP is either a pseudo-register or a register denoting a
966 cc_reg_operand (op
, mode
)
968 enum machine_mode mode
;
970 return (register_operand (op
, mode
)
971 && (GET_CODE (op
) != REG
972 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
973 || CR_REGNO_P (REGNO (op
))));
976 /* Returns 1 if OP is either a pseudo-register or a register denoting a
977 CR field that isn't CR0. */
980 cc_reg_not_cr0_operand (op
, mode
)
982 enum machine_mode mode
;
984 return (register_operand (op
, mode
)
985 && (GET_CODE (op
) != REG
986 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
987 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
990 /* Returns 1 if OP is either a constant integer valid for a D-field or
991 a non-special register. If a register, it must be in the proper
992 mode unless MODE is VOIDmode. */
995 reg_or_short_operand (op
, mode
)
997 enum machine_mode mode
;
999 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1002 /* Similar, except check if the negation of the constant would be
1003 valid for a D-field. */
1006 reg_or_neg_short_operand (op
, mode
)
1008 enum machine_mode mode
;
1010 if (GET_CODE (op
) == CONST_INT
)
1011 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
1013 return gpc_reg_operand (op
, mode
);
1016 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1017 a non-special register. If a register, it must be in the proper
1018 mode unless MODE is VOIDmode. */
1021 reg_or_aligned_short_operand (op
, mode
)
1023 enum machine_mode mode
;
1025 if (gpc_reg_operand (op
, mode
))
1027 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1034 /* Return 1 if the operand is either a register or an integer whose
1035 high-order 16 bits are zero. */
1038 reg_or_u_short_operand (op
, mode
)
1040 enum machine_mode mode
;
1042 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1045 /* Return 1 is the operand is either a non-special register or ANY
1046 constant integer. */
1049 reg_or_cint_operand (op
, mode
)
1051 enum machine_mode mode
;
1053 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1056 /* Return 1 is the operand is either a non-special register or ANY
1057 32-bit signed constant integer. */
1060 reg_or_arith_cint_operand (op
, mode
)
1062 enum machine_mode mode
;
1064 return (gpc_reg_operand (op
, mode
)
1065 || (GET_CODE (op
) == CONST_INT
1066 #if HOST_BITS_PER_WIDE_INT != 32
1067 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1068 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1073 /* Return 1 is the operand is either a non-special register or a 32-bit
1074 signed constant integer valid for 64-bit addition. */
1077 reg_or_add_cint64_operand (op
, mode
)
1079 enum machine_mode mode
;
1081 return (gpc_reg_operand (op
, mode
)
1082 || (GET_CODE (op
) == CONST_INT
1083 #if HOST_BITS_PER_WIDE_INT == 32
1084 && INTVAL (op
) < 0x7fff8000
1086 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1092 /* Return 1 is the operand is either a non-special register or a 32-bit
1093 signed constant integer valid for 64-bit subtraction. */
1096 reg_or_sub_cint64_operand (op
, mode
)
1098 enum machine_mode mode
;
1100 return (gpc_reg_operand (op
, mode
)
1101 || (GET_CODE (op
) == CONST_INT
1102 #if HOST_BITS_PER_WIDE_INT == 32
1103 && (- INTVAL (op
)) < 0x7fff8000
1105 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1111 /* Return 1 is the operand is either a non-special register or ANY
1112 32-bit unsigned constant integer. */
1115 reg_or_logical_cint_operand (op
, mode
)
1117 enum machine_mode mode
;
1119 if (GET_CODE (op
) == CONST_INT
)
1121 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1123 if (GET_MODE_BITSIZE (mode
) <= 32)
1126 if (INTVAL (op
) < 0)
1130 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
1131 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
1133 else if (GET_CODE (op
) == CONST_DOUBLE
)
1135 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
1139 return CONST_DOUBLE_HIGH (op
) == 0;
1142 return gpc_reg_operand (op
, mode
);
1145 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1148 got_operand (op
, mode
)
1150 enum machine_mode mode ATTRIBUTE_UNUSED
;
1152 return (GET_CODE (op
) == SYMBOL_REF
1153 || GET_CODE (op
) == CONST
1154 || GET_CODE (op
) == LABEL_REF
);
1157 /* Return 1 if the operand is a simple references that can be loaded via
1158 the GOT (labels involving addition aren't allowed). */
1161 got_no_const_operand (op
, mode
)
1163 enum machine_mode mode ATTRIBUTE_UNUSED
;
1165 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1168 /* Return the number of instructions it takes to form a constant in an
1169 integer register. */
1172 num_insns_constant_wide (value
)
1173 HOST_WIDE_INT value
;
1175 /* signed constant loadable with {cal|addi} */
1176 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1179 /* constant loadable with {cau|addis} */
1180 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1183 #if HOST_BITS_PER_WIDE_INT == 64
1184 else if (TARGET_POWERPC64
)
1186 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1187 HOST_WIDE_INT high
= value
>> 31;
1189 if (high
== 0 || high
== -1)
1195 return num_insns_constant_wide (high
) + 1;
1197 return (num_insns_constant_wide (high
)
1198 + num_insns_constant_wide (low
) + 1);
1207 num_insns_constant (op
, mode
)
1209 enum machine_mode mode
;
1211 if (GET_CODE (op
) == CONST_INT
)
1213 #if HOST_BITS_PER_WIDE_INT == 64
1214 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1215 && mask64_operand (op
, mode
))
1219 return num_insns_constant_wide (INTVAL (op
));
1222 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1227 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1228 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1229 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1232 else if (GET_CODE (op
) == CONST_DOUBLE
)
1238 int endian
= (WORDS_BIG_ENDIAN
== 0);
1240 if (mode
== VOIDmode
|| mode
== DImode
)
1242 high
= CONST_DOUBLE_HIGH (op
);
1243 low
= CONST_DOUBLE_LOW (op
);
1247 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1248 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1250 low
= l
[1 - endian
];
1254 return (num_insns_constant_wide (low
)
1255 + num_insns_constant_wide (high
));
1259 if (high
== 0 && low
>= 0)
1260 return num_insns_constant_wide (low
);
1262 else if (high
== -1 && low
< 0)
1263 return num_insns_constant_wide (low
);
1265 else if (mask64_operand (op
, mode
))
1269 return num_insns_constant_wide (high
) + 1;
1272 return (num_insns_constant_wide (high
)
1273 + num_insns_constant_wide (low
) + 1);
1281 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1282 register with one instruction per word. We only do this if we can
1283 safely read CONST_DOUBLE_{LOW,HIGH}. */
1286 easy_fp_constant (op
, mode
)
1288 enum machine_mode mode
;
1290 if (GET_CODE (op
) != CONST_DOUBLE
1291 || GET_MODE (op
) != mode
1292 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1295 /* Consider all constants with -msoft-float to be easy. */
1296 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
1300 /* If we are using V.4 style PIC, consider all constants to be hard. */
1301 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1304 #ifdef TARGET_RELOCATABLE
1305 /* Similarly if we are using -mrelocatable, consider all constants
1307 if (TARGET_RELOCATABLE
)
1316 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1317 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1319 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
1320 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
1323 else if (mode
== SFmode
)
1328 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1329 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1331 return num_insns_constant_wide (l
) == 1;
1334 else if (mode
== DImode
)
1335 return ((TARGET_POWERPC64
1336 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1337 || (num_insns_constant (op
, DImode
) <= 2));
1339 else if (mode
== SImode
)
1345 /* Return 1 if the operand is a CONST_INT and can be put into a
1346 register with one instruction. */
1349 easy_vector_constant (op
)
1355 if (GET_CODE (op
) != CONST_VECTOR
)
1358 units
= CONST_VECTOR_NUNITS (op
);
1360 /* We can generate 0 easily. Look for that. */
1361 for (i
= 0; i
< units
; ++i
)
1363 elt
= CONST_VECTOR_ELT (op
, i
);
1365 /* We could probably simplify this by just checking for equality
1366 with CONST0_RTX for the current mode, but let's be safe
1369 switch (GET_CODE (elt
))
1372 if (INTVAL (elt
) != 0)
1376 if (CONST_DOUBLE_LOW (elt
) != 0 || CONST_DOUBLE_HIGH (elt
) != 0)
1384 /* We could probably generate a few other constants trivially, but
1385 gcc doesn't generate them yet. FIXME later. */
1389 /* Return 1 if the operand is the constant 0. This works for scalars
1390 as well as vectors. */
1392 zero_constant (op
, mode
)
1394 enum machine_mode mode
;
1396 return op
== CONST0_RTX (mode
);
1399 /* Return 1 if the operand is 0.0. */
1401 zero_fp_constant (op
, mode
)
1403 enum machine_mode mode
;
1405 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1408 /* Return 1 if the operand is in volatile memory. Note that during
1409 the RTL generation phase, memory_operand does not return TRUE for
1410 volatile memory references. So this function allows us to
1411 recognize volatile references where its safe. */
1414 volatile_mem_operand (op
, mode
)
1416 enum machine_mode mode
;
1418 if (GET_CODE (op
) != MEM
)
1421 if (!MEM_VOLATILE_P (op
))
1424 if (mode
!= GET_MODE (op
))
1427 if (reload_completed
)
1428 return memory_operand (op
, mode
);
1430 if (reload_in_progress
)
1431 return strict_memory_address_p (mode
, XEXP (op
, 0));
1433 return memory_address_p (mode
, XEXP (op
, 0));
1436 /* Return 1 if the operand is an offsettable memory operand. */
1439 offsettable_mem_operand (op
, mode
)
1441 enum machine_mode mode
;
1443 return ((GET_CODE (op
) == MEM
)
1444 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1445 mode
, XEXP (op
, 0)));
1448 /* Return 1 if the operand is either an easy FP constant (see above) or
1452 mem_or_easy_const_operand (op
, mode
)
1454 enum machine_mode mode
;
1456 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1459 /* Return 1 if the operand is either a non-special register or an item
1460 that can be used as the operand of a `mode' add insn. */
1463 add_operand (op
, mode
)
1465 enum machine_mode mode
;
1467 if (GET_CODE (op
) == CONST_INT
)
1468 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1469 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1471 return gpc_reg_operand (op
, mode
);
1474 /* Return 1 if OP is a constant but not a valid add_operand. */
1477 non_add_cint_operand (op
, mode
)
1479 enum machine_mode mode ATTRIBUTE_UNUSED
;
1481 return (GET_CODE (op
) == CONST_INT
1482 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1483 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1486 /* Return 1 if the operand is a non-special register or a constant that
1487 can be used as the operand of an OR or XOR insn on the RS/6000. */
1490 logical_operand (op
, mode
)
1492 enum machine_mode mode
;
1494 HOST_WIDE_INT opl
, oph
;
1496 if (gpc_reg_operand (op
, mode
))
1499 if (GET_CODE (op
) == CONST_INT
)
1501 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1503 #if HOST_BITS_PER_WIDE_INT <= 32
1504 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1508 else if (GET_CODE (op
) == CONST_DOUBLE
)
1510 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1513 opl
= CONST_DOUBLE_LOW (op
);
1514 oph
= CONST_DOUBLE_HIGH (op
);
1521 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1522 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1525 /* Return 1 if C is a constant that is not a logical operand (as
1526 above), but could be split into one. */
1529 non_logical_cint_operand (op
, mode
)
1531 enum machine_mode mode
;
1533 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1534 && ! logical_operand (op
, mode
)
1535 && reg_or_logical_cint_operand (op
, mode
));
1538 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1539 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1540 Reject all ones and all zeros, since these should have been optimized
1541 away and confuse the making of MB and ME. */
1544 mask_operand (op
, mode
)
1546 enum machine_mode mode ATTRIBUTE_UNUSED
;
1548 HOST_WIDE_INT c
, lsb
;
1550 if (GET_CODE (op
) != CONST_INT
)
1555 /* Fail in 64-bit mode if the mask wraps around because the upper
1556 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1557 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1560 /* We don't change the number of transitions by inverting,
1561 so make sure we start with the LS bit zero. */
1565 /* Reject all zeros or all ones. */
1569 /* Find the first transition. */
1572 /* Invert to look for a second transition. */
1575 /* Erase first transition. */
1578 /* Find the second transition (if any). */
1581 /* Match if all the bits above are 1's (or c is zero). */
1585 /* Return 1 for the PowerPC64 rlwinm corner case. */
1588 mask_operand_wrap (op
, mode
)
1590 enum machine_mode mode ATTRIBUTE_UNUSED
;
1592 HOST_WIDE_INT c
, lsb
;
1594 if (GET_CODE (op
) != CONST_INT
)
1599 if ((c
& 0x80000001) != 0x80000001)
1613 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1614 It is if there are no more than one 1->0 or 0->1 transitions.
1615 Reject all zeros, since zero should have been optimized away and
1616 confuses the making of MB and ME. */
1619 mask64_operand (op
, mode
)
1621 enum machine_mode mode ATTRIBUTE_UNUSED
;
1623 if (GET_CODE (op
) == CONST_INT
)
1625 HOST_WIDE_INT c
, lsb
;
1629 /* Reject all zeros. */
1633 /* We don't change the number of transitions by inverting,
1634 so make sure we start with the LS bit zero. */
1638 /* Find the transition, and check that all bits above are 1's. */
1645 /* Like mask64_operand, but allow up to three transitions. This
1646 predicate is used by insn patterns that generate two rldicl or
1647 rldicr machine insns. */
1650 mask64_2_operand (op
, mode
)
1652 enum machine_mode mode ATTRIBUTE_UNUSED
;
1654 if (GET_CODE (op
) == CONST_INT
)
1656 HOST_WIDE_INT c
, lsb
;
1660 /* Disallow all zeros. */
1664 /* We don't change the number of transitions by inverting,
1665 so make sure we start with the LS bit zero. */
1669 /* Find the first transition. */
1672 /* Invert to look for a second transition. */
1675 /* Erase first transition. */
1678 /* Find the second transition. */
1681 /* Invert to look for a third transition. */
1684 /* Erase second transition. */
1687 /* Find the third transition (if any). */
1690 /* Match if all the bits above are 1's (or c is zero). */
1696 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1697 implement ANDing by the mask IN. */
1699 build_mask64_2_operands (in
, out
)
1703 #if HOST_BITS_PER_WIDE_INT >= 64
1704 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
1707 if (GET_CODE (in
) != CONST_INT
)
1713 /* Assume c initially something like 0x00fff000000fffff. The idea
1714 is to rotate the word so that the middle ^^^^^^ group of zeros
1715 is at the MS end and can be cleared with an rldicl mask. We then
1716 rotate back and clear off the MS ^^ group of zeros with a
1718 c
= ~c
; /* c == 0xff000ffffff00000 */
1719 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
1720 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
1721 c
= ~c
; /* c == 0x00fff000000fffff */
1722 c
&= -lsb
; /* c == 0x00fff00000000000 */
1723 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1724 c
= ~c
; /* c == 0xff000fffffffffff */
1725 c
&= -lsb
; /* c == 0xff00000000000000 */
1727 while ((lsb
>>= 1) != 0)
1728 shift
++; /* shift == 44 on exit from loop */
1729 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
1730 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
1731 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
1735 /* Assume c initially something like 0xff000f0000000000. The idea
1736 is to rotate the word so that the ^^^ middle group of zeros
1737 is at the LS end and can be cleared with an rldicr mask. We then
1738 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1740 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
1741 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
1742 c
= ~c
; /* c == 0x00fff0ffffffffff */
1743 c
&= -lsb
; /* c == 0x00fff00000000000 */
1744 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
1745 c
= ~c
; /* c == 0xff000fffffffffff */
1746 c
&= -lsb
; /* c == 0xff00000000000000 */
1748 while ((lsb
>>= 1) != 0)
1749 shift
++; /* shift == 44 on exit from loop */
1750 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
1751 m1
>>= shift
; /* m1 == 0x0000000000000fff */
1752 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
1755 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1756 masks will be all 1's. We are guaranteed more than one transition. */
1757 out
[0] = GEN_INT (64 - shift
);
1758 out
[1] = GEN_INT (m1
);
1759 out
[2] = GEN_INT (shift
);
1760 out
[3] = GEN_INT (m2
);
1766 /* Return 1 if the operand is either a non-special register or a constant
1767 that can be used as the operand of a PowerPC64 logical AND insn. */
1770 and64_operand (op
, mode
)
1772 enum machine_mode mode
;
1774 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1775 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1777 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1780 /* Like the above, but also match constants that can be implemented
1781 with two rldicl or rldicr insns. */
1784 and64_2_operand (op
, mode
)
1786 enum machine_mode mode
;
1788 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1789 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1791 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
1794 /* Return 1 if the operand is either a non-special register or a
1795 constant that can be used as the operand of an RS/6000 logical AND insn. */
1798 and_operand (op
, mode
)
1800 enum machine_mode mode
;
1802 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1803 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1805 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1808 /* Return 1 if the operand is a general register or memory operand. */
1811 reg_or_mem_operand (op
, mode
)
1813 enum machine_mode mode
;
1815 return (gpc_reg_operand (op
, mode
)
1816 || memory_operand (op
, mode
)
1817 || volatile_mem_operand (op
, mode
));
1820 /* Return 1 if the operand is a general register or memory operand without
1821 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1825 lwa_operand (op
, mode
)
1827 enum machine_mode mode
;
1831 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1832 inner
= SUBREG_REG (inner
);
1834 return gpc_reg_operand (inner
, mode
)
1835 || (memory_operand (inner
, mode
)
1836 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1837 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1838 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1839 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1840 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1843 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1846 symbol_ref_operand (op
, mode
)
1848 enum machine_mode mode
;
1850 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1853 return (GET_CODE (op
) == SYMBOL_REF
);
1856 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1857 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1860 call_operand (op
, mode
)
1862 enum machine_mode mode
;
1864 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1867 return (GET_CODE (op
) == SYMBOL_REF
1868 || (GET_CODE (op
) == REG
1869 && (REGNO (op
) == LINK_REGISTER_REGNUM
1870 || REGNO (op
) == COUNT_REGISTER_REGNUM
1871 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1874 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1875 this file and the function is not weakly defined. */
1878 current_file_function_operand (op
, mode
)
1880 enum machine_mode mode ATTRIBUTE_UNUSED
;
1882 return (GET_CODE (op
) == SYMBOL_REF
1883 && (SYMBOL_REF_FLAG (op
)
1884 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1885 && ! DECL_WEAK (current_function_decl
))));
1888 /* Return 1 if this operand is a valid input for a move insn. */
1891 input_operand (op
, mode
)
1893 enum machine_mode mode
;
1895 /* Memory is always valid. */
1896 if (memory_operand (op
, mode
))
1899 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1900 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1903 /* For floating-point, easy constants are valid. */
1904 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1906 && easy_fp_constant (op
, mode
))
1909 /* Allow any integer constant. */
1910 if (GET_MODE_CLASS (mode
) == MODE_INT
1911 && (GET_CODE (op
) == CONST_INT
1912 || GET_CODE (op
) == CONST_DOUBLE
))
1915 /* For floating-point or multi-word mode, the only remaining valid type
1917 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1918 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1919 return register_operand (op
, mode
);
1921 /* The only cases left are integral modes one word or smaller (we
1922 do not get called for MODE_CC values). These can be in any
1924 if (register_operand (op
, mode
))
1927 /* A SYMBOL_REF referring to the TOC is valid. */
1928 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
1931 /* A constant pool expression (relative to the TOC) is valid */
1932 if (TOC_RELATIVE_EXPR_P (op
))
1935 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1937 if (DEFAULT_ABI
== ABI_V4
1938 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
1939 && small_data_operand (op
, Pmode
))
1945 /* Return 1 for an operand in small memory on V.4/eabi. */
1948 small_data_operand (op
, mode
)
1949 rtx op ATTRIBUTE_UNUSED
;
1950 enum machine_mode mode ATTRIBUTE_UNUSED
;
1955 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
1958 if (DEFAULT_ABI
!= ABI_V4
)
1961 if (GET_CODE (op
) == SYMBOL_REF
)
1964 else if (GET_CODE (op
) != CONST
1965 || GET_CODE (XEXP (op
, 0)) != PLUS
1966 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
1967 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
1972 rtx sum
= XEXP (op
, 0);
1973 HOST_WIDE_INT summand
;
1975 /* We have to be careful here, because it is the referenced address
1976 that must be 32k from _SDA_BASE_, not just the symbol. */
1977 summand
= INTVAL (XEXP (sum
, 1));
1978 if (summand
< 0 || summand
> g_switch_value
)
1981 sym_ref
= XEXP (sum
, 0);
1984 if (*XSTR (sym_ref
, 0) != '@')
1995 constant_pool_expr_1 (op
, have_sym
, have_toc
)
2000 switch (GET_CODE(op
))
2003 if (CONSTANT_POOL_ADDRESS_P (op
))
2005 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2013 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2022 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2023 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2025 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2034 constant_pool_expr_p (op
)
2039 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2043 toc_relative_expr_p (op
)
2048 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2051 /* Try machine-dependent ways of modifying an illegitimate address
2052 to be legitimate. If we find one, return the new, valid address.
2053 This is used from only one place: `memory_address' in explow.c.
2055 OLDX is the address as it was before break_out_memory_refs was
2056 called. In some cases it is useful to look at this to decide what
2059 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2061 It is always safe for this function to do nothing. It exists to
2062 recognize opportunities to optimize the output.
2064 On RS/6000, first check for the sum of a register with a constant
2065 integer that is out of range. If so, generate code to add the
2066 constant with the low-order 16 bits masked to the register and force
2067 this result into another register (this can be done with `cau').
2068 Then generate an address of REG+(CONST&0xffff), allowing for the
2069 possibility of bit 16 being a one.
2071 Then check for the sum of a register and something not constant, try to
2072 load the other things into a register and return the sum. */
2074 rs6000_legitimize_address (x
, oldx
, mode
)
2076 rtx oldx ATTRIBUTE_UNUSED
;
2077 enum machine_mode mode
;
2079 if (GET_CODE (x
) == PLUS
2080 && GET_CODE (XEXP (x
, 0)) == REG
2081 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2082 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2084 HOST_WIDE_INT high_int
, low_int
;
2086 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2087 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2088 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2089 GEN_INT (high_int
)), 0);
2090 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2092 else if (GET_CODE (x
) == PLUS
2093 && GET_CODE (XEXP (x
, 0)) == REG
2094 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2095 && GET_MODE_NUNITS (mode
) == 1
2096 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2099 && (TARGET_POWERPC64
|| mode
!= DImode
)
2102 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2103 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2105 else if (ALTIVEC_VECTOR_MODE (mode
))
2109 /* Make sure both operands are registers. */
2110 if (GET_CODE (x
) == PLUS
)
2111 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2112 force_reg (Pmode
, XEXP (x
, 1)));
2114 reg
= force_reg (Pmode
, x
);
2117 else if (SPE_VECTOR_MODE (mode
))
2119 /* We accept [reg + reg] and [reg + OFFSET]. */
2121 if (GET_CODE (x
) == PLUS
)
2123 rtx op1
= XEXP (x
, 0);
2124 rtx op2
= XEXP (x
, 1);
2126 op1
= force_reg (Pmode
, op1
);
2128 if (GET_CODE (op2
) != REG
2129 && (GET_CODE (op2
) != CONST_INT
2130 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2131 op2
= force_reg (Pmode
, op2
);
2133 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2136 return force_reg (Pmode
, x
);
2138 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
2139 && GET_CODE (x
) != CONST_INT
2140 && GET_CODE (x
) != CONST_DOUBLE
2142 && GET_MODE_NUNITS (mode
) == 1
2143 && (GET_MODE_BITSIZE (mode
) <= 32
2144 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2146 rtx reg
= gen_reg_rtx (Pmode
);
2147 emit_insn (gen_elf_high (reg
, (x
)));
2148 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2150 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2152 && GET_CODE (x
) != CONST_INT
2153 && GET_CODE (x
) != CONST_DOUBLE
2155 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2159 rtx reg
= gen_reg_rtx (Pmode
);
2160 emit_insn (gen_macho_high (reg
, (x
)));
2161 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
2164 && CONSTANT_POOL_EXPR_P (x
)
2165 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2167 return create_TOC_reference (x
);
2173 /* The convention appears to be to define this wherever it is used.
2174 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2175 is now used here. */
2176 #ifndef REG_MODE_OK_FOR_BASE_P
2177 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2180 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2181 replace the input X, or the original X if no replacement is called for.
2182 The output parameter *WIN is 1 if the calling macro should goto WIN,
2185 For RS/6000, we wish to handle large displacements off a base
2186 register by splitting the addend across an addiu/addis and the mem insn.
2187 This cuts number of extra insns needed from 3 to 1.
2189 On Darwin, we use this to generate code for floating point constants.
2190 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2191 The Darwin code is inside #if TARGET_MACHO because only then is
2192 machopic_function_base_name() defined. */
2194 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
2196 enum machine_mode mode
;
2199 int ind_levels ATTRIBUTE_UNUSED
;
2202 /* We must recognize output that we have already generated ourselves. */
2203 if (GET_CODE (x
) == PLUS
2204 && GET_CODE (XEXP (x
, 0)) == PLUS
2205 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
2206 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2207 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2209 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2210 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2211 opnum
, (enum reload_type
)type
);
2217 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
2218 && GET_CODE (x
) == LO_SUM
2219 && GET_CODE (XEXP (x
, 0)) == PLUS
2220 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
2221 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
2222 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
2223 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
2224 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
2225 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
2226 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
2228 /* Result of previous invocation of this function on Darwin
2229 floating point constant. */
2230 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2231 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2232 opnum
, (enum reload_type
)type
);
2237 if (GET_CODE (x
) == PLUS
2238 && GET_CODE (XEXP (x
, 0)) == REG
2239 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2240 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
2241 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2242 && !SPE_VECTOR_MODE (mode
)
2243 && !ALTIVEC_VECTOR_MODE (mode
))
2245 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
2246 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
2248 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2250 /* Check for 32-bit overflow. */
2251 if (high
+ low
!= val
)
2257 /* Reload the high part into a base reg; leave the low part
2258 in the mem directly. */
2260 x
= gen_rtx_PLUS (GET_MODE (x
),
2261 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
2265 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2266 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
2267 opnum
, (enum reload_type
)type
);
2272 if (GET_CODE (x
) == SYMBOL_REF
2273 && DEFAULT_ABI
== ABI_DARWIN
2274 && !ALTIVEC_VECTOR_MODE (mode
)
2277 /* Darwin load of floating point constant. */
2278 rtx offset
= gen_rtx (CONST
, Pmode
,
2279 gen_rtx (MINUS
, Pmode
, x
,
2280 gen_rtx (SYMBOL_REF
, Pmode
,
2281 machopic_function_base_name ())));
2282 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
2283 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
2284 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
2285 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
2286 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
2287 opnum
, (enum reload_type
)type
);
2293 && CONSTANT_POOL_EXPR_P (x
)
2294 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
2296 (x
) = create_TOC_reference (x
);
2304 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2305 that is a valid memory address for an instruction.
2306 The MODE argument is the machine mode for the MEM expression
2307 that wants to use this address.
2309 On the RS/6000, there are four valid address: a SYMBOL_REF that
2310 refers to a constant pool entry of an address (or the sum of it
2311 plus a constant), a short (16-bit signed) constant plus a register,
2312 the sum of two registers, or a register indirect, possibly with an
2313 auto-increment. For DFmode and DImode with an constant plus register,
2314 we must ensure that both words are addressable or PowerPC64 with offset
2317 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2318 32-bit DImode, TImode), indexed addressing cannot be used because
2319 adjacent memory cells are accessed by adding word-sized offsets
2320 during assembly output. */
2322 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
2323 enum machine_mode mode
;
2327 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2329 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2330 && !ALTIVEC_VECTOR_MODE (mode
)
2331 && !SPE_VECTOR_MODE (mode
)
2333 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2335 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2337 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2339 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2341 && GET_CODE (x
) == PLUS
2342 && GET_CODE (XEXP (x
, 0)) == REG
2343 && XEXP (x
, 0) == virtual_stack_vars_rtx
2344 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2346 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2349 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2352 && (TARGET_POWERPC64
|| mode
!= DImode
)
2353 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2355 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2360 /* Try to output insns to set TARGET equal to the constant C if it can
2361 be done in less than N insns. Do all computations in MODE.
2362 Returns the place where the output has been placed if it can be
2363 done and the insns have been emitted. If it would take more than N
2364 insns, zero is returned and no insns and emitted. */
2367 rs6000_emit_set_const (dest
, mode
, source
, n
)
2369 enum machine_mode mode
;
2370 int n ATTRIBUTE_UNUSED
;
2372 rtx result
, insn
, set
;
2373 HOST_WIDE_INT c0
, c1
;
2375 if (mode
== QImode
|| mode
== HImode
)
2378 dest
= gen_reg_rtx (mode
);
2379 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2382 else if (mode
== SImode
)
2384 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
2386 emit_insn (gen_rtx_SET (VOIDmode
, result
,
2387 GEN_INT (INTVAL (source
)
2388 & (~ (HOST_WIDE_INT
) 0xffff))));
2389 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
2390 gen_rtx_IOR (SImode
, result
,
2391 GEN_INT (INTVAL (source
) & 0xffff))));
2394 else if (mode
== DImode
)
2396 if (GET_CODE (source
) == CONST_INT
)
2398 c0
= INTVAL (source
);
2401 else if (GET_CODE (source
) == CONST_DOUBLE
)
2403 #if HOST_BITS_PER_WIDE_INT >= 64
2404 c0
= CONST_DOUBLE_LOW (source
);
2407 c0
= CONST_DOUBLE_LOW (source
);
2408 c1
= CONST_DOUBLE_HIGH (source
);
2414 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
2419 insn
= get_last_insn ();
2420 set
= single_set (insn
);
2421 if (! CONSTANT_P (SET_SRC (set
)))
2422 set_unique_reg_note (insn
, REG_EQUAL
, source
);
2427 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2428 fall back to a straight forward decomposition. We do this to avoid
2429 exponential run times encountered when looking for longer sequences
2430 with rs6000_emit_set_const. */
2432 rs6000_emit_set_long_const (dest
, c1
, c2
)
2434 HOST_WIDE_INT c1
, c2
;
2436 if (!TARGET_POWERPC64
)
2438 rtx operand1
, operand2
;
2440 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2442 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2444 emit_move_insn (operand1
, GEN_INT (c1
));
2445 emit_move_insn (operand2
, GEN_INT (c2
));
2449 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2452 ud2
= (c1
& 0xffff0000) >> 16;
2453 #if HOST_BITS_PER_WIDE_INT >= 64
2457 ud4
= (c2
& 0xffff0000) >> 16;
2459 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2460 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2463 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2465 emit_move_insn (dest
, GEN_INT (ud1
));
2468 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2469 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2472 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2475 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2477 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2479 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2480 || (ud4
== 0 && ! (ud3
& 0x8000)))
2483 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2486 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2489 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2490 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2492 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2497 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2500 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2503 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2505 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2507 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2508 GEN_INT (ud2
<< 16)));
2510 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2516 /* Emit a move from SOURCE to DEST in mode MODE. */
2518 rs6000_emit_move (dest
, source
, mode
)
2521 enum machine_mode mode
;
2525 operands
[1] = source
;
2527 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2528 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2529 && ! FLOAT_MODE_P (mode
)
2530 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2532 /* FIXME. This should never happen. */
2533 /* Since it seems that it does, do the safe thing and convert
2535 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
2537 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2538 && ! FLOAT_MODE_P (mode
)
2539 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2540 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2541 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2542 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2545 /* Check if GCC is setting up a block move that will end up using FP
2546 registers as temporaries. We must make sure this is acceptable. */
2547 if (GET_CODE (operands
[0]) == MEM
2548 && GET_CODE (operands
[1]) == MEM
2550 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2551 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2552 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2553 ? 32 : MEM_ALIGN (operands
[0])))
2554 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2556 : MEM_ALIGN (operands
[1]))))
2557 && ! MEM_VOLATILE_P (operands
[0])
2558 && ! MEM_VOLATILE_P (operands
[1]))
2560 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2561 adjust_address (operands
[1], SImode
, 0));
2562 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2563 adjust_address (operands
[1], SImode
, 4));
2567 if (! no_new_pseudos
&& GET_CODE (operands
[0]) != REG
)
2568 operands
[1] = force_reg (mode
, operands
[1]);
2570 if (mode
== SFmode
&& ! TARGET_POWERPC
2571 && TARGET_HARD_FLOAT
&& TARGET_FPRS
2572 && GET_CODE (operands
[0]) == MEM
)
2576 if (reload_in_progress
|| reload_completed
)
2577 regnum
= true_regnum (operands
[1]);
2578 else if (GET_CODE (operands
[1]) == REG
)
2579 regnum
= REGNO (operands
[1]);
2583 /* If operands[1] is a register, on POWER it may have
2584 double-precision data in it, so truncate it to single
2586 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2589 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2590 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2591 operands
[1] = newreg
;
2595 /* Handle the case where reload calls us with an invalid address;
2596 and the case of CONSTANT_P_RTX. */
2597 if (!ALTIVEC_VECTOR_MODE (mode
)
2598 && (! general_operand (operands
[1], mode
)
2599 || ! nonimmediate_operand (operands
[0], mode
)
2600 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2602 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2606 /* FIXME: In the long term, this switch statement should go away
2607 and be replaced by a sequence of tests based on things like
2613 if (CONSTANT_P (operands
[1])
2614 && GET_CODE (operands
[1]) != CONST_INT
)
2615 operands
[1] = force_const_mem (mode
, operands
[1]);
2621 if (CONSTANT_P (operands
[1])
2622 && ! easy_fp_constant (operands
[1], mode
))
2623 operands
[1] = force_const_mem (mode
, operands
[1]);
2633 if (CONSTANT_P (operands
[1])
2634 && !easy_vector_constant (operands
[1]))
2635 operands
[1] = force_const_mem (mode
, operands
[1]);
2640 /* Use default pattern for address of ELF small data */
2643 && DEFAULT_ABI
== ABI_V4
2644 && (GET_CODE (operands
[1]) == SYMBOL_REF
2645 || GET_CODE (operands
[1]) == CONST
)
2646 && small_data_operand (operands
[1], mode
))
2648 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2652 if (DEFAULT_ABI
== ABI_V4
2653 && mode
== Pmode
&& mode
== SImode
2654 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2656 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2660 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2661 && TARGET_NO_TOC
&& ! flag_pic
2663 && CONSTANT_P (operands
[1])
2664 && GET_CODE (operands
[1]) != HIGH
2665 && GET_CODE (operands
[1]) != CONST_INT
)
2667 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2669 /* If this is a function address on -mcall-aixdesc,
2670 convert it to the address of the descriptor. */
2671 if (DEFAULT_ABI
== ABI_AIX
2672 && GET_CODE (operands
[1]) == SYMBOL_REF
2673 && XSTR (operands
[1], 0)[0] == '.')
2675 const char *name
= XSTR (operands
[1], 0);
2677 while (*name
== '.')
2679 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2680 CONSTANT_POOL_ADDRESS_P (new_ref
)
2681 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2682 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2683 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2684 operands
[1] = new_ref
;
2687 if (DEFAULT_ABI
== ABI_DARWIN
)
2689 emit_insn (gen_macho_high (target
, operands
[1]));
2690 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2694 emit_insn (gen_elf_high (target
, operands
[1]));
2695 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2699 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2700 and we have put it in the TOC, we just need to make a TOC-relative
2703 && GET_CODE (operands
[1]) == SYMBOL_REF
2704 && CONSTANT_POOL_EXPR_P (operands
[1])
2705 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2706 get_pool_mode (operands
[1])))
2708 operands
[1] = create_TOC_reference (operands
[1]);
2710 else if (mode
== Pmode
2711 && CONSTANT_P (operands
[1])
2712 && ((GET_CODE (operands
[1]) != CONST_INT
2713 && ! easy_fp_constant (operands
[1], mode
))
2714 || (GET_CODE (operands
[1]) == CONST_INT
2715 && num_insns_constant (operands
[1], mode
) > 2)
2716 || (GET_CODE (operands
[0]) == REG
2717 && FP_REGNO_P (REGNO (operands
[0]))))
2718 && GET_CODE (operands
[1]) != HIGH
2719 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2720 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2722 /* Emit a USE operation so that the constant isn't deleted if
2723 expensive optimizations are turned on because nobody
2724 references it. This should only be done for operands that
2725 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2726 This should not be done for operands that contain LABEL_REFs.
2727 For now, we just handle the obvious case. */
2728 if (GET_CODE (operands
[1]) != LABEL_REF
)
2729 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2732 /* Darwin uses a special PIC legitimizer. */
2733 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2736 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2738 if (operands
[0] != operands
[1])
2739 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2744 /* If we are to limit the number of things we put in the TOC and
2745 this is a symbol plus a constant we can add in one insn,
2746 just put the symbol in the TOC and add the constant. Don't do
2747 this if reload is in progress. */
2748 if (GET_CODE (operands
[1]) == CONST
2749 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2750 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2751 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2752 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2753 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2754 && ! side_effects_p (operands
[0]))
2757 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2758 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2760 sym
= force_reg (mode
, sym
);
2762 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2764 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2768 operands
[1] = force_const_mem (mode
, operands
[1]);
2771 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2772 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2773 get_pool_constant (XEXP (operands
[1], 0)),
2774 get_pool_mode (XEXP (operands
[1], 0))))
2777 = gen_rtx_MEM (mode
,
2778 create_TOC_reference (XEXP (operands
[1], 0)));
2779 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2780 RTX_UNCHANGING_P (operands
[1]) = 1;
2786 if (GET_CODE (operands
[0]) == MEM
2787 && GET_CODE (XEXP (operands
[0], 0)) != REG
2788 && ! reload_in_progress
)
2790 = replace_equiv_address (operands
[0],
2791 copy_addr_to_reg (XEXP (operands
[0], 0)));
2793 if (GET_CODE (operands
[1]) == MEM
2794 && GET_CODE (XEXP (operands
[1], 0)) != REG
2795 && ! reload_in_progress
)
2797 = replace_equiv_address (operands
[1],
2798 copy_addr_to_reg (XEXP (operands
[1], 0)));
2805 /* Above, we may have called force_const_mem which may have returned
2806 an invalid address. If we can, fix this up; otherwise, reload will
2807 have to deal with it. */
2808 if (GET_CODE (operands
[1]) == MEM
2809 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2810 && ! reload_in_progress
)
2811 operands
[1] = adjust_address (operands
[1], mode
, 0);
2813 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2817 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2818 for a call to a function whose data type is FNTYPE.
2819 For a library call, FNTYPE is 0.
2821 For incoming args we set the number of arguments in the prototype large
2822 so we never return a PARALLEL. */
2825 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2826 CUMULATIVE_ARGS
*cum
;
2828 rtx libname ATTRIBUTE_UNUSED
;
2831 static CUMULATIVE_ARGS zero_cumulative
;
2833 *cum
= zero_cumulative
;
2835 cum
->fregno
= FP_ARG_MIN_REG
;
2836 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2837 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2838 cum
->call_cookie
= CALL_NORMAL
;
2839 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2842 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2844 else if (cum
->prototype
)
2845 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2846 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2847 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2850 cum
->nargs_prototype
= 0;
2852 cum
->orig_nargs
= cum
->nargs_prototype
;
2854 /* Check for a longcall attribute. */
2856 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
2857 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
2858 cum
->call_cookie
= CALL_LONG
;
2860 if (TARGET_DEBUG_ARG
)
2862 fprintf (stderr
, "\ninit_cumulative_args:");
2865 tree ret_type
= TREE_TYPE (fntype
);
2866 fprintf (stderr
, " ret code = %s,",
2867 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2870 if (cum
->call_cookie
& CALL_LONG
)
2871 fprintf (stderr
, " longcall,");
2873 fprintf (stderr
, " proto = %d, nargs = %d\n",
2874 cum
->prototype
, cum
->nargs_prototype
);
2878 /* If defined, a C expression which determines whether, and in which
2879 direction, to pad out an argument with extra space. The value
2880 should be of type `enum direction': either `upward' to pad above
2881 the argument, `downward' to pad below, or `none' to inhibit
2884 For the AIX ABI structs are always stored left shifted in their
2888 function_arg_padding (mode
, type
)
2889 enum machine_mode mode
;
2892 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2895 /* This is the default definition. */
2896 return (! BYTES_BIG_ENDIAN
2899 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
2900 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2901 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
2902 ? downward
: upward
));
2905 /* If defined, a C expression that gives the alignment boundary, in bits,
2906 of an argument with the specified mode and type. If it is not defined,
2907 PARM_BOUNDARY is used for all arguments.
2909 V.4 wants long longs to be double word aligned. */
2912 function_arg_boundary (mode
, type
)
2913 enum machine_mode mode
;
2914 tree type ATTRIBUTE_UNUSED
;
2916 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
2918 else if (SPE_VECTOR_MODE (mode
))
2920 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2923 return PARM_BOUNDARY
;
2926 /* Update the data in CUM to advance over an argument
2927 of mode MODE and data type TYPE.
2928 (TYPE is null for libcalls where that information may not be available.) */
2931 function_arg_advance (cum
, mode
, type
, named
)
2932 CUMULATIVE_ARGS
*cum
;
2933 enum machine_mode mode
;
2937 cum
->nargs_prototype
--;
2939 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2941 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
2944 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2946 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
2947 && named
&& cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
2949 else if (DEFAULT_ABI
== ABI_V4
)
2951 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
2952 && (mode
== SFmode
|| mode
== DFmode
))
2954 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
2959 cum
->words
+= cum
->words
& 1;
2960 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2966 int gregno
= cum
->sysv_gregno
;
2968 /* Aggregates and IEEE quad get passed by reference. */
2969 if ((type
&& AGGREGATE_TYPE_P (type
))
2973 n_words
= RS6000_ARG_SIZE (mode
, type
);
2975 /* Long long and SPE vectors are put in odd registers. */
2976 if (n_words
== 2 && (gregno
& 1) == 0)
2979 /* Long long and SPE vectors are not split between registers
2981 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
2983 /* Long long is aligned on the stack. */
2985 cum
->words
+= cum
->words
& 1;
2986 cum
->words
+= n_words
;
2989 /* Note: continuing to accumulate gregno past when we've started
2990 spilling to the stack indicates the fact that we've started
2991 spilling to the stack to expand_builtin_saveregs. */
2992 cum
->sysv_gregno
= gregno
+ n_words
;
2995 if (TARGET_DEBUG_ARG
)
2997 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
2998 cum
->words
, cum
->fregno
);
2999 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
3000 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
3001 fprintf (stderr
, "mode = %4s, named = %d\n",
3002 GET_MODE_NAME (mode
), named
);
3007 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3008 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3010 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
3012 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
3013 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3016 if (TARGET_DEBUG_ARG
)
3018 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
3019 cum
->words
, cum
->fregno
);
3020 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
3021 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
3022 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
3027 /* Determine where to put an argument to a function.
3028 Value is zero to push the argument on the stack,
3029 or a hard register in which to store the argument.
3031 MODE is the argument's machine mode.
3032 TYPE is the data type of the argument (as a tree).
3033 This is null for libcalls where that information may
3035 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3036 the preceding args and about the function being called.
3037 NAMED is nonzero if this argument is a named parameter
3038 (otherwise it is an extra parameter matching an ellipsis).
3040 On RS/6000 the first eight words of non-FP are normally in registers
3041 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3042 Under V.4, the first 8 FP args are in registers.
3044 If this is floating-point and no prototype is specified, we use
3045 both an FP and integer register (or possibly FP reg and stack). Library
3046 functions (when TYPE is zero) always have the proper types for args,
3047 so we can pass the FP value just in one register. emit_library_function
3048 doesn't support PARALLEL anyway. */
3051 function_arg (cum
, mode
, type
, named
)
3052 CUMULATIVE_ARGS
*cum
;
3053 enum machine_mode mode
;
3057 enum rs6000_abi abi
= DEFAULT_ABI
;
3059 /* Return a marker to indicate whether CR1 needs to set or clear the
3060 bit that V.4 uses to say fp args were passed in registers.
3061 Assume that we don't need the marker for software floating point,
3062 or compiler generated library calls. */
3063 if (mode
== VOIDmode
)
3066 && cum
->nargs_prototype
< 0
3067 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
3069 /* For the SPE, we need to crxor CR6 always. */
3071 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
3072 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3073 return GEN_INT (cum
->call_cookie
3074 | ((cum
->fregno
== FP_ARG_MIN_REG
)
3075 ? CALL_V4_SET_FP_ARGS
3076 : CALL_V4_CLEAR_FP_ARGS
));
3079 return GEN_INT (cum
->call_cookie
);
3082 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
3084 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
3085 return gen_rtx_REG (mode
, cum
->vregno
);
3089 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
) && named
)
3091 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
3092 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
3096 else if (abi
== ABI_V4
)
3098 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
3099 && (mode
== SFmode
|| mode
== DFmode
))
3101 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
3102 return gen_rtx_REG (mode
, cum
->fregno
);
3109 int gregno
= cum
->sysv_gregno
;
3111 /* Aggregates and IEEE quad get passed by reference. */
3112 if ((type
&& AGGREGATE_TYPE_P (type
))
3116 n_words
= RS6000_ARG_SIZE (mode
, type
);
3118 /* Long long and SPE vectors are put in odd registers. */
3119 if (n_words
== 2 && (gregno
& 1) == 0)
3122 /* Long long and SPE vectors are not split between registers
3124 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
3126 /* SPE vectors in ... get split into 2 registers. */
3127 if (TARGET_SPE
&& TARGET_SPE_ABI
3128 && SPE_VECTOR_MODE (mode
) && !named
)
3131 enum machine_mode m
= GET_MODE_INNER (mode
);
3133 r1
= gen_rtx_REG (m
, gregno
);
3134 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
3135 r2
= gen_rtx_REG (m
, gregno
+ 1);
3136 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
3137 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
3139 return gen_rtx_REG (mode
, gregno
);
3147 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
3148 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
3149 int align_words
= cum
->words
+ align
;
3151 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3154 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
3157 || ((cum
->nargs_prototype
> 0)
3158 /* IBM AIX extended its linkage convention definition always
3159 to require FP args after register save area hole on the
3161 && (DEFAULT_ABI
!= ABI_AIX
3163 || (align_words
< GP_ARG_NUM_REG
))))
3164 return gen_rtx_REG (mode
, cum
->fregno
);
3166 return gen_rtx_PARALLEL (mode
,
3168 gen_rtx_EXPR_LIST (VOIDmode
,
3169 ((align_words
>= GP_ARG_NUM_REG
)
3172 + RS6000_ARG_SIZE (mode
, type
)
3174 /* If this is partially on the stack, then
3175 we only include the portion actually
3176 in registers here. */
3177 ? gen_rtx_REG (SImode
,
3178 GP_ARG_MIN_REG
+ align_words
)
3179 : gen_rtx_REG (mode
,
3180 GP_ARG_MIN_REG
+ align_words
))),
3182 gen_rtx_EXPR_LIST (VOIDmode
,
3183 gen_rtx_REG (mode
, cum
->fregno
),
3186 else if (align_words
< GP_ARG_NUM_REG
)
3187 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
3193 /* For an arg passed partly in registers and partly in memory,
3194 this is the number of registers used.
3195 For args passed entirely in registers or entirely in memory, zero. */
3198 function_arg_partial_nregs (cum
, mode
, type
, named
)
3199 CUMULATIVE_ARGS
*cum
;
3200 enum machine_mode mode
;
3202 int named ATTRIBUTE_UNUSED
;
3204 if (DEFAULT_ABI
== ABI_V4
)
3207 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
3208 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
3210 if (cum
->nargs_prototype
>= 0)
3214 if (cum
->words
< GP_ARG_NUM_REG
3215 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
3217 int ret
= GP_ARG_NUM_REG
- cum
->words
;
3218 if (ret
&& TARGET_DEBUG_ARG
)
3219 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
3227 /* A C expression that indicates when an argument must be passed by
3228 reference. If nonzero for an argument, a copy of that argument is
3229 made in memory and a pointer to the argument is passed instead of
3230 the argument itself. The pointer is passed in whatever way is
3231 appropriate for passing a pointer to that type.
3233 Under V.4, structures and unions are passed by reference. */
3236 function_arg_pass_by_reference (cum
, mode
, type
, named
)
3237 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
3238 enum machine_mode mode ATTRIBUTE_UNUSED
;
3240 int named ATTRIBUTE_UNUSED
;
3242 if (DEFAULT_ABI
== ABI_V4
3243 && ((type
&& AGGREGATE_TYPE_P (type
))
3246 if (TARGET_DEBUG_ARG
)
3247 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
3255 /* Perform any needed actions needed for a function that is receiving a
3256 variable number of arguments.
3260 MODE and TYPE are the mode and type of the current parameter.
3262 PRETEND_SIZE is a variable that should be set to the amount of stack
3263 that must be pushed by the prolog to pretend that our caller pushed
3266 Normally, this macro will push all remaining incoming registers on the
3267 stack and set PRETEND_SIZE to the length of the registers pushed. */
3270 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
3271 CUMULATIVE_ARGS
*cum
;
3272 enum machine_mode mode
;
3278 CUMULATIVE_ARGS next_cum
;
3279 int reg_size
= TARGET_32BIT
? 4 : 8;
3280 rtx save_area
= NULL_RTX
, mem
;
3281 int first_reg_offset
, set
;
3285 fntype
= TREE_TYPE (current_function_decl
);
3286 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
3287 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3288 != void_type_node
));
3290 /* For varargs, we do not want to skip the dummy va_dcl argument.
3291 For stdargs, we do want to skip the last named argument. */
3294 function_arg_advance (&next_cum
, mode
, type
, 1);
3296 if (DEFAULT_ABI
== ABI_V4
)
3298 /* Indicate to allocate space on the stack for varargs save area. */
3299 cfun
->machine
->sysv_varargs_p
= 1;
3301 save_area
= plus_constant (virtual_stack_vars_rtx
,
3302 - RS6000_VARARGS_SIZE
);
3304 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
3308 first_reg_offset
= next_cum
.words
;
3309 save_area
= virtual_incoming_args_rtx
;
3310 cfun
->machine
->sysv_varargs_p
= 0;
3312 if (MUST_PASS_IN_STACK (mode
, type
))
3313 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
3316 set
= get_varargs_alias_set ();
3317 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
3319 mem
= gen_rtx_MEM (BLKmode
,
3320 plus_constant (save_area
,
3321 first_reg_offset
* reg_size
)),
3322 set_mem_alias_set (mem
, set
);
3323 set_mem_align (mem
, BITS_PER_WORD
);
3326 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
3327 GP_ARG_NUM_REG
- first_reg_offset
,
3328 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
3330 /* ??? Does ABI_V4 need this at all? */
3331 *pretend_size
= (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
;
3334 /* Save FP registers if needed. */
3335 if (DEFAULT_ABI
== ABI_V4
3336 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3338 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
3340 int fregno
= next_cum
.fregno
;
3341 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
3342 rtx lab
= gen_label_rtx ();
3343 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
3345 emit_jump_insn (gen_rtx_SET (VOIDmode
,
3347 gen_rtx_IF_THEN_ELSE (VOIDmode
,
3348 gen_rtx_NE (VOIDmode
, cr1
,
3350 gen_rtx_LABEL_REF (VOIDmode
, lab
),
3353 while (fregno
<= FP_ARG_V4_MAX_REG
)
3355 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
3356 set_mem_alias_set (mem
, set
);
3357 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
3366 /* Create the va_list data type. */
3369 rs6000_build_va_list ()
3371 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
3373 /* For AIX, prefer 'char *' because that's what the system
3374 header files like. */
3375 if (DEFAULT_ABI
!= ABI_V4
)
3376 return build_pointer_type (char_type_node
);
3378 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
3379 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
3381 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
3382 unsigned_char_type_node
);
3383 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
3384 unsigned_char_type_node
);
3385 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
3387 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3390 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3391 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3392 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3393 DECL_FIELD_CONTEXT (f_sav
) = record
;
3395 TREE_CHAIN (record
) = type_decl
;
3396 TYPE_NAME (record
) = type_decl
;
3397 TYPE_FIELDS (record
) = f_gpr
;
3398 TREE_CHAIN (f_gpr
) = f_fpr
;
3399 TREE_CHAIN (f_fpr
) = f_ovf
;
3400 TREE_CHAIN (f_ovf
) = f_sav
;
3402 layout_type (record
);
3404 /* The correct type is an array type of one element. */
3405 return build_array_type (record
, build_index_type (size_zero_node
));
3408 /* Implement va_start. */
3411 rs6000_va_start (valist
, nextarg
)
3415 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3416 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3417 tree gpr
, fpr
, ovf
, sav
, t
;
3419 /* Only SVR4 needs something special. */
3420 if (DEFAULT_ABI
!= ABI_V4
)
3422 std_expand_builtin_va_start (valist
, nextarg
);
3426 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3427 f_fpr
= TREE_CHAIN (f_gpr
);
3428 f_ovf
= TREE_CHAIN (f_fpr
);
3429 f_sav
= TREE_CHAIN (f_ovf
);
3431 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3432 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3433 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3434 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3435 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3437 /* Count number of gp and fp argument registers used. */
3438 words
= current_function_args_info
.words
;
3439 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3440 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3442 if (TARGET_DEBUG_ARG
)
3444 fputs ("va_start: words = ", stderr
);
3445 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3446 fputs (", n_gpr = ", stderr
);
3447 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3448 fputs (", n_fpr = ", stderr
);
3449 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3450 putc ('\n', stderr
);
3453 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3454 TREE_SIDE_EFFECTS (t
) = 1;
3455 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3457 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3458 TREE_SIDE_EFFECTS (t
) = 1;
3459 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3461 /* Find the overflow area. */
3462 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3464 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3465 build_int_2 (words
* UNITS_PER_WORD
, 0));
3466 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3467 TREE_SIDE_EFFECTS (t
) = 1;
3468 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3470 /* Find the register save area. */
3471 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3472 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3473 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3474 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3475 TREE_SIDE_EFFECTS (t
) = 1;
3476 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3479 /* Implement va_arg. */
3482 rs6000_va_arg (valist
, type
)
3485 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3486 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3487 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3488 rtx lab_false
, lab_over
, addr_rtx
, r
;
3490 if (DEFAULT_ABI
!= ABI_V4
)
3491 return std_expand_builtin_va_arg (valist
, type
);
3493 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3494 f_fpr
= TREE_CHAIN (f_gpr
);
3495 f_ovf
= TREE_CHAIN (f_fpr
);
3496 f_sav
= TREE_CHAIN (f_ovf
);
3498 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3499 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3500 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3501 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3502 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3504 size
= int_size_in_bytes (type
);
3505 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3507 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3509 /* Aggregates and long doubles are passed by reference. */
3515 size
= UNITS_PER_WORD
;
3518 else if (FLOAT_TYPE_P (type
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3520 /* FP args go in FP registers, if present. */
3529 /* Otherwise into GP registers. */
3537 /* Pull the value out of the saved registers ... */
3539 lab_false
= gen_label_rtx ();
3540 lab_over
= gen_label_rtx ();
3541 addr_rtx
= gen_reg_rtx (Pmode
);
3543 /* AltiVec vectors never go in registers. */
3544 if (!TARGET_ALTIVEC
|| TREE_CODE (type
) != VECTOR_TYPE
)
3546 TREE_THIS_VOLATILE (reg
) = 1;
3547 emit_cmp_and_jump_insns
3548 (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3549 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3552 /* Long long is aligned in the registers. */
3555 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3556 build_int_2 (n_reg
- 1, 0));
3557 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3558 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3559 TREE_SIDE_EFFECTS (u
) = 1;
3560 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3564 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3568 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
,
3569 build_int_2 (n_reg
, 0));
3570 TREE_SIDE_EFFECTS (u
) = 1;
3572 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3573 TREE_SIDE_EFFECTS (u
) = 1;
3575 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3576 TREE_SIDE_EFFECTS (u
) = 1;
3578 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3579 TREE_SIDE_EFFECTS (t
) = 1;
3581 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3583 emit_move_insn (addr_rtx
, r
);
3585 emit_jump_insn (gen_jump (lab_over
));
3589 emit_label (lab_false
);
3591 /* ... otherwise out of the overflow area. */
3593 /* Make sure we don't find reg 7 for the next int arg.
3595 All AltiVec vectors go in the overflow area. So in the AltiVec
3596 case we need to get the vectors from the overflow area, but
3597 remember where the GPRs and FPRs are. */
3598 if (n_reg
> 1 && (TREE_CODE (type
) != VECTOR_TYPE
3599 || !TARGET_ALTIVEC
))
3601 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3602 TREE_SIDE_EFFECTS (t
) = 1;
3603 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3606 /* Care for on-stack alignment if needed. */
3613 /* AltiVec vectors are 16 byte aligned. */
3614 if (TARGET_ALTIVEC
&& TREE_CODE (type
) == VECTOR_TYPE
)
3619 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (align
, 0));
3620 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-align
-1, -1));
3624 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3626 emit_move_insn (addr_rtx
, r
);
3628 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3629 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3630 TREE_SIDE_EFFECTS (t
) = 1;
3631 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3633 emit_label (lab_over
);
3637 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3638 set_mem_alias_set (r
, get_varargs_alias_set ());
3639 emit_move_insn (addr_rtx
, r
);
3647 #define def_builtin(MASK, NAME, TYPE, CODE) \
3649 if ((MASK) & target_flags) \
3650 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3654 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3656 static const struct builtin_description bdesc_3arg
[] =
3658 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3659 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3660 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3661 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3662 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3663 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3664 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3665 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3666 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3667 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3668 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3669 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3670 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3671 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3672 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3673 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3674 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3675 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3676 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3677 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3678 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3679 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3680 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3683 /* DST operations: void foo (void *, const int, const char). */
3685 static const struct builtin_description bdesc_dst
[] =
3687 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3688 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3689 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3690 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3693 /* Simple binary operations: VECc = foo (VECa, VECb). */
3695 static struct builtin_description bdesc_2arg
[] =
3697 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3698 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3699 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3700 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3701 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3702 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3703 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3704 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3705 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3706 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3707 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3708 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3709 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3710 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3711 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3712 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3713 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3714 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3715 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3716 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3717 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3718 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3719 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3720 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3721 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3722 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3723 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3724 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3725 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3726 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3727 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3728 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3729 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3730 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3731 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3732 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3733 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3734 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3735 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3736 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3737 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3738 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3739 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3740 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3741 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3742 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3743 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3744 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3745 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3746 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3747 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3748 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3749 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3750 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3751 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3752 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3753 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3754 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3755 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3756 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3757 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3758 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3759 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3760 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3761 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3762 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3763 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3764 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3765 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3766 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3767 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3768 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3769 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3770 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3771 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3772 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3773 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3774 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3775 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3776 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3777 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3778 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3779 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3780 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3781 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3782 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3783 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3784 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3785 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3786 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3787 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3788 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3789 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3790 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3791 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3792 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3793 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3794 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3795 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3796 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3797 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3798 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3799 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3800 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3801 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3802 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3803 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3804 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3805 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3806 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3807 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3808 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3809 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3811 /* Place holder, leave as first spe builtin. */
3812 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
3813 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
3814 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
3815 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
3816 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
3817 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
3818 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
3819 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
3820 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
3821 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
3822 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
3823 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
3824 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
3825 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
3826 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
3827 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
3828 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
3829 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
3830 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
3831 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
3832 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
3833 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
3834 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
3835 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
3836 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
3837 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
3838 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
3839 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
3840 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
3841 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
3842 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
3843 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
3844 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
3845 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
3846 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
3847 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
3848 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
3849 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
3850 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
3851 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
3852 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
3853 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
3854 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
3855 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
3856 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
3857 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
3858 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
3859 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
3860 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
3861 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
3862 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
3863 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
3864 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
3865 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
3866 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
3867 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
3868 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
3869 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
3870 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
3871 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
3872 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
3873 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
3874 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
3875 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
3876 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
3877 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
3878 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
3879 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
3880 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
3881 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
3882 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
3883 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
3884 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
3885 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
3886 { 0, CODE_FOR_spe_evmwlsmf
, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF
},
3887 { 0, CODE_FOR_spe_evmwlsmfa
, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA
},
3888 { 0, CODE_FOR_spe_evmwlsmfaaw
, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW
},
3889 { 0, CODE_FOR_spe_evmwlsmfanw
, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW
},
3890 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
3891 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
3892 { 0, CODE_FOR_spe_evmwlssf
, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF
},
3893 { 0, CODE_FOR_spe_evmwlssfa
, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA
},
3894 { 0, CODE_FOR_spe_evmwlssfaaw
, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW
},
3895 { 0, CODE_FOR_spe_evmwlssfanw
, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW
},
3896 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
3897 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
3898 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
3899 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
3900 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
3901 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
3902 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
3903 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
3904 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
3905 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
3906 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
3907 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
3908 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
3909 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
3910 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
3911 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
3912 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
3913 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
3914 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
3915 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
3916 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
3917 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
3918 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
3919 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
3920 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
3921 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
3922 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
3923 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
3924 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
3925 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
3926 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
3927 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
3928 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
3930 /* SPE binary operations expecting a 5-bit unsigned literal. */
3931 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
3933 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
3934 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
3935 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
3936 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
3937 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
3938 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
3939 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
3940 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
3941 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
3942 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
3943 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
3944 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
3945 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
3946 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
3947 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
3948 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
3949 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
3950 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
3951 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
3952 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
3953 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
3954 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
3955 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
3956 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
3957 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
3958 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
3960 /* Place-holder. Leave as last binary SPE builtin. */
3961 { 0, CODE_FOR_spe_evxor
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
3964 /* AltiVec predicates. */
3966 struct builtin_description_predicates
3968 const unsigned int mask
;
3969 const enum insn_code icode
;
3971 const char *const name
;
3972 const enum rs6000_builtins code
;
3975 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
3977 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
3978 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
3979 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
3980 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
3981 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
3982 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
3983 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
3984 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
3985 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
3986 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
3987 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
3988 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
3989 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
3992 /* SPE predicates. */
3993 static struct builtin_description bdesc_spe_predicates
[] =
3995 /* Place-holder. Leave as first. */
3996 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
3997 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
3998 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
3999 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
4000 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
4001 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
4002 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
4003 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
4004 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
4005 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
4006 /* Place-holder. Leave as last. */
4007 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
4010 /* SPE evsel predicates. */
4011 static struct builtin_description bdesc_spe_evsel
[] =
4013 /* Place-holder. Leave as first. */
4014 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
4015 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
4016 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
4017 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
4018 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
4019 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
4020 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
4021 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
4022 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
4023 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
4024 /* Place-holder. Leave as last. */
4025 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
4028 /* ABS* opreations. */
4030 static const struct builtin_description bdesc_abs
[] =
4032 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
4033 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
4034 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
4035 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
4036 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
4037 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
4038 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
4041 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4044 static struct builtin_description bdesc_1arg
[] =
4046 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
4047 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
4048 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
4049 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
4050 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
4051 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
4052 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
4053 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
4054 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
4055 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
4056 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
4057 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
4058 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
4059 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
4060 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
4061 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
4062 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
4064 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4065 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4066 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
4067 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
4068 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
4069 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
4070 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
4071 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
4072 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
4073 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
4074 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
4075 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
4076 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
4077 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
4078 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
4079 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
4080 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
4081 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
4082 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
4083 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
4084 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
4085 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
4086 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
4087 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
4088 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
4089 { 0, CODE_FOR_spe_evneg
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
4090 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
4091 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
4092 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
4093 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
4094 { 0, CODE_FOR_spe_evsplatfi
, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI
},
4095 { 0, CODE_FOR_spe_evsplati
, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI
},
4097 /* Place-holder. Leave as last unary SPE builtin. */
4098 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
4102 rs6000_expand_unop_builtin (icode
, arglist
, target
)
4103 enum insn_code icode
;
4108 tree arg0
= TREE_VALUE (arglist
);
4109 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4110 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4111 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4113 if (icode
== CODE_FOR_nothing
)
4114 /* Builtin not supported on this processor. */
4117 /* If we got invalid arguments bail out before generating bad rtl. */
4118 if (arg0
== error_mark_node
)
4121 if (icode
== CODE_FOR_altivec_vspltisb
4122 || icode
== CODE_FOR_altivec_vspltish
4123 || icode
== CODE_FOR_altivec_vspltisw
4124 || icode
== CODE_FOR_spe_evsplatfi
4125 || icode
== CODE_FOR_spe_evsplati
)
4127 /* Only allow 5-bit *signed* literals. */
4128 if (GET_CODE (op0
) != CONST_INT
4129 || INTVAL (op0
) > 0x1f
4130 || INTVAL (op0
) < -0x1f)
4132 error ("argument 1 must be a 5-bit signed literal");
4138 || GET_MODE (target
) != tmode
4139 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4140 target
= gen_reg_rtx (tmode
);
4142 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4143 op0
= copy_to_mode_reg (mode0
, op0
);
4145 pat
= GEN_FCN (icode
) (target
, op0
);
4154 altivec_expand_abs_builtin (icode
, arglist
, target
)
4155 enum insn_code icode
;
4159 rtx pat
, scratch1
, scratch2
;
4160 tree arg0
= TREE_VALUE (arglist
);
4161 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4162 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4163 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4165 /* If we have invalid arguments, bail out before generating bad rtl. */
4166 if (arg0
== error_mark_node
)
4170 || GET_MODE (target
) != tmode
4171 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4172 target
= gen_reg_rtx (tmode
);
4174 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4175 op0
= copy_to_mode_reg (mode0
, op0
);
4177 scratch1
= gen_reg_rtx (mode0
);
4178 scratch2
= gen_reg_rtx (mode0
);
4180 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
4189 rs6000_expand_binop_builtin (icode
, arglist
, target
)
4190 enum insn_code icode
;
4195 tree arg0
= TREE_VALUE (arglist
);
4196 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4197 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4198 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4199 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4200 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4201 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4203 if (icode
== CODE_FOR_nothing
)
4204 /* Builtin not supported on this processor. */
4207 /* If we got invalid arguments bail out before generating bad rtl. */
4208 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4211 if (icode
== CODE_FOR_altivec_vcfux
4212 || icode
== CODE_FOR_altivec_vcfsx
4213 || icode
== CODE_FOR_altivec_vctsxs
4214 || icode
== CODE_FOR_altivec_vctuxs
4215 || icode
== CODE_FOR_altivec_vspltb
4216 || icode
== CODE_FOR_altivec_vsplth
4217 || icode
== CODE_FOR_altivec_vspltw
4218 || icode
== CODE_FOR_spe_evaddiw
4219 || icode
== CODE_FOR_spe_evldd
4220 || icode
== CODE_FOR_spe_evldh
4221 || icode
== CODE_FOR_spe_evldw
4222 || icode
== CODE_FOR_spe_evlhhesplat
4223 || icode
== CODE_FOR_spe_evlhhossplat
4224 || icode
== CODE_FOR_spe_evlhhousplat
4225 || icode
== CODE_FOR_spe_evlwhe
4226 || icode
== CODE_FOR_spe_evlwhos
4227 || icode
== CODE_FOR_spe_evlwhou
4228 || icode
== CODE_FOR_spe_evlwhsplat
4229 || icode
== CODE_FOR_spe_evlwwsplat
4230 || icode
== CODE_FOR_spe_evrlwi
4231 || icode
== CODE_FOR_spe_evslwi
4232 || icode
== CODE_FOR_spe_evsrwis
4233 || icode
== CODE_FOR_spe_evsrwiu
)
4235 /* Only allow 5-bit unsigned literals. */
4236 if (TREE_CODE (arg1
) != INTEGER_CST
4237 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4239 error ("argument 2 must be a 5-bit unsigned literal");
4245 || GET_MODE (target
) != tmode
4246 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4247 target
= gen_reg_rtx (tmode
);
4249 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4250 op0
= copy_to_mode_reg (mode0
, op0
);
4251 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4252 op1
= copy_to_mode_reg (mode1
, op1
);
4254 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4263 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
4264 enum insn_code icode
;
4270 tree cr6_form
= TREE_VALUE (arglist
);
4271 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4272 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4273 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4274 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4275 enum machine_mode tmode
= SImode
;
4276 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4277 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4280 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
4282 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4286 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
4291 /* If we have invalid arguments, bail out before generating bad rtl. */
4292 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4296 || GET_MODE (target
) != tmode
4297 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4298 target
= gen_reg_rtx (tmode
);
4300 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4301 op0
= copy_to_mode_reg (mode0
, op0
);
4302 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4303 op1
= copy_to_mode_reg (mode1
, op1
);
4305 scratch
= gen_reg_rtx (mode0
);
4307 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
4308 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
4313 /* The vec_any* and vec_all* predicates use the same opcodes for two
4314 different operations, but the bits in CR6 will be different
4315 depending on what information we want. So we have to play tricks
4316 with CR6 to get the right bits out.
4318 If you think this is disgusting, look at the specs for the
4319 AltiVec predicates. */
4321 switch (cr6_form_int
)
4324 emit_insn (gen_cr6_test_for_zero (target
));
4327 emit_insn (gen_cr6_test_for_zero_reverse (target
));
4330 emit_insn (gen_cr6_test_for_lt (target
));
4333 emit_insn (gen_cr6_test_for_lt_reverse (target
));
4336 error ("argument 1 of __builtin_altivec_predicate is out of range");
4344 altivec_expand_stv_builtin (icode
, arglist
)
4345 enum insn_code icode
;
4348 tree arg0
= TREE_VALUE (arglist
);
4349 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4350 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4351 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4352 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4353 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4355 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
4356 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
4357 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
4359 /* Invalid arguments. Bail before doing anything stoopid! */
4360 if (arg0
== error_mark_node
4361 || arg1
== error_mark_node
4362 || arg2
== error_mark_node
)
4365 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
4366 op0
= copy_to_mode_reg (mode2
, op0
);
4367 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
4368 op1
= copy_to_mode_reg (mode0
, op1
);
4369 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
4370 op2
= copy_to_mode_reg (mode1
, op2
);
4372 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
4379 rs6000_expand_ternop_builtin (icode
, arglist
, target
)
4380 enum insn_code icode
;
4385 tree arg0
= TREE_VALUE (arglist
);
4386 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4387 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4388 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4389 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4390 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4391 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4392 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4393 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4394 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
4396 if (icode
== CODE_FOR_nothing
)
4397 /* Builtin not supported on this processor. */
4400 /* If we got invalid arguments bail out before generating bad rtl. */
4401 if (arg0
== error_mark_node
4402 || arg1
== error_mark_node
4403 || arg2
== error_mark_node
)
4406 if (icode
== CODE_FOR_altivec_vsldoi_4sf
4407 || icode
== CODE_FOR_altivec_vsldoi_4si
4408 || icode
== CODE_FOR_altivec_vsldoi_8hi
4409 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
4411 /* Only allow 4-bit unsigned literals. */
4412 if (TREE_CODE (arg2
) != INTEGER_CST
4413 || TREE_INT_CST_LOW (arg2
) & ~0xf)
4415 error ("argument 3 must be a 4-bit unsigned literal");
4421 || GET_MODE (target
) != tmode
4422 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4423 target
= gen_reg_rtx (tmode
);
4425 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4426 op0
= copy_to_mode_reg (mode0
, op0
);
4427 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4428 op1
= copy_to_mode_reg (mode1
, op1
);
4429 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
4430 op2
= copy_to_mode_reg (mode2
, op2
);
4432 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
4440 /* Expand the lvx builtins. */
4442 altivec_expand_ld_builtin (exp
, target
, expandedp
)
4447 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4448 tree arglist
= TREE_OPERAND (exp
, 1);
4449 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4451 enum machine_mode tmode
, mode0
;
4453 enum insn_code icode
;
4457 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
4458 icode
= CODE_FOR_altivec_lvx_16qi
;
4460 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
4461 icode
= CODE_FOR_altivec_lvx_8hi
;
4463 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
4464 icode
= CODE_FOR_altivec_lvx_4si
;
4466 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
4467 icode
= CODE_FOR_altivec_lvx_4sf
;
4476 arg0
= TREE_VALUE (arglist
);
4477 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4478 tmode
= insn_data
[icode
].operand
[0].mode
;
4479 mode0
= insn_data
[icode
].operand
[1].mode
;
4482 || GET_MODE (target
) != tmode
4483 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4484 target
= gen_reg_rtx (tmode
);
4486 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4487 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4489 pat
= GEN_FCN (icode
) (target
, op0
);
4496 /* Expand the stvx builtins. */
4498 altivec_expand_st_builtin (exp
, target
, expandedp
)
4500 rtx target ATTRIBUTE_UNUSED
;
4503 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4504 tree arglist
= TREE_OPERAND (exp
, 1);
4505 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4507 enum machine_mode mode0
, mode1
;
4509 enum insn_code icode
;
4513 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
4514 icode
= CODE_FOR_altivec_stvx_16qi
;
4516 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
4517 icode
= CODE_FOR_altivec_stvx_8hi
;
4519 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
4520 icode
= CODE_FOR_altivec_stvx_4si
;
4522 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
4523 icode
= CODE_FOR_altivec_stvx_4sf
;
4530 arg0
= TREE_VALUE (arglist
);
4531 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4532 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4533 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4534 mode0
= insn_data
[icode
].operand
[0].mode
;
4535 mode1
= insn_data
[icode
].operand
[1].mode
;
4537 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4538 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
4539 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
4540 op1
= copy_to_mode_reg (mode1
, op1
);
4542 pat
= GEN_FCN (icode
) (op0
, op1
);
4550 /* Expand the dst builtins. */
4552 altivec_expand_dst_builtin (exp
, target
, expandedp
)
4554 rtx target ATTRIBUTE_UNUSED
;
4557 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4558 tree arglist
= TREE_OPERAND (exp
, 1);
4559 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4560 tree arg0
, arg1
, arg2
;
4561 enum machine_mode mode0
, mode1
, mode2
;
4562 rtx pat
, op0
, op1
, op2
;
4563 struct builtin_description
*d
;
4568 /* Handle DST variants. */
4569 d
= (struct builtin_description
*) bdesc_dst
;
4570 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
4571 if (d
->code
== fcode
)
4573 arg0
= TREE_VALUE (arglist
);
4574 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4575 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4576 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4577 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4578 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4579 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4580 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4581 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4583 /* Invalid arguments, bail out before generating bad rtl. */
4584 if (arg0
== error_mark_node
4585 || arg1
== error_mark_node
4586 || arg2
== error_mark_node
)
4589 if (TREE_CODE (arg2
) != INTEGER_CST
4590 || TREE_INT_CST_LOW (arg2
) & ~0x3)
4592 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
4596 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4597 op0
= copy_to_mode_reg (mode0
, op0
);
4598 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4599 op1
= copy_to_mode_reg (mode1
, op1
);
4601 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4612 /* Expand the builtin in EXP and store the result in TARGET. Store
4613 true in *EXPANDEDP if we found a builtin to expand. */
4615 altivec_expand_builtin (exp
, target
, expandedp
)
4620 struct builtin_description
*d
;
4621 struct builtin_description_predicates
*dp
;
4623 enum insn_code icode
;
4624 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4625 tree arglist
= TREE_OPERAND (exp
, 1);
4628 enum machine_mode tmode
, mode0
;
4629 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4631 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
4635 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
4639 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
4647 case ALTIVEC_BUILTIN_STVX
:
4648 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
4649 case ALTIVEC_BUILTIN_STVEBX
:
4650 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
4651 case ALTIVEC_BUILTIN_STVEHX
:
4652 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
4653 case ALTIVEC_BUILTIN_STVEWX
:
4654 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
4655 case ALTIVEC_BUILTIN_STVXL
:
4656 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
4658 case ALTIVEC_BUILTIN_MFVSCR
:
4659 icode
= CODE_FOR_altivec_mfvscr
;
4660 tmode
= insn_data
[icode
].operand
[0].mode
;
4663 || GET_MODE (target
) != tmode
4664 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4665 target
= gen_reg_rtx (tmode
);
4667 pat
= GEN_FCN (icode
) (target
);
4673 case ALTIVEC_BUILTIN_MTVSCR
:
4674 icode
= CODE_FOR_altivec_mtvscr
;
4675 arg0
= TREE_VALUE (arglist
);
4676 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4677 mode0
= insn_data
[icode
].operand
[0].mode
;
4679 /* If we got invalid arguments bail out before generating bad rtl. */
4680 if (arg0
== error_mark_node
)
4683 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4684 op0
= copy_to_mode_reg (mode0
, op0
);
4686 pat
= GEN_FCN (icode
) (op0
);
4691 case ALTIVEC_BUILTIN_DSSALL
:
4692 emit_insn (gen_altivec_dssall ());
4695 case ALTIVEC_BUILTIN_DSS
:
4696 icode
= CODE_FOR_altivec_dss
;
4697 arg0
= TREE_VALUE (arglist
);
4698 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4699 mode0
= insn_data
[icode
].operand
[0].mode
;
4701 /* If we got invalid arguments bail out before generating bad rtl. */
4702 if (arg0
== error_mark_node
)
4705 if (TREE_CODE (arg0
) != INTEGER_CST
4706 || TREE_INT_CST_LOW (arg0
) & ~0x3)
4708 error ("argument to dss must be a 2-bit unsigned literal");
4712 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4713 op0
= copy_to_mode_reg (mode0
, op0
);
4715 emit_insn (gen_altivec_dss (op0
));
4719 /* Expand abs* operations. */
4720 d
= (struct builtin_description
*) bdesc_abs
;
4721 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
4722 if (d
->code
== fcode
)
4723 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4725 /* Expand the AltiVec predicates. */
4726 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4727 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
4728 if (dp
->code
== fcode
)
4729 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4731 /* LV* are funky. We initialized them differently. */
4734 case ALTIVEC_BUILTIN_LVSL
:
4735 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4737 case ALTIVEC_BUILTIN_LVSR
:
4738 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4740 case ALTIVEC_BUILTIN_LVEBX
:
4741 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4743 case ALTIVEC_BUILTIN_LVEHX
:
4744 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4746 case ALTIVEC_BUILTIN_LVEWX
:
4747 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4749 case ALTIVEC_BUILTIN_LVXL
:
4750 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4752 case ALTIVEC_BUILTIN_LVX
:
4753 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4764 /* Binops that need to be initialized manually, but can be expanded
4765 automagically by rs6000_expand_binop_builtin. */
4766 static struct builtin_description bdesc_2arg_spe
[] =
4768 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
4769 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
4770 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
4771 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
4772 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
4773 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
4774 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
4775 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
4776 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
4777 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
4778 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
4779 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
4780 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
4781 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
4782 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
4783 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
4784 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
4785 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
4786 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
4787 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
4788 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
4789 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
4792 /* Expand the builtin in EXP and store the result in TARGET. Store
4793 true in *EXPANDEDP if we found a builtin to expand.
4795 This expands the SPE builtins that are not simple unary and binary
4798 spe_expand_builtin (exp
, target
, expandedp
)
4803 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4804 tree arglist
= TREE_OPERAND (exp
, 1);
4806 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4807 enum insn_code icode
;
4808 enum machine_mode tmode
, mode0
;
4810 struct builtin_description
*d
;
4815 /* Syntax check for a 5-bit unsigned immediate. */
4818 case SPE_BUILTIN_EVSTDD
:
4819 case SPE_BUILTIN_EVSTDH
:
4820 case SPE_BUILTIN_EVSTDW
:
4821 case SPE_BUILTIN_EVSTWHE
:
4822 case SPE_BUILTIN_EVSTWHO
:
4823 case SPE_BUILTIN_EVSTWWE
:
4824 case SPE_BUILTIN_EVSTWWO
:
4825 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4826 if (TREE_CODE (arg1
) != INTEGER_CST
4827 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
4829 error ("argument 2 must be a 5-bit unsigned literal");
4837 d
= (struct builtin_description
*) bdesc_2arg_spe
;
4838 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
4839 if (d
->code
== fcode
)
4840 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
4842 d
= (struct builtin_description
*) bdesc_spe_predicates
;
4843 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
4844 if (d
->code
== fcode
)
4845 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
4847 d
= (struct builtin_description
*) bdesc_spe_evsel
;
4848 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
4849 if (d
->code
== fcode
)
4850 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
4854 case SPE_BUILTIN_EVSTDDX
:
4855 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
4856 case SPE_BUILTIN_EVSTDHX
:
4857 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
4858 case SPE_BUILTIN_EVSTDWX
:
4859 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
4860 case SPE_BUILTIN_EVSTWHEX
:
4861 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
4862 case SPE_BUILTIN_EVSTWHOX
:
4863 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
4864 case SPE_BUILTIN_EVSTWWEX
:
4865 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
4866 case SPE_BUILTIN_EVSTWWOX
:
4867 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
4868 case SPE_BUILTIN_EVSTDD
:
4869 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
4870 case SPE_BUILTIN_EVSTDH
:
4871 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
4872 case SPE_BUILTIN_EVSTDW
:
4873 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
4874 case SPE_BUILTIN_EVSTWHE
:
4875 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
4876 case SPE_BUILTIN_EVSTWHO
:
4877 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
4878 case SPE_BUILTIN_EVSTWWE
:
4879 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
4880 case SPE_BUILTIN_EVSTWWO
:
4881 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
4882 case SPE_BUILTIN_MFSPEFSCR
:
4883 icode
= CODE_FOR_spe_mfspefscr
;
4884 tmode
= insn_data
[icode
].operand
[0].mode
;
4887 || GET_MODE (target
) != tmode
4888 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4889 target
= gen_reg_rtx (tmode
);
4891 pat
= GEN_FCN (icode
) (target
);
4896 case SPE_BUILTIN_MTSPEFSCR
:
4897 icode
= CODE_FOR_spe_mtspefscr
;
4898 arg0
= TREE_VALUE (arglist
);
4899 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4900 mode0
= insn_data
[icode
].operand
[0].mode
;
4902 if (arg0
== error_mark_node
)
4905 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4906 op0
= copy_to_mode_reg (mode0
, op0
);
4908 pat
= GEN_FCN (icode
) (op0
);
4921 spe_expand_predicate_builtin (icode
, arglist
, target
)
4922 enum insn_code icode
;
4926 rtx pat
, scratch
, tmp
;
4927 tree form
= TREE_VALUE (arglist
);
4928 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
4929 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4930 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4931 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4932 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4933 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4937 if (TREE_CODE (form
) != INTEGER_CST
)
4939 error ("argument 1 of __builtin_spe_predicate must be a constant");
4943 form_int
= TREE_INT_CST_LOW (form
);
4948 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
4952 || GET_MODE (target
) != SImode
4953 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
4954 target
= gen_reg_rtx (SImode
);
4956 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4957 op0
= copy_to_mode_reg (mode0
, op0
);
4958 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4959 op1
= copy_to_mode_reg (mode1
, op1
);
4961 scratch
= gen_reg_rtx (CCmode
);
4963 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
4968 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4969 _lower_. We use one compare, but look in different bits of the
4970 CR for each variant.
4972 There are 2 elements in each SPE simd type (upper/lower). The CR
4973 bits are set as follows:
4975 BIT0 | BIT 1 | BIT 2 | BIT 3
4976 U | L | (U | L) | (U & L)
4978 So, for an "all" relationship, BIT 3 would be set.
4979 For an "any" relationship, BIT 2 would be set. Etc.
4981 Following traditional nomenclature, these bits map to:
4983 BIT0 | BIT 1 | BIT 2 | BIT 3
4986 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
4991 /* All variant. OV bit. */
4993 /* We need to get to the OV bit, which is the ORDERED bit. We
4994 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
4995 that's ugly and will trigger a validate_condition_mode abort.
4996 So let's just use another pattern. */
4997 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
4999 /* Any variant. EQ bit. */
5003 /* Upper variant. LT bit. */
5007 /* Lower variant. GT bit. */
5012 error ("argument 1 of __builtin_spe_predicate is out of range");
5016 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
5017 emit_move_insn (target
, tmp
);
5022 /* The evsel builtins look like this:
5024 e = __builtin_spe_evsel_OP (a, b, c, d);
5028 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5029 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5033 spe_expand_evsel_builtin (icode
, arglist
, target
)
5034 enum insn_code icode
;
5039 tree arg0
= TREE_VALUE (arglist
);
5040 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5041 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
5042 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
5043 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
5044 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
5045 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
5046 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
5047 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
5048 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
5053 if (arg0
== error_mark_node
|| arg1
== error_mark_node
5054 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
5058 || GET_MODE (target
) != mode0
5059 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
5060 target
= gen_reg_rtx (mode0
);
5062 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
5063 op0
= copy_to_mode_reg (mode0
, op0
);
5064 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
5065 op1
= copy_to_mode_reg (mode0
, op1
);
5066 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
5067 op2
= copy_to_mode_reg (mode0
, op2
);
5068 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
5069 op3
= copy_to_mode_reg (mode0
, op3
);
5071 /* Generate the compare. */
5072 scratch
= gen_reg_rtx (CCmode
);
5073 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
5078 if (mode0
== V2SImode
)
5079 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
5081 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
5086 /* Expand an expression EXP that calls a built-in function,
5087 with result going to TARGET if that's convenient
5088 (and in mode MODE if that's convenient).
5089 SUBTARGET may be used as the target for computing one of EXP's operands.
5090 IGNORE is nonzero if the value is to be ignored. */
5093 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
5096 rtx subtarget ATTRIBUTE_UNUSED
;
5097 enum machine_mode mode ATTRIBUTE_UNUSED
;
5098 int ignore ATTRIBUTE_UNUSED
;
5100 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5101 tree arglist
= TREE_OPERAND (exp
, 1);
5102 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
5103 struct builtin_description
*d
;
5110 ret
= altivec_expand_builtin (exp
, target
, &success
);
5117 ret
= spe_expand_builtin (exp
, target
, &success
);
5123 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5125 /* Handle simple unary operations. */
5126 d
= (struct builtin_description
*) bdesc_1arg
;
5127 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5128 if (d
->code
== fcode
)
5129 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
5131 /* Handle simple binary operations. */
5132 d
= (struct builtin_description
*) bdesc_2arg
;
5133 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5134 if (d
->code
== fcode
)
5135 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
5137 /* Handle simple ternary operations. */
5138 d
= (struct builtin_description
*) bdesc_3arg
;
5139 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5140 if (d
->code
== fcode
)
5141 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
5149 rs6000_init_builtins ()
5152 spe_init_builtins ();
5154 altivec_init_builtins ();
5155 if (TARGET_ALTIVEC
|| TARGET_SPE
)
5156 rs6000_common_init_builtins ();
5159 /* Search through a set of builtins and enable the mask bits.
5160 DESC is an array of builtins.
5161 SIZE is the totaly number of builtins.
5162 START is the builtin enum at which to start.
5163 END is the builtin enum at which to end. */
5165 enable_mask_for_builtins (desc
, size
, start
, end
)
5166 struct builtin_description
*desc
;
5168 enum rs6000_builtins start
, end
;
5172 for (i
= 0; i
< size
; ++i
)
5173 if (desc
[i
].code
== start
)
5179 for (; i
< size
; ++i
)
5181 /* Flip all the bits on. */
5182 desc
[i
].mask
= target_flags
;
5183 if (desc
[i
].code
== end
)
5189 spe_init_builtins ()
5191 tree endlink
= void_list_node
;
5192 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
5193 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
5194 tree pv2si_type_node
= build_pointer_type (V2SI_type_node
);
5195 struct builtin_description
*d
;
5198 tree v2si_ftype_4_v2si
5199 = build_function_type
5201 tree_cons (NULL_TREE
, V2SI_type_node
,
5202 tree_cons (NULL_TREE
, V2SI_type_node
,
5203 tree_cons (NULL_TREE
, V2SI_type_node
,
5204 tree_cons (NULL_TREE
, V2SI_type_node
,
5207 tree v2sf_ftype_4_v2sf
5208 = build_function_type
5210 tree_cons (NULL_TREE
, V2SF_type_node
,
5211 tree_cons (NULL_TREE
, V2SF_type_node
,
5212 tree_cons (NULL_TREE
, V2SF_type_node
,
5213 tree_cons (NULL_TREE
, V2SF_type_node
,
5216 tree int_ftype_int_v2si_v2si
5217 = build_function_type
5219 tree_cons (NULL_TREE
, integer_type_node
,
5220 tree_cons (NULL_TREE
, V2SI_type_node
,
5221 tree_cons (NULL_TREE
, V2SI_type_node
,
5224 tree int_ftype_int_v2sf_v2sf
5225 = build_function_type
5227 tree_cons (NULL_TREE
, integer_type_node
,
5228 tree_cons (NULL_TREE
, V2SF_type_node
,
5229 tree_cons (NULL_TREE
, V2SF_type_node
,
5232 tree void_ftype_v2si_puint_int
5233 = build_function_type (void_type_node
,
5234 tree_cons (NULL_TREE
, V2SI_type_node
,
5235 tree_cons (NULL_TREE
, puint_type_node
,
5236 tree_cons (NULL_TREE
,
5240 tree void_ftype_v2si_puint_char
5241 = build_function_type (void_type_node
,
5242 tree_cons (NULL_TREE
, V2SI_type_node
,
5243 tree_cons (NULL_TREE
, puint_type_node
,
5244 tree_cons (NULL_TREE
,
5248 tree void_ftype_v2si_pv2si_int
5249 = build_function_type (void_type_node
,
5250 tree_cons (NULL_TREE
, V2SI_type_node
,
5251 tree_cons (NULL_TREE
, pv2si_type_node
,
5252 tree_cons (NULL_TREE
,
5256 tree void_ftype_v2si_pv2si_char
5257 = build_function_type (void_type_node
,
5258 tree_cons (NULL_TREE
, V2SI_type_node
,
5259 tree_cons (NULL_TREE
, pv2si_type_node
,
5260 tree_cons (NULL_TREE
,
5265 = build_function_type (void_type_node
,
5266 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
5269 = build_function_type (integer_type_node
,
5270 tree_cons (NULL_TREE
, void_type_node
, endlink
));
5272 tree v2si_ftype_pv2si_int
5273 = build_function_type (V2SI_type_node
,
5274 tree_cons (NULL_TREE
, pv2si_type_node
,
5275 tree_cons (NULL_TREE
, integer_type_node
,
5278 tree v2si_ftype_puint_int
5279 = build_function_type (V2SI_type_node
,
5280 tree_cons (NULL_TREE
, puint_type_node
,
5281 tree_cons (NULL_TREE
, integer_type_node
,
5284 tree v2si_ftype_pushort_int
5285 = build_function_type (V2SI_type_node
,
5286 tree_cons (NULL_TREE
, pushort_type_node
,
5287 tree_cons (NULL_TREE
, integer_type_node
,
5290 /* The initialization of the simple binary and unary builtins is
5291 done in rs6000_common_init_builtins, but we have to enable the
5292 mask bits here manually because we have run out of `target_flags'
5293 bits. We really need to redesign this mask business. */
5295 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
5296 ARRAY_SIZE (bdesc_2arg
),
5299 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
5300 ARRAY_SIZE (bdesc_1arg
),
5302 SPE_BUILTIN_EVSUBFUSIAAW
);
5303 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
5304 ARRAY_SIZE (bdesc_spe_predicates
),
5305 SPE_BUILTIN_EVCMPEQ
,
5306 SPE_BUILTIN_EVFSTSTLT
);
5307 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
5308 ARRAY_SIZE (bdesc_spe_evsel
),
5309 SPE_BUILTIN_EVSEL_CMPGTS
,
5310 SPE_BUILTIN_EVSEL_FSTSTEQ
);
5312 /* Initialize irregular SPE builtins. */
5314 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
5315 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
5316 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
5317 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
5318 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
5319 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
5320 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
5321 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
5322 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
5323 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
5324 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
5325 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
5326 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
5327 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
5328 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
5329 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
5332 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
5333 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
5334 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
5335 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
5336 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
5337 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
5338 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
5339 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
5340 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
5341 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
5342 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
5343 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
5344 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
5345 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
5346 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
5347 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
5348 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
5349 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
5350 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
5351 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
5352 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
5353 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
5356 d
= (struct builtin_description
*) bdesc_spe_predicates
;
5357 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
5361 switch (insn_data
[d
->icode
].operand
[1].mode
)
5364 type
= int_ftype_int_v2si_v2si
;
5367 type
= int_ftype_int_v2sf_v2sf
;
5373 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5376 /* Evsel predicates. */
5377 d
= (struct builtin_description
*) bdesc_spe_evsel
;
5378 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
5382 switch (insn_data
[d
->icode
].operand
[1].mode
)
5385 type
= v2si_ftype_4_v2si
;
5388 type
= v2sf_ftype_4_v2sf
;
5394 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5399 altivec_init_builtins ()
5401 struct builtin_description
*d
;
5402 struct builtin_description_predicates
*dp
;
5404 tree pfloat_type_node
= build_pointer_type (float_type_node
);
5405 tree pint_type_node
= build_pointer_type (integer_type_node
);
5406 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
5407 tree pchar_type_node
= build_pointer_type (char_type_node
);
5409 tree pvoid_type_node
= build_pointer_type (void_type_node
);
5411 tree int_ftype_int_v4si_v4si
5412 = build_function_type_list (integer_type_node
,
5413 integer_type_node
, V4SI_type_node
,
5414 V4SI_type_node
, NULL_TREE
);
5415 tree v4sf_ftype_pfloat
5416 = build_function_type_list (V4SF_type_node
, pfloat_type_node
, NULL_TREE
);
5417 tree void_ftype_pfloat_v4sf
5418 = build_function_type_list (void_type_node
,
5419 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
5420 tree v4si_ftype_pint
5421 = build_function_type_list (V4SI_type_node
, pint_type_node
, NULL_TREE
); tree void_ftype_pint_v4si
5422 = build_function_type_list (void_type_node
,
5423 pint_type_node
, V4SI_type_node
, NULL_TREE
);
5424 tree v8hi_ftype_pshort
5425 = build_function_type_list (V8HI_type_node
, pshort_type_node
, NULL_TREE
);
5426 tree void_ftype_pshort_v8hi
5427 = build_function_type_list (void_type_node
,
5428 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
5429 tree v16qi_ftype_pchar
5430 = build_function_type_list (V16QI_type_node
, pchar_type_node
, NULL_TREE
);
5431 tree void_ftype_pchar_v16qi
5432 = build_function_type_list (void_type_node
,
5433 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
5434 tree void_ftype_v4si
5435 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
5436 tree v8hi_ftype_void
5437 = build_function_type (V8HI_type_node
, void_list_node
);
5438 tree void_ftype_void
5439 = build_function_type (void_type_node
, void_list_node
);
5441 = build_function_type_list (void_type_node
, char_type_node
, NULL_TREE
);
5442 tree v16qi_ftype_int_pvoid
5443 = build_function_type_list (V16QI_type_node
,
5444 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5445 tree v8hi_ftype_int_pvoid
5446 = build_function_type_list (V8HI_type_node
,
5447 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5448 tree v4si_ftype_int_pvoid
5449 = build_function_type_list (V4SI_type_node
,
5450 integer_type_node
, pvoid_type_node
, NULL_TREE
);
5451 tree void_ftype_v4si_int_pvoid
5452 = build_function_type_list (void_type_node
,
5453 V4SI_type_node
, integer_type_node
,
5454 pvoid_type_node
, NULL_TREE
);
5455 tree void_ftype_v16qi_int_pvoid
5456 = build_function_type_list (void_type_node
,
5457 V16QI_type_node
, integer_type_node
,
5458 pvoid_type_node
, NULL_TREE
);
5459 tree void_ftype_v8hi_int_pvoid
5460 = build_function_type_list (void_type_node
,
5461 V8HI_type_node
, integer_type_node
,
5462 pvoid_type_node
, NULL_TREE
);
5463 tree int_ftype_int_v8hi_v8hi
5464 = build_function_type_list (integer_type_node
,
5465 integer_type_node
, V8HI_type_node
,
5466 V8HI_type_node
, NULL_TREE
);
5467 tree int_ftype_int_v16qi_v16qi
5468 = build_function_type_list (integer_type_node
,
5469 integer_type_node
, V16QI_type_node
,
5470 V16QI_type_node
, NULL_TREE
);
5471 tree int_ftype_int_v4sf_v4sf
5472 = build_function_type_list (integer_type_node
,
5473 integer_type_node
, V4SF_type_node
,
5474 V4SF_type_node
, NULL_TREE
);
5475 tree v4si_ftype_v4si
5476 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5477 tree v8hi_ftype_v8hi
5478 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5479 tree v16qi_ftype_v16qi
5480 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5481 tree v4sf_ftype_v4sf
5482 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5483 tree void_ftype_pvoid_int_char
5484 = build_function_type_list (void_type_node
,
5485 pvoid_type_node
, integer_type_node
,
5486 char_type_node
, NULL_TREE
);
5488 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat
, ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
5489 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
, ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
5490 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint
, ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
5491 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
, ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
5492 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort
, ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
5493 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
, ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
5494 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar
, ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
5495 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
, ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
5496 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
5497 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
5498 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
5499 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
5500 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSL
);
5501 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSR
);
5502 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEBX
);
5503 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEHX
);
5504 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEWX
);
5505 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVXL
);
5506 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVX
);
5507 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
5508 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
5509 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
5510 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
5511 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
5513 /* Add the DST variants. */
5514 d
= (struct builtin_description
*) bdesc_dst
;
5515 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
5516 def_builtin (d
->mask
, d
->name
, void_ftype_pvoid_int_char
, d
->code
);
5518 /* Initialize the predicates. */
5519 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
5520 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
5522 enum machine_mode mode1
;
5525 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
5530 type
= int_ftype_int_v4si_v4si
;
5533 type
= int_ftype_int_v8hi_v8hi
;
5536 type
= int_ftype_int_v16qi_v16qi
;
5539 type
= int_ftype_int_v4sf_v4sf
;
5545 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
5548 /* Initialize the abs* operators. */
5549 d
= (struct builtin_description
*) bdesc_abs
;
5550 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
5552 enum machine_mode mode0
;
5555 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5560 type
= v4si_ftype_v4si
;
5563 type
= v8hi_ftype_v8hi
;
5566 type
= v16qi_ftype_v16qi
;
5569 type
= v4sf_ftype_v4sf
;
5575 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5580 rs6000_common_init_builtins ()
5582 struct builtin_description
*d
;
5585 tree v4sf_ftype_v4sf_v4sf_v16qi
5586 = build_function_type_list (V4SF_type_node
,
5587 V4SF_type_node
, V4SF_type_node
,
5588 V16QI_type_node
, NULL_TREE
);
5589 tree v4si_ftype_v4si_v4si_v16qi
5590 = build_function_type_list (V4SI_type_node
,
5591 V4SI_type_node
, V4SI_type_node
,
5592 V16QI_type_node
, NULL_TREE
);
5593 tree v8hi_ftype_v8hi_v8hi_v16qi
5594 = build_function_type_list (V8HI_type_node
,
5595 V8HI_type_node
, V8HI_type_node
,
5596 V16QI_type_node
, NULL_TREE
);
5597 tree v16qi_ftype_v16qi_v16qi_v16qi
5598 = build_function_type_list (V16QI_type_node
,
5599 V16QI_type_node
, V16QI_type_node
,
5600 V16QI_type_node
, NULL_TREE
);
5601 tree v4si_ftype_char
5602 = build_function_type_list (V4SI_type_node
, char_type_node
, NULL_TREE
);
5603 tree v8hi_ftype_char
5604 = build_function_type_list (V8HI_type_node
, char_type_node
, NULL_TREE
);
5605 tree v16qi_ftype_char
5606 = build_function_type_list (V16QI_type_node
, char_type_node
, NULL_TREE
);
5607 tree v8hi_ftype_v16qi
5608 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
5609 tree v4sf_ftype_v4sf
5610 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5612 tree v2si_ftype_v2si_v2si
5613 = build_function_type_list (V2SI_type_node
,
5614 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5616 tree v2sf_ftype_v2sf_v2sf
5617 = build_function_type_list (V2SF_type_node
,
5618 V2SF_type_node
, V2SF_type_node
, NULL_TREE
);
5620 tree v2si_ftype_int_int
5621 = build_function_type_list (V2SI_type_node
,
5622 integer_type_node
, integer_type_node
,
5625 tree v2si_ftype_v2si
5626 = build_function_type_list (V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
5628 tree v2sf_ftype_v2sf
5629 = build_function_type_list (V2SF_type_node
,
5630 V2SF_type_node
, NULL_TREE
);
5632 tree v2sf_ftype_v2si
5633 = build_function_type_list (V2SF_type_node
,
5634 V2SI_type_node
, NULL_TREE
);
5636 tree v2si_ftype_v2sf
5637 = build_function_type_list (V2SI_type_node
,
5638 V2SF_type_node
, NULL_TREE
);
5640 tree v2si_ftype_v2si_char
5641 = build_function_type_list (V2SI_type_node
,
5642 V2SI_type_node
, char_type_node
, NULL_TREE
);
5644 tree v2si_ftype_int_char
5645 = build_function_type_list (V2SI_type_node
,
5646 integer_type_node
, char_type_node
, NULL_TREE
);
5648 tree v2si_ftype_char
5649 = build_function_type_list (V2SI_type_node
, char_type_node
, NULL_TREE
);
5651 tree int_ftype_int_int
5652 = build_function_type_list (integer_type_node
,
5653 integer_type_node
, integer_type_node
,
5656 tree v4si_ftype_v4si_v4si
5657 = build_function_type_list (V4SI_type_node
,
5658 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5659 tree v4sf_ftype_v4si_char
5660 = build_function_type_list (V4SF_type_node
,
5661 V4SI_type_node
, char_type_node
, NULL_TREE
);
5662 tree v4si_ftype_v4sf_char
5663 = build_function_type_list (V4SI_type_node
,
5664 V4SF_type_node
, char_type_node
, NULL_TREE
);
5665 tree v4si_ftype_v4si_char
5666 = build_function_type_list (V4SI_type_node
,
5667 V4SI_type_node
, char_type_node
, NULL_TREE
);
5668 tree v8hi_ftype_v8hi_char
5669 = build_function_type_list (V8HI_type_node
,
5670 V8HI_type_node
, char_type_node
, NULL_TREE
);
5671 tree v16qi_ftype_v16qi_char
5672 = build_function_type_list (V16QI_type_node
,
5673 V16QI_type_node
, char_type_node
, NULL_TREE
);
5674 tree v16qi_ftype_v16qi_v16qi_char
5675 = build_function_type_list (V16QI_type_node
,
5676 V16QI_type_node
, V16QI_type_node
,
5677 char_type_node
, NULL_TREE
);
5678 tree v8hi_ftype_v8hi_v8hi_char
5679 = build_function_type_list (V8HI_type_node
,
5680 V8HI_type_node
, V8HI_type_node
,
5681 char_type_node
, NULL_TREE
);
5682 tree v4si_ftype_v4si_v4si_char
5683 = build_function_type_list (V4SI_type_node
,
5684 V4SI_type_node
, V4SI_type_node
,
5685 char_type_node
, NULL_TREE
);
5686 tree v4sf_ftype_v4sf_v4sf_char
5687 = build_function_type_list (V4SF_type_node
,
5688 V4SF_type_node
, V4SF_type_node
,
5689 char_type_node
, NULL_TREE
);
5690 tree v4sf_ftype_v4sf_v4sf
5691 = build_function_type_list (V4SF_type_node
,
5692 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5693 tree v4sf_ftype_v4sf_v4sf_v4si
5694 = build_function_type_list (V4SF_type_node
,
5695 V4SF_type_node
, V4SF_type_node
,
5696 V4SI_type_node
, NULL_TREE
);
5697 tree v4sf_ftype_v4sf_v4sf_v4sf
5698 = build_function_type_list (V4SF_type_node
,
5699 V4SF_type_node
, V4SF_type_node
,
5700 V4SF_type_node
, NULL_TREE
);
5701 tree v4si_ftype_v4si_v4si_v4si
5702 = build_function_type_list (V4SI_type_node
,
5703 V4SI_type_node
, V4SI_type_node
,
5704 V4SI_type_node
, NULL_TREE
);
5705 tree v8hi_ftype_v8hi_v8hi
5706 = build_function_type_list (V8HI_type_node
,
5707 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5708 tree v8hi_ftype_v8hi_v8hi_v8hi
5709 = build_function_type_list (V8HI_type_node
,
5710 V8HI_type_node
, V8HI_type_node
,
5711 V8HI_type_node
, NULL_TREE
);
5712 tree v4si_ftype_v8hi_v8hi_v4si
5713 = build_function_type_list (V4SI_type_node
,
5714 V8HI_type_node
, V8HI_type_node
,
5715 V4SI_type_node
, NULL_TREE
);
5716 tree v4si_ftype_v16qi_v16qi_v4si
5717 = build_function_type_list (V4SI_type_node
,
5718 V16QI_type_node
, V16QI_type_node
,
5719 V4SI_type_node
, NULL_TREE
);
5720 tree v16qi_ftype_v16qi_v16qi
5721 = build_function_type_list (V16QI_type_node
,
5722 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5723 tree v4si_ftype_v4sf_v4sf
5724 = build_function_type_list (V4SI_type_node
,
5725 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5726 tree v8hi_ftype_v16qi_v16qi
5727 = build_function_type_list (V8HI_type_node
,
5728 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5729 tree v4si_ftype_v8hi_v8hi
5730 = build_function_type_list (V4SI_type_node
,
5731 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5732 tree v8hi_ftype_v4si_v4si
5733 = build_function_type_list (V8HI_type_node
,
5734 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5735 tree v16qi_ftype_v8hi_v8hi
5736 = build_function_type_list (V16QI_type_node
,
5737 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5738 tree v4si_ftype_v16qi_v4si
5739 = build_function_type_list (V4SI_type_node
,
5740 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
5741 tree v4si_ftype_v16qi_v16qi
5742 = build_function_type_list (V4SI_type_node
,
5743 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5744 tree v4si_ftype_v8hi_v4si
5745 = build_function_type_list (V4SI_type_node
,
5746 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
5747 tree v4si_ftype_v8hi
5748 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
5749 tree int_ftype_v4si_v4si
5750 = build_function_type_list (integer_type_node
,
5751 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
5752 tree int_ftype_v4sf_v4sf
5753 = build_function_type_list (integer_type_node
,
5754 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
5755 tree int_ftype_v16qi_v16qi
5756 = build_function_type_list (integer_type_node
,
5757 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
5758 tree int_ftype_v8hi_v8hi
5759 = build_function_type_list (integer_type_node
,
5760 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
5762 /* Add the simple ternary operators. */
5763 d
= (struct builtin_description
*) bdesc_3arg
;
5764 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
5767 enum machine_mode mode0
, mode1
, mode2
, mode3
;
5770 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5773 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5774 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5775 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5776 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
5778 /* When all four are of the same mode. */
5779 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
5784 type
= v4si_ftype_v4si_v4si_v4si
;
5787 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
5790 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
5793 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5799 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
5804 type
= v4si_ftype_v4si_v4si_v16qi
;
5807 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
5810 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
5813 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
5819 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
5820 && mode3
== V4SImode
)
5821 type
= v4si_ftype_v16qi_v16qi_v4si
;
5822 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
5823 && mode3
== V4SImode
)
5824 type
= v4si_ftype_v8hi_v8hi_v4si
;
5825 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
5826 && mode3
== V4SImode
)
5827 type
= v4sf_ftype_v4sf_v4sf_v4si
;
5829 /* vchar, vchar, vchar, 4 bit literal. */
5830 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
5832 type
= v16qi_ftype_v16qi_v16qi_char
;
5834 /* vshort, vshort, vshort, 4 bit literal. */
5835 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
5837 type
= v8hi_ftype_v8hi_v8hi_char
;
5839 /* vint, vint, vint, 4 bit literal. */
5840 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
5842 type
= v4si_ftype_v4si_v4si_char
;
5844 /* vfloat, vfloat, vfloat, 4 bit literal. */
5845 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
5847 type
= v4sf_ftype_v4sf_v4sf_char
;
5852 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5855 /* Add the simple binary operators. */
5856 d
= (struct builtin_description
*) bdesc_2arg
;
5857 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
5859 enum machine_mode mode0
, mode1
, mode2
;
5862 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
5865 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
5866 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
5867 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
5869 /* When all three operands are of the same mode. */
5870 if (mode0
== mode1
&& mode1
== mode2
)
5875 type
= v4sf_ftype_v4sf_v4sf
;
5878 type
= v4si_ftype_v4si_v4si
;
5881 type
= v16qi_ftype_v16qi_v16qi
;
5884 type
= v8hi_ftype_v8hi_v8hi
;
5887 type
= v2si_ftype_v2si_v2si
;
5890 type
= v2sf_ftype_v2sf_v2sf
;
5893 type
= int_ftype_int_int
;
5900 /* A few other combos we really don't want to do manually. */
5902 /* vint, vfloat, vfloat. */
5903 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
5904 type
= v4si_ftype_v4sf_v4sf
;
5906 /* vshort, vchar, vchar. */
5907 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5908 type
= v8hi_ftype_v16qi_v16qi
;
5910 /* vint, vshort, vshort. */
5911 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5912 type
= v4si_ftype_v8hi_v8hi
;
5914 /* vshort, vint, vint. */
5915 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
5916 type
= v8hi_ftype_v4si_v4si
;
5918 /* vchar, vshort, vshort. */
5919 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
5920 type
= v16qi_ftype_v8hi_v8hi
;
5922 /* vint, vchar, vint. */
5923 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
5924 type
= v4si_ftype_v16qi_v4si
;
5926 /* vint, vchar, vchar. */
5927 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
5928 type
= v4si_ftype_v16qi_v16qi
;
5930 /* vint, vshort, vint. */
5931 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
5932 type
= v4si_ftype_v8hi_v4si
;
5934 /* vint, vint, 5 bit literal. */
5935 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
5936 type
= v4si_ftype_v4si_char
;
5938 /* vshort, vshort, 5 bit literal. */
5939 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
5940 type
= v8hi_ftype_v8hi_char
;
5942 /* vchar, vchar, 5 bit literal. */
5943 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
5944 type
= v16qi_ftype_v16qi_char
;
5946 /* vfloat, vint, 5 bit literal. */
5947 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
5948 type
= v4sf_ftype_v4si_char
;
5950 /* vint, vfloat, 5 bit literal. */
5951 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
5952 type
= v4si_ftype_v4sf_char
;
5954 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
5955 type
= v2si_ftype_int_int
;
5957 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
5958 type
= v2si_ftype_v2si_char
;
5960 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
5961 type
= v2si_ftype_int_char
;
5964 else if (mode0
== SImode
)
5969 type
= int_ftype_v4si_v4si
;
5972 type
= int_ftype_v4sf_v4sf
;
5975 type
= int_ftype_v16qi_v16qi
;
5978 type
= int_ftype_v8hi_v8hi
;
5988 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
5991 /* Add the simple unary operators. */
5992 d
= (struct builtin_description
*) bdesc_1arg
;
5993 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
5995 enum machine_mode mode0
, mode1
;
5998 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
6001 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6002 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6004 if (mode0
== V4SImode
&& mode1
== QImode
)
6005 type
= v4si_ftype_char
;
6006 else if (mode0
== V8HImode
&& mode1
== QImode
)
6007 type
= v8hi_ftype_char
;
6008 else if (mode0
== V16QImode
&& mode1
== QImode
)
6009 type
= v16qi_ftype_char
;
6010 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
6011 type
= v4sf_ftype_v4sf
;
6012 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
6013 type
= v8hi_ftype_v16qi
;
6014 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
6015 type
= v4si_ftype_v8hi
;
6016 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
6017 type
= v2si_ftype_v2si
;
6018 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
6019 type
= v2sf_ftype_v2sf
;
6020 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
6021 type
= v2sf_ftype_v2si
;
6022 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
6023 type
= v2si_ftype_v2sf
;
6024 else if (mode0
== V2SImode
&& mode1
== QImode
)
6025 type
= v2si_ftype_char
;
6029 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
6033 /* Generate a memory reference for expand_block_move, copying volatile,
6034 and other bits from an original memory reference. */
6037 expand_block_move_mem (mode
, addr
, orig_mem
)
6038 enum machine_mode mode
;
6042 rtx mem
= gen_rtx_MEM (mode
, addr
);
6044 MEM_COPY_ATTRIBUTES (mem
, orig_mem
);
6048 /* Expand a block move operation, and return 1 if successful. Return 0
6049 if we should let the compiler generate normal code.
6051 operands[0] is the destination
6052 operands[1] is the source
6053 operands[2] is the length
6054 operands[3] is the alignment */
6056 #define MAX_MOVE_REG 4
6059 expand_block_move (operands
)
6062 rtx orig_dest
= operands
[0];
6063 rtx orig_src
= operands
[1];
6064 rtx bytes_rtx
= operands
[2];
6065 rtx align_rtx
= operands
[3];
6066 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
6077 rtx stores
[MAX_MOVE_REG
];
6080 /* If this is not a fixed size move, just call memcpy */
6084 /* If this is not a fixed size alignment, abort */
6085 if (GET_CODE (align_rtx
) != CONST_INT
)
6087 align
= INTVAL (align_rtx
);
6089 /* Anything to move? */
6090 bytes
= INTVAL (bytes_rtx
);
6094 /* store_one_arg depends on expand_block_move to handle at least the size of
6095 reg_parm_stack_space. */
6096 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
6099 /* Move the address into scratch registers. */
6100 dest_reg
= copy_addr_to_reg (XEXP (orig_dest
, 0));
6101 src_reg
= copy_addr_to_reg (XEXP (orig_src
, 0));
6103 if (TARGET_STRING
) /* string instructions are available */
6105 for ( ; bytes
> 0; bytes
-= move_bytes
)
6107 if (bytes
> 24 /* move up to 32 bytes at a time */
6115 && ! fixed_regs
[12])
6117 move_bytes
= (bytes
> 32) ? 32 : bytes
;
6118 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode
,
6121 expand_block_move_mem (BLKmode
,
6124 GEN_INT ((move_bytes
== 32)
6128 else if (bytes
> 16 /* move up to 24 bytes at a time */
6134 && ! fixed_regs
[10])
6136 move_bytes
= (bytes
> 24) ? 24 : bytes
;
6137 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode
,
6140 expand_block_move_mem (BLKmode
,
6143 GEN_INT (move_bytes
),
6146 else if (bytes
> 8 /* move up to 16 bytes at a time */
6152 move_bytes
= (bytes
> 16) ? 16 : bytes
;
6153 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode
,
6156 expand_block_move_mem (BLKmode
,
6159 GEN_INT (move_bytes
),
6162 else if (bytes
>= 8 && TARGET_POWERPC64
6163 /* 64-bit loads and stores require word-aligned
6165 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6168 tmp_reg
= gen_reg_rtx (DImode
);
6169 emit_move_insn (tmp_reg
,
6170 expand_block_move_mem (DImode
,
6171 src_reg
, orig_src
));
6172 emit_move_insn (expand_block_move_mem (DImode
,
6173 dest_reg
, orig_dest
),
6176 else if (bytes
> 4 && !TARGET_POWERPC64
)
6177 { /* move up to 8 bytes at a time */
6178 move_bytes
= (bytes
> 8) ? 8 : bytes
;
6179 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode
,
6182 expand_block_move_mem (BLKmode
,
6185 GEN_INT (move_bytes
),
6188 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6189 { /* move 4 bytes */
6191 tmp_reg
= gen_reg_rtx (SImode
);
6192 emit_move_insn (tmp_reg
,
6193 expand_block_move_mem (SImode
,
6194 src_reg
, orig_src
));
6195 emit_move_insn (expand_block_move_mem (SImode
,
6196 dest_reg
, orig_dest
),
6199 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6200 { /* move 2 bytes */
6202 tmp_reg
= gen_reg_rtx (HImode
);
6203 emit_move_insn (tmp_reg
,
6204 expand_block_move_mem (HImode
,
6205 src_reg
, orig_src
));
6206 emit_move_insn (expand_block_move_mem (HImode
,
6207 dest_reg
, orig_dest
),
6210 else if (bytes
== 1) /* move 1 byte */
6213 tmp_reg
= gen_reg_rtx (QImode
);
6214 emit_move_insn (tmp_reg
,
6215 expand_block_move_mem (QImode
,
6216 src_reg
, orig_src
));
6217 emit_move_insn (expand_block_move_mem (QImode
,
6218 dest_reg
, orig_dest
),
6222 { /* move up to 4 bytes at a time */
6223 move_bytes
= (bytes
> 4) ? 4 : bytes
;
6224 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode
,
6227 expand_block_move_mem (BLKmode
,
6230 GEN_INT (move_bytes
),
6234 if (bytes
> move_bytes
)
6236 if (! TARGET_POWERPC64
)
6238 emit_insn (gen_addsi3 (src_reg
, src_reg
,
6239 GEN_INT (move_bytes
)));
6240 emit_insn (gen_addsi3 (dest_reg
, dest_reg
,
6241 GEN_INT (move_bytes
)));
6245 emit_insn (gen_adddi3 (src_reg
, src_reg
,
6246 GEN_INT (move_bytes
)));
6247 emit_insn (gen_adddi3 (dest_reg
, dest_reg
,
6248 GEN_INT (move_bytes
)));
6254 else /* string instructions not available */
6256 num_reg
= offset
= 0;
6257 for ( ; bytes
> 0; (bytes
-= move_bytes
), (offset
+= move_bytes
))
6259 /* Calculate the correct offset for src/dest */
6263 dest_addr
= dest_reg
;
6267 src_addr
= plus_constant (src_reg
, offset
);
6268 dest_addr
= plus_constant (dest_reg
, offset
);
6271 /* Generate the appropriate load and store, saving the stores
6273 if (bytes
>= 8 && TARGET_POWERPC64
6274 /* 64-bit loads and stores require word-aligned
6276 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
6279 tmp_reg
= gen_reg_rtx (DImode
);
6280 emit_insn (gen_movdi (tmp_reg
,
6281 expand_block_move_mem (DImode
,
6284 stores
[num_reg
++] = gen_movdi (expand_block_move_mem (DImode
,
6289 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
6292 tmp_reg
= gen_reg_rtx (SImode
);
6293 emit_insn (gen_movsi (tmp_reg
,
6294 expand_block_move_mem (SImode
,
6297 stores
[num_reg
++] = gen_movsi (expand_block_move_mem (SImode
,
6302 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
6305 tmp_reg
= gen_reg_rtx (HImode
);
6306 emit_insn (gen_movhi (tmp_reg
,
6307 expand_block_move_mem (HImode
,
6310 stores
[num_reg
++] = gen_movhi (expand_block_move_mem (HImode
,
6318 tmp_reg
= gen_reg_rtx (QImode
);
6319 emit_insn (gen_movqi (tmp_reg
,
6320 expand_block_move_mem (QImode
,
6323 stores
[num_reg
++] = gen_movqi (expand_block_move_mem (QImode
,
6329 if (num_reg
>= MAX_MOVE_REG
)
6331 for (i
= 0; i
< num_reg
; i
++)
6332 emit_insn (stores
[i
]);
6337 for (i
= 0; i
< num_reg
; i
++)
6338 emit_insn (stores
[i
]);
6345 /* Return 1 if OP is a load multiple operation. It is known to be a
6346 PARALLEL and the first section will be tested. */
6349 load_multiple_operation (op
, mode
)
6351 enum machine_mode mode ATTRIBUTE_UNUSED
;
6353 int count
= XVECLEN (op
, 0);
6354 unsigned int dest_regno
;
6358 /* Perform a quick check so we don't blow up below. */
6360 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6361 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6362 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6365 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6366 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6368 for (i
= 1; i
< count
; i
++)
6370 rtx elt
= XVECEXP (op
, 0, i
);
6372 if (GET_CODE (elt
) != SET
6373 || GET_CODE (SET_DEST (elt
)) != REG
6374 || GET_MODE (SET_DEST (elt
)) != SImode
6375 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6376 || GET_CODE (SET_SRC (elt
)) != MEM
6377 || GET_MODE (SET_SRC (elt
)) != SImode
6378 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
6379 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
6380 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
6381 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
6388 /* Similar, but tests for store multiple. Here, the second vector element
6389 is a CLOBBER. It will be tested later. */
6392 store_multiple_operation (op
, mode
)
6394 enum machine_mode mode ATTRIBUTE_UNUSED
;
6396 int count
= XVECLEN (op
, 0) - 1;
6397 unsigned int src_regno
;
6401 /* Perform a quick check so we don't blow up below. */
6403 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6404 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6405 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6408 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6409 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6411 for (i
= 1; i
< count
; i
++)
6413 rtx elt
= XVECEXP (op
, 0, i
+ 1);
6415 if (GET_CODE (elt
) != SET
6416 || GET_CODE (SET_SRC (elt
)) != REG
6417 || GET_MODE (SET_SRC (elt
)) != SImode
6418 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6419 || GET_CODE (SET_DEST (elt
)) != MEM
6420 || GET_MODE (SET_DEST (elt
)) != SImode
6421 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
6422 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
6423 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
6424 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
6431 /* Return 1 for a parallel vrsave operation. */
6434 vrsave_operation (op
, mode
)
6436 enum machine_mode mode ATTRIBUTE_UNUSED
;
6438 int count
= XVECLEN (op
, 0);
6439 unsigned int dest_regno
, src_regno
;
6443 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6444 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6445 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
6448 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6449 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6451 if (dest_regno
!= VRSAVE_REGNO
6452 && src_regno
!= VRSAVE_REGNO
)
6455 for (i
= 1; i
< count
; i
++)
6457 rtx elt
= XVECEXP (op
, 0, i
);
6459 if (GET_CODE (elt
) != CLOBBER
6460 && GET_CODE (elt
) != SET
)
6467 /* Return 1 for an PARALLEL suitable for mtcrf. */
6470 mtcrf_operation (op
, mode
)
6472 enum machine_mode mode ATTRIBUTE_UNUSED
;
6474 int count
= XVECLEN (op
, 0);
6478 /* Perform a quick check so we don't blow up below. */
6480 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6481 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
6482 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
6484 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
6486 if (GET_CODE (src_reg
) != REG
6487 || GET_MODE (src_reg
) != SImode
6488 || ! INT_REGNO_P (REGNO (src_reg
)))
6491 for (i
= 0; i
< count
; i
++)
6493 rtx exp
= XVECEXP (op
, 0, i
);
6497 if (GET_CODE (exp
) != SET
6498 || GET_CODE (SET_DEST (exp
)) != REG
6499 || GET_MODE (SET_DEST (exp
)) != CCmode
6500 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
6502 unspec
= SET_SRC (exp
);
6503 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
6505 if (GET_CODE (unspec
) != UNSPEC
6506 || XINT (unspec
, 1) != 20
6507 || XVECLEN (unspec
, 0) != 2
6508 || XVECEXP (unspec
, 0, 0) != src_reg
6509 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
6510 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
6516 /* Return 1 for an PARALLEL suitable for lmw. */
6519 lmw_operation (op
, mode
)
6521 enum machine_mode mode ATTRIBUTE_UNUSED
;
6523 int count
= XVECLEN (op
, 0);
6524 unsigned int dest_regno
;
6526 unsigned int base_regno
;
6527 HOST_WIDE_INT offset
;
6530 /* Perform a quick check so we don't blow up below. */
6532 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6533 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
6534 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
6537 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
6538 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
6541 || count
!= 32 - (int) dest_regno
)
6544 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
6547 base_regno
= REGNO (src_addr
);
6548 if (base_regno
== 0)
6551 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
6553 offset
= INTVAL (XEXP (src_addr
, 1));
6554 base_regno
= REGNO (XEXP (src_addr
, 0));
6559 for (i
= 0; i
< count
; i
++)
6561 rtx elt
= XVECEXP (op
, 0, i
);
6564 HOST_WIDE_INT newoffset
;
6566 if (GET_CODE (elt
) != SET
6567 || GET_CODE (SET_DEST (elt
)) != REG
6568 || GET_MODE (SET_DEST (elt
)) != SImode
6569 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
6570 || GET_CODE (SET_SRC (elt
)) != MEM
6571 || GET_MODE (SET_SRC (elt
)) != SImode
)
6573 newaddr
= XEXP (SET_SRC (elt
), 0);
6574 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6579 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6581 addr_reg
= XEXP (newaddr
, 0);
6582 newoffset
= INTVAL (XEXP (newaddr
, 1));
6586 if (REGNO (addr_reg
) != base_regno
6587 || newoffset
!= offset
+ 4 * i
)
6594 /* Return 1 for an PARALLEL suitable for stmw. */
6597 stmw_operation (op
, mode
)
6599 enum machine_mode mode ATTRIBUTE_UNUSED
;
6601 int count
= XVECLEN (op
, 0);
6602 unsigned int src_regno
;
6604 unsigned int base_regno
;
6605 HOST_WIDE_INT offset
;
6608 /* Perform a quick check so we don't blow up below. */
6610 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
6611 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
6612 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
6615 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
6616 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
6619 || count
!= 32 - (int) src_regno
)
6622 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
6625 base_regno
= REGNO (dest_addr
);
6626 if (base_regno
== 0)
6629 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
6631 offset
= INTVAL (XEXP (dest_addr
, 1));
6632 base_regno
= REGNO (XEXP (dest_addr
, 0));
6637 for (i
= 0; i
< count
; i
++)
6639 rtx elt
= XVECEXP (op
, 0, i
);
6642 HOST_WIDE_INT newoffset
;
6644 if (GET_CODE (elt
) != SET
6645 || GET_CODE (SET_SRC (elt
)) != REG
6646 || GET_MODE (SET_SRC (elt
)) != SImode
6647 || REGNO (SET_SRC (elt
)) != src_regno
+ i
6648 || GET_CODE (SET_DEST (elt
)) != MEM
6649 || GET_MODE (SET_DEST (elt
)) != SImode
)
6651 newaddr
= XEXP (SET_DEST (elt
), 0);
6652 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
6657 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
6659 addr_reg
= XEXP (newaddr
, 0);
6660 newoffset
= INTVAL (XEXP (newaddr
, 1));
6664 if (REGNO (addr_reg
) != base_regno
6665 || newoffset
!= offset
+ 4 * i
)
6672 /* A validation routine: say whether CODE, a condition code, and MODE
6673 match. The other alternatives either don't make sense or should
6674 never be generated. */
6677 validate_condition_mode (code
, mode
)
6679 enum machine_mode mode
;
6681 if (GET_RTX_CLASS (code
) != '<'
6682 || GET_MODE_CLASS (mode
) != MODE_CC
)
6685 /* These don't make sense. */
6686 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
6687 && mode
== CCUNSmode
)
6690 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
6691 && mode
!= CCUNSmode
)
6694 if (mode
!= CCFPmode
6695 && (code
== ORDERED
|| code
== UNORDERED
6696 || code
== UNEQ
|| code
== LTGT
6697 || code
== UNGT
|| code
== UNLT
6698 || code
== UNGE
|| code
== UNLE
))
6701 /* These should never be generated except for
6702 flag_unsafe_math_optimizations and flag_finite_math_only. */
6703 if (mode
== CCFPmode
6704 && ! flag_unsafe_math_optimizations
6705 && ! flag_finite_math_only
6706 && (code
== LE
|| code
== GE
6707 || code
== UNEQ
|| code
== LTGT
6708 || code
== UNGT
|| code
== UNLT
))
6711 /* These are invalid; the information is not there. */
6712 if (mode
== CCEQmode
6713 && code
!= EQ
&& code
!= NE
)
6717 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6718 We only check the opcode against the mode of the CC value here. */
6721 branch_comparison_operator (op
, mode
)
6723 enum machine_mode mode ATTRIBUTE_UNUSED
;
6725 enum rtx_code code
= GET_CODE (op
);
6726 enum machine_mode cc_mode
;
6728 if (GET_RTX_CLASS (code
) != '<')
6731 cc_mode
= GET_MODE (XEXP (op
, 0));
6732 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6735 validate_condition_mode (code
, cc_mode
);
6740 /* Return 1 if OP is a comparison operation that is valid for a branch
6741 insn and which is true if the corresponding bit in the CC register
6745 branch_positive_comparison_operator (op
, mode
)
6747 enum machine_mode mode
;
6751 if (! branch_comparison_operator (op
, mode
))
6754 code
= GET_CODE (op
);
6755 return (code
== EQ
|| code
== LT
|| code
== GT
6756 || (TARGET_SPE
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
&& code
== NE
)
6757 || code
== LTU
|| code
== GTU
6758 || code
== UNORDERED
);
6761 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6762 We check the opcode against the mode of the CC value and disallow EQ or
6763 NE comparisons for integers. */
6766 scc_comparison_operator (op
, mode
)
6768 enum machine_mode mode
;
6770 enum rtx_code code
= GET_CODE (op
);
6771 enum machine_mode cc_mode
;
6773 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
6776 if (GET_RTX_CLASS (code
) != '<')
6779 cc_mode
= GET_MODE (XEXP (op
, 0));
6780 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
6783 validate_condition_mode (code
, cc_mode
);
6785 if (code
== NE
&& cc_mode
!= CCFPmode
)
6792 trap_comparison_operator (op
, mode
)
6794 enum machine_mode mode
;
6796 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
6798 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
6802 boolean_operator (op
, mode
)
6804 enum machine_mode mode ATTRIBUTE_UNUSED
;
6806 enum rtx_code code
= GET_CODE (op
);
6807 return (code
== AND
|| code
== IOR
|| code
== XOR
);
6811 boolean_or_operator (op
, mode
)
6813 enum machine_mode mode ATTRIBUTE_UNUSED
;
6815 enum rtx_code code
= GET_CODE (op
);
6816 return (code
== IOR
|| code
== XOR
);
6820 min_max_operator (op
, mode
)
6822 enum machine_mode mode ATTRIBUTE_UNUSED
;
6824 enum rtx_code code
= GET_CODE (op
);
6825 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
6828 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6829 mask required to convert the result of a rotate insn into a shift
6830 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6833 includes_lshift_p (shiftop
, andop
)
6837 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6839 shift_mask
<<= INTVAL (shiftop
);
6841 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6844 /* Similar, but for right shift. */
6847 includes_rshift_p (shiftop
, andop
)
6851 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
6853 shift_mask
>>= INTVAL (shiftop
);
6855 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
6858 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6859 to perform a left shift. It must have exactly SHIFTOP least
6860 signifigant 0's, then one or more 1's, then zero or more 0's. */
6863 includes_rldic_lshift_p (shiftop
, andop
)
6867 if (GET_CODE (andop
) == CONST_INT
)
6869 HOST_WIDE_INT c
, lsb
, shift_mask
;
6872 if (c
== 0 || c
== ~0)
6876 shift_mask
<<= INTVAL (shiftop
);
6878 /* Find the least signifigant one bit. */
6881 /* It must coincide with the LSB of the shift mask. */
6882 if (-lsb
!= shift_mask
)
6885 /* Invert to look for the next transition (if any). */
6888 /* Remove the low group of ones (originally low group of zeros). */
6891 /* Again find the lsb, and check we have all 1's above. */
6895 else if (GET_CODE (andop
) == CONST_DOUBLE
6896 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6898 HOST_WIDE_INT low
, high
, lsb
;
6899 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
6901 low
= CONST_DOUBLE_LOW (andop
);
6902 if (HOST_BITS_PER_WIDE_INT
< 64)
6903 high
= CONST_DOUBLE_HIGH (andop
);
6905 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
6906 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
6909 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6911 shift_mask_high
= ~0;
6912 if (INTVAL (shiftop
) > 32)
6913 shift_mask_high
<<= INTVAL (shiftop
) - 32;
6917 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
6924 return high
== -lsb
;
6927 shift_mask_low
= ~0;
6928 shift_mask_low
<<= INTVAL (shiftop
);
6932 if (-lsb
!= shift_mask_low
)
6935 if (HOST_BITS_PER_WIDE_INT
< 64)
6940 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
6943 return high
== -lsb
;
6947 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
6953 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6954 to perform a left shift. It must have SHIFTOP or more least
6955 signifigant 0's, with the remainder of the word 1's. */
6958 includes_rldicr_lshift_p (shiftop
, andop
)
6962 if (GET_CODE (andop
) == CONST_INT
)
6964 HOST_WIDE_INT c
, lsb
, shift_mask
;
6967 shift_mask
<<= INTVAL (shiftop
);
6970 /* Find the least signifigant one bit. */
6973 /* It must be covered by the shift mask.
6974 This test also rejects c == 0. */
6975 if ((lsb
& shift_mask
) == 0)
6978 /* Check we have all 1's above the transition, and reject all 1's. */
6979 return c
== -lsb
&& lsb
!= 1;
6981 else if (GET_CODE (andop
) == CONST_DOUBLE
6982 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
6984 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
6986 low
= CONST_DOUBLE_LOW (andop
);
6988 if (HOST_BITS_PER_WIDE_INT
< 64)
6990 HOST_WIDE_INT high
, shift_mask_high
;
6992 high
= CONST_DOUBLE_HIGH (andop
);
6996 shift_mask_high
= ~0;
6997 if (INTVAL (shiftop
) > 32)
6998 shift_mask_high
<<= INTVAL (shiftop
) - 32;
7002 if ((lsb
& shift_mask_high
) == 0)
7005 return high
== -lsb
;
7011 shift_mask_low
= ~0;
7012 shift_mask_low
<<= INTVAL (shiftop
);
7016 if ((lsb
& shift_mask_low
) == 0)
7019 return low
== -lsb
&& lsb
!= 1;
7025 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7026 for lfq and stfq insns.
7028 Note reg1 and reg2 *must* be hard registers. To be sure we will
7029 abort if we are passed pseudo registers. */
7032 registers_ok_for_quad_peep (reg1
, reg2
)
7035 /* We might have been passed a SUBREG. */
7036 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
7039 return (REGNO (reg1
) == REGNO (reg2
) - 1);
7042 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7043 addr1 and addr2 must be in consecutive memory locations
7044 (addr2 == addr1 + 8). */
7047 addrs_ok_for_quad_peep (addr1
, addr2
)
7054 /* Extract an offset (if used) from the first addr. */
7055 if (GET_CODE (addr1
) == PLUS
)
7057 /* If not a REG, return zero. */
7058 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
7062 reg1
= REGNO (XEXP (addr1
, 0));
7063 /* The offset must be constant! */
7064 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
7066 offset1
= INTVAL (XEXP (addr1
, 1));
7069 else if (GET_CODE (addr1
) != REG
)
7073 reg1
= REGNO (addr1
);
7074 /* This was a simple (mem (reg)) expression. Offset is 0. */
7078 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7079 if (GET_CODE (addr2
) != PLUS
)
7082 if (GET_CODE (XEXP (addr2
, 0)) != REG
7083 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
7086 if (reg1
!= REGNO (XEXP (addr2
, 0)))
7089 /* The offset for the second addr must be 8 more than the first addr. */
7090 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
7093 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7098 /* Return the register class of a scratch register needed to copy IN into
7099 or out of a register in CLASS in MODE. If it can be done directly,
7100 NO_REGS is returned. */
7103 secondary_reload_class (class, mode
, in
)
7104 enum reg_class
class;
7105 enum machine_mode mode ATTRIBUTE_UNUSED
;
7110 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
7112 /* We cannot copy a symbolic operand directly into anything
7113 other than BASE_REGS for TARGET_ELF. So indicate that a
7114 register from BASE_REGS is needed as an intermediate
7117 On Darwin, pic addresses require a load from memory, which
7118 needs a base register. */
7119 if (class != BASE_REGS
7120 && (GET_CODE (in
) == SYMBOL_REF
7121 || GET_CODE (in
) == HIGH
7122 || GET_CODE (in
) == LABEL_REF
7123 || GET_CODE (in
) == CONST
))
7127 if (GET_CODE (in
) == REG
)
7130 if (regno
>= FIRST_PSEUDO_REGISTER
)
7132 regno
= true_regnum (in
);
7133 if (regno
>= FIRST_PSEUDO_REGISTER
)
7137 else if (GET_CODE (in
) == SUBREG
)
7139 regno
= true_regnum (in
);
7140 if (regno
>= FIRST_PSEUDO_REGISTER
)
7146 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7148 if (class == GENERAL_REGS
|| class == BASE_REGS
7149 || (regno
>= 0 && INT_REGNO_P (regno
)))
7152 /* Constants, memory, and FP registers can go into FP registers. */
7153 if ((regno
== -1 || FP_REGNO_P (regno
))
7154 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
7157 /* Memory, and AltiVec registers can go into AltiVec registers. */
7158 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
7159 && class == ALTIVEC_REGS
)
7162 /* We can copy among the CR registers. */
7163 if ((class == CR_REGS
|| class == CR0_REGS
)
7164 && regno
>= 0 && CR_REGNO_P (regno
))
7167 /* Otherwise, we need GENERAL_REGS. */
7168 return GENERAL_REGS
;
7171 /* Given a comparison operation, return the bit number in CCR to test. We
7172 know this is a valid comparison.
7174 SCC_P is 1 if this is for an scc. That means that %D will have been
7175 used instead of %C, so the bits will be in different places.
7177 Return -1 if OP isn't a valid comparison for some reason. */
7184 enum rtx_code code
= GET_CODE (op
);
7185 enum machine_mode cc_mode
;
7190 if (GET_RTX_CLASS (code
) != '<')
7195 if (GET_CODE (reg
) != REG
7196 || ! CR_REGNO_P (REGNO (reg
)))
7199 cc_mode
= GET_MODE (reg
);
7200 cc_regnum
= REGNO (reg
);
7201 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
7203 validate_condition_mode (code
, cc_mode
);
7208 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7209 return base_bit
+ 1;
7210 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
7212 if (TARGET_SPE
&& TARGET_HARD_FLOAT
&& cc_mode
== CCFPmode
)
7213 return base_bit
+ 1;
7214 return base_bit
+ 2;
7215 case GT
: case GTU
: case UNLE
:
7216 return base_bit
+ 1;
7217 case LT
: case LTU
: case UNGE
:
7219 case ORDERED
: case UNORDERED
:
7220 return base_bit
+ 3;
7223 /* If scc, we will have done a cror to put the bit in the
7224 unordered position. So test that bit. For integer, this is ! LT
7225 unless this is an scc insn. */
7226 return scc_p
? base_bit
+ 3 : base_bit
;
7229 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
7236 /* Return the GOT register. */
7239 rs6000_got_register (value
)
7240 rtx value ATTRIBUTE_UNUSED
;
7242 /* The second flow pass currently (June 1999) can't update
7243 regs_ever_live without disturbing other parts of the compiler, so
7244 update it here to make the prolog/epilogue code happy. */
7245 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
7246 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
7248 current_function_uses_pic_offset_table
= 1;
7250 return pic_offset_table_rtx
;
7253 /* Function to init struct machine_function.
7254 This will be called, via a pointer variable,
7255 from push_function_context. */
7257 static struct machine_function
*
7258 rs6000_init_machine_status ()
7260 return ggc_alloc_cleared (sizeof (machine_function
));
7263 /* These macros test for integers and extract the low-order bits. */
7265 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7266 && GET_MODE (X) == VOIDmode)
7268 #define INT_LOWPART(X) \
7269 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7276 unsigned long val
= INT_LOWPART (op
);
7278 /* If the high bit is zero, the value is the first 1 bit we find
7280 if ((val
& 0x80000000) == 0)
7282 if ((val
& 0xffffffff) == 0)
7286 while (((val
<<= 1) & 0x80000000) == 0)
7291 /* If the high bit is set and the low bit is not, or the mask is all
7292 1's, the value is zero. */
7293 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
7296 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7299 while (((val
>>= 1) & 1) != 0)
7310 unsigned long val
= INT_LOWPART (op
);
7312 /* If the low bit is zero, the value is the first 1 bit we find from
7316 if ((val
& 0xffffffff) == 0)
7320 while (((val
>>= 1) & 1) == 0)
7326 /* If the low bit is set and the high bit is not, or the mask is all
7327 1's, the value is 31. */
7328 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
7331 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7334 while (((val
<<= 1) & 0x80000000) != 0)
7340 /* Print an operand. Recognize special options, documented below. */
7343 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7344 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7346 #define SMALL_DATA_RELOC "sda21"
7347 #define SMALL_DATA_REG 0
7351 print_operand (file
, x
, code
)
7358 unsigned HOST_WIDE_INT uval
;
7363 /* Write out an instruction after the call which may be replaced
7364 with glue code by the loader. This depends on the AIX version. */
7365 asm_fprintf (file
, RS6000_CALL_GLUE
);
7368 /* %a is output_address. */
7371 /* If X is a constant integer whose low-order 5 bits are zero,
7372 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7373 in the AIX assembler where "sri" with a zero shift count
7374 writes a trash instruction. */
7375 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
7382 /* If constant, low-order 16 bits of constant, unsigned.
7383 Otherwise, write normally. */
7385 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
7387 print_operand (file
, x
, 0);
7391 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7392 for 64-bit mask direction. */
7393 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
7396 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7400 /* There used to be a comment for 'C' reading "This is an
7401 optional cror needed for certain floating-point
7402 comparisons. Otherwise write nothing." */
7404 /* Similar, except that this is for an scc, so we must be able to
7405 encode the test in a single bit that is one. We do the above
7406 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7407 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
7408 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
7410 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7412 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
7414 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
7417 else if (GET_CODE (x
) == NE
)
7419 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7421 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
7422 base_bit
+ 2, base_bit
+ 2);
7424 else if (TARGET_SPE
&& TARGET_HARD_FLOAT
7425 && GET_CODE (x
) == EQ
7426 && GET_MODE (XEXP (x
, 0)) == CCFPmode
)
7428 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
7430 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 1,
7431 base_bit
+ 1, base_bit
+ 1);
7436 /* X is a CR register. Print the number of the EQ bit of the CR */
7437 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7438 output_operand_lossage ("invalid %%E value");
7440 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
7444 /* X is a CR register. Print the shift count needed to move it
7445 to the high-order four bits. */
7446 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7447 output_operand_lossage ("invalid %%f value");
7449 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
7453 /* Similar, but print the count for the rotate in the opposite
7455 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7456 output_operand_lossage ("invalid %%F value");
7458 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
7462 /* X is a constant integer. If it is negative, print "m",
7463 otherwise print "z". This is to make a aze or ame insn. */
7464 if (GET_CODE (x
) != CONST_INT
)
7465 output_operand_lossage ("invalid %%G value");
7466 else if (INTVAL (x
) >= 0)
7473 /* If constant, output low-order five bits. Otherwise, write
7476 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
7478 print_operand (file
, x
, 0);
7482 /* If constant, output low-order six bits. Otherwise, write
7485 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
7487 print_operand (file
, x
, 0);
7491 /* Print `i' if this is a constant, else nothing. */
7497 /* Write the bit number in CCR for jump. */
7500 output_operand_lossage ("invalid %%j code");
7502 fprintf (file
, "%d", i
);
7506 /* Similar, but add one for shift count in rlinm for scc and pass
7507 scc flag to `ccr_bit'. */
7510 output_operand_lossage ("invalid %%J code");
7512 /* If we want bit 31, write a shift count of zero, not 32. */
7513 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7517 /* X must be a constant. Write the 1's complement of the
7520 output_operand_lossage ("invalid %%k value");
7522 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
7526 /* X must be a symbolic constant on ELF. Write an
7527 expression suitable for an 'addi' that adds in the low 16
7529 if (GET_CODE (x
) != CONST
)
7531 print_operand_address (file
, x
);
7536 if (GET_CODE (XEXP (x
, 0)) != PLUS
7537 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
7538 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
7539 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
7540 output_operand_lossage ("invalid %%K value");
7541 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
7543 /* For GNU as, there must be a non-alphanumeric character
7544 between 'l' and the number. The '-' is added by
7545 print_operand() already. */
7546 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
7548 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
7552 /* %l is output_asm_label. */
7555 /* Write second word of DImode or DFmode reference. Works on register
7556 or non-indexed memory only. */
7557 if (GET_CODE (x
) == REG
)
7558 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
7559 else if (GET_CODE (x
) == MEM
)
7561 /* Handle possible auto-increment. Since it is pre-increment and
7562 we have already done it, we can just use an offset of word. */
7563 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7564 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7565 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
7568 output_address (XEXP (adjust_address_nv (x
, SImode
,
7572 if (small_data_operand (x
, GET_MODE (x
)))
7573 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7574 reg_names
[SMALL_DATA_REG
]);
7579 /* MB value for a mask operand. */
7580 if (! mask_operand (x
, SImode
))
7581 output_operand_lossage ("invalid %%m value");
7583 fprintf (file
, "%d", extract_MB (x
));
7587 /* ME value for a mask operand. */
7588 if (! mask_operand (x
, SImode
))
7589 output_operand_lossage ("invalid %%M value");
7591 fprintf (file
, "%d", extract_ME (x
));
7594 /* %n outputs the negative of its operand. */
7597 /* Write the number of elements in the vector times 4. */
7598 if (GET_CODE (x
) != PARALLEL
)
7599 output_operand_lossage ("invalid %%N value");
7601 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
7605 /* Similar, but subtract 1 first. */
7606 if (GET_CODE (x
) != PARALLEL
)
7607 output_operand_lossage ("invalid %%O value");
7609 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
7613 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7615 || INT_LOWPART (x
) < 0
7616 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
7617 output_operand_lossage ("invalid %%p value");
7619 fprintf (file
, "%d", i
);
7623 /* The operand must be an indirect memory reference. The result
7624 is the register number. */
7625 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
7626 || REGNO (XEXP (x
, 0)) >= 32)
7627 output_operand_lossage ("invalid %%P value");
7629 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
7633 /* This outputs the logical code corresponding to a boolean
7634 expression. The expression may have one or both operands
7635 negated (if one, only the first one). For condition register
7636 logical operations, it will also treat the negated
7637 CR codes as NOTs, but not handle NOTs of them. */
7639 const char *const *t
= 0;
7641 enum rtx_code code
= GET_CODE (x
);
7642 static const char * const tbl
[3][3] = {
7643 { "and", "andc", "nor" },
7644 { "or", "orc", "nand" },
7645 { "xor", "eqv", "xor" } };
7649 else if (code
== IOR
)
7651 else if (code
== XOR
)
7654 output_operand_lossage ("invalid %%q value");
7656 if (GET_CODE (XEXP (x
, 0)) != NOT
)
7660 if (GET_CODE (XEXP (x
, 1)) == NOT
)
7671 /* X is a CR register. Print the mask for `mtcrf'. */
7672 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
7673 output_operand_lossage ("invalid %%R value");
7675 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
7679 /* Low 5 bits of 32 - value */
7681 output_operand_lossage ("invalid %%s value");
7683 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
7687 /* PowerPC64 mask position. All 0's is excluded.
7688 CONST_INT 32-bit mask is considered sign-extended so any
7689 transition must occur within the CONST_INT, not on the boundary. */
7690 if (! mask64_operand (x
, DImode
))
7691 output_operand_lossage ("invalid %%S value");
7693 uval
= INT_LOWPART (x
);
7695 if (uval
& 1) /* Clear Left */
7697 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7700 else /* Clear Right */
7703 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 63 << 1) - 1;
7710 fprintf (file
, "%d", i
);
7714 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7715 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
7718 /* Bit 3 is OV bit. */
7719 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
7721 /* If we want bit 31, write a shift count of zero, not 32. */
7722 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
7726 /* Print the symbolic name of a branch target register. */
7727 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
7728 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
7729 output_operand_lossage ("invalid %%T value");
7730 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
7731 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
7733 fputs ("ctr", file
);
7737 /* High-order 16 bits of constant for use in unsigned operand. */
7739 output_operand_lossage ("invalid %%u value");
7741 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7742 (INT_LOWPART (x
) >> 16) & 0xffff);
7746 /* High-order 16 bits of constant for use in signed operand. */
7748 output_operand_lossage ("invalid %%v value");
7750 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
7751 (INT_LOWPART (x
) >> 16) & 0xffff);
7755 /* Print `u' if this has an auto-increment or auto-decrement. */
7756 if (GET_CODE (x
) == MEM
7757 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
7758 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
7763 /* Print the trap code for this operand. */
7764 switch (GET_CODE (x
))
7767 fputs ("eq", file
); /* 4 */
7770 fputs ("ne", file
); /* 24 */
7773 fputs ("lt", file
); /* 16 */
7776 fputs ("le", file
); /* 20 */
7779 fputs ("gt", file
); /* 8 */
7782 fputs ("ge", file
); /* 12 */
7785 fputs ("llt", file
); /* 2 */
7788 fputs ("lle", file
); /* 6 */
7791 fputs ("lgt", file
); /* 1 */
7794 fputs ("lge", file
); /* 5 */
7802 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7805 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
7806 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
7808 print_operand (file
, x
, 0);
7812 /* MB value for a PowerPC64 rldic operand. */
7813 val
= (GET_CODE (x
) == CONST_INT
7814 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
7819 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
7820 if ((val
<<= 1) < 0)
7823 #if HOST_BITS_PER_WIDE_INT == 32
7824 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
7825 i
+= 32; /* zero-extend high-part was all 0's */
7826 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
7828 val
= CONST_DOUBLE_LOW (x
);
7835 for ( ; i
< 64; i
++)
7836 if ((val
<<= 1) < 0)
7841 fprintf (file
, "%d", i
+ 1);
7845 if (GET_CODE (x
) == MEM
7846 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
7851 /* Like 'L', for third word of TImode */
7852 if (GET_CODE (x
) == REG
)
7853 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
7854 else if (GET_CODE (x
) == MEM
)
7856 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7857 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7858 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
7860 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
7861 if (small_data_operand (x
, GET_MODE (x
)))
7862 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7863 reg_names
[SMALL_DATA_REG
]);
7868 /* X is a SYMBOL_REF. Write out the name preceded by a
7869 period and without any trailing data in brackets. Used for function
7870 names. If we are configured for System V (or the embedded ABI) on
7871 the PowerPC, do not emit the period, since those systems do not use
7872 TOCs and the like. */
7873 if (GET_CODE (x
) != SYMBOL_REF
)
7876 if (XSTR (x
, 0)[0] != '.')
7878 switch (DEFAULT_ABI
)
7888 case ABI_AIX_NODESC
:
7894 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
7896 assemble_name (file
, XSTR (x
, 0));
7901 /* Like 'L', for last word of TImode. */
7902 if (GET_CODE (x
) == REG
)
7903 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
7904 else if (GET_CODE (x
) == MEM
)
7906 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
7907 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7908 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
7910 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
7911 if (small_data_operand (x
, GET_MODE (x
)))
7912 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
7913 reg_names
[SMALL_DATA_REG
]);
7917 /* Print AltiVec or SPE memory operand. */
7922 if (GET_CODE (x
) != MEM
)
7930 if (GET_CODE (tmp
) == REG
)
7932 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
7935 /* Handle [reg+UIMM]. */
7936 else if (GET_CODE (tmp
) == PLUS
&&
7937 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
7941 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
7944 x
= INTVAL (XEXP (tmp
, 1));
7945 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
7949 /* Fall through. Must be [reg+reg]. */
7951 if (GET_CODE (tmp
) == REG
)
7952 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
7953 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
7955 if (REGNO (XEXP (tmp
, 0)) == 0)
7956 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
7957 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
7959 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
7960 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
7968 if (GET_CODE (x
) == REG
)
7969 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
7970 else if (GET_CODE (x
) == MEM
)
7972 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7973 know the width from the mode. */
7974 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
7975 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
7976 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7977 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
7978 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
7979 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
7981 output_address (XEXP (x
, 0));
7984 output_addr_const (file
, x
);
7988 output_operand_lossage ("invalid %%xn code");
7992 /* Print the address of an operand. */
7995 print_operand_address (file
, x
)
7999 if (GET_CODE (x
) == REG
)
8000 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
8001 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
8002 || GET_CODE (x
) == LABEL_REF
)
8004 output_addr_const (file
, x
);
8005 if (small_data_operand (x
, GET_MODE (x
)))
8006 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
8007 reg_names
[SMALL_DATA_REG
]);
8008 else if (TARGET_TOC
)
8011 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
8013 if (REGNO (XEXP (x
, 0)) == 0)
8014 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
8015 reg_names
[ REGNO (XEXP (x
, 0)) ]);
8017 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
8018 reg_names
[ REGNO (XEXP (x
, 1)) ]);
8020 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
8022 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
8023 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8026 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8027 && CONSTANT_P (XEXP (x
, 1)))
8029 output_addr_const (file
, XEXP (x
, 1));
8030 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8034 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
8035 && CONSTANT_P (XEXP (x
, 1)))
8037 fprintf (file
, "lo16(");
8038 output_addr_const (file
, XEXP (x
, 1));
8039 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
8042 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
8044 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
8046 rtx contains_minus
= XEXP (x
, 1);
8050 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8051 turn it into (sym) for output_addr_const. */
8052 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
8053 contains_minus
= XEXP (contains_minus
, 0);
8055 minus
= XEXP (contains_minus
, 0);
8056 symref
= XEXP (minus
, 0);
8057 XEXP (contains_minus
, 0) = symref
;
8062 name
= XSTR (symref
, 0);
8063 newname
= alloca (strlen (name
) + sizeof ("@toc"));
8064 strcpy (newname
, name
);
8065 strcat (newname
, "@toc");
8066 XSTR (symref
, 0) = newname
;
8068 output_addr_const (file
, XEXP (x
, 1));
8070 XSTR (symref
, 0) = name
;
8071 XEXP (contains_minus
, 0) = minus
;
8074 output_addr_const (file
, XEXP (x
, 1));
8076 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
8082 /* Target hook for assembling integer objects. The powerpc version has
8083 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8084 is defined. It also needs to handle DI-mode objects on 64-bit
8088 rs6000_assemble_integer (x
, size
, aligned_p
)
8093 #ifdef RELOCATABLE_NEEDS_FIXUP
8094 /* Special handling for SI values. */
8095 if (size
== 4 && aligned_p
)
8097 extern int in_toc_section
PARAMS ((void));
8098 static int recurse
= 0;
8100 /* For -mrelocatable, we mark all addresses that need to be fixed up
8101 in the .fixup section. */
8102 if (TARGET_RELOCATABLE
8103 && !in_toc_section ()
8104 && !in_text_section ()
8106 && GET_CODE (x
) != CONST_INT
8107 && GET_CODE (x
) != CONST_DOUBLE
8113 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
8115 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
8116 fprintf (asm_out_file
, "\t.long\t(");
8117 output_addr_const (asm_out_file
, x
);
8118 fprintf (asm_out_file
, ")@fixup\n");
8119 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
8120 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
8121 fprintf (asm_out_file
, "\t.long\t");
8122 assemble_name (asm_out_file
, buf
);
8123 fprintf (asm_out_file
, "\n\t.previous\n");
8127 /* Remove initial .'s to turn a -mcall-aixdesc function
8128 address into the address of the descriptor, not the function
8130 else if (GET_CODE (x
) == SYMBOL_REF
8131 && XSTR (x
, 0)[0] == '.'
8132 && DEFAULT_ABI
== ABI_AIX
)
8134 const char *name
= XSTR (x
, 0);
8135 while (*name
== '.')
8138 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
8142 #endif /* RELOCATABLE_NEEDS_FIXUP */
8143 return default_assemble_integer (x
, size
, aligned_p
);
8147 rs6000_reverse_condition (mode
, code
)
8148 enum machine_mode mode
;
8151 /* Reversal of FP compares takes care -- an ordered compare
8152 becomes an unordered compare and vice versa. */
8153 if (mode
== CCFPmode
)
8154 return reverse_condition_maybe_unordered (code
);
8156 return reverse_condition (code
);
8159 /* Generate a compare for CODE. Return a brand-new rtx that
8160 represents the result of the compare. */
8163 rs6000_generate_compare (code
)
8166 enum machine_mode comp_mode
;
8169 if (rs6000_compare_fp_p
)
8170 comp_mode
= CCFPmode
;
8171 else if (code
== GTU
|| code
== LTU
8172 || code
== GEU
|| code
== LEU
)
8173 comp_mode
= CCUNSmode
;
8177 /* First, the compare. */
8178 compare_result
= gen_reg_rtx (comp_mode
);
8180 /* SPE FP compare instructions on the GPRs. Yuck! */
8181 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && rs6000_compare_fp_p
)
8183 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
8191 cmp
= flag_unsafe_math_optimizations
8192 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
8194 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
8195 rs6000_compare_op1
);
8203 cmp
= flag_unsafe_math_optimizations
8204 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
8206 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
8207 rs6000_compare_op1
);
8215 cmp
= flag_unsafe_math_optimizations
8216 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
8218 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
8219 rs6000_compare_op1
);
8225 /* Synthesize LE and GE from LT/GT || EQ. */
8226 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
8228 /* Synthesize GE/LE frome GT/LT || EQ. */
8234 case LE
: code
= LT
; break;
8235 case GE
: code
= GT
; break;
8236 case LEU
: code
= LT
; break;
8237 case GEU
: code
= GT
; break;
8241 or1
= gen_reg_rtx (SImode
);
8242 or2
= gen_reg_rtx (SImode
);
8243 or_result
= gen_reg_rtx (CCEQmode
);
8244 compare_result2
= gen_reg_rtx (CCFPmode
);
8247 cmp
= flag_unsafe_math_optimizations
8248 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8250 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
8251 rs6000_compare_op1
);
8254 /* The MC8540 FP compare instructions set the CR bits
8255 differently than other PPC compare instructions. For
8256 that matter, there is no generic test instruction, but a
8257 testgt, testlt, and testeq. For a true condition, bit 2
8258 is set (x1xx) in the CR. Following the traditional CR
8264 ... bit 2 would be a GT CR alias, so later on we
8265 look in the GT bits for the branch instructins.
8266 However, we must be careful to emit correct RTL in
8267 the meantime, so optimizations don't get confused. */
8269 or1
= gen_rtx (NE
, SImode
, compare_result
, const0_rtx
);
8270 or2
= gen_rtx (NE
, SImode
, compare_result2
, const0_rtx
);
8272 /* OR them together. */
8273 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
8274 gen_rtx_COMPARE (CCEQmode
,
8275 gen_rtx_IOR (SImode
, or1
, or2
),
8277 compare_result
= or_result
;
8282 /* We only care about 1 bit (x1xx), so map everything to NE to
8283 maintain rtl sanity. We'll get to the right bit (x1xx) at
8284 code output time. */
8285 if (code
== NE
|| code
== LTGT
)
8286 /* Do the inverse here because we have no cmpne
8287 instruction. We use the cmpeq instruction and expect
8288 to get a 0 instead. */
8297 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
8298 gen_rtx_COMPARE (comp_mode
,
8300 rs6000_compare_op1
)));
8302 /* Some kinds of FP comparisons need an OR operation;
8303 except for flag_unsafe_math_optimizations we don't bother. */
8304 if (rs6000_compare_fp_p
8305 && ! flag_unsafe_math_optimizations
8306 && ! (TARGET_HARD_FLOAT
&& TARGET_SPE
)
8307 && (code
== LE
|| code
== GE
8308 || code
== UNEQ
|| code
== LTGT
8309 || code
== UNGT
|| code
== UNLT
))
8311 enum rtx_code or1
, or2
;
8312 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
8313 rtx or_result
= gen_reg_rtx (CCEQmode
);
8317 case LE
: or1
= LT
; or2
= EQ
; break;
8318 case GE
: or1
= GT
; or2
= EQ
; break;
8319 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
8320 case LTGT
: or1
= LT
; or2
= GT
; break;
8321 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
8322 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
8325 validate_condition_mode (or1
, comp_mode
);
8326 validate_condition_mode (or2
, comp_mode
);
8327 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
8328 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
8329 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
8330 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
8332 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
8334 compare_result
= or_result
;
8338 validate_condition_mode (code
, GET_MODE (compare_result
));
8340 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
8344 /* Emit the RTL for an sCOND pattern. */
8347 rs6000_emit_sCOND (code
, result
)
8352 enum machine_mode op_mode
;
8354 condition_rtx
= rs6000_generate_compare (code
);
8356 op_mode
= GET_MODE (rs6000_compare_op0
);
8357 if (op_mode
== VOIDmode
)
8358 op_mode
= GET_MODE (rs6000_compare_op1
);
8360 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
8362 PUT_MODE (condition_rtx
, DImode
);
8363 convert_move (result
, condition_rtx
, 0);
8367 PUT_MODE (condition_rtx
, SImode
);
8368 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
8372 /* Emit a branch of kind CODE to location LOC. */
8375 rs6000_emit_cbranch (code
, loc
)
8379 rtx condition_rtx
, loc_ref
;
8381 condition_rtx
= rs6000_generate_compare (code
);
8382 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
8383 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
8384 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
8388 /* Return the string to output a conditional branch to LABEL, which is
8389 the operand number of the label, or -1 if the branch is really a
8392 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8393 condition code register and its mode specifies what kind of
8396 REVERSED is non-zero if we should reverse the sense of the comparison.
8398 INSN is the insn. */
8401 output_cbranch (op
, label
, reversed
, insn
)
8407 static char string
[64];
8408 enum rtx_code code
= GET_CODE (op
);
8409 rtx cc_reg
= XEXP (op
, 0);
8410 enum machine_mode mode
= GET_MODE (cc_reg
);
8411 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
8412 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
8413 int really_reversed
= reversed
^ need_longbranch
;
8419 validate_condition_mode (code
, mode
);
8421 /* Work out which way this really branches. We could use
8422 reverse_condition_maybe_unordered here always but this
8423 makes the resulting assembler clearer. */
8424 if (really_reversed
)
8425 code
= rs6000_reverse_condition (mode
, code
);
8427 if ((TARGET_SPE
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
8429 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8432 /* Opposite of GT. */
8434 else if (code
== NE
)
8442 /* Not all of these are actually distinct opcodes, but
8443 we distinguish them for clarity of the resulting assembler. */
8445 ccode
= "ne"; break;
8447 ccode
= "eq"; break;
8449 ccode
= "ge"; break;
8450 case GT
: case GTU
: case UNGT
:
8451 ccode
= "gt"; break;
8453 ccode
= "le"; break;
8454 case LT
: case LTU
: case UNLT
:
8455 ccode
= "lt"; break;
8456 case UNORDERED
: ccode
= "un"; break;
8457 case ORDERED
: ccode
= "nu"; break;
8458 case UNGE
: ccode
= "nl"; break;
8459 case UNLE
: ccode
= "ng"; break;
8464 /* Maybe we have a guess as to how likely the branch is.
8465 The old mnemonics don't have a way to specify this information. */
8467 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
8468 if (note
!= NULL_RTX
)
8470 /* PROB is the difference from 50%. */
8471 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
8472 bool always_hint
= rs6000_cpu
!= PROCESSOR_POWER4
;
8474 /* Only hint for highly probable/improbable branches on newer
8475 cpus as static prediction overrides processor dynamic
8476 prediction. For older cpus we may as well always hint, but
8477 assume not taken for branches that are very close to 50% as a
8478 mispredicted taken branch is more expensive than a
8479 mispredicted not-taken branch. */
8481 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
8483 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
8484 && ((prob
> 0) ^ need_longbranch
))
8492 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
8494 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
8496 /* We need to escape any '%' characters in the reg_names string.
8497 Assume they'd only be the first character... */
8498 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
8500 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
8504 /* If the branch distance was too far, we may have to use an
8505 unconditional branch to go the distance. */
8506 if (need_longbranch
)
8507 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
8509 s
+= sprintf (s
, ",%s", label
);
8515 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8516 operands of the last comparison is nonzero/true, FALSE_COND if it
8517 is zero/false. Return 0 if the hardware has no such operation. */
8520 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
8526 enum rtx_code code
= GET_CODE (op
);
8527 rtx op0
= rs6000_compare_op0
;
8528 rtx op1
= rs6000_compare_op1
;
8530 enum machine_mode compare_mode
= GET_MODE (op0
);
8531 enum machine_mode result_mode
= GET_MODE (dest
);
8534 /* These modes should always match. */
8535 if (GET_MODE (op1
) != compare_mode
8536 /* In the isel case however, we can use a compare immediate, so
8537 op1 may be a small constant. */
8538 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
8540 if (GET_MODE (true_cond
) != result_mode
)
8542 if (GET_MODE (false_cond
) != result_mode
)
8545 /* First, work out if the hardware can do this at all, or
8546 if it's too slow... */
8547 if (! rs6000_compare_fp_p
)
8550 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
8554 /* Eliminate half of the comparisons by switching operands, this
8555 makes the remaining code simpler. */
8556 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
8557 || code
== LTGT
|| code
== LT
)
8559 code
= reverse_condition_maybe_unordered (code
);
8561 true_cond
= false_cond
;
8565 /* UNEQ and LTGT take four instructions for a comparison with zero,
8566 it'll probably be faster to use a branch here too. */
8570 if (GET_CODE (op1
) == CONST_DOUBLE
)
8571 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
8573 /* We're going to try to implement comparions by performing
8574 a subtract, then comparing against zero. Unfortunately,
8575 Inf - Inf is NaN which is not zero, and so if we don't
8576 know that the operand is finite and the comparison
8577 would treat EQ different to UNORDERED, we can't do it. */
8578 if (! flag_unsafe_math_optimizations
8579 && code
!= GT
&& code
!= UNGE
8580 && (GET_CODE (op1
) != CONST_DOUBLE
|| target_isinf (c1
))
8581 /* Constructs of the form (a OP b ? a : b) are safe. */
8582 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
8583 || (! rtx_equal_p (op0
, true_cond
)
8584 && ! rtx_equal_p (op1
, true_cond
))))
8586 /* At this point we know we can use fsel. */
8588 /* Reduce the comparison to a comparison against zero. */
8589 temp
= gen_reg_rtx (compare_mode
);
8590 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8591 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
8593 op1
= CONST0_RTX (compare_mode
);
8595 /* If we don't care about NaNs we can reduce some of the comparisons
8596 down to faster ones. */
8597 if (flag_unsafe_math_optimizations
)
8603 true_cond
= false_cond
;
8616 /* Now, reduce everything down to a GE. */
8623 temp
= gen_reg_rtx (compare_mode
);
8624 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8629 temp
= gen_reg_rtx (compare_mode
);
8630 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
8635 temp
= gen_reg_rtx (compare_mode
);
8636 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8637 gen_rtx_NEG (compare_mode
,
8638 gen_rtx_ABS (compare_mode
, op0
))));
8643 temp
= gen_reg_rtx (result_mode
);
8644 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8645 gen_rtx_IF_THEN_ELSE (result_mode
,
8646 gen_rtx_GE (VOIDmode
,
8648 true_cond
, false_cond
)));
8650 true_cond
= false_cond
;
8652 temp
= gen_reg_rtx (compare_mode
);
8653 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8658 temp
= gen_reg_rtx (result_mode
);
8659 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
8660 gen_rtx_IF_THEN_ELSE (result_mode
,
8661 gen_rtx_GE (VOIDmode
,
8663 true_cond
, false_cond
)));
8665 false_cond
= true_cond
;
8667 temp
= gen_reg_rtx (compare_mode
);
8668 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
8676 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
8677 gen_rtx_IF_THEN_ELSE (result_mode
,
8678 gen_rtx_GE (VOIDmode
,
8680 true_cond
, false_cond
)));
8684 /* Same as above, but for ints (isel). */
8687 rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
)
8693 rtx condition_rtx
, cr
;
8695 /* All isel implementations thus far are 32-bits. */
8696 if (GET_MODE (rs6000_compare_op0
) != SImode
)
8699 /* We still have to do the compare, because isel doesn't do a
8700 compare, it just looks at the CRx bits set by a previous compare
8702 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
8703 cr
= XEXP (condition_rtx
, 0);
8705 if (GET_MODE (cr
) == CCmode
)
8706 emit_insn (gen_isel_signed (dest
, condition_rtx
,
8707 true_cond
, false_cond
, cr
));
8709 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
8710 true_cond
, false_cond
, cr
));
8716 output_isel (operands
)
8721 code
= GET_CODE (operands
[1]);
8722 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
8724 PUT_CODE (operands
[1], reverse_condition (code
));
8725 return "isel %0,%3,%2,%j1";
8728 return "isel %0,%2,%3,%j1";
8732 rs6000_emit_minmax (dest
, code
, op0
, op1
)
8738 enum machine_mode mode
= GET_MODE (op0
);
8742 if (code
== SMAX
|| code
== SMIN
)
8747 if (code
== SMAX
|| code
== UMAX
)
8748 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8751 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
8753 if (target
== NULL_RTX
)
8756 emit_move_insn (dest
, target
);
8759 /* This page contains routines that are used to determine what the
8760 function prologue and epilogue code will do and write them out. */
8762 /* Return the first fixed-point register that is required to be
8763 saved. 32 if none. */
8766 first_reg_to_save ()
8770 /* Find lowest numbered live register. */
8771 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
8772 if (regs_ever_live
[first_reg
]
8773 && (! call_used_regs
[first_reg
]
8774 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
8775 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8776 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
8780 if (flag_pic
&& current_function_uses_pic_offset_table
&&
8781 (first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
))
8782 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
8788 /* Similar, for FP regs. */
8791 first_fp_reg_to_save ()
8795 /* Find lowest numbered live register. */
8796 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
8797 if (regs_ever_live
[first_reg
])
8803 /* Similar, for AltiVec regs. */
8806 first_altivec_reg_to_save ()
8810 /* Stack frame remains as is unless we are in AltiVec ABI. */
8811 if (! TARGET_ALTIVEC_ABI
)
8812 return LAST_ALTIVEC_REGNO
+ 1;
8814 /* Find lowest numbered live register. */
8815 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8816 if (regs_ever_live
[i
])
8822 /* Return a 32-bit mask of the AltiVec registers we need to set in
8823 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8824 the 32-bit word is 0. */
8827 compute_vrsave_mask ()
8829 unsigned int i
, mask
= 0;
8831 /* First, find out if we use _any_ altivec registers. */
8832 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8833 if (regs_ever_live
[i
])
8834 mask
|= ALTIVEC_REG_BIT (i
);
8839 /* Next, add all registers that are call-clobbered. We do this
8840 because post-reload register optimizers such as regrename_optimize
8841 may choose to use them. They never change the register class
8842 chosen by reload, so cannot create new uses of altivec registers
8843 if there were none before, so the early exit above is safe. */
8844 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8845 altivec registers not saved in the mask, which might well make the
8846 adjustments below more effective in eliding the save/restore of
8847 VRSAVE in small functions. */
8848 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8849 if (call_used_regs
[i
])
8850 mask
|= ALTIVEC_REG_BIT (i
);
8852 /* Next, remove the argument registers from the set. These must
8853 be in the VRSAVE mask set by the caller, so we don't need to add
8854 them in again. More importantly, the mask we compute here is
8855 used to generate CLOBBERs in the set_vrsave insn, and we do not
8856 wish the argument registers to die. */
8857 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
8858 mask
&= ~ALTIVEC_REG_BIT (i
);
8860 /* Similarly, remove the return value from the set. */
8863 diddle_return_value (is_altivec_return_reg
, &yes
);
8865 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
8872 is_altivec_return_reg (reg
, xyes
)
8876 bool *yes
= (bool *) xyes
;
8877 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
8882 /* Calculate the stack information for the current function. This is
8883 complicated by having two separate calling sequences, the AIX calling
8884 sequence and the V.4 calling sequence.
8886 AIX (and Darwin/Mac OS X) stack frames look like:
8888 SP----> +---------------------------------------+
8889 | back chain to caller | 0 0
8890 +---------------------------------------+
8891 | saved CR | 4 8 (8-11)
8892 +---------------------------------------+
8894 +---------------------------------------+
8895 | reserved for compilers | 12 24
8896 +---------------------------------------+
8897 | reserved for binders | 16 32
8898 +---------------------------------------+
8899 | saved TOC pointer | 20 40
8900 +---------------------------------------+
8901 | Parameter save area (P) | 24 48
8902 +---------------------------------------+
8903 | Alloca space (A) | 24+P etc.
8904 +---------------------------------------+
8905 | Local variable space (L) | 24+P+A
8906 +---------------------------------------+
8907 | Float/int conversion temporary (X) | 24+P+A+L
8908 +---------------------------------------+
8909 | Save area for AltiVec registers (W) | 24+P+A+L+X
8910 +---------------------------------------+
8911 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8912 +---------------------------------------+
8913 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8914 +---------------------------------------+
8915 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8916 +---------------------------------------+
8917 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8918 +---------------------------------------+
8919 old SP->| back chain to caller's caller |
8920 +---------------------------------------+
8922 The required alignment for AIX configurations is two words (i.e., 8
8926 V.4 stack frames look like:
8928 SP----> +---------------------------------------+
8929 | back chain to caller | 0
8930 +---------------------------------------+
8931 | caller's saved LR | 4
8932 +---------------------------------------+
8933 | Parameter save area (P) | 8
8934 +---------------------------------------+
8935 | Alloca space (A) | 8+P
8936 +---------------------------------------+
8937 | Varargs save area (V) | 8+P+A
8938 +---------------------------------------+
8939 | Local variable space (L) | 8+P+A+V
8940 +---------------------------------------+
8941 | Float/int conversion temporary (X) | 8+P+A+V+L
8942 +---------------------------------------+
8943 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8944 +---------------------------------------+
8945 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8946 +---------------------------------------+
8947 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8948 +---------------------------------------+
8949 | SPE: area for 64-bit GP registers |
8950 +---------------------------------------+
8951 | SPE alignment padding |
8952 +---------------------------------------+
8953 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8954 +---------------------------------------+
8955 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8956 +---------------------------------------+
8957 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8958 +---------------------------------------+
8959 old SP->| back chain to caller's caller |
8960 +---------------------------------------+
8962 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8963 given. (But note below and in sysv4.h that we require only 8 and
8964 may round up the size of our stack frame anyways. The historical
8965 reason is early versions of powerpc-linux which didn't properly
8966 align the stack at program startup. A happy side-effect is that
8967 -mno-eabi libraries can be used with -meabi programs.)
8969 The EABI configuration defaults to the V.4 layout, unless
8970 -mcall-aix is used, in which case the AIX layout is used. However,
8971 the stack alignment requirements may differ. If -mno-eabi is not
8972 given, the required stack alignment is 8 bytes; if -mno-eabi is
8973 given, the required alignment is 16 bytes. (But see V.4 comment
8976 #ifndef ABI_STACK_BOUNDARY
8977 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8981 rs6000_stack_info ()
8983 static rs6000_stack_t info
, zero_info
;
8984 rs6000_stack_t
*info_ptr
= &info
;
8985 int reg_size
= TARGET_POWERPC64
? 8 : 4;
8986 enum rs6000_abi abi
;
8990 /* Zero all fields portably. */
8993 /* Select which calling sequence. */
8994 info_ptr
->abi
= abi
= DEFAULT_ABI
;
8996 /* Calculate which registers need to be saved & save area size. */
8997 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
8998 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8999 even if it currently looks like we won't. */
9000 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
9001 || (flag_pic
== 1 && abi
== ABI_V4
)
9002 || (flag_pic
&& abi
== ABI_DARWIN
))
9003 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
9004 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
9006 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
9008 /* For the SPE, we have an additional upper 32-bits on each GPR.
9009 Ideally we should save the entire 64-bits only when the upper
9010 half is used in SIMD instructions. Since we only record
9011 registers live (not the size they are used in), this proves
9012 difficult because we'd have to traverse the instruction chain at
9013 the right time, taking reload into account. This is a real pain,
9014 so we opt to save the GPRs in 64-bits always. Anyone overly
9015 concerned with frame size can fix this. ;-).
9017 So... since we save all GPRs (except the SP) in 64-bits, the
9018 traditional GP save area will be empty. */
9020 info_ptr
->gp_size
= 0;
9022 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
9023 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
9025 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
9026 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
9027 - info_ptr
->first_altivec_reg_save
);
9029 /* Does this function call anything? */
9030 info_ptr
->calls_p
= (! current_function_is_leaf
9031 || cfun
->machine
->ra_needs_full_frame
);
9033 /* Determine if we need to save the link register. */
9034 if (rs6000_ra_ever_killed ()
9035 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
9036 #ifdef TARGET_RELOCATABLE
9037 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
9039 || (info_ptr
->first_fp_reg_save
!= 64
9040 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
9041 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
9042 || (abi
== ABI_V4
&& current_function_calls_alloca
)
9043 || (DEFAULT_ABI
== ABI_DARWIN
9045 && current_function_uses_pic_offset_table
)
9046 || info_ptr
->calls_p
)
9048 info_ptr
->lr_save_p
= 1;
9049 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
9052 /* Determine if we need to save the condition code registers. */
9053 if (regs_ever_live
[CR2_REGNO
]
9054 || regs_ever_live
[CR3_REGNO
]
9055 || regs_ever_live
[CR4_REGNO
])
9057 info_ptr
->cr_save_p
= 1;
9059 info_ptr
->cr_size
= reg_size
;
9062 /* If the current function calls __builtin_eh_return, then we need
9063 to allocate stack space for registers that will hold data for
9064 the exception handler. */
9065 if (current_function_calls_eh_return
)
9068 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
9071 /* SPE saves EH registers in 64-bits. */
9072 ehrd_size
= i
* (TARGET_SPE_ABI
? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
9077 /* Determine various sizes. */
9078 info_ptr
->reg_size
= reg_size
;
9079 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
9080 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
9081 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
9082 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
9086 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
9088 info_ptr
->spe_gp_size
= 0;
9090 if (TARGET_ALTIVEC_ABI
&& TARGET_ALTIVEC_VRSAVE
)
9092 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
9093 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
9097 info_ptr
->vrsave_mask
= 0;
9098 info_ptr
->vrsave_size
= 0;
9101 /* Calculate the offsets. */
9109 case ABI_AIX_NODESC
:
9111 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9112 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9114 if (TARGET_ALTIVEC_ABI
)
9116 info_ptr
->vrsave_save_offset
9117 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
9119 /* Align stack so vector save area is on a quadword boundary. */
9120 if (info_ptr
->altivec_size
!= 0)
9121 info_ptr
->altivec_padding_size
9122 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9124 info_ptr
->altivec_padding_size
= 0;
9126 info_ptr
->altivec_save_offset
9127 = info_ptr
->vrsave_save_offset
9128 - info_ptr
->altivec_padding_size
9129 - info_ptr
->altivec_size
;
9131 /* Adjust for AltiVec case. */
9132 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
9135 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
9136 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
9137 info_ptr
->lr_save_offset
= 2*reg_size
;
9141 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
9142 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
9143 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
9147 /* Align stack so SPE GPR save area is aligned on a
9148 double-word boundary. */
9149 if (info_ptr
->spe_gp_size
!= 0)
9150 info_ptr
->spe_padding_size
9151 = 8 - (-info_ptr
->cr_save_offset
% 8);
9153 info_ptr
->spe_padding_size
= 0;
9155 info_ptr
->spe_gp_save_offset
9156 = info_ptr
->cr_save_offset
9157 - info_ptr
->spe_padding_size
9158 - info_ptr
->spe_gp_size
;
9160 /* Adjust for SPE case. */
9161 info_ptr
->toc_save_offset
9162 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
9164 else if (TARGET_ALTIVEC_ABI
)
9166 info_ptr
->vrsave_save_offset
9167 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
9169 /* Align stack so vector save area is on a quadword boundary. */
9170 if (info_ptr
->altivec_size
!= 0)
9171 info_ptr
->altivec_padding_size
9172 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
9174 info_ptr
->altivec_padding_size
= 0;
9176 info_ptr
->altivec_save_offset
9177 = info_ptr
->vrsave_save_offset
9178 - info_ptr
->altivec_padding_size
9179 - info_ptr
->altivec_size
;
9181 /* Adjust for AltiVec case. */
9182 info_ptr
->toc_save_offset
9183 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
9186 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
9187 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
9188 info_ptr
->lr_save_offset
= reg_size
;
9192 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
9194 + info_ptr
->altivec_size
9195 + info_ptr
->altivec_padding_size
9196 + info_ptr
->vrsave_size
9197 + info_ptr
->spe_gp_size
9198 + info_ptr
->spe_padding_size
9202 + info_ptr
->vrsave_size
9203 + info_ptr
->toc_size
,
9204 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
9207 total_raw_size
= (info_ptr
->vars_size
9208 + info_ptr
->parm_size
9209 + info_ptr
->save_size
9210 + info_ptr
->varargs_size
9211 + info_ptr
->fixed_size
);
9213 info_ptr
->total_size
=
9214 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
9216 /* Determine if we need to allocate any stack frame:
9218 For AIX we need to push the stack if a frame pointer is needed
9219 (because the stack might be dynamically adjusted), if we are
9220 debugging, if we make calls, or if the sum of fp_save, gp_save,
9221 and local variables are more than the space needed to save all
9222 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9223 + 18*8 = 288 (GPR13 reserved).
9225 For V.4 we don't have the stack cushion that AIX uses, but assume
9226 that the debugger can handle stackless frames. */
9228 if (info_ptr
->calls_p
)
9229 info_ptr
->push_p
= 1;
9231 else if (abi
== ABI_V4
)
9232 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
9235 info_ptr
->push_p
= (frame_pointer_needed
9236 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
9237 || ((total_raw_size
- info_ptr
->fixed_size
)
9238 > (TARGET_32BIT
? 220 : 288)));
9240 /* Zero offsets if we're not saving those registers. */
9241 if (info_ptr
->fp_size
== 0)
9242 info_ptr
->fp_save_offset
= 0;
9244 if (info_ptr
->gp_size
== 0)
9245 info_ptr
->gp_save_offset
= 0;
9247 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
9248 info_ptr
->altivec_save_offset
= 0;
9250 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
9251 info_ptr
->vrsave_save_offset
= 0;
9253 if (! TARGET_SPE_ABI
|| info_ptr
->spe_gp_size
== 0)
9254 info_ptr
->spe_gp_save_offset
= 0;
9256 if (! info_ptr
->lr_save_p
)
9257 info_ptr
->lr_save_offset
= 0;
9259 if (! info_ptr
->cr_save_p
)
9260 info_ptr
->cr_save_offset
= 0;
9262 if (! info_ptr
->toc_save_p
)
9263 info_ptr
->toc_save_offset
= 0;
9269 debug_stack_info (info
)
9270 rs6000_stack_t
*info
;
9272 const char *abi_string
;
9275 info
= rs6000_stack_info ();
9277 fprintf (stderr
, "\nStack information for function %s:\n",
9278 ((current_function_decl
&& DECL_NAME (current_function_decl
))
9279 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
9284 default: abi_string
= "Unknown"; break;
9285 case ABI_NONE
: abi_string
= "NONE"; break;
9287 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
9288 case ABI_DARWIN
: abi_string
= "Darwin"; break;
9289 case ABI_V4
: abi_string
= "V.4"; break;
9292 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
9294 if (TARGET_ALTIVEC_ABI
)
9295 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
9298 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
9300 if (info
->first_gp_reg_save
!= 32)
9301 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
9303 if (info
->first_fp_reg_save
!= 64)
9304 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
9306 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
9307 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
9308 info
->first_altivec_reg_save
);
9310 if (info
->lr_save_p
)
9311 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
9313 if (info
->cr_save_p
)
9314 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
9316 if (info
->toc_save_p
)
9317 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
9319 if (info
->vrsave_mask
)
9320 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
9323 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
9326 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
9328 if (info
->gp_save_offset
)
9329 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
9331 if (info
->fp_save_offset
)
9332 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
9334 if (info
->altivec_save_offset
)
9335 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
9336 info
->altivec_save_offset
);
9338 if (info
->spe_gp_save_offset
)
9339 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
9340 info
->spe_gp_save_offset
);
9342 if (info
->vrsave_save_offset
)
9343 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
9344 info
->vrsave_save_offset
);
9346 if (info
->lr_save_offset
)
9347 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
9349 if (info
->cr_save_offset
)
9350 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
9352 if (info
->toc_save_offset
)
9353 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
9355 if (info
->varargs_save_offset
)
9356 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
9358 if (info
->total_size
)
9359 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
9361 if (info
->varargs_size
)
9362 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
9364 if (info
->vars_size
)
9365 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
9367 if (info
->parm_size
)
9368 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
9370 if (info
->fixed_size
)
9371 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
9374 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
9376 if (info
->spe_gp_size
)
9377 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
9380 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
9382 if (info
->altivec_size
)
9383 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
9385 if (info
->vrsave_size
)
9386 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
9388 if (info
->altivec_padding_size
)
9389 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
9390 info
->altivec_padding_size
);
9392 if (info
->spe_padding_size
)
9393 fprintf (stderr
, "\tspe_padding_size = %5d\n",
9394 info
->spe_padding_size
);
9397 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
9400 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
9403 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
9405 if (info
->save_size
)
9406 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
9408 if (info
->reg_size
!= 4)
9409 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
9411 fprintf (stderr
, "\n");
9415 rs6000_return_addr (count
, frame
)
9419 /* Currently we don't optimize very well between prolog and body
9420 code and for PIC code the code can be actually quite bad, so
9421 don't try to be too clever here. */
9422 if (count
!= 0 || flag_pic
!= 0)
9424 cfun
->machine
->ra_needs_full_frame
= 1;
9431 plus_constant (copy_to_reg
9432 (gen_rtx_MEM (Pmode
,
9433 memory_address (Pmode
, frame
))),
9434 RETURN_ADDRESS_OFFSET
)));
9437 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
9440 /* Say whether a function is a candidate for sibcall handling or not.
9441 We do not allow indirect calls to be optimized into sibling calls.
9442 Also, we can't do it if there are any vector parameters; there's
9443 nowhere to put the VRsave code so it works; note that functions with
9444 vector parameters are required to have a prototype, so the argument
9445 type info must be available here. (The tail recursion case can work
9446 with vector parameters, but there's no way to distinguish here.) */
9448 function_ok_for_sibcall (fndecl
)
9454 if (TARGET_ALTIVEC_VRSAVE
)
9456 for (type
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
9457 type
; type
= TREE_CHAIN (type
))
9459 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
9463 if (DEFAULT_ABI
== ABI_DARWIN
9464 || (TREE_ASM_WRITTEN (fndecl
) && !flag_pic
) || !TREE_PUBLIC (fndecl
))
9470 /* function rewritten to handle sibcalls */
9472 rs6000_ra_ever_killed ()
9478 #ifdef ASM_OUTPUT_MI_THUNK
9479 if (current_function_is_thunk
)
9482 /* regs_ever_live has LR marked as used if any sibcalls
9483 are present. Which it is, but this should not force
9484 saving and restoring in the prologue/epilog. Likewise,
9485 reg_set_between_p thinks a sibcall clobbers LR, so
9486 that is inappropriate. */
9487 /* Also, the prologue can generate a store into LR that
9488 doesn't really count, like this:
9490 bcl to set PIC register
9493 When we're called from the epilog, we need to avoid counting
9494 this as a store; thus we ignore any insns with a REG_MAYBE_DEAD note. */
9496 push_topmost_sequence ();
9498 pop_topmost_sequence ();
9499 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
9501 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
9505 if (FIND_REG_INC_NOTE (insn
, reg
))
9507 else if (GET_CODE (insn
) == CALL_INSN
9508 && !SIBLING_CALL_P (insn
))
9510 else if (set_of (reg
, insn
) != NULL_RTX
9511 && find_reg_note (insn
, REG_MAYBE_DEAD
, NULL_RTX
) == 0)
9518 /* Add a REG_MAYBE_DEAD note to the insn. */
9520 rs6000_maybe_dead (insn
)
9523 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
9528 /* Emit instructions needed to load the TOC register.
9529 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9530 a constant pool; or for SVR4 -fpic. */
9533 rs6000_emit_load_toc_table (fromprolog
)
9537 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
9539 if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
)
9541 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9543 rtx temp
= (fromprolog
9544 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9545 : gen_reg_rtx (Pmode
));
9546 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
9547 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
9549 else if (flag_pic
== 2)
9552 rtx tempLR
= (fromprolog
9553 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
9554 : gen_reg_rtx (Pmode
));
9555 rtx temp0
= (fromprolog
9556 ? gen_rtx_REG (Pmode
, 0)
9557 : gen_reg_rtx (Pmode
));
9560 /* possibly create the toc section */
9561 if (! toc_initialized
)
9564 function_section (current_function_decl
);
9571 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
9572 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9574 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
9575 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9577 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
9579 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9580 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
9587 static int reload_toc_labelno
= 0;
9589 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
9591 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
9592 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9594 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
9597 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
9598 rs6000_maybe_dead (emit_move_insn (temp0
,
9599 gen_rtx_MEM (Pmode
, dest
)));
9601 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
9603 else if (flag_pic
== 0 && TARGET_MINIMAL_TOC
)
9605 /* This is for AIX code running in non-PIC ELF. */
9608 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
9609 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
9611 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
9612 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
9620 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
9622 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
9627 get_TOC_alias_set ()
9629 static int set
= -1;
9631 set
= new_alias_set ();
9635 /* This retuns nonzero if the current function uses the TOC. This is
9636 determined by the presence of (unspec ... 7), which is generated by
9637 the various load_toc_* patterns. */
9644 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9647 rtx pat
= PATTERN (insn
);
9650 if (GET_CODE (pat
) == PARALLEL
)
9651 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
9652 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
9653 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
9660 create_TOC_reference (symbol
)
9663 return gen_rtx_PLUS (Pmode
,
9664 gen_rtx_REG (Pmode
, TOC_REGISTER
),
9665 gen_rtx_CONST (Pmode
,
9666 gen_rtx_MINUS (Pmode
, symbol
,
9667 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
9671 /* __throw will restore its own return address to be the same as the
9672 return address of the function that the throw is being made to.
9673 This is unfortunate, because we want to check the original
9674 return address to see if we need to restore the TOC.
9675 So we have to squirrel it away here.
9676 This is used only in compiling __throw and __rethrow.
9678 Most of this code should be removed by CSE. */
9679 static rtx insn_after_throw
;
9681 /* This does the saving... */
9683 rs6000_aix_emit_builtin_unwind_init ()
9686 rtx stack_top
= gen_reg_rtx (Pmode
);
9687 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9689 insn_after_throw
= gen_reg_rtx (SImode
);
9691 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9692 emit_move_insn (stack_top
, mem
);
9694 mem
= gen_rtx_MEM (Pmode
,
9695 gen_rtx_PLUS (Pmode
, stack_top
,
9696 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9697 emit_move_insn (opcode_addr
, mem
);
9698 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
9701 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9702 in _eh.o). Only used on AIX.
9704 The idea is that on AIX, function calls look like this:
9705 bl somefunction-trampoline
9709 somefunction-trampoline:
9711 ... load function address in the count register ...
9713 or like this, if the linker determines that this is not a cross-module call
9714 and so the TOC need not be restored:
9717 or like this, if the compiler could determine that this is not a
9720 now, the tricky bit here is that register 2 is saved and restored
9721 by the _linker_, so we can't readily generate debugging information
9722 for it. So we need to go back up the call chain looking at the
9723 insns at return addresses to see which calls saved the TOC register
9724 and so see where it gets restored from.
9726 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9727 just before the actual epilogue.
9729 On the bright side, this incurs no space or time overhead unless an
9730 exception is thrown, except for the extra code in libgcc.a.
9732 The parameter STACKSIZE is a register containing (at runtime)
9733 the amount to be popped off the stack in addition to the stack frame
9734 of this routine (which will be __throw or __rethrow, and so is
9735 guaranteed to have a stack frame). */
9738 rs6000_emit_eh_toc_restore (stacksize
)
9742 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
9743 rtx tocompare
= gen_reg_rtx (SImode
);
9744 rtx opcode
= gen_reg_rtx (SImode
);
9745 rtx opcode_addr
= gen_reg_rtx (Pmode
);
9747 rtx loop_start
= gen_label_rtx ();
9748 rtx no_toc_restore_needed
= gen_label_rtx ();
9749 rtx loop_exit
= gen_label_rtx ();
9751 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
9752 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9753 emit_move_insn (bottom_of_stack
, mem
);
9755 top_of_stack
= expand_binop (Pmode
, add_optab
,
9756 bottom_of_stack
, stacksize
,
9757 NULL_RTX
, 1, OPTAB_WIDEN
);
9759 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
9760 : 0xE8410028, SImode
));
9762 if (insn_after_throw
== NULL_RTX
)
9764 emit_move_insn (opcode
, insn_after_throw
);
9766 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
9767 emit_label (loop_start
);
9769 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
9770 SImode
, NULL_RTX
, NULL_RTX
,
9771 no_toc_restore_needed
);
9773 mem
= gen_rtx_MEM (Pmode
,
9774 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9775 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
9776 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
9778 emit_label (no_toc_restore_needed
);
9779 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
9780 Pmode
, NULL_RTX
, NULL_RTX
,
9783 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
9784 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9785 emit_move_insn (bottom_of_stack
, mem
);
9787 mem
= gen_rtx_MEM (Pmode
,
9788 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
9789 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
9790 emit_move_insn (opcode_addr
, mem
);
9791 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
9793 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
9794 emit_jump (loop_start
);
9795 emit_note (NULL
, NOTE_INSN_LOOP_END
);
9796 emit_label (loop_exit
);
9798 #endif /* TARGET_AIX */
9800 /* This ties together stack memory (MEM with an alias set of
9801 rs6000_sr_alias_set) and the change to the stack pointer. */
9804 rs6000_emit_stack_tie ()
9806 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
9808 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9809 emit_insn (gen_stack_tie (mem
));
9812 /* Emit the correct code for allocating stack space, as insns.
9813 If COPY_R12, make sure a copy of the old frame is left in r12.
9814 The generated code may use hard register 0 as a temporary. */
9817 rs6000_emit_allocate_stack (size
, copy_r12
)
9822 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
9823 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
9824 rtx todec
= GEN_INT (-size
);
9826 if (current_function_limit_stack
)
9828 if (REG_P (stack_limit_rtx
)
9829 && REGNO (stack_limit_rtx
) > 1
9830 && REGNO (stack_limit_rtx
) <= 31)
9832 emit_insn (Pmode
== SImode
9833 ? gen_addsi3 (tmp_reg
,
9836 : gen_adddi3 (tmp_reg
,
9840 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9843 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
9845 && DEFAULT_ABI
== ABI_V4
)
9847 rtx toload
= gen_rtx_CONST (VOIDmode
,
9848 gen_rtx_PLUS (Pmode
,
9852 emit_insn (gen_elf_high (tmp_reg
, toload
));
9853 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
9854 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
9858 warning ("stack limit expression is not supported");
9861 if (copy_r12
|| ! TARGET_UPDATE
)
9862 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
9868 /* Need a note here so that try_split doesn't get confused. */
9869 if (get_last_insn() == NULL_RTX
)
9870 emit_note (0, NOTE_INSN_DELETED
);
9871 insn
= emit_move_insn (tmp_reg
, todec
);
9872 try_split (PATTERN (insn
), insn
, 0);
9876 if (Pmode
== SImode
)
9877 insn
= emit_insn (gen_movsi_update (stack_reg
, stack_reg
,
9880 insn
= emit_insn (gen_movdi_update (stack_reg
, stack_reg
,
9885 if (Pmode
== SImode
)
9886 insn
= emit_insn (gen_addsi3 (stack_reg
, stack_reg
, todec
));
9888 insn
= emit_insn (gen_adddi3 (stack_reg
, stack_reg
, todec
));
9889 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
9890 gen_rtx_REG (Pmode
, 12));
9893 RTX_FRAME_RELATED_P (insn
) = 1;
9895 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9896 gen_rtx_SET (VOIDmode
, stack_reg
,
9897 gen_rtx_PLUS (Pmode
, stack_reg
,
9902 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9905 (mem (plus (blah) (regXX)))
9909 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9912 altivec_frame_fixup (insn
, reg
, val
)
9918 real
= copy_rtx (PATTERN (insn
));
9920 real
= replace_rtx (real
, reg
, GEN_INT (val
));
9922 RTX_FRAME_RELATED_P (insn
) = 1;
9923 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
9928 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9929 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9930 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9931 deduce these equivalences by itself so it wasn't necessary to hold
9932 its hand so much. */
9935 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
9944 /* copy_rtx will not make unique copies of registers, so we need to
9945 ensure we don't have unwanted sharing here. */
9947 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9950 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
9952 real
= copy_rtx (PATTERN (insn
));
9954 if (reg2
!= NULL_RTX
)
9955 real
= replace_rtx (real
, reg2
, rreg
);
9957 real
= replace_rtx (real
, reg
,
9958 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
9959 STACK_POINTER_REGNUM
),
9962 /* We expect that 'real' is either a SET or a PARALLEL containing
9963 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9964 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9966 if (GET_CODE (real
) == SET
)
9970 temp
= simplify_rtx (SET_SRC (set
));
9972 SET_SRC (set
) = temp
;
9973 temp
= simplify_rtx (SET_DEST (set
));
9975 SET_DEST (set
) = temp
;
9976 if (GET_CODE (SET_DEST (set
)) == MEM
)
9978 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
9980 XEXP (SET_DEST (set
), 0) = temp
;
9983 else if (GET_CODE (real
) == PARALLEL
)
9986 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
9987 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
9989 rtx set
= XVECEXP (real
, 0, i
);
9991 temp
= simplify_rtx (SET_SRC (set
));
9993 SET_SRC (set
) = temp
;
9994 temp
= simplify_rtx (SET_DEST (set
));
9996 SET_DEST (set
) = temp
;
9997 if (GET_CODE (SET_DEST (set
)) == MEM
)
9999 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
10001 XEXP (SET_DEST (set
), 0) = temp
;
10003 RTX_FRAME_RELATED_P (set
) = 1;
10009 RTX_FRAME_RELATED_P (insn
) = 1;
10010 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
10015 /* Returns an insn that has a vrsave set operation with the
10016 appropriate CLOBBERs. */
10019 generate_set_vrsave (reg
, info
, epiloguep
)
10021 rs6000_stack_t
*info
;
10025 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
10026 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10029 = gen_rtx_SET (VOIDmode
,
10031 gen_rtx_UNSPEC_VOLATILE (SImode
,
10032 gen_rtvec (2, reg
, vrsave
),
10037 /* We need to clobber the registers in the mask so the scheduler
10038 does not move sets to VRSAVE before sets of AltiVec registers.
10040 However, if the function receives nonlocal gotos, reload will set
10041 all call saved registers live. We will end up with:
10043 (set (reg 999) (mem))
10044 (parallel [ (set (reg vrsave) (unspec blah))
10045 (clobber (reg 999))])
10047 The clobber will cause the store into reg 999 to be dead, and
10048 flow will attempt to delete an epilogue insn. In this case, we
10049 need an unspec use/set of the register. */
10051 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10052 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
10054 if (!epiloguep
|| call_used_regs
[i
])
10055 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
10056 gen_rtx_REG (V4SImode
, i
));
10059 rtx reg
= gen_rtx_REG (V4SImode
, i
);
10062 = gen_rtx_SET (VOIDmode
,
10064 gen_rtx_UNSPEC (V4SImode
,
10065 gen_rtvec (1, reg
), 27));
10069 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
10071 for (i
= 0; i
< nclobs
; ++i
)
10072 XVECEXP (insn
, 0, i
) = clobs
[i
];
10077 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10078 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10081 emit_frame_save (frame_reg
, frame_ptr
, mode
, regno
, offset
, total_size
)
10084 enum machine_mode mode
;
10085 unsigned int regno
;
10089 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
10090 rtx replacea
, replaceb
;
10092 int_rtx
= GEN_INT (offset
);
10094 /* Some cases that need register indexed addressing. */
10095 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
10097 && SPE_VECTOR_MODE (mode
)
10098 && !SPE_CONST_OFFSET_OK (offset
)))
10100 /* Whomever calls us must make sure r11 is available in the
10101 flow path of instructions in the prologue. */
10102 offset_rtx
= gen_rtx_REG (Pmode
, 11);
10103 emit_move_insn (offset_rtx
, int_rtx
);
10105 replacea
= offset_rtx
;
10106 replaceb
= int_rtx
;
10110 offset_rtx
= int_rtx
;
10111 replacea
= NULL_RTX
;
10112 replaceb
= NULL_RTX
;
10115 reg
= gen_rtx_REG (mode
, regno
);
10116 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
10117 mem
= gen_rtx_MEM (mode
, addr
);
10118 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10120 insn
= emit_move_insn (mem
, reg
);
10122 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
10125 /* Emit an offset memory reference suitable for a frame store, while
10126 converting to a valid addressing mode. */
10129 gen_frame_mem_offset (mode
, reg
, offset
)
10130 enum machine_mode mode
;
10134 rtx int_rtx
, offset_rtx
;
10136 int_rtx
= GEN_INT (offset
);
10138 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
10140 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10141 emit_move_insn (offset_rtx
, int_rtx
);
10144 offset_rtx
= int_rtx
;
10146 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
10149 /* Emit function prologue as insns. */
10152 rs6000_emit_prologue ()
10154 rs6000_stack_t
*info
= rs6000_stack_info ();
10155 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10156 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10157 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
10158 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
10159 rtx frame_reg_rtx
= sp_reg_rtx
;
10160 rtx cr_save_rtx
= NULL
;
10162 int saving_FPRs_inline
;
10163 int using_store_multiple
;
10164 HOST_WIDE_INT sp_offset
= 0;
10166 if (TARGET_SPE_ABI
)
10168 reg_mode
= V2SImode
;
10172 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10174 && info
->first_gp_reg_save
< 31);
10175 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
10176 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10178 /* For V.4, update stack before we do any saving and set back pointer. */
10179 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
10181 if (info
->total_size
< 32767)
10182 sp_offset
= info
->total_size
;
10184 frame_reg_rtx
= frame_ptr_rtx
;
10185 rs6000_emit_allocate_stack (info
->total_size
,
10186 (frame_reg_rtx
!= sp_reg_rtx
10187 && (info
->cr_save_p
10189 || info
->first_fp_reg_save
< 64
10190 || info
->first_gp_reg_save
< 32
10192 if (frame_reg_rtx
!= sp_reg_rtx
)
10193 rs6000_emit_stack_tie ();
10196 /* Save AltiVec registers if needed. */
10197 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10201 /* There should be a non inline version of this, for when we
10202 are saving lots of vector registers. */
10203 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10204 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10206 rtx areg
, savereg
, mem
;
10209 offset
= info
->altivec_save_offset
+ sp_offset
10210 + 16 * (i
- info
->first_altivec_reg_save
);
10212 savereg
= gen_rtx_REG (V4SImode
, i
);
10214 areg
= gen_rtx_REG (Pmode
, 0);
10215 emit_move_insn (areg
, GEN_INT (offset
));
10217 /* AltiVec addressing mode is [reg+reg]. */
10218 mem
= gen_rtx_MEM (V4SImode
,
10219 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
10221 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10223 insn
= emit_move_insn (mem
, savereg
);
10225 altivec_frame_fixup (insn
, areg
, offset
);
10229 /* VRSAVE is a bit vector representing which AltiVec registers
10230 are used. The OS uses this to determine which vector
10231 registers to save on a context switch. We need to save
10232 VRSAVE on the stack frame, add whatever AltiVec registers we
10233 used in this function, and do the corresponding magic in the
10236 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
10238 rtx reg
, mem
, vrsave
;
10241 /* Get VRSAVE onto a GPR. */
10242 reg
= gen_rtx_REG (SImode
, 12);
10243 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
10245 emit_insn (gen_get_vrsave_internal (reg
));
10247 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
10250 offset
= info
->vrsave_save_offset
+ sp_offset
;
10252 = gen_rtx_MEM (SImode
,
10253 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
10254 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10255 insn
= emit_move_insn (mem
, reg
);
10257 /* Include the registers in the mask. */
10258 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
10260 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
10263 /* If we use the link register, get it into r0. */
10264 if (info
->lr_save_p
)
10265 emit_move_insn (gen_rtx_REG (Pmode
, 0),
10266 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10268 /* If we need to save CR, put it into r12. */
10269 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
10271 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
10272 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10275 /* Do any required saving of fpr's. If only one or two to save, do
10276 it ourselves. Otherwise, call function. */
10277 if (saving_FPRs_inline
)
10280 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10281 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10282 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10283 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
10284 info
->first_fp_reg_save
+ i
,
10285 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
10288 else if (info
->first_fp_reg_save
!= 64)
10292 const char *alloc_rname
;
10294 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
10296 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
10297 gen_rtx_REG (Pmode
,
10298 LINK_REGISTER_REGNUM
));
10299 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
10300 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
10301 alloc_rname
= ggc_strdup (rname
);
10302 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10303 gen_rtx_SYMBOL_REF (Pmode
,
10305 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10307 rtx addr
, reg
, mem
;
10308 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
10309 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10310 GEN_INT (info
->fp_save_offset
10311 + sp_offset
+ 8*i
));
10312 mem
= gen_rtx_MEM (DFmode
, addr
);
10313 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10315 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10317 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10318 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10319 NULL_RTX
, NULL_RTX
);
10322 /* Save GPRs. This is done as a PARALLEL if we are using
10323 the store-multiple instructions. */
10324 if (using_store_multiple
)
10328 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10329 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10331 rtx addr
, reg
, mem
;
10332 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10333 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10334 GEN_INT (info
->gp_save_offset
10337 mem
= gen_rtx_MEM (reg_mode
, addr
);
10338 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10340 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
10342 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10343 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10344 NULL_RTX
, NULL_RTX
);
10349 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10350 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10351 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10352 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10353 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10354 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10356 rtx addr
, reg
, mem
;
10357 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
10359 if (TARGET_SPE_ABI
)
10361 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10364 if (!SPE_CONST_OFFSET_OK (offset
))
10366 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10367 emit_move_insn (b
, GEN_INT (offset
));
10370 b
= GEN_INT (offset
);
10372 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10373 mem
= gen_rtx_MEM (V2SImode
, addr
);
10374 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10375 insn
= emit_move_insn (mem
, reg
);
10377 if (GET_CODE (b
) == CONST_INT
)
10378 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10379 NULL_RTX
, NULL_RTX
);
10381 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10382 b
, GEN_INT (offset
));
10386 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10387 GEN_INT (info
->gp_save_offset
10390 mem
= gen_rtx_MEM (reg_mode
, addr
);
10391 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10393 insn
= emit_move_insn (mem
, reg
);
10394 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10395 NULL_RTX
, NULL_RTX
);
10400 /* ??? There's no need to emit actual instructions here, but it's the
10401 easiest way to get the frame unwind information emitted. */
10402 if (current_function_calls_eh_return
)
10404 unsigned int i
, regno
;
10408 regno
= EH_RETURN_DATA_REGNO (i
);
10409 if (regno
== INVALID_REGNUM
)
10412 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
10413 info
->ehrd_offset
+ sp_offset
10414 + reg_size
* (int) i
,
10419 /* Save lr if we used it. */
10420 if (info
->lr_save_p
)
10422 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10423 GEN_INT (info
->lr_save_offset
+ sp_offset
));
10424 rtx reg
= gen_rtx_REG (Pmode
, 0);
10425 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
10426 /* This should not be of rs6000_sr_alias_set, because of
10427 __builtin_return_address. */
10429 insn
= emit_move_insn (mem
, reg
);
10430 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10431 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10434 /* Save CR if we use any that must be preserved. */
10435 if (info
->cr_save_p
)
10437 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10438 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10439 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10441 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10443 /* If r12 was used to hold the original sp, copy cr into r0 now
10445 if (REGNO (frame_reg_rtx
) == 12)
10447 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
10448 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
10450 insn
= emit_move_insn (mem
, cr_save_rtx
);
10452 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10453 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10454 OK. All we have to do is specify that _one_ condition code
10455 register is saved in this stack slot. The thrower's epilogue
10456 will then restore all the call-saved registers.
10457 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10458 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
10459 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
10462 /* Update stack and set back pointer unless this is V.4,
10463 for which it was done previously. */
10464 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
10465 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
10467 /* Set frame pointer, if needed. */
10468 if (frame_pointer_needed
)
10470 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
10472 RTX_FRAME_RELATED_P (insn
) = 1;
10475 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10476 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
10477 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
10478 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
10480 /* If emit_load_toc_table will use the link register, we need to save
10481 it. We use R11 for this purpose because emit_load_toc_table
10482 can use register 0. This allows us to use a plain 'blr' to return
10483 from the procedure more often. */
10484 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
10485 && ! info
->lr_save_p
10486 && EXIT_BLOCK_PTR
->pred
!= NULL
);
10487 if (save_LR_around_toc_setup
)
10488 emit_move_insn (gen_rtx_REG (Pmode
, 11),
10489 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
10491 rs6000_emit_load_toc_table (TRUE
);
10493 if (save_LR_around_toc_setup
)
10494 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10495 gen_rtx_REG (Pmode
, 11));
10498 if (DEFAULT_ABI
== ABI_DARWIN
10499 && flag_pic
&& current_function_uses_pic_offset_table
)
10501 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
10503 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
)));
10505 rs6000_maybe_dead (
10506 emit_move_insn (gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
),
10507 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
10511 /* Write function prologue. */
10514 rs6000_output_function_prologue (file
, size
)
10516 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10518 rs6000_stack_t
*info
= rs6000_stack_info ();
10520 if (TARGET_DEBUG_STACK
)
10521 debug_stack_info (info
);
10523 /* Write .extern for any function we will call to save and restore
10525 if (info
->first_fp_reg_save
< 64
10526 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
10527 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10528 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
10529 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
10530 RESTORE_FP_SUFFIX
);
10532 /* Write .extern for AIX common mode routines, if needed. */
10533 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
10535 fputs ("\t.extern __mulh\n", file
);
10536 fputs ("\t.extern __mull\n", file
);
10537 fputs ("\t.extern __divss\n", file
);
10538 fputs ("\t.extern __divus\n", file
);
10539 fputs ("\t.extern __quoss\n", file
);
10540 fputs ("\t.extern __quous\n", file
);
10541 common_mode_defined
= 1;
10544 if (! HAVE_prologue
)
10548 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10549 the "toplevel" insn chain. */
10550 emit_note (0, NOTE_INSN_DELETED
);
10551 rs6000_emit_prologue ();
10552 emit_note (0, NOTE_INSN_DELETED
);
10554 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10558 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10560 INSN_ADDRESSES_NEW (insn
, addr
);
10565 if (TARGET_DEBUG_STACK
)
10566 debug_rtx_list (get_insns (), 100);
10567 final (get_insns (), file
, FALSE
, FALSE
);
10571 rs6000_pic_labelno
++;
10574 /* Emit function epilogue as insns.
10576 At present, dwarf2out_frame_debug_expr doesn't understand
10577 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10578 anywhere in the epilogue. Most of the insns below would in any case
10579 need special notes to explain where r11 is in relation to the stack. */
10582 rs6000_emit_epilogue (sibcall
)
10585 rs6000_stack_t
*info
;
10586 int restoring_FPRs_inline
;
10587 int using_load_multiple
;
10588 int using_mfcr_multiple
;
10589 int use_backchain_to_restore_sp
;
10591 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
10592 rtx frame_reg_rtx
= sp_reg_rtx
;
10593 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
10594 int reg_size
= TARGET_POWERPC64
? 8 : 4;
10597 if (TARGET_SPE_ABI
)
10599 reg_mode
= V2SImode
;
10603 info
= rs6000_stack_info ();
10604 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
10606 && info
->first_gp_reg_save
< 31);
10607 restoring_FPRs_inline
= (sibcall
10608 || current_function_calls_eh_return
10609 || info
->first_fp_reg_save
== 64
10610 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
10611 use_backchain_to_restore_sp
= (frame_pointer_needed
10612 || current_function_calls_alloca
10613 || info
->total_size
> 32767);
10614 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
10615 || rs6000_cpu
== PROCESSOR_PPC603
10616 || rs6000_cpu
== PROCESSOR_PPC750
10619 /* If we have a frame pointer, a call to alloca, or a large stack
10620 frame, restore the old stack pointer using the backchain. Otherwise,
10621 we know what size to update it with. */
10622 if (use_backchain_to_restore_sp
)
10624 /* Under V.4, don't reset the stack pointer until after we're done
10625 loading the saved registers. */
10626 if (DEFAULT_ABI
== ABI_V4
)
10627 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
10629 emit_move_insn (frame_reg_rtx
,
10630 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
10633 else if (info
->push_p
)
10635 if (DEFAULT_ABI
== ABI_V4
)
10636 sp_offset
= info
->total_size
;
10639 emit_insn (TARGET_32BIT
10640 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10641 GEN_INT (info
->total_size
))
10642 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10643 GEN_INT (info
->total_size
)));
10647 /* Restore AltiVec registers if needed. */
10648 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
10652 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
10653 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
10655 rtx addr
, areg
, mem
;
10657 areg
= gen_rtx_REG (Pmode
, 0);
10659 (areg
, GEN_INT (info
->altivec_save_offset
10661 + 16 * (i
- info
->first_altivec_reg_save
)));
10663 /* AltiVec addressing mode is [reg+reg]. */
10664 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
10665 mem
= gen_rtx_MEM (V4SImode
, addr
);
10666 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10668 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
10672 /* Restore VRSAVE if needed. */
10673 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
10675 rtx addr
, mem
, reg
;
10677 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10678 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
10679 mem
= gen_rtx_MEM (SImode
, addr
);
10680 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10681 reg
= gen_rtx_REG (SImode
, 12);
10682 emit_move_insn (reg
, mem
);
10684 emit_insn (generate_set_vrsave (reg
, info
, 1));
10687 /* Get the old lr if we saved it. */
10688 if (info
->lr_save_p
)
10690 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
10691 info
->lr_save_offset
+ sp_offset
);
10693 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10695 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
10698 /* Get the old cr if we saved it. */
10699 if (info
->cr_save_p
)
10701 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10702 GEN_INT (info
->cr_save_offset
+ sp_offset
));
10703 rtx mem
= gen_rtx_MEM (SImode
, addr
);
10705 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10707 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
10710 /* Set LR here to try to overlap restores below. */
10711 if (info
->lr_save_p
)
10712 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
10713 gen_rtx_REG (Pmode
, 0));
10715 /* Load exception handler data registers, if needed. */
10716 if (current_function_calls_eh_return
)
10718 unsigned int i
, regno
;
10724 regno
= EH_RETURN_DATA_REGNO (i
);
10725 if (regno
== INVALID_REGNUM
)
10728 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
10729 info
->ehrd_offset
+ sp_offset
10730 + reg_size
* (int) i
);
10731 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10733 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
10737 /* Restore GPRs. This is done as a PARALLEL if we are using
10738 the load-multiple instructions. */
10739 if (using_load_multiple
)
10742 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
10743 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10745 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10746 GEN_INT (info
->gp_save_offset
10749 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10751 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10754 gen_rtx_SET (VOIDmode
,
10755 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
10758 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10761 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
10762 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
10763 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
10764 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
10765 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
10766 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
10768 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10769 GEN_INT (info
->gp_save_offset
10772 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
10774 /* Restore 64-bit quantities for SPE. */
10775 if (TARGET_SPE_ABI
)
10777 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
10780 if (!SPE_CONST_OFFSET_OK (offset
))
10782 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
10783 emit_move_insn (b
, GEN_INT (offset
));
10786 b
= GEN_INT (offset
);
10788 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
10789 mem
= gen_rtx_MEM (V2SImode
, addr
);
10792 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10794 emit_move_insn (gen_rtx_REG (reg_mode
,
10795 info
->first_gp_reg_save
+ i
), mem
);
10798 /* Restore fpr's if we need to do it without calling a function. */
10799 if (restoring_FPRs_inline
)
10800 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10801 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
10802 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
10805 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
10806 GEN_INT (info
->fp_save_offset
10809 mem
= gen_rtx_MEM (DFmode
, addr
);
10810 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10812 emit_move_insn (gen_rtx_REG (DFmode
,
10813 info
->first_fp_reg_save
+ i
),
10817 /* If we saved cr, restore it here. Just those that were used. */
10818 if (info
->cr_save_p
)
10820 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
10823 if (using_mfcr_multiple
)
10825 for (i
= 0; i
< 8; i
++)
10826 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10832 if (using_mfcr_multiple
&& count
> 1)
10837 p
= rtvec_alloc (count
);
10840 for (i
= 0; i
< 8; i
++)
10841 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10843 rtvec r
= rtvec_alloc (2);
10844 RTVEC_ELT (r
, 0) = r12_rtx
;
10845 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
10846 RTVEC_ELT (p
, ndx
) =
10847 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
10848 gen_rtx_UNSPEC (CCmode
, r
, 20));
10851 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10856 for (i
= 0; i
< 8; i
++)
10857 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
10859 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
10865 /* If this is V.4, unwind the stack pointer after all of the loads
10866 have been done. We need to emit a block here so that sched
10867 doesn't decide to move the sp change before the register restores
10868 (which may not have any obvious dependency on the stack). This
10869 doesn't hurt performance, because there is no scheduling that can
10870 be done after this point. */
10871 if (DEFAULT_ABI
== ABI_V4
)
10873 if (frame_reg_rtx
!= sp_reg_rtx
)
10874 rs6000_emit_stack_tie ();
10876 if (use_backchain_to_restore_sp
)
10878 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
10880 else if (sp_offset
!= 0)
10882 emit_insn (Pmode
== SImode
10883 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
10884 GEN_INT (sp_offset
))
10885 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
10886 GEN_INT (sp_offset
)));
10890 if (current_function_calls_eh_return
)
10892 rtx sa
= EH_RETURN_STACKADJ_RTX
;
10893 emit_insn (Pmode
== SImode
10894 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
10895 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
10901 if (! restoring_FPRs_inline
)
10902 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
10904 p
= rtvec_alloc (2);
10906 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
10907 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
10908 gen_rtx_REG (Pmode
,
10909 LINK_REGISTER_REGNUM
));
10911 /* If we have to restore more than two FP registers, branch to the
10912 restore function. It will return to our caller. */
10913 if (! restoring_FPRs_inline
)
10917 const char *alloc_rname
;
10919 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
10920 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
10921 alloc_rname
= ggc_strdup (rname
);
10922 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
10923 gen_rtx_SYMBOL_REF (Pmode
,
10926 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
10929 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
10930 GEN_INT (info
->fp_save_offset
+ 8*i
));
10931 mem
= gen_rtx_MEM (DFmode
, addr
);
10932 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
10934 RTVEC_ELT (p
, i
+3) =
10935 gen_rtx_SET (VOIDmode
,
10936 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
10941 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
10945 /* Write function epilogue. */
10948 rs6000_output_function_epilogue (file
, size
)
10950 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10952 rs6000_stack_t
*info
= rs6000_stack_info ();
10954 if (! HAVE_epilogue
)
10956 rtx insn
= get_last_insn ();
10957 /* If the last insn was a BARRIER, we don't have to write anything except
10958 the trace table. */
10959 if (GET_CODE (insn
) == NOTE
)
10960 insn
= prev_nonnote_insn (insn
);
10961 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
10963 /* This is slightly ugly, but at least we don't have two
10964 copies of the epilogue-emitting code. */
10967 /* A NOTE_INSN_DELETED is supposed to be at the start
10968 and end of the "toplevel" insn chain. */
10969 emit_note (0, NOTE_INSN_DELETED
);
10970 rs6000_emit_epilogue (FALSE
);
10971 emit_note (0, NOTE_INSN_DELETED
);
10973 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10977 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
10979 INSN_ADDRESSES_NEW (insn
, addr
);
10984 if (TARGET_DEBUG_STACK
)
10985 debug_rtx_list (get_insns (), 100);
10986 final (get_insns (), file
, FALSE
, FALSE
);
10991 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10994 We don't output a traceback table if -finhibit-size-directive was
10995 used. The documentation for -finhibit-size-directive reads
10996 ``don't output a @code{.size} assembler directive, or anything
10997 else that would cause trouble if the function is split in the
10998 middle, and the two halves are placed at locations far apart in
10999 memory.'' The traceback table has this property, since it
11000 includes the offset from the start of the function to the
11001 traceback table itself.
11003 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11004 different traceback table. */
11005 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
11006 && rs6000_traceback
!= traceback_none
)
11008 const char *fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
11009 const char *language_string
= lang_hooks
.name
;
11010 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
11012 int optional_tbtab
;
11014 if (rs6000_traceback
== traceback_full
)
11015 optional_tbtab
= 1;
11016 else if (rs6000_traceback
== traceback_part
)
11017 optional_tbtab
= 0;
11019 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
11021 while (*fname
== '.') /* V.4 encodes . in the name */
11024 /* Need label immediately before tbtab, so we can compute its offset
11025 from the function start. */
11028 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11029 ASM_OUTPUT_LABEL (file
, fname
);
11031 /* The .tbtab pseudo-op can only be used for the first eight
11032 expressions, since it can't handle the possibly variable
11033 length fields that follow. However, if you omit the optional
11034 fields, the assembler outputs zeros for all optional fields
11035 anyways, giving each variable length field is minimum length
11036 (as defined in sys/debug.h). Thus we can not use the .tbtab
11037 pseudo-op at all. */
11039 /* An all-zero word flags the start of the tbtab, for debuggers
11040 that have to find it by searching forward from the entry
11041 point or from the current pc. */
11042 fputs ("\t.long 0\n", file
);
11044 /* Tbtab format type. Use format type 0. */
11045 fputs ("\t.byte 0,", file
);
11047 /* Language type. Unfortunately, there doesn't seem to be any
11048 official way to get this info, so we use language_string. C
11049 is 0. C++ is 9. No number defined for Obj-C, so use the
11050 value for C for now. There is no official value for Java,
11051 although IBM appears to be using 13. There is no official value
11052 for Chill, so we've chosen 44 pseudo-randomly. */
11053 if (! strcmp (language_string
, "GNU C")
11054 || ! strcmp (language_string
, "GNU Objective-C"))
11056 else if (! strcmp (language_string
, "GNU F77"))
11058 else if (! strcmp (language_string
, "GNU Ada"))
11060 else if (! strcmp (language_string
, "GNU Pascal"))
11062 else if (! strcmp (language_string
, "GNU C++"))
11064 else if (! strcmp (language_string
, "GNU Java"))
11066 else if (! strcmp (language_string
, "GNU CHILL"))
11070 fprintf (file
, "%d,", i
);
11072 /* 8 single bit fields: global linkage (not set for C extern linkage,
11073 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11074 from start of procedure stored in tbtab, internal function, function
11075 has controlled storage, function has no toc, function uses fp,
11076 function logs/aborts fp operations. */
11077 /* Assume that fp operations are used if any fp reg must be saved. */
11078 fprintf (file
, "%d,",
11079 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
11081 /* 6 bitfields: function is interrupt handler, name present in
11082 proc table, function calls alloca, on condition directives
11083 (controls stack walks, 3 bits), saves condition reg, saves
11085 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11086 set up as a frame pointer, even when there is no alloca call. */
11087 fprintf (file
, "%d,",
11088 ((optional_tbtab
<< 6)
11089 | ((optional_tbtab
& frame_pointer_needed
) << 5)
11090 | (info
->cr_save_p
<< 1)
11091 | (info
->lr_save_p
)));
11093 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11095 fprintf (file
, "%d,",
11096 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
11098 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11099 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
11101 if (optional_tbtab
)
11103 /* Compute the parameter info from the function decl argument
11106 int next_parm_info_bit
= 31;
11108 for (decl
= DECL_ARGUMENTS (current_function_decl
);
11109 decl
; decl
= TREE_CHAIN (decl
))
11111 rtx parameter
= DECL_INCOMING_RTL (decl
);
11112 enum machine_mode mode
= GET_MODE (parameter
);
11114 if (GET_CODE (parameter
) == REG
)
11116 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
11122 if (mode
== SFmode
)
11124 else if (mode
== DFmode
)
11129 /* If only one bit will fit, don't or in this entry. */
11130 if (next_parm_info_bit
> 0)
11131 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
11132 next_parm_info_bit
-= 2;
11136 fixed_parms
+= ((GET_MODE_SIZE (mode
)
11137 + (UNITS_PER_WORD
- 1))
11139 next_parm_info_bit
-= 1;
11145 /* Number of fixed point parameters. */
11146 /* This is actually the number of words of fixed point parameters; thus
11147 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11148 fprintf (file
, "%d,", fixed_parms
);
11150 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11152 /* This is actually the number of fp registers that hold parameters;
11153 and thus the maximum value is 13. */
11154 /* Set parameters on stack bit if parameters are not in their original
11155 registers, regardless of whether they are on the stack? Xlc
11156 seems to set the bit when not optimizing. */
11157 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
11159 if (! optional_tbtab
)
11162 /* Optional fields follow. Some are variable length. */
11164 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11165 11 double float. */
11166 /* There is an entry for each parameter in a register, in the order that
11167 they occur in the parameter list. Any intervening arguments on the
11168 stack are ignored. If the list overflows a long (max possible length
11169 34 bits) then completely leave off all elements that don't fit. */
11170 /* Only emit this long if there was at least one parameter. */
11171 if (fixed_parms
|| float_parms
)
11172 fprintf (file
, "\t.long %d\n", parm_info
);
11174 /* Offset from start of code to tb table. */
11175 fputs ("\t.long ", file
);
11176 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
11178 RS6000_OUTPUT_BASENAME (file
, fname
);
11180 assemble_name (file
, fname
);
11182 fputs ("-.", file
);
11184 RS6000_OUTPUT_BASENAME (file
, fname
);
11186 assemble_name (file
, fname
);
11190 /* Interrupt handler mask. */
11191 /* Omit this long, since we never set the interrupt handler bit
11194 /* Number of CTL (controlled storage) anchors. */
11195 /* Omit this long, since the has_ctl bit is never set above. */
11197 /* Displacement into stack of each CTL anchor. */
11198 /* Omit this list of longs, because there are no CTL anchors. */
11200 /* Length of function name. */
11201 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
11203 /* Function name. */
11204 assemble_string (fname
, strlen (fname
));
11206 /* Register for alloca automatic storage; this is always reg 31.
11207 Only emit this if the alloca bit was set above. */
11208 if (frame_pointer_needed
)
11209 fputs ("\t.byte 31\n", file
);
11211 fputs ("\t.align 2\n", file
);
11215 /* A C compound statement that outputs the assembler code for a thunk
11216 function, used to implement C++ virtual function calls with
11217 multiple inheritance. The thunk acts as a wrapper around a virtual
11218 function, adjusting the implicit object parameter before handing
11219 control off to the real function.
11221 First, emit code to add the integer DELTA to the location that
11222 contains the incoming first argument. Assume that this argument
11223 contains a pointer, and is the one used to pass the `this' pointer
11224 in C++. This is the incoming argument *before* the function
11225 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11226 values of all other incoming arguments.
11228 After the addition, emit code to jump to FUNCTION, which is a
11229 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11230 not touch the return address. Hence returning from FUNCTION will
11231 return to whoever called the current `thunk'.
11233 The effect must be as if FUNCTION had been called directly with the
11234 adjusted first argument. This macro is responsible for emitting
11235 all of the code for a thunk function; output_function_prologue()
11236 and output_function_epilogue() are not invoked.
11238 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11239 been extracted from it.) It might possibly be useful on some
11240 targets, but probably not.
11242 If you do not define this macro, the target-independent code in the
11243 C++ frontend will generate a less efficient heavyweight thunk that
11244 calls FUNCTION instead of jumping to it. The generic approach does
11245 not support varargs. */
11248 output_mi_thunk (file
, thunk_fndecl
, delta
, function
)
11250 tree thunk_fndecl ATTRIBUTE_UNUSED
;
11254 const char *this_reg
=
11255 reg_names
[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))) ? 4 : 3 ];
11256 const char *prefix
;
11258 const char *r0
= reg_names
[0];
11259 const char *toc
= reg_names
[2];
11260 const char *schain
= reg_names
[11];
11261 const char *r12
= reg_names
[12];
11263 static int labelno
= 0;
11265 /* Small constants that can be done by one add instruction. */
11266 if (delta
>= -32768 && delta
<= 32767)
11268 if (! TARGET_NEW_MNEMONICS
)
11269 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta
, this_reg
);
11271 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta
);
11274 /* 64-bit constants. If "int" is 32 bits, we'll never hit this abort. */
11275 else if (TARGET_64BIT
&& (delta
< -2147483647 - 1 || delta
> 2147483647))
11278 /* Large constants that can be done by one addis instruction. */
11279 else if ((delta
& 0xffff) == 0)
11280 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11283 /* 32-bit constants that can be done by an add and addis instruction. */
11286 /* Break into two pieces, propagating the sign bit from the low
11287 word to the upper word. */
11288 int delta_low
= ((delta
& 0xffff) ^ 0x8000) - 0x8000;
11289 int delta_high
= (delta
- delta_low
) >> 16;
11291 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
11294 if (! TARGET_NEW_MNEMONICS
)
11295 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta_low
, this_reg
);
11297 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta_low
);
11300 /* Get the prefix in front of the names. */
11301 switch (DEFAULT_ABI
)
11311 case ABI_AIX_NODESC
:
11316 /* If the function is compiled in this module, jump to it directly.
11317 Otherwise, load up its address and jump to it. */
11319 fname
= XSTR (XEXP (DECL_RTL (function
), 0), 0);
11321 if (current_file_function_operand (XEXP (DECL_RTL (function
), 0), VOIDmode
)
11322 && (! lookup_attribute ("longcall",
11323 TYPE_ATTRIBUTES (TREE_TYPE (function
)))
11324 || lookup_attribute ("shortcall",
11325 TYPE_ATTRIBUTES (TREE_TYPE (function
)))))
11328 fprintf (file
, "\tb %s", prefix
);
11329 assemble_name (file
, fname
);
11330 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
) fputs ("@local", file
);
11336 switch (DEFAULT_ABI
)
11342 /* Set up a TOC entry for the function. */
11343 ASM_GENERATE_INTERNAL_LABEL (buf
, "Lthunk", labelno
);
11345 ASM_OUTPUT_INTERNAL_LABEL (file
, "Lthunk", labelno
);
11348 if (TARGET_MINIMAL_TOC
)
11349 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11352 fputs ("\t.tc ", file
);
11353 assemble_name (file
, fname
);
11354 fputs ("[TC],", file
);
11356 assemble_name (file
, fname
);
11359 function_section (current_function_decl
);
11362 if (TARGET_MINIMAL_TOC
)
11363 asm_fprintf (file
, (TARGET_32BIT
)
11364 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12
,
11365 TARGET_ELF
? ".LCTOC0@toc" : ".LCTOC..1", toc
);
11366 asm_fprintf (file
, (TARGET_32BIT
) ? "\t{l|lwz} %s," : "\tld %s,", r12
);
11367 assemble_name (file
, buf
);
11368 if (TARGET_ELF
&& TARGET_MINIMAL_TOC
)
11369 fputs ("-(.LCTOC1)", file
);
11370 asm_fprintf (file
, "(%s)\n", TARGET_MINIMAL_TOC
? r12
: toc
);
11372 (TARGET_32BIT
) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11376 (TARGET_32BIT
) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11379 asm_fprintf (file
, "\tmtctr %s\n", r0
);
11381 (TARGET_32BIT
) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11384 asm_fprintf (file
, "\tbctr\n");
11387 case ABI_AIX_NODESC
:
11389 fprintf (file
, "\tb %s", prefix
);
11390 assemble_name (file
, fname
);
11391 if (flag_pic
) fputs ("@plt", file
);
11397 fprintf (file
, "\tb %s", prefix
);
11398 if (flag_pic
&& !machopic_name_defined_p (fname
))
11399 assemble_name (file
, machopic_stub_name (fname
));
11401 assemble_name (file
, fname
);
11410 /* A quick summary of the various types of 'constant-pool tables'
11413 Target Flags Name One table per
11414 AIX (none) AIX TOC object file
11415 AIX -mfull-toc AIX TOC object file
11416 AIX -mminimal-toc AIX minimal TOC translation unit
11417 SVR4/EABI (none) SVR4 SDATA object file
11418 SVR4/EABI -fpic SVR4 pic object file
11419 SVR4/EABI -fPIC SVR4 PIC translation unit
11420 SVR4/EABI -mrelocatable EABI TOC function
11421 SVR4/EABI -maix AIX TOC object file
11422 SVR4/EABI -maix -mminimal-toc
11423 AIX minimal TOC translation unit
11425 Name Reg. Set by entries contains:
11426 made by addrs? fp? sum?
11428 AIX TOC 2 crt0 as Y option option
11429 AIX minimal TOC 30 prolog gcc Y Y option
11430 SVR4 SDATA 13 crt0 gcc N Y N
11431 SVR4 pic 30 prolog ld Y not yet N
11432 SVR4 PIC 30 prolog gcc Y option option
11433 EABI TOC 30 prolog gcc Y option option
11437 /* Hash table stuff for keeping track of TOC entries. */
11439 struct toc_hash_struct
11441 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11442 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11444 enum machine_mode key_mode
;
11448 static htab_t toc_hash_table
;
11450 /* Hash functions for the hash table. */
11453 rs6000_hash_constant (k
)
11456 unsigned result
= (GET_CODE (k
) << 3) ^ GET_MODE (k
);
11457 const char *format
= GET_RTX_FORMAT (GET_CODE (k
));
11458 int flen
= strlen (format
);
11461 if (GET_CODE (k
) == LABEL_REF
)
11462 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
11464 if (GET_CODE (k
) == CODE_LABEL
)
11469 for (; fidx
< flen
; fidx
++)
11470 switch (format
[fidx
])
11475 const char *str
= XSTR (k
, fidx
);
11476 len
= strlen (str
);
11477 result
= result
* 613 + len
;
11478 for (i
= 0; i
< len
; i
++)
11479 result
= result
* 613 + (unsigned) str
[i
];
11484 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
11488 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
11491 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
11492 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
11496 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
11497 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
11508 toc_hash_function (hash_entry
)
11509 const void * hash_entry
;
11511 const struct toc_hash_struct
*thc
=
11512 (const struct toc_hash_struct
*) hash_entry
;
11513 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
11516 /* Compare H1 and H2 for equivalence. */
11519 toc_hash_eq (h1
, h2
)
11523 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
11524 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
11526 if (((const struct toc_hash_struct
*) h1
)->key_mode
11527 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
11530 return rtx_equal_p (r1
, r2
);
11533 /* Mark the hash table-entry HASH_ENTRY. */
11536 toc_hash_mark_entry (hash_slot
, unused
)
11538 void * unused ATTRIBUTE_UNUSED
;
11540 const struct toc_hash_struct
* hash_entry
=
11541 *(const struct toc_hash_struct
**) hash_slot
;
11542 rtx r
= hash_entry
->key
;
11543 ggc_set_mark (hash_entry
);
11544 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11545 if (GET_CODE (r
) == LABEL_REF
)
11548 ggc_set_mark (XEXP (r
, 0));
11555 /* Mark all the elements of the TOC hash-table *HT. */
11558 toc_hash_mark_table (vht
)
11563 htab_traverse (*ht
, toc_hash_mark_entry
, (void *)0);
11566 /* These are the names given by the C++ front-end to vtables, and
11567 vtable-like objects. Ideally, this logic should not be here;
11568 instead, there should be some programmatic way of inquiring as
11569 to whether or not an object is a vtable. */
11571 #define VTABLE_NAME_P(NAME) \
11572 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11573 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11574 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11575 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11578 rs6000_output_symbol_ref (file
, x
)
11582 /* Currently C++ toc references to vtables can be emitted before it
11583 is decided whether the vtable is public or private. If this is
11584 the case, then the linker will eventually complain that there is
11585 a reference to an unknown section. Thus, for vtables only,
11586 we emit the TOC reference to reference the symbol and not the
11588 const char *name
= XSTR (x
, 0);
11590 if (VTABLE_NAME_P (name
))
11592 RS6000_OUTPUT_BASENAME (file
, name
);
11595 assemble_name (file
, name
);
11598 /* Output a TOC entry. We derive the entry name from what is being
11602 output_toc (file
, x
, labelno
, mode
)
11606 enum machine_mode mode
;
11609 const char *name
= buf
;
11610 const char *real_name
;
11617 /* When the linker won't eliminate them, don't output duplicate
11618 TOC entries (this happens on AIX if there is any kind of TOC,
11619 and on SVR4 under -fPIC or -mrelocatable). */
11622 struct toc_hash_struct
*h
;
11625 h
= ggc_alloc (sizeof (*h
));
11627 h
->key_mode
= mode
;
11628 h
->labelno
= labelno
;
11630 found
= htab_find_slot (toc_hash_table
, h
, 1);
11631 if (*found
== NULL
)
11633 else /* This is indeed a duplicate.
11634 Set this label equal to that label. */
11636 fputs ("\t.set ", file
);
11637 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11638 fprintf (file
, "%d,", labelno
);
11639 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
11640 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
11646 /* If we're going to put a double constant in the TOC, make sure it's
11647 aligned properly when strict alignment is on. */
11648 if (GET_CODE (x
) == CONST_DOUBLE
11649 && STRICT_ALIGNMENT
11650 && GET_MODE_BITSIZE (mode
) >= 64
11651 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
11652 ASM_OUTPUT_ALIGN (file
, 3);
11655 ASM_OUTPUT_INTERNAL_LABEL (file
, "LC", labelno
);
11657 /* Handle FP constants specially. Note that if we have a minimal
11658 TOC, things we put here aren't actually in the TOC, so we can allow
11660 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
11662 REAL_VALUE_TYPE rv
;
11665 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11666 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
11670 if (TARGET_MINIMAL_TOC
)
11671 fputs (DOUBLE_INT_ASM_OP
, file
);
11673 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11674 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11675 fprintf (file
, "0x%lx%08lx\n",
11676 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11681 if (TARGET_MINIMAL_TOC
)
11682 fputs ("\t.long ", file
);
11684 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
11685 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11686 fprintf (file
, "0x%lx,0x%lx\n",
11687 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
11691 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
11693 REAL_VALUE_TYPE rv
;
11696 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
11697 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
11701 if (TARGET_MINIMAL_TOC
)
11702 fputs (DOUBLE_INT_ASM_OP
, file
);
11704 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11705 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
11710 if (TARGET_MINIMAL_TOC
)
11711 fputs ("\t.long ", file
);
11713 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
11714 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
11718 else if (GET_MODE (x
) == VOIDmode
11719 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
11721 unsigned HOST_WIDE_INT low
;
11722 HOST_WIDE_INT high
;
11724 if (GET_CODE (x
) == CONST_DOUBLE
)
11726 low
= CONST_DOUBLE_LOW (x
);
11727 high
= CONST_DOUBLE_HIGH (x
);
11730 #if HOST_BITS_PER_WIDE_INT == 32
11733 high
= (low
& 0x80000000) ? ~0 : 0;
11737 low
= INTVAL (x
) & 0xffffffff;
11738 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
11742 /* TOC entries are always Pmode-sized, but since this
11743 is a bigendian machine then if we're putting smaller
11744 integer constants in the TOC we have to pad them.
11745 (This is still a win over putting the constants in
11746 a separate constant pool, because then we'd have
11747 to have both a TOC entry _and_ the actual constant.)
11749 For a 32-bit target, CONST_INT values are loaded and shifted
11750 entirely within `low' and can be stored in one TOC entry. */
11752 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11753 abort ();/* It would be easy to make this work, but it doesn't now. */
11755 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
11757 #if HOST_BITS_PER_WIDE_INT == 32
11758 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
11759 POINTER_SIZE
, &low
, &high
, 0);
11762 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
11763 high
= (HOST_WIDE_INT
) low
>> 32;
11770 if (TARGET_MINIMAL_TOC
)
11771 fputs (DOUBLE_INT_ASM_OP
, file
);
11773 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11774 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11775 fprintf (file
, "0x%lx%08lx\n",
11776 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11781 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
11783 if (TARGET_MINIMAL_TOC
)
11784 fputs ("\t.long ", file
);
11786 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
11787 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11788 fprintf (file
, "0x%lx,0x%lx\n",
11789 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
11793 if (TARGET_MINIMAL_TOC
)
11794 fputs ("\t.long ", file
);
11796 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
11797 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
11803 if (GET_CODE (x
) == CONST
)
11805 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
11808 base
= XEXP (XEXP (x
, 0), 0);
11809 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
11812 if (GET_CODE (base
) == SYMBOL_REF
)
11813 name
= XSTR (base
, 0);
11814 else if (GET_CODE (base
) == LABEL_REF
)
11815 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
11816 else if (GET_CODE (base
) == CODE_LABEL
)
11817 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
11821 real_name
= (*targetm
.strip_name_encoding
) (name
);
11822 if (TARGET_MINIMAL_TOC
)
11823 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
11826 fprintf (file
, "\t.tc %s", real_name
);
11829 fprintf (file
, ".N%d", - offset
);
11831 fprintf (file
, ".P%d", offset
);
11833 fputs ("[TC],", file
);
11836 /* Currently C++ toc references to vtables can be emitted before it
11837 is decided whether the vtable is public or private. If this is
11838 the case, then the linker will eventually complain that there is
11839 a TOC reference to an unknown section. Thus, for vtables only,
11840 we emit the TOC reference to reference the symbol and not the
11842 if (VTABLE_NAME_P (name
))
11844 RS6000_OUTPUT_BASENAME (file
, name
);
11846 fprintf (file
, "%d", offset
);
11847 else if (offset
> 0)
11848 fprintf (file
, "+%d", offset
);
11851 output_addr_const (file
, x
);
11855 /* Output an assembler pseudo-op to write an ASCII string of N characters
11856 starting at P to FILE.
11858 On the RS/6000, we have to do this using the .byte operation and
11859 write out special characters outside the quoted string.
11860 Also, the assembler is broken; very long strings are truncated,
11861 so we must artificially break them up early. */
11864 output_ascii (file
, p
, n
)
11870 int i
, count_string
;
11871 const char *for_string
= "\t.byte \"";
11872 const char *for_decimal
= "\t.byte ";
11873 const char *to_close
= NULL
;
11876 for (i
= 0; i
< n
; i
++)
11879 if (c
>= ' ' && c
< 0177)
11882 fputs (for_string
, file
);
11885 /* Write two quotes to get one. */
11893 for_decimal
= "\"\n\t.byte ";
11897 if (count_string
>= 512)
11899 fputs (to_close
, file
);
11901 for_string
= "\t.byte \"";
11902 for_decimal
= "\t.byte ";
11910 fputs (for_decimal
, file
);
11911 fprintf (file
, "%d", c
);
11913 for_string
= "\n\t.byte \"";
11914 for_decimal
= ", ";
11920 /* Now close the string if we have written one. Then end the line. */
11922 fputs (to_close
, file
);
11925 /* Generate a unique section name for FILENAME for a section type
11926 represented by SECTION_DESC. Output goes into BUF.
11928 SECTION_DESC can be any string, as long as it is different for each
11929 possible section type.
11931 We name the section in the same manner as xlc. The name begins with an
11932 underscore followed by the filename (after stripping any leading directory
11933 names) with the last period replaced by the string SECTION_DESC. If
11934 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11938 rs6000_gen_section_name (buf
, filename
, section_desc
)
11940 const char *filename
;
11941 const char *section_desc
;
11943 const char *q
, *after_last_slash
, *last_period
= 0;
11947 after_last_slash
= filename
;
11948 for (q
= filename
; *q
; q
++)
11951 after_last_slash
= q
+ 1;
11952 else if (*q
== '.')
11956 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
11957 *buf
= (char *) xmalloc (len
);
11962 for (q
= after_last_slash
; *q
; q
++)
11964 if (q
== last_period
)
11966 strcpy (p
, section_desc
);
11967 p
+= strlen (section_desc
);
11970 else if (ISALNUM (*q
))
11974 if (last_period
== 0)
11975 strcpy (p
, section_desc
);
11980 /* Emit profile function. */
11983 output_profile_hook (labelno
)
11984 int labelno ATTRIBUTE_UNUSED
;
11986 if (DEFAULT_ABI
== ABI_AIX
)
11988 #ifdef NO_PROFILE_COUNTERS
11989 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
11992 const char *label_name
;
11995 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
11996 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
11997 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
11999 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
12003 else if (DEFAULT_ABI
== ABI_DARWIN
)
12005 const char *mcount_name
= RS6000_MCOUNT
;
12006 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
12008 /* Be conservative and always set this, at least for now. */
12009 current_function_uses_pic_offset_table
= 1;
12012 /* For PIC code, set up a stub and collect the caller's address
12013 from r0, which is where the prologue puts it. */
12016 mcount_name
= machopic_stub_name (mcount_name
);
12017 if (current_function_uses_pic_offset_table
)
12018 caller_addr_regno
= 0;
12021 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
12023 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
12027 /* Write function profiler code. */
12030 output_function_profiler (file
, labelno
)
12037 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
12038 switch (DEFAULT_ABI
)
12045 /* Fall through. */
12047 case ABI_AIX_NODESC
:
12050 warning ("no profiling of 64-bit code for this ABI");
12053 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
12056 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
12057 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12058 reg_names
[0], save_lr
, reg_names
[1]);
12059 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
12060 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
12061 assemble_name (file
, buf
);
12062 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
12064 else if (flag_pic
> 1)
12066 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12067 reg_names
[0], save_lr
, reg_names
[1]);
12068 /* Now, we need to get the address of the label. */
12069 fputs ("\tbl 1f\n\t.long ", file
);
12070 assemble_name (file
, buf
);
12071 fputs ("-.\n1:", file
);
12072 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
12073 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
12074 reg_names
[0], reg_names
[11]);
12075 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
12076 reg_names
[0], reg_names
[0], reg_names
[11]);
12080 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
12081 assemble_name (file
, buf
);
12082 fputs ("@ha\n", file
);
12083 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12084 reg_names
[0], save_lr
, reg_names
[1]);
12085 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
12086 assemble_name (file
, buf
);
12087 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
12090 if (current_function_needs_context
&& DEFAULT_ABI
== ABI_AIX_NODESC
)
12092 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
12093 reg_names
[STATIC_CHAIN_REGNUM
],
12095 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12096 asm_fprintf (file
, "\t{l|lwz} %s,%d(%s)\n",
12097 reg_names
[STATIC_CHAIN_REGNUM
],
12101 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12102 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
12107 /* Don't do anything, done in output_profile_hook (). */
12112 /* Adjust the cost of a scheduling dependency. Return the new cost of
12113 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12116 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
12119 rtx dep_insn ATTRIBUTE_UNUSED
;
12122 if (! recog_memoized (insn
))
12125 if (REG_NOTE_KIND (link
) != 0)
12128 if (REG_NOTE_KIND (link
) == 0)
12130 /* Data dependency; DEP_INSN writes a register that INSN reads
12131 some cycles later. */
12132 switch (get_attr_type (insn
))
12135 /* Tell the first scheduling pass about the latency between
12136 a mtctr and bctr (and mtlr and br/blr). The first
12137 scheduling pass will not know about this latency since
12138 the mtctr instruction, which has the latency associated
12139 to it, will be generated by reload. */
12140 return TARGET_POWER
? 5 : 4;
12142 /* Leave some extra cycles between a compare and its
12143 dependent branch, to inhibit expensive mispredicts. */
12144 if ((rs6000_cpu_attr
== CPU_PPC603
12145 || rs6000_cpu_attr
== CPU_PPC604
12146 || rs6000_cpu_attr
== CPU_PPC604E
12147 || rs6000_cpu_attr
== CPU_PPC620
12148 || rs6000_cpu_attr
== CPU_PPC630
12149 || rs6000_cpu_attr
== CPU_PPC750
12150 || rs6000_cpu_attr
== CPU_PPC7400
12151 || rs6000_cpu_attr
== CPU_PPC7450
12152 || rs6000_cpu_attr
== CPU_POWER4
)
12153 && recog_memoized (dep_insn
)
12154 && (INSN_CODE (dep_insn
) >= 0)
12155 && (get_attr_type (dep_insn
) == TYPE_COMPARE
12156 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
12157 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
12158 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
))
12163 /* Fall out to return default cost. */
12169 /* A C statement (sans semicolon) to update the integer scheduling
12170 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12171 INSN earlier, increase the priority to execute INSN later. Do not
12172 define this macro if you do not need to adjust the scheduling
12173 priorities of insns. */
12176 rs6000_adjust_priority (insn
, priority
)
12177 rtx insn ATTRIBUTE_UNUSED
;
12180 /* On machines (like the 750) which have asymmetric integer units,
12181 where one integer unit can do multiply and divides and the other
12182 can't, reduce the priority of multiply/divide so it is scheduled
12183 before other integer operations. */
12186 if (! INSN_P (insn
))
12189 if (GET_CODE (PATTERN (insn
)) == USE
)
12192 switch (rs6000_cpu_attr
) {
12194 switch (get_attr_type (insn
))
12201 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
12202 priority
, priority
);
12203 if (priority
>= 0 && priority
< 0x01000000)
12213 /* Return how many instructions the machine can issue per cycle. */
12216 rs6000_issue_rate ()
12218 switch (rs6000_cpu_attr
) {
12219 case CPU_RIOS1
: /* ? */
12221 case CPU_PPC601
: /* ? */
12241 /* Length in units of the trampoline for entering a nested function. */
12244 rs6000_trampoline_size ()
12248 switch (DEFAULT_ABI
)
12254 ret
= (TARGET_32BIT
) ? 12 : 24;
12259 case ABI_AIX_NODESC
:
12260 ret
= (TARGET_32BIT
) ? 40 : 48;
12267 /* Emit RTL insns to initialize the variable parts of a trampoline.
12268 FNADDR is an RTX for the address of the function's pure code.
12269 CXT is an RTX for the static chain value for the function. */
12272 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
12277 enum machine_mode pmode
= Pmode
;
12278 int regsize
= (TARGET_32BIT
) ? 4 : 8;
12279 rtx ctx_reg
= force_reg (pmode
, cxt
);
12281 switch (DEFAULT_ABI
)
12286 /* Macros to shorten the code expansions below. */
12287 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12288 #define MEM_PLUS(addr,offset) \
12289 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12291 /* Under AIX, just build the 3 word function descriptor */
12294 rtx fn_reg
= gen_reg_rtx (pmode
);
12295 rtx toc_reg
= gen_reg_rtx (pmode
);
12296 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
12297 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
12298 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
12299 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
12300 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
12304 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12307 case ABI_AIX_NODESC
:
12308 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
12309 FALSE
, VOIDmode
, 4,
12311 GEN_INT (rs6000_trampoline_size ()), SImode
,
12321 /* Table of valid machine attributes. */
12323 const struct attribute_spec rs6000_attribute_table
[] =
12325 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12326 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12327 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
12328 { NULL
, 0, 0, false, false, false, NULL
}
12331 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12332 struct attribute_spec.handler. */
12335 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
12338 tree args ATTRIBUTE_UNUSED
;
12339 int flags ATTRIBUTE_UNUSED
;
12340 bool *no_add_attrs
;
12342 if (TREE_CODE (*node
) != FUNCTION_TYPE
12343 && TREE_CODE (*node
) != FIELD_DECL
12344 && TREE_CODE (*node
) != TYPE_DECL
)
12346 warning ("`%s' attribute only applies to functions",
12347 IDENTIFIER_POINTER (name
));
12348 *no_add_attrs
= true;
12354 /* Set longcall attributes on all functions declared when
12355 rs6000_default_long_calls is true. */
12357 rs6000_set_default_type_attributes (type
)
12360 if (rs6000_default_long_calls
12361 && (TREE_CODE (type
) == FUNCTION_TYPE
12362 || TREE_CODE (type
) == METHOD_TYPE
))
12363 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
12365 TYPE_ATTRIBUTES (type
));
12368 /* Return a reference suitable for calling a function with the
12369 longcall attribute. */
12372 rs6000_longcall_ref (call_ref
)
12375 const char *call_name
;
12378 if (GET_CODE (call_ref
) != SYMBOL_REF
)
12381 /* System V adds '.' to the internal name, so skip them. */
12382 call_name
= XSTR (call_ref
, 0);
12383 if (*call_name
== '.')
12385 while (*call_name
== '.')
12388 node
= get_identifier (call_name
);
12389 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
12392 return force_reg (Pmode
, call_ref
);
12396 #ifdef USING_ELFOS_H
12398 /* A C statement or statements to switch to the appropriate section
12399 for output of RTX in mode MODE. You can assume that RTX is some
12400 kind of constant in RTL. The argument MODE is redundant except in
12401 the case of a `const_int' rtx. Select the section by calling
12402 `text_section' or one of the alternatives for other sections.
12404 Do not define this macro if you put all constants in the read-only
12408 rs6000_elf_select_rtx_section (mode
, x
, align
)
12409 enum machine_mode mode
;
12411 unsigned HOST_WIDE_INT align
;
12413 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
12416 default_elf_select_rtx_section (mode
, x
, align
);
12419 /* A C statement or statements to switch to the appropriate
12420 section for output of DECL. DECL is either a `VAR_DECL' node
12421 or a constant of some sort. RELOC indicates whether forming
12422 the initial value of DECL requires link-time relocations. */
12425 rs6000_elf_select_section (decl
, reloc
, align
)
12428 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
12430 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12433 static void (* const sec_funcs
[4]) PARAMS ((void)) = {
12434 &readonly_data_section
,
12440 needs_sdata
= (size
> 0
12441 && size
<= g_switch_value
12442 && rs6000_sdata
!= SDATA_NONE
12443 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)));
12445 if (TREE_CODE (decl
) == STRING_CST
)
12446 readonly
= !flag_writable_strings
;
12447 else if (TREE_CODE (decl
) == VAR_DECL
)
12448 readonly
= (!((flag_pic
|| DEFAULT_ABI
== ABI_AIX
) && reloc
)
12449 && TREE_READONLY (decl
)
12450 && !TREE_SIDE_EFFECTS (decl
)
12451 && DECL_INITIAL (decl
)
12452 && DECL_INITIAL (decl
) != error_mark_node
12453 && TREE_CONSTANT (DECL_INITIAL (decl
)));
12454 else if (TREE_CODE (decl
) == CONSTRUCTOR
)
12455 readonly
= (!((flag_pic
|| DEFAULT_ABI
== ABI_AIX
) && reloc
)
12456 && !TREE_SIDE_EFFECTS (decl
)
12457 && TREE_CONSTANT (decl
));
12459 readonly
= !((flag_pic
|| DEFAULT_ABI
== ABI_AIX
) && reloc
);
12461 if (needs_sdata
&& rs6000_sdata
!= SDATA_EABI
)
12464 (*sec_funcs
[(readonly
? 0 : 2) + (needs_sdata
? 1 : 0)])();
12467 /* A C statement to build up a unique section name, expressed as a
12468 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12469 RELOC indicates whether the initial value of EXP requires
12470 link-time relocations. If you do not define this macro, GCC will use
12471 the symbol name prefixed by `.' as the section name. Note - this
12472 macro can now be called for uninitialized data items as well as
12473 initialised data and functions. */
12476 rs6000_elf_unique_section (decl
, reloc
)
12484 const char *prefix
;
12486 static const char *const prefixes
[7][2] =
12488 { ".rodata.", ".gnu.linkonce.r." },
12489 { ".sdata2.", ".gnu.linkonce.s2." },
12490 { ".data.", ".gnu.linkonce.d." },
12491 { ".sdata.", ".gnu.linkonce.s." },
12492 { ".bss.", ".gnu.linkonce.b." },
12493 { ".sbss.", ".gnu.linkonce.sb." },
12494 { ".text.", ".gnu.linkonce.t." }
12497 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12505 if (TREE_CODE (decl
) == STRING_CST
)
12506 readonly
= !flag_writable_strings
;
12507 else if (TREE_CODE (decl
) == VAR_DECL
)
12508 readonly
= (!((flag_pic
|| DEFAULT_ABI
== ABI_AIX
) && reloc
)
12509 && TREE_READONLY (decl
)
12510 && !TREE_SIDE_EFFECTS (decl
)
12511 && TREE_CONSTANT (DECL_INITIAL (decl
)));
12513 readonly
= !((flag_pic
|| DEFAULT_ABI
== ABI_AIX
) && reloc
);
12515 size
= int_size_in_bytes (TREE_TYPE (decl
));
12516 needs_sdata
= (size
> 0
12517 && size
<= g_switch_value
12518 && rs6000_sdata
!= SDATA_NONE
12519 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)));
12521 if (DECL_INITIAL (decl
) == NULL
12522 || DECL_INITIAL (decl
) == error_mark_node
)
12524 else if (!readonly
)
12531 /* .sdata2 is only for EABI. */
12532 if (sec
== 0 && rs6000_sdata
!= SDATA_EABI
)
12538 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
12539 name
= (*targetm
.strip_name_encoding
) (name
);
12540 prefix
= prefixes
[sec
][DECL_ONE_ONLY (decl
)];
12541 len
= strlen (name
) + strlen (prefix
);
12542 string
= alloca (len
+ 1);
12544 sprintf (string
, "%s%s", prefix
, name
);
12546 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
12550 /* If we are referencing a function that is static or is known to be
12551 in this file, make the SYMBOL_REF special. We can use this to indicate
12552 that we can branch to this function without emitting a no-op after the
12553 call. For real AIX calling sequences, we also replace the
12554 function name with the real name (1 or 2 leading .'s), rather than
12555 the function descriptor name. This saves a lot of overriding code
12556 to read the prefixes. */
12559 rs6000_elf_encode_section_info (decl
, first
)
12566 if (TREE_CODE (decl
) == FUNCTION_DECL
)
12568 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12569 if ((TREE_ASM_WRITTEN (decl
) || ! TREE_PUBLIC (decl
))
12570 && ! DECL_WEAK (decl
))
12571 SYMBOL_REF_FLAG (sym_ref
) = 1;
12573 if (DEFAULT_ABI
== ABI_AIX
)
12575 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
12576 size_t len2
= strlen (XSTR (sym_ref
, 0));
12577 char *str
= alloca (len1
+ len2
+ 1);
12580 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
12582 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
12585 else if (rs6000_sdata
!= SDATA_NONE
12586 && DEFAULT_ABI
== ABI_V4
12587 && TREE_CODE (decl
) == VAR_DECL
)
12589 int size
= int_size_in_bytes (TREE_TYPE (decl
));
12590 tree section_name
= DECL_SECTION_NAME (decl
);
12591 const char *name
= (char *)0;
12596 if (TREE_CODE (section_name
) == STRING_CST
)
12598 name
= TREE_STRING_POINTER (section_name
);
12599 len
= TREE_STRING_LENGTH (section_name
);
12605 if ((size
> 0 && size
<= g_switch_value
)
12607 && ((len
== sizeof (".sdata") - 1
12608 && strcmp (name
, ".sdata") == 0)
12609 || (len
== sizeof (".sdata2") - 1
12610 && strcmp (name
, ".sdata2") == 0)
12611 || (len
== sizeof (".sbss") - 1
12612 && strcmp (name
, ".sbss") == 0)
12613 || (len
== sizeof (".sbss2") - 1
12614 && strcmp (name
, ".sbss2") == 0)
12615 || (len
== sizeof (".PPC.EMB.sdata0") - 1
12616 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
12617 || (len
== sizeof (".PPC.EMB.sbss0") - 1
12618 && strcmp (name
, ".PPC.EMB.sbss0") == 0))))
12620 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
12621 size_t len
= strlen (XSTR (sym_ref
, 0));
12622 char *str
= alloca (len
+ 2);
12625 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
12626 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
12631 static const char *
12632 rs6000_elf_strip_name_encoding (str
)
12635 while (*str
== '*' || *str
== '@')
12640 #endif /* USING_ELFOS_H */
12643 /* Return a REG that occurs in ADDR with coefficient 1.
12644 ADDR can be effectively incremented by incrementing REG.
12646 r0 is special and we must not select it as an address
12647 register by this routine since our caller will try to
12648 increment the returned register via an "la" instruction. */
12651 find_addr_reg (addr
)
12654 while (GET_CODE (addr
) == PLUS
)
12656 if (GET_CODE (XEXP (addr
, 0)) == REG
12657 && REGNO (XEXP (addr
, 0)) != 0)
12658 addr
= XEXP (addr
, 0);
12659 else if (GET_CODE (XEXP (addr
, 1)) == REG
12660 && REGNO (XEXP (addr
, 1)) != 0)
12661 addr
= XEXP (addr
, 1);
12662 else if (CONSTANT_P (XEXP (addr
, 0)))
12663 addr
= XEXP (addr
, 1);
12664 else if (CONSTANT_P (XEXP (addr
, 1)))
12665 addr
= XEXP (addr
, 0);
12669 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
12675 rs6000_fatal_bad_address (op
)
12678 fatal_insn ("bad address", op
);
12681 /* Called to register all of our global variables with the garbage
12685 rs6000_add_gc_roots ()
12687 toc_hash_table
= htab_create (1021, toc_hash_function
, toc_hash_eq
, NULL
);
12688 ggc_add_root (&toc_hash_table
, 1, sizeof (toc_hash_table
),
12689 toc_hash_mark_table
);
12695 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12696 reference and a constant. */
12699 symbolic_operand (op
)
12702 switch (GET_CODE (op
))
12709 return (GET_CODE (op
) == SYMBOL_REF
||
12710 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
12711 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
12712 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
12719 #ifdef RS6000_LONG_BRANCH
12721 static tree stub_list
= 0;
12723 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12724 procedure calls to the linked list. */
12727 add_compiler_stub (label_name
, function_name
, line_number
)
12729 tree function_name
;
12732 tree stub
= build_tree_list (function_name
, label_name
);
12733 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
12734 TREE_CHAIN (stub
) = stub_list
;
12738 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12739 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12740 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12742 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12743 handling procedure calls from the linked list and initializes the
12747 output_compiler_stub ()
12750 char label_buf
[256];
12754 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12756 fprintf (asm_out_file
,
12757 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
12759 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12760 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12761 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
12762 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12764 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
12766 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
12769 label_buf
[0] = '_';
12770 strcpy (label_buf
+1,
12771 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
12774 strcpy (tmp_buf
, "lis r12,hi16(");
12775 strcat (tmp_buf
, label_buf
);
12776 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
12777 strcat (tmp_buf
, label_buf
);
12778 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
12779 output_asm_insn (tmp_buf
, 0);
12781 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12782 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
12783 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
12784 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12790 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12791 already there or not. */
12794 no_previous_def (function_name
)
12795 tree function_name
;
12798 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12799 if (function_name
== STUB_FUNCTION_NAME (stub
))
12804 /* GET_PREV_LABEL gets the label name from the previous definition of
12808 get_prev_label (function_name
)
12809 tree function_name
;
12812 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
12813 if (function_name
== STUB_FUNCTION_NAME (stub
))
12814 return STUB_LABEL_NAME (stub
);
12818 /* INSN is either a function call or a millicode call. It may have an
12819 unconditional jump in its delay slot.
12821 CALL_DEST is the routine we are calling. */
12824 output_call (insn
, call_dest
, operand_number
)
12827 int operand_number
;
12829 static char buf
[256];
12830 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
12833 tree funname
= get_identifier (XSTR (call_dest
, 0));
12835 if (no_previous_def (funname
))
12837 int line_number
= 0;
12838 rtx label_rtx
= gen_label_rtx ();
12839 char *label_buf
, temp_buf
[256];
12840 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
12841 CODE_LABEL_NUMBER (label_rtx
));
12842 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
12843 labelname
= get_identifier (label_buf
);
12844 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
12846 line_number
= NOTE_LINE_NUMBER (insn
);
12847 add_compiler_stub (labelname
, funname
, line_number
);
12850 labelname
= get_prev_label (funname
);
12852 sprintf (buf
, "jbsr %%z%d,%.246s",
12853 operand_number
, IDENTIFIER_POINTER (labelname
));
12858 sprintf (buf
, "bl %%z%d", operand_number
);
12863 #endif /* RS6000_LONG_BRANCH */
12865 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12867 const char *const symbol_ = (SYMBOL); \
12868 char *buffer_ = (BUF); \
12869 if (symbol_[0] == '"') \
12871 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12873 else if (name_needs_quotes(symbol_)) \
12875 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12879 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12884 /* Generate PIC and indirect symbol stubs. */
12887 machopic_output_stub (file
, symb
, stub
)
12889 const char *symb
, *stub
;
12891 unsigned int length
;
12892 char *symbol_name
, *lazy_ptr_name
;
12893 char *local_label_0
;
12894 static int label
= 0;
12896 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12897 symb
= (*targetm
.strip_name_encoding
) (symb
);
12901 length
= strlen (symb
);
12902 symbol_name
= alloca (length
+ 32);
12903 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
12905 lazy_ptr_name
= alloca (length
+ 32);
12906 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
12908 local_label_0
= alloca (length
+ 32);
12909 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
12912 machopic_picsymbol_stub_section ();
12914 machopic_symbol_stub_section ();
12916 fprintf (file
, "%s:\n", stub
);
12917 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12921 fprintf (file
, "\tmflr r0\n");
12922 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
12923 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
12924 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
12925 lazy_ptr_name
, local_label_0
);
12926 fprintf (file
, "\tmtlr r0\n");
12927 fprintf (file
, "\tlwz r12,lo16(%s-%s)(r11)\n",
12928 lazy_ptr_name
, local_label_0
);
12929 fprintf (file
, "\tmtctr r12\n");
12930 fprintf (file
, "\taddi r11,r11,lo16(%s-%s)\n",
12931 lazy_ptr_name
, local_label_0
);
12932 fprintf (file
, "\tbctr\n");
12935 fprintf (file
, "non-pure not supported\n");
12937 machopic_lazy_symbol_ptr_section ();
12938 fprintf (file
, "%s:\n", lazy_ptr_name
);
12939 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
12940 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
12943 /* Legitimize PIC addresses. If the address is already
12944 position-independent, we return ORIG. Newly generated
12945 position-independent addresses go into a reg. This is REG if non
12946 zero, otherwise we allocate register(s) as necessary. */
12948 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12951 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
12953 enum machine_mode mode
;
12958 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
12959 reg
= gen_reg_rtx (Pmode
);
12961 if (GET_CODE (orig
) == CONST
)
12963 if (GET_CODE (XEXP (orig
, 0)) == PLUS
12964 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
12967 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
12970 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
12973 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
12979 if (GET_CODE (offset
) == CONST_INT
)
12981 if (SMALL_INT (offset
))
12982 return plus_constant (base
, INTVAL (offset
));
12983 else if (! reload_in_progress
&& ! reload_completed
)
12984 offset
= force_reg (Pmode
, offset
);
12987 rtx mem
= force_const_mem (Pmode
, orig
);
12988 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
12991 return gen_rtx (PLUS
, Pmode
, base
, offset
);
12994 /* Fall back on generic machopic code. */
12995 return machopic_legitimize_pic_address (orig
, mode
, reg
);
12998 /* This is just a placeholder to make linking work without having to
12999 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13000 ever needed for Darwin (not too likely!) this would have to get a
13001 real definition. */
13008 #endif /* TARGET_MACHO */
13011 static unsigned int
13012 rs6000_elf_section_type_flags (decl
, name
, reloc
)
13017 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
13019 if (TARGET_RELOCATABLE
)
13020 flags
|= SECTION_WRITE
;
13025 /* Record an element in the table of global constructors. SYMBOL is
13026 a SYMBOL_REF of the function to be called; PRIORITY is a number
13027 between 0 and MAX_INIT_PRIORITY.
13029 This differs from default_named_section_asm_out_constructor in
13030 that we have special handling for -mrelocatable. */
13033 rs6000_elf_asm_out_constructor (symbol
, priority
)
13037 const char *section
= ".ctors";
13040 if (priority
!= DEFAULT_INIT_PRIORITY
)
13042 sprintf (buf
, ".ctors.%.5u",
13043 /* Invert the numbering so the linker puts us in the proper
13044 order; constructors are run from right to left, and the
13045 linker sorts in increasing order. */
13046 MAX_INIT_PRIORITY
- priority
);
13050 named_section_flags (section
, SECTION_WRITE
);
13051 assemble_align (POINTER_SIZE
);
13053 if (TARGET_RELOCATABLE
)
13055 fputs ("\t.long (", asm_out_file
);
13056 output_addr_const (asm_out_file
, symbol
);
13057 fputs (")@fixup\n", asm_out_file
);
13060 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13064 rs6000_elf_asm_out_destructor (symbol
, priority
)
13068 const char *section
= ".dtors";
13071 if (priority
!= DEFAULT_INIT_PRIORITY
)
13073 sprintf (buf
, ".dtors.%.5u",
13074 /* Invert the numbering so the linker puts us in the proper
13075 order; constructors are run from right to left, and the
13076 linker sorts in increasing order. */
13077 MAX_INIT_PRIORITY
- priority
);
13081 named_section_flags (section
, SECTION_WRITE
);
13082 assemble_align (POINTER_SIZE
);
13084 if (TARGET_RELOCATABLE
)
13086 fputs ("\t.long (", asm_out_file
);
13087 output_addr_const (asm_out_file
, symbol
);
13088 fputs (")@fixup\n", asm_out_file
);
13091 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
13097 rs6000_xcoff_asm_globalize_label (stream
, name
)
13101 fputs (GLOBAL_ASM_OP
, stream
);
13102 RS6000_OUTPUT_BASENAME (stream
, name
);
13103 putc ('\n', stream
);
13107 rs6000_xcoff_asm_named_section (name
, flags
)
13109 unsigned int flags ATTRIBUTE_UNUSED
;
13111 fprintf (asm_out_file
, "\t.csect %s\n", name
);
13115 rs6000_xcoff_select_section (exp
, reloc
, align
)
13118 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13120 if ((TREE_CODE (exp
) == STRING_CST
13121 && ! flag_writable_strings
)
13122 || (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
13123 && TREE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
)
13124 && DECL_INITIAL (exp
)
13125 && (DECL_INITIAL (exp
) == error_mark_node
13126 || TREE_CONSTANT (DECL_INITIAL (exp
)))
13129 if (TREE_PUBLIC (exp
))
13130 read_only_data_section ();
13132 read_only_private_data_section ();
13136 if (TREE_PUBLIC (exp
))
13139 private_data_section ();
13144 rs6000_xcoff_unique_section (decl
, reloc
)
13146 int reloc ATTRIBUTE_UNUSED
;
13152 if (TREE_CODE (decl
) == FUNCTION_DECL
)
13154 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
13155 len
= strlen (name
) + 5;
13156 string
= alloca (len
+ 1);
13157 sprintf (string
, ".%s[PR]", name
);
13158 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
13162 /* Select section for constant in constant pool.
13164 On RS/6000, all constants are in the private read-only data area.
13165 However, if this is being placed in the TOC it must be output as a
13169 rs6000_xcoff_select_rtx_section (mode
, x
, align
)
13170 enum machine_mode mode
;
13172 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
;
13174 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
13177 read_only_private_data_section ();
13180 /* Remove any trailing [DS] or the like from the symbol name. */
13182 static const char *
13183 rs6000_xcoff_strip_name_encoding (name
)
13189 len
= strlen (name
);
13190 if (name
[len
- 1] == ']')
13191 return ggc_alloc_string (name
, len
- 4);
13196 #endif /* TARGET_XCOFF */
13198 /* Note that this is also used for ELF64. */
13201 rs6000_xcoff_encode_section_info (decl
, first
)
13203 int first ATTRIBUTE_UNUSED
;
13205 if (TREE_CODE (decl
) == FUNCTION_DECL
13206 && (TREE_ASM_WRITTEN (decl
) || ! TREE_PUBLIC (decl
))
13207 && ! DECL_WEAK (decl
))
13208 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl
), 0)) = 1;