2002-07-25 Aldy Hernandez <aldyh@redhat.com>
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob5b6cabb4fdddf68ba150a408344751e62935091a
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "reload.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
54 #endif
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
59 /* Target cpu type */
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Nonzero if we want SPE ABI extensions. */
84 int rs6000_spe_abi;
86 /* Whether isel instructions should be generated. */
87 int rs6000_isel;
89 /* Nonzero if we have FPRs. */
90 int rs6000_fprs = 1;
92 /* String from -misel=. */
93 const char *rs6000_isel_string;
95 /* Set to non-zero once AIX common-mode calls have been defined. */
96 static int common_mode_defined;
98 /* Save information from a "cmpxx" operation until the branch or scc is
99 emitted. */
100 rtx rs6000_compare_op0, rs6000_compare_op1;
101 int rs6000_compare_fp_p;
103 /* Label number of label created for -mrelocatable, to call to so we can
104 get the address of the GOT section */
105 int rs6000_pic_labelno;
107 #ifdef USING_ELFOS_H
108 /* Which abi to adhere to */
109 const char *rs6000_abi_name = RS6000_ABI_NAME;
111 /* Semantics of the small data area */
112 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
114 /* Which small data model to use */
115 const char *rs6000_sdata_name = (char *)0;
117 /* Counter for labels which are to be placed in .fixup. */
118 int fixuplabelno = 0;
119 #endif
121 /* ABI enumeration available for subtarget to use. */
122 enum rs6000_abi rs6000_current_abi;
124 /* ABI string from -mabi= option. */
125 const char *rs6000_abi_string;
127 /* Debug flags */
128 const char *rs6000_debug_name;
129 int rs6000_debug_stack; /* debug stack applications */
130 int rs6000_debug_arg; /* debug argument handling */
132 const char *rs6000_traceback_name;
133 static enum {
134 traceback_default = 0,
135 traceback_none,
136 traceback_part,
137 traceback_full
138 } rs6000_traceback;
140 /* Flag to say the TOC is initialized */
141 int toc_initialized;
142 char toc_label_name[10];
144 /* Alias set for saves and restores from the rs6000 stack. */
145 static int rs6000_sr_alias_set;
147 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
148 The only place that looks at this is rs6000_set_default_type_attributes;
149 everywhere else should rely on the presence or absence of a longcall
150 attribute on the function declaration. */
151 int rs6000_default_long_calls;
152 const char *rs6000_longcall_switch;
154 struct builtin_description
156 /* mask is not const because we're going to alter it below. This
157 nonsense will go away when we rewrite the -march infrastructure
158 to give us more target flag bits. */
159 unsigned int mask;
160 const enum insn_code icode;
161 const char *const name;
162 const enum rs6000_builtins code;
165 static void rs6000_add_gc_roots PARAMS ((void));
166 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
167 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
168 static void validate_condition_mode
169 PARAMS ((enum rtx_code, enum machine_mode));
170 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
171 static void rs6000_maybe_dead PARAMS ((rtx));
172 static void rs6000_emit_stack_tie PARAMS ((void));
173 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
174 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
175 unsigned int, int, int));
176 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
177 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
178 static unsigned rs6000_hash_constant PARAMS ((rtx));
179 static unsigned toc_hash_function PARAMS ((const void *));
180 static int toc_hash_eq PARAMS ((const void *, const void *));
181 static int toc_hash_mark_entry PARAMS ((void **, void *));
182 static void toc_hash_mark_table PARAMS ((void *));
183 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
184 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
185 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
186 static int rs6000_ra_ever_killed PARAMS ((void));
187 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
188 const struct attribute_spec rs6000_attribute_table[];
189 static void rs6000_set_default_type_attributes PARAMS ((tree));
190 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
191 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
192 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
193 HOST_WIDE_INT, HOST_WIDE_INT));
194 #if TARGET_ELF
195 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
196 int));
197 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
198 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
199 static void rs6000_elf_select_section PARAMS ((tree, int,
200 unsigned HOST_WIDE_INT));
201 static void rs6000_elf_unique_section PARAMS ((tree, int));
202 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
203 unsigned HOST_WIDE_INT));
204 static void rs6000_elf_encode_section_info PARAMS ((tree, int));
205 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
206 #endif
207 #if TARGET_XCOFF
208 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
209 static void rs6000_xcoff_select_section PARAMS ((tree, int,
210 unsigned HOST_WIDE_INT));
211 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
212 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
213 unsigned HOST_WIDE_INT));
214 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
215 #endif
216 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
217 ATTRIBUTE_UNUSED;
218 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
219 static int rs6000_adjust_priority PARAMS ((rtx, int));
220 static int rs6000_issue_rate PARAMS ((void));
222 static void rs6000_init_builtins PARAMS ((void));
223 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
224 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
225 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
226 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
227 static void altivec_init_builtins PARAMS ((void));
228 static void rs6000_common_init_builtins PARAMS ((void));
230 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
231 int, enum rs6000_builtins,
232 enum rs6000_builtins));
233 static void spe_init_builtins PARAMS ((void));
234 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
235 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
236 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
237 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
239 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
240 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
241 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
242 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
243 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
244 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
245 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
246 static void rs6000_parse_abi_options PARAMS ((void));
247 static void rs6000_parse_vrsave_option PARAMS ((void));
248 static void rs6000_parse_isel_option PARAMS ((void));
249 static int first_altivec_reg_to_save PARAMS ((void));
250 static unsigned int compute_vrsave_mask PARAMS ((void));
251 static void is_altivec_return_reg PARAMS ((rtx, void *));
252 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
253 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
254 static int easy_vector_constant PARAMS ((rtx));
256 /* Default register names. */
257 char rs6000_reg_names[][8] =
259 "0", "1", "2", "3", "4", "5", "6", "7",
260 "8", "9", "10", "11", "12", "13", "14", "15",
261 "16", "17", "18", "19", "20", "21", "22", "23",
262 "24", "25", "26", "27", "28", "29", "30", "31",
263 "0", "1", "2", "3", "4", "5", "6", "7",
264 "8", "9", "10", "11", "12", "13", "14", "15",
265 "16", "17", "18", "19", "20", "21", "22", "23",
266 "24", "25", "26", "27", "28", "29", "30", "31",
267 "mq", "lr", "ctr","ap",
268 "0", "1", "2", "3", "4", "5", "6", "7",
269 "xer",
270 /* AltiVec registers. */
271 "0", "1", "2", "3", "4", "5", "6", "7",
272 "8", "9", "10", "11", "12", "13", "14", "15",
273 "16", "17", "18", "19", "20", "21", "22", "23",
274 "24", "25", "26", "27", "28", "29", "30", "31",
275 "vrsave"
278 #ifdef TARGET_REGNAMES
279 static const char alt_reg_names[][8] =
281 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
282 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
283 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
284 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
285 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
286 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
287 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
288 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
289 "mq", "lr", "ctr", "ap",
290 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
291 "xer",
292 /* AltiVec registers. */
293 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
294 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
295 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
296 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
297 "vrsave"
299 #endif
301 #ifndef MASK_STRICT_ALIGN
302 #define MASK_STRICT_ALIGN 0
303 #endif
305 /* Initialize the GCC target structure. */
306 #undef TARGET_ATTRIBUTE_TABLE
307 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
308 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
309 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
311 #undef TARGET_ASM_ALIGNED_DI_OP
312 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
314 /* Default unaligned ops are only provided for ELF. Find the ops needed
315 for non-ELF systems. */
316 #ifndef OBJECT_FORMAT_ELF
317 #if TARGET_XCOFF
318 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
319 64-bit targets. */
320 #undef TARGET_ASM_UNALIGNED_HI_OP
321 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
322 #undef TARGET_ASM_UNALIGNED_SI_OP
323 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
324 #undef TARGET_ASM_UNALIGNED_DI_OP
325 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
326 #else
327 /* For Darwin. */
328 #undef TARGET_ASM_UNALIGNED_HI_OP
329 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
330 #undef TARGET_ASM_UNALIGNED_SI_OP
331 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
332 #endif
333 #endif
335 /* This hook deals with fixups for relocatable code and DI-mode objects
336 in 64-bit code. */
337 #undef TARGET_ASM_INTEGER
338 #define TARGET_ASM_INTEGER rs6000_assemble_integer
340 #undef TARGET_ASM_FUNCTION_PROLOGUE
341 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
342 #undef TARGET_ASM_FUNCTION_EPILOGUE
343 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
345 #if TARGET_ELF
346 #undef TARGET_SECTION_TYPE_FLAGS
347 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
348 #endif
350 #undef TARGET_SCHED_ISSUE_RATE
351 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
352 #undef TARGET_SCHED_ADJUST_COST
353 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
354 #undef TARGET_SCHED_ADJUST_PRIORITY
355 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
357 #undef TARGET_INIT_BUILTINS
358 #define TARGET_INIT_BUILTINS rs6000_init_builtins
360 #undef TARGET_EXPAND_BUILTIN
361 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
363 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
364 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
366 struct gcc_target targetm = TARGET_INITIALIZER;
368 /* Override command line options. Mostly we process the processor
369 type and sometimes adjust other TARGET_ options. */
371 void
372 rs6000_override_options (default_cpu)
373 const char *default_cpu;
375 size_t i, j;
376 struct rs6000_cpu_select *ptr;
378 /* Simplify the entries below by making a mask for any POWER
379 variant and any PowerPC variant. */
381 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
382 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
383 | MASK_PPC_GFXOPT | MASK_POWERPC64)
384 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
386 static struct ptt
388 const char *const name; /* Canonical processor name. */
389 const enum processor_type processor; /* Processor type enum value. */
390 const int target_enable; /* Target flags to enable. */
391 const int target_disable; /* Target flags to disable. */
392 } const processor_target_table[]
393 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
394 POWER_MASKS | POWERPC_MASKS},
395 {"power", PROCESSOR_POWER,
396 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
397 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
398 {"power2", PROCESSOR_POWER,
399 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
400 POWERPC_MASKS | MASK_NEW_MNEMONICS},
401 {"power3", PROCESSOR_PPC630,
402 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
403 POWER_MASKS | MASK_PPC_GPOPT},
404 {"power4", PROCESSOR_POWER4,
405 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
406 POWER_MASKS | MASK_PPC_GPOPT},
407 {"powerpc", PROCESSOR_POWERPC,
408 MASK_POWERPC | MASK_NEW_MNEMONICS,
409 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
410 {"powerpc64", PROCESSOR_POWERPC64,
411 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
412 POWER_MASKS | POWERPC_OPT_MASKS},
413 {"rios", PROCESSOR_RIOS1,
414 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
415 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
416 {"rios1", PROCESSOR_RIOS1,
417 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
418 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
419 {"rsc", PROCESSOR_PPC601,
420 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
421 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
422 {"rsc1", PROCESSOR_PPC601,
423 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
424 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
425 {"rios2", PROCESSOR_RIOS2,
426 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
427 POWERPC_MASKS | MASK_NEW_MNEMONICS},
428 {"rs64a", PROCESSOR_RS64A,
429 MASK_POWERPC | MASK_NEW_MNEMONICS,
430 POWER_MASKS | POWERPC_OPT_MASKS},
431 {"401", PROCESSOR_PPC403,
432 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
433 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
434 {"403", PROCESSOR_PPC403,
435 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
436 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
437 {"405", PROCESSOR_PPC405,
438 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
439 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
440 {"505", PROCESSOR_MPCCORE,
441 MASK_POWERPC | MASK_NEW_MNEMONICS,
442 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
443 {"601", PROCESSOR_PPC601,
444 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
445 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
446 {"602", PROCESSOR_PPC603,
447 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
448 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
449 {"603", PROCESSOR_PPC603,
450 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
451 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
452 {"603e", PROCESSOR_PPC603,
453 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
454 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
455 {"ec603e", PROCESSOR_PPC603,
456 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
457 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
458 {"604", PROCESSOR_PPC604,
459 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
460 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
461 {"604e", PROCESSOR_PPC604e,
462 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
463 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
464 {"620", PROCESSOR_PPC620,
465 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
466 POWER_MASKS | MASK_PPC_GPOPT},
467 {"630", PROCESSOR_PPC630,
468 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
469 POWER_MASKS | MASK_PPC_GPOPT},
470 {"740", PROCESSOR_PPC750,
471 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
472 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
473 {"750", PROCESSOR_PPC750,
474 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
475 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
476 {"7400", PROCESSOR_PPC7400,
477 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
478 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
479 {"7450", PROCESSOR_PPC7450,
480 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
481 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
482 {"8540", PROCESSOR_PPC8540,
483 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
484 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
485 {"801", PROCESSOR_MPCCORE,
486 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
487 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
488 {"821", PROCESSOR_MPCCORE,
489 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
490 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
491 {"823", PROCESSOR_MPCCORE,
492 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
493 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
494 {"860", PROCESSOR_MPCCORE,
495 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
496 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
498 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
500 /* Save current -mmultiple/-mno-multiple status. */
501 int multiple = TARGET_MULTIPLE;
502 /* Save current -mstring/-mno-string status. */
503 int string = TARGET_STRING;
505 /* Identify the processor type. */
506 rs6000_select[0].string = default_cpu;
507 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
509 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
511 ptr = &rs6000_select[i];
512 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
514 for (j = 0; j < ptt_size; j++)
515 if (! strcmp (ptr->string, processor_target_table[j].name))
517 if (ptr->set_tune_p)
518 rs6000_cpu = processor_target_table[j].processor;
520 if (ptr->set_arch_p)
522 target_flags |= processor_target_table[j].target_enable;
523 target_flags &= ~processor_target_table[j].target_disable;
525 break;
528 if (j == ptt_size)
529 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
533 if (rs6000_cpu == PROCESSOR_PPC8540)
534 rs6000_isel = 1;
536 /* If we are optimizing big endian systems for space, use the store
537 multiple instructions. */
538 if (BYTES_BIG_ENDIAN && optimize_size)
539 target_flags |= MASK_MULTIPLE;
541 /* If -mmultiple or -mno-multiple was explicitly used, don't
542 override with the processor default */
543 if (TARGET_MULTIPLE_SET)
544 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
546 /* If -mstring or -mno-string was explicitly used, don't override
547 with the processor default. */
548 if (TARGET_STRING_SET)
549 target_flags = (target_flags & ~MASK_STRING) | string;
551 /* Don't allow -mmultiple or -mstring on little endian systems
552 unless the cpu is a 750, because the hardware doesn't support the
553 instructions used in little endian mode, and causes an alignment
554 trap. The 750 does not cause an alignment trap (except when the
555 target is unaligned). */
557 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
559 if (TARGET_MULTIPLE)
561 target_flags &= ~MASK_MULTIPLE;
562 if (TARGET_MULTIPLE_SET)
563 warning ("-mmultiple is not supported on little endian systems");
566 if (TARGET_STRING)
568 target_flags &= ~MASK_STRING;
569 if (TARGET_STRING_SET)
570 warning ("-mstring is not supported on little endian systems");
574 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
576 flag_pic = 0;
578 if (extra_warnings)
579 warning ("-f%s ignored (all code is position independent)",
580 (flag_pic > 1) ? "PIC" : "pic");
583 #ifdef XCOFF_DEBUGGING_INFO
584 if (flag_function_sections && (write_symbols != NO_DEBUG)
585 && DEFAULT_ABI == ABI_AIX)
587 warning ("-ffunction-sections disabled on AIX when debugging");
588 flag_function_sections = 0;
591 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
593 warning ("-fdata-sections not supported on AIX");
594 flag_data_sections = 0;
596 #endif
598 /* Set debug flags */
599 if (rs6000_debug_name)
601 if (! strcmp (rs6000_debug_name, "all"))
602 rs6000_debug_stack = rs6000_debug_arg = 1;
603 else if (! strcmp (rs6000_debug_name, "stack"))
604 rs6000_debug_stack = 1;
605 else if (! strcmp (rs6000_debug_name, "arg"))
606 rs6000_debug_arg = 1;
607 else
608 error ("unknown -mdebug-%s switch", rs6000_debug_name);
611 if (rs6000_traceback_name)
613 if (! strncmp (rs6000_traceback_name, "full", 4))
614 rs6000_traceback = traceback_full;
615 else if (! strncmp (rs6000_traceback_name, "part", 4))
616 rs6000_traceback = traceback_part;
617 else if (! strncmp (rs6000_traceback_name, "no", 2))
618 rs6000_traceback = traceback_none;
619 else
620 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
621 rs6000_traceback_name);
624 /* Set size of long double */
625 rs6000_long_double_type_size = 64;
626 if (rs6000_long_double_size_string)
628 char *tail;
629 int size = strtol (rs6000_long_double_size_string, &tail, 10);
630 if (*tail != '\0' || (size != 64 && size != 128))
631 error ("Unknown switch -mlong-double-%s",
632 rs6000_long_double_size_string);
633 else
634 rs6000_long_double_type_size = size;
637 /* Handle -mabi= options. */
638 rs6000_parse_abi_options ();
640 /* Handle -mvrsave= option. */
641 rs6000_parse_vrsave_option ();
643 /* Handle -misel= option. */
644 rs6000_parse_isel_option ();
646 #ifdef SUBTARGET_OVERRIDE_OPTIONS
647 SUBTARGET_OVERRIDE_OPTIONS;
648 #endif
649 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
650 SUBSUBTARGET_OVERRIDE_OPTIONS;
651 #endif
653 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
654 using TARGET_OPTIONS to handle a toggle switch, but we're out of
655 bits in target_flags so TARGET_SWITCHES cannot be used.
656 Assumption here is that rs6000_longcall_switch points into the
657 text of the complete option, rather than being a copy, so we can
658 scan back for the presence or absence of the no- modifier. */
659 if (rs6000_longcall_switch)
661 const char *base = rs6000_longcall_switch;
662 while (base[-1] != 'm') base--;
664 if (*rs6000_longcall_switch != '\0')
665 error ("invalid option `%s'", base);
666 rs6000_default_long_calls = (base[0] != 'n');
669 #ifdef TARGET_REGNAMES
670 /* If the user desires alternate register names, copy in the
671 alternate names now. */
672 if (TARGET_REGNAMES)
673 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
674 #endif
676 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
677 If -maix-struct-return or -msvr4-struct-return was explicitly
678 used, don't override with the ABI default. */
679 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
681 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
682 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
683 else
684 target_flags |= MASK_AIX_STRUCT_RET;
687 /* Register global variables with the garbage collector. */
688 rs6000_add_gc_roots ();
690 /* Allocate an alias set for register saves & restores from stack. */
691 rs6000_sr_alias_set = new_alias_set ();
693 if (TARGET_TOC)
694 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
696 /* We can only guarantee the availability of DI pseudo-ops when
697 assembling for 64-bit targets. */
698 if (!TARGET_64BIT)
700 targetm.asm_out.aligned_op.di = NULL;
701 targetm.asm_out.unaligned_op.di = NULL;
704 /* Arrange to save and restore machine status around nested functions. */
705 init_machine_status = rs6000_init_machine_status;
708 /* Handle -misel= option. */
709 static void
710 rs6000_parse_isel_option ()
712 if (rs6000_isel_string == 0)
713 return;
714 else if (! strcmp (rs6000_isel_string, "yes"))
715 rs6000_isel = 1;
716 else if (! strcmp (rs6000_isel_string, "no"))
717 rs6000_isel = 0;
718 else
719 error ("unknown -misel= option specified: '%s'",
720 rs6000_isel_string);
723 /* Handle -mvrsave= options. */
724 static void
725 rs6000_parse_vrsave_option ()
727 /* Generate VRSAVE instructions by default. */
728 if (rs6000_altivec_vrsave_string == 0
729 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
730 rs6000_altivec_vrsave = 1;
731 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
732 rs6000_altivec_vrsave = 0;
733 else
734 error ("unknown -mvrsave= option specified: '%s'",
735 rs6000_altivec_vrsave_string);
738 /* Handle -mabi= options. */
739 static void
740 rs6000_parse_abi_options ()
742 if (rs6000_abi_string == 0)
743 return;
744 else if (! strcmp (rs6000_abi_string, "altivec"))
745 rs6000_altivec_abi = 1;
746 else if (! strcmp (rs6000_abi_string, "no-altivec"))
747 rs6000_altivec_abi = 0;
748 else if (! strcmp (rs6000_abi_string, "spe"))
749 rs6000_spe_abi = 1;
750 else if (! strcmp (rs6000_abi_string, "no-spe"))
751 rs6000_spe_abi = 0;
752 else
753 error ("unknown ABI specified: '%s'", rs6000_abi_string);
756 void
757 optimization_options (level, size)
758 int level ATTRIBUTE_UNUSED;
759 int size ATTRIBUTE_UNUSED;
763 /* Do anything needed at the start of the asm file. */
765 void
766 rs6000_file_start (file, default_cpu)
767 FILE *file;
768 const char *default_cpu;
770 size_t i;
771 char buffer[80];
772 const char *start = buffer;
773 struct rs6000_cpu_select *ptr;
775 if (flag_verbose_asm)
777 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
778 rs6000_select[0].string = default_cpu;
780 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
782 ptr = &rs6000_select[i];
783 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
785 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
786 start = "";
790 #ifdef USING_ELFOS_H
791 switch (rs6000_sdata)
793 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
794 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
795 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
796 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
799 if (rs6000_sdata && g_switch_value)
801 fprintf (file, "%s -G %d", start, g_switch_value);
802 start = "";
804 #endif
806 if (*start == '\0')
807 putc ('\n', file);
811 /* Return non-zero if this function is known to have a null epilogue. */
814 direct_return ()
816 if (reload_completed)
818 rs6000_stack_t *info = rs6000_stack_info ();
820 if (info->first_gp_reg_save == 32
821 && info->first_fp_reg_save == 64
822 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
823 && ! info->lr_save_p
824 && ! info->cr_save_p
825 && info->vrsave_mask == 0
826 && ! info->push_p)
827 return 1;
830 return 0;
833 /* Returns 1 always. */
836 any_operand (op, mode)
837 rtx op ATTRIBUTE_UNUSED;
838 enum machine_mode mode ATTRIBUTE_UNUSED;
840 return 1;
843 /* Returns 1 if op is the count register. */
845 count_register_operand (op, mode)
846 rtx op;
847 enum machine_mode mode ATTRIBUTE_UNUSED;
849 if (GET_CODE (op) != REG)
850 return 0;
852 if (REGNO (op) == COUNT_REGISTER_REGNUM)
853 return 1;
855 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
856 return 1;
858 return 0;
861 /* Returns 1 if op is an altivec register. */
863 altivec_register_operand (op, mode)
864 rtx op;
865 enum machine_mode mode ATTRIBUTE_UNUSED;
868 return (register_operand (op, mode)
869 && (GET_CODE (op) != REG
870 || REGNO (op) > FIRST_PSEUDO_REGISTER
871 || ALTIVEC_REGNO_P (REGNO (op))));
875 xer_operand (op, mode)
876 rtx op;
877 enum machine_mode mode ATTRIBUTE_UNUSED;
879 if (GET_CODE (op) != REG)
880 return 0;
882 if (XER_REGNO_P (REGNO (op)))
883 return 1;
885 return 0;
888 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
889 by such constants completes more quickly. */
892 s8bit_cint_operand (op, mode)
893 rtx op;
894 enum machine_mode mode ATTRIBUTE_UNUSED;
896 return ( GET_CODE (op) == CONST_INT
897 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
900 /* Return 1 if OP is a constant that can fit in a D field. */
903 short_cint_operand (op, mode)
904 rtx op;
905 enum machine_mode mode ATTRIBUTE_UNUSED;
907 return (GET_CODE (op) == CONST_INT
908 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
911 /* Similar for an unsigned D field. */
914 u_short_cint_operand (op, mode)
915 rtx op;
916 enum machine_mode mode ATTRIBUTE_UNUSED;
918 return (GET_CODE (op) == CONST_INT
919 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
922 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
925 non_short_cint_operand (op, mode)
926 rtx op;
927 enum machine_mode mode ATTRIBUTE_UNUSED;
929 return (GET_CODE (op) == CONST_INT
930 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
933 /* Returns 1 if OP is a CONST_INT that is a positive value
934 and an exact power of 2. */
937 exact_log2_cint_operand (op, mode)
938 rtx op;
939 enum machine_mode mode ATTRIBUTE_UNUSED;
941 return (GET_CODE (op) == CONST_INT
942 && INTVAL (op) > 0
943 && exact_log2 (INTVAL (op)) >= 0);
946 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
947 ctr, or lr). */
950 gpc_reg_operand (op, mode)
951 rtx op;
952 enum machine_mode mode;
954 return (register_operand (op, mode)
955 && (GET_CODE (op) != REG
956 || (REGNO (op) >= ARG_POINTER_REGNUM
957 && !XER_REGNO_P (REGNO (op)))
958 || REGNO (op) < MQ_REGNO));
961 /* Returns 1 if OP is either a pseudo-register or a register denoting a
962 CR field. */
965 cc_reg_operand (op, mode)
966 rtx op;
967 enum machine_mode mode;
969 return (register_operand (op, mode)
970 && (GET_CODE (op) != REG
971 || REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || CR_REGNO_P (REGNO (op))));
975 /* Returns 1 if OP is either a pseudo-register or a register denoting a
976 CR field that isn't CR0. */
979 cc_reg_not_cr0_operand (op, mode)
980 rtx op;
981 enum machine_mode mode;
983 return (register_operand (op, mode)
984 && (GET_CODE (op) != REG
985 || REGNO (op) >= FIRST_PSEUDO_REGISTER
986 || CR_REGNO_NOT_CR0_P (REGNO (op))));
989 /* Returns 1 if OP is either a constant integer valid for a D-field or
990 a non-special register. If a register, it must be in the proper
991 mode unless MODE is VOIDmode. */
994 reg_or_short_operand (op, mode)
995 rtx op;
996 enum machine_mode mode;
998 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1001 /* Similar, except check if the negation of the constant would be
1002 valid for a D-field. */
1005 reg_or_neg_short_operand (op, mode)
1006 rtx op;
1007 enum machine_mode mode;
1009 if (GET_CODE (op) == CONST_INT)
1010 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1012 return gpc_reg_operand (op, mode);
1015 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1016 a non-special register. If a register, it must be in the proper
1017 mode unless MODE is VOIDmode. */
1020 reg_or_aligned_short_operand (op, mode)
1021 rtx op;
1022 enum machine_mode mode;
1024 if (gpc_reg_operand (op, mode))
1025 return 1;
1026 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1027 return 1;
1029 return 0;
1033 /* Return 1 if the operand is either a register or an integer whose
1034 high-order 16 bits are zero. */
1037 reg_or_u_short_operand (op, mode)
1038 rtx op;
1039 enum machine_mode mode;
1041 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1044 /* Return 1 is the operand is either a non-special register or ANY
1045 constant integer. */
1048 reg_or_cint_operand (op, mode)
1049 rtx op;
1050 enum machine_mode mode;
1052 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1055 /* Return 1 is the operand is either a non-special register or ANY
1056 32-bit signed constant integer. */
1059 reg_or_arith_cint_operand (op, mode)
1060 rtx op;
1061 enum machine_mode mode;
1063 return (gpc_reg_operand (op, mode)
1064 || (GET_CODE (op) == CONST_INT
1065 #if HOST_BITS_PER_WIDE_INT != 32
1066 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1067 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1068 #endif
1072 /* Return 1 is the operand is either a non-special register or a 32-bit
1073 signed constant integer valid for 64-bit addition. */
1076 reg_or_add_cint64_operand (op, mode)
1077 rtx op;
1078 enum machine_mode mode;
1080 return (gpc_reg_operand (op, mode)
1081 || (GET_CODE (op) == CONST_INT
1082 #if HOST_BITS_PER_WIDE_INT == 32
1083 && INTVAL (op) < 0x7fff8000
1084 #else
1085 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1086 < 0x100000000ll)
1087 #endif
1091 /* Return 1 is the operand is either a non-special register or a 32-bit
1092 signed constant integer valid for 64-bit subtraction. */
1095 reg_or_sub_cint64_operand (op, mode)
1096 rtx op;
1097 enum machine_mode mode;
1099 return (gpc_reg_operand (op, mode)
1100 || (GET_CODE (op) == CONST_INT
1101 #if HOST_BITS_PER_WIDE_INT == 32
1102 && (- INTVAL (op)) < 0x7fff8000
1103 #else
1104 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1105 < 0x100000000ll)
1106 #endif
1110 /* Return 1 is the operand is either a non-special register or ANY
1111 32-bit unsigned constant integer. */
1114 reg_or_logical_cint_operand (op, mode)
1115 rtx op;
1116 enum machine_mode mode;
1118 if (GET_CODE (op) == CONST_INT)
1120 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1122 if (GET_MODE_BITSIZE (mode) <= 32)
1123 abort ();
1125 if (INTVAL (op) < 0)
1126 return 0;
1129 return ((INTVAL (op) & GET_MODE_MASK (mode)
1130 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1132 else if (GET_CODE (op) == CONST_DOUBLE)
1134 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1135 || mode != DImode)
1136 abort ();
1138 return CONST_DOUBLE_HIGH (op) == 0;
1140 else
1141 return gpc_reg_operand (op, mode);
1144 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1147 got_operand (op, mode)
1148 rtx op;
1149 enum machine_mode mode ATTRIBUTE_UNUSED;
1151 return (GET_CODE (op) == SYMBOL_REF
1152 || GET_CODE (op) == CONST
1153 || GET_CODE (op) == LABEL_REF);
1156 /* Return 1 if the operand is a simple references that can be loaded via
1157 the GOT (labels involving addition aren't allowed). */
1160 got_no_const_operand (op, mode)
1161 rtx op;
1162 enum machine_mode mode ATTRIBUTE_UNUSED;
1164 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1167 /* Return the number of instructions it takes to form a constant in an
1168 integer register. */
1170 static int
1171 num_insns_constant_wide (value)
1172 HOST_WIDE_INT value;
1174 /* signed constant loadable with {cal|addi} */
1175 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1176 return 1;
1178 /* constant loadable with {cau|addis} */
1179 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1180 return 1;
1182 #if HOST_BITS_PER_WIDE_INT == 64
1183 else if (TARGET_POWERPC64)
1185 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1186 HOST_WIDE_INT high = value >> 31;
1188 if (high == 0 || high == -1)
1189 return 2;
1191 high >>= 1;
1193 if (low == 0)
1194 return num_insns_constant_wide (high) + 1;
1195 else
1196 return (num_insns_constant_wide (high)
1197 + num_insns_constant_wide (low) + 1);
1199 #endif
1201 else
1202 return 2;
1206 num_insns_constant (op, mode)
1207 rtx op;
1208 enum machine_mode mode;
1210 if (GET_CODE (op) == CONST_INT)
1212 #if HOST_BITS_PER_WIDE_INT == 64
1213 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1214 && mask64_operand (op, mode))
1215 return 2;
1216 else
1217 #endif
1218 return num_insns_constant_wide (INTVAL (op));
1221 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1223 long l;
1224 REAL_VALUE_TYPE rv;
1226 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1227 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1228 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1231 else if (GET_CODE (op) == CONST_DOUBLE)
1233 HOST_WIDE_INT low;
1234 HOST_WIDE_INT high;
1235 long l[2];
1236 REAL_VALUE_TYPE rv;
1237 int endian = (WORDS_BIG_ENDIAN == 0);
1239 if (mode == VOIDmode || mode == DImode)
1241 high = CONST_DOUBLE_HIGH (op);
1242 low = CONST_DOUBLE_LOW (op);
1244 else
1246 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1247 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1248 high = l[endian];
1249 low = l[1 - endian];
1252 if (TARGET_32BIT)
1253 return (num_insns_constant_wide (low)
1254 + num_insns_constant_wide (high));
1256 else
1258 if (high == 0 && low >= 0)
1259 return num_insns_constant_wide (low);
1261 else if (high == -1 && low < 0)
1262 return num_insns_constant_wide (low);
1264 else if (mask64_operand (op, mode))
1265 return 2;
1267 else if (low == 0)
1268 return num_insns_constant_wide (high) + 1;
1270 else
1271 return (num_insns_constant_wide (high)
1272 + num_insns_constant_wide (low) + 1);
1276 else
1277 abort ();
1280 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1281 register with one instruction per word. We only do this if we can
1282 safely read CONST_DOUBLE_{LOW,HIGH}. */
1285 easy_fp_constant (op, mode)
1286 rtx op;
1287 enum machine_mode mode;
1289 if (GET_CODE (op) != CONST_DOUBLE
1290 || GET_MODE (op) != mode
1291 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1292 return 0;
1294 /* Consider all constants with -msoft-float to be easy. */
1295 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1296 && mode != DImode)
1297 return 1;
1299 /* If we are using V.4 style PIC, consider all constants to be hard. */
1300 if (flag_pic && DEFAULT_ABI == ABI_V4)
1301 return 0;
1303 #ifdef TARGET_RELOCATABLE
1304 /* Similarly if we are using -mrelocatable, consider all constants
1305 to be hard. */
1306 if (TARGET_RELOCATABLE)
1307 return 0;
1308 #endif
1310 if (mode == DFmode)
1312 long k[2];
1313 REAL_VALUE_TYPE rv;
1315 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1316 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1318 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1319 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1322 else if (mode == SFmode)
1324 long l;
1325 REAL_VALUE_TYPE rv;
1327 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1328 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1330 return num_insns_constant_wide (l) == 1;
1333 else if (mode == DImode)
1334 return ((TARGET_POWERPC64
1335 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1336 || (num_insns_constant (op, DImode) <= 2));
1338 else if (mode == SImode)
1339 return 1;
1340 else
1341 abort ();
1344 /* Return 1 if the operand is a CONST_INT and can be put into a
1345 register with one instruction. */
1347 static int
1348 easy_vector_constant (op)
1349 rtx op;
1351 rtx elt;
1352 int units, i;
1354 if (GET_CODE (op) != CONST_VECTOR)
1355 return 0;
1357 units = CONST_VECTOR_NUNITS (op);
1359 /* We can generate 0 easily. Look for that. */
1360 for (i = 0; i < units; ++i)
1362 elt = CONST_VECTOR_ELT (op, i);
1364 /* We could probably simplify this by just checking for equality
1365 with CONST0_RTX for the current mode, but let's be safe
1366 instead. */
1368 switch (GET_CODE (elt))
1370 case CONST_INT:
1371 if (INTVAL (elt) != 0)
1372 return 0;
1373 break;
1374 case CONST_DOUBLE:
1375 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1376 return 0;
1377 break;
1378 default:
1379 return 0;
1383 /* We could probably generate a few other constants trivially, but
1384 gcc doesn't generate them yet. FIXME later. */
1385 return 1;
1388 /* Return 1 if the operand is the constant 0. This works for scalars
1389 as well as vectors. */
1391 zero_constant (op, mode)
1392 rtx op;
1393 enum machine_mode mode;
1395 return op == CONST0_RTX (mode);
1398 /* Return 1 if the operand is 0.0. */
1400 zero_fp_constant (op, mode)
1401 rtx op;
1402 enum machine_mode mode;
1404 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1407 /* Return 1 if the operand is in volatile memory. Note that during
1408 the RTL generation phase, memory_operand does not return TRUE for
1409 volatile memory references. So this function allows us to
1410 recognize volatile references where its safe. */
1413 volatile_mem_operand (op, mode)
1414 rtx op;
1415 enum machine_mode mode;
1417 if (GET_CODE (op) != MEM)
1418 return 0;
1420 if (!MEM_VOLATILE_P (op))
1421 return 0;
1423 if (mode != GET_MODE (op))
1424 return 0;
1426 if (reload_completed)
1427 return memory_operand (op, mode);
1429 if (reload_in_progress)
1430 return strict_memory_address_p (mode, XEXP (op, 0));
1432 return memory_address_p (mode, XEXP (op, 0));
1435 /* Return 1 if the operand is an offsettable memory operand. */
1438 offsettable_mem_operand (op, mode)
1439 rtx op;
1440 enum machine_mode mode;
1442 return ((GET_CODE (op) == MEM)
1443 && offsettable_address_p (reload_completed || reload_in_progress,
1444 mode, XEXP (op, 0)));
1447 /* Return 1 if the operand is either an easy FP constant (see above) or
1448 memory. */
1451 mem_or_easy_const_operand (op, mode)
1452 rtx op;
1453 enum machine_mode mode;
1455 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1458 /* Return 1 if the operand is either a non-special register or an item
1459 that can be used as the operand of a `mode' add insn. */
1462 add_operand (op, mode)
1463 rtx op;
1464 enum machine_mode mode;
1466 if (GET_CODE (op) == CONST_INT)
1467 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1468 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1470 return gpc_reg_operand (op, mode);
1473 /* Return 1 if OP is a constant but not a valid add_operand. */
1476 non_add_cint_operand (op, mode)
1477 rtx op;
1478 enum machine_mode mode ATTRIBUTE_UNUSED;
1480 return (GET_CODE (op) == CONST_INT
1481 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1482 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1485 /* Return 1 if the operand is a non-special register or a constant that
1486 can be used as the operand of an OR or XOR insn on the RS/6000. */
1489 logical_operand (op, mode)
1490 rtx op;
1491 enum machine_mode mode;
1493 HOST_WIDE_INT opl, oph;
1495 if (gpc_reg_operand (op, mode))
1496 return 1;
1498 if (GET_CODE (op) == CONST_INT)
1500 opl = INTVAL (op) & GET_MODE_MASK (mode);
1502 #if HOST_BITS_PER_WIDE_INT <= 32
1503 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1504 return 0;
1505 #endif
1507 else if (GET_CODE (op) == CONST_DOUBLE)
1509 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1510 abort ();
1512 opl = CONST_DOUBLE_LOW (op);
1513 oph = CONST_DOUBLE_HIGH (op);
1514 if (oph != 0)
1515 return 0;
1517 else
1518 return 0;
1520 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1521 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1524 /* Return 1 if C is a constant that is not a logical operand (as
1525 above), but could be split into one. */
1528 non_logical_cint_operand (op, mode)
1529 rtx op;
1530 enum machine_mode mode;
1532 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1533 && ! logical_operand (op, mode)
1534 && reg_or_logical_cint_operand (op, mode));
1537 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1538 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1539 Reject all ones and all zeros, since these should have been optimized
1540 away and confuse the making of MB and ME. */
1543 mask_operand (op, mode)
1544 rtx op;
1545 enum machine_mode mode ATTRIBUTE_UNUSED;
1547 HOST_WIDE_INT c, lsb;
1549 if (GET_CODE (op) != CONST_INT)
1550 return 0;
1552 c = INTVAL (op);
1554 /* Fail in 64-bit mode if the mask wraps around because the upper
1555 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1556 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1557 return 0;
1559 /* We don't change the number of transitions by inverting,
1560 so make sure we start with the LS bit zero. */
1561 if (c & 1)
1562 c = ~c;
1564 /* Reject all zeros or all ones. */
1565 if (c == 0)
1566 return 0;
1568 /* Find the first transition. */
1569 lsb = c & -c;
1571 /* Invert to look for a second transition. */
1572 c = ~c;
1574 /* Erase first transition. */
1575 c &= -lsb;
1577 /* Find the second transition (if any). */
1578 lsb = c & -c;
1580 /* Match if all the bits above are 1's (or c is zero). */
1581 return c == -lsb;
1584 /* Return 1 for the PowerPC64 rlwinm corner case. */
1587 mask_operand_wrap (op, mode)
1588 rtx op;
1589 enum machine_mode mode ATTRIBUTE_UNUSED;
1591 HOST_WIDE_INT c, lsb;
1593 if (GET_CODE (op) != CONST_INT)
1594 return 0;
1596 c = INTVAL (op);
1598 if ((c & 0x80000001) != 0x80000001)
1599 return 0;
1601 c = ~c;
1602 if (c == 0)
1603 return 0;
1605 lsb = c & -c;
1606 c = ~c;
1607 c &= -lsb;
1608 lsb = c & -c;
1609 return c == -lsb;
1612 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1613 It is if there are no more than one 1->0 or 0->1 transitions.
1614 Reject all zeros, since zero should have been optimized away and
1615 confuses the making of MB and ME. */
1618 mask64_operand (op, mode)
1619 rtx op;
1620 enum machine_mode mode ATTRIBUTE_UNUSED;
1622 if (GET_CODE (op) == CONST_INT)
1624 HOST_WIDE_INT c, lsb;
1626 c = INTVAL (op);
1628 /* Reject all zeros. */
1629 if (c == 0)
1630 return 0;
1632 /* We don't change the number of transitions by inverting,
1633 so make sure we start with the LS bit zero. */
1634 if (c & 1)
1635 c = ~c;
1637 /* Find the transition, and check that all bits above are 1's. */
1638 lsb = c & -c;
1639 return c == -lsb;
1641 return 0;
1644 /* Like mask64_operand, but allow up to three transitions. This
1645 predicate is used by insn patterns that generate two rldicl or
1646 rldicr machine insns. */
1649 mask64_2_operand (op, mode)
1650 rtx op;
1651 enum machine_mode mode ATTRIBUTE_UNUSED;
1653 if (GET_CODE (op) == CONST_INT)
1655 HOST_WIDE_INT c, lsb;
1657 c = INTVAL (op);
1659 /* Disallow all zeros. */
1660 if (c == 0)
1661 return 0;
1663 /* We don't change the number of transitions by inverting,
1664 so make sure we start with the LS bit zero. */
1665 if (c & 1)
1666 c = ~c;
1668 /* Find the first transition. */
1669 lsb = c & -c;
1671 /* Invert to look for a second transition. */
1672 c = ~c;
1674 /* Erase first transition. */
1675 c &= -lsb;
1677 /* Find the second transition. */
1678 lsb = c & -c;
1680 /* Invert to look for a third transition. */
1681 c = ~c;
1683 /* Erase second transition. */
1684 c &= -lsb;
1686 /* Find the third transition (if any). */
1687 lsb = c & -c;
1689 /* Match if all the bits above are 1's (or c is zero). */
1690 return c == -lsb;
1692 return 0;
1695 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1696 implement ANDing by the mask IN. */
1697 void
1698 build_mask64_2_operands (in, out)
1699 rtx in;
1700 rtx *out;
1702 #if HOST_BITS_PER_WIDE_INT >= 64
1703 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1704 int shift;
1706 if (GET_CODE (in) != CONST_INT)
1707 abort ();
1709 c = INTVAL (in);
1710 if (c & 1)
1712 /* Assume c initially something like 0x00fff000000fffff. The idea
1713 is to rotate the word so that the middle ^^^^^^ group of zeros
1714 is at the MS end and can be cleared with an rldicl mask. We then
1715 rotate back and clear off the MS ^^ group of zeros with a
1716 second rldicl. */
1717 c = ~c; /* c == 0xff000ffffff00000 */
1718 lsb = c & -c; /* lsb == 0x0000000000100000 */
1719 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1720 c = ~c; /* c == 0x00fff000000fffff */
1721 c &= -lsb; /* c == 0x00fff00000000000 */
1722 lsb = c & -c; /* lsb == 0x0000100000000000 */
1723 c = ~c; /* c == 0xff000fffffffffff */
1724 c &= -lsb; /* c == 0xff00000000000000 */
1725 shift = 0;
1726 while ((lsb >>= 1) != 0)
1727 shift++; /* shift == 44 on exit from loop */
1728 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1729 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1730 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1732 else
1734 /* Assume c initially something like 0xff000f0000000000. The idea
1735 is to rotate the word so that the ^^^ middle group of zeros
1736 is at the LS end and can be cleared with an rldicr mask. We then
1737 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1738 a second rldicr. */
1739 lsb = c & -c; /* lsb == 0x0000010000000000 */
1740 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1741 c = ~c; /* c == 0x00fff0ffffffffff */
1742 c &= -lsb; /* c == 0x00fff00000000000 */
1743 lsb = c & -c; /* lsb == 0x0000100000000000 */
1744 c = ~c; /* c == 0xff000fffffffffff */
1745 c &= -lsb; /* c == 0xff00000000000000 */
1746 shift = 0;
1747 while ((lsb >>= 1) != 0)
1748 shift++; /* shift == 44 on exit from loop */
1749 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1750 m1 >>= shift; /* m1 == 0x0000000000000fff */
1751 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1754 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1755 masks will be all 1's. We are guaranteed more than one transition. */
1756 out[0] = GEN_INT (64 - shift);
1757 out[1] = GEN_INT (m1);
1758 out[2] = GEN_INT (shift);
1759 out[3] = GEN_INT (m2);
1760 #else
1761 abort ();
1762 #endif
1765 /* Return 1 if the operand is either a non-special register or a constant
1766 that can be used as the operand of a PowerPC64 logical AND insn. */
1769 and64_operand (op, mode)
1770 rtx op;
1771 enum machine_mode mode;
1773 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1774 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1776 return (logical_operand (op, mode) || mask64_operand (op, mode));
1779 /* Like the above, but also match constants that can be implemented
1780 with two rldicl or rldicr insns. */
1783 and64_2_operand (op, mode)
1784 rtx op;
1785 enum machine_mode mode;
1787 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1788 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
1790 return logical_operand (op, mode) || mask64_2_operand (op, mode);
1793 /* Return 1 if the operand is either a non-special register or a
1794 constant that can be used as the operand of an RS/6000 logical AND insn. */
1797 and_operand (op, mode)
1798 rtx op;
1799 enum machine_mode mode;
1801 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1802 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1804 return (logical_operand (op, mode) || mask_operand (op, mode));
1807 /* Return 1 if the operand is a general register or memory operand. */
1810 reg_or_mem_operand (op, mode)
1811 rtx op;
1812 enum machine_mode mode;
1814 return (gpc_reg_operand (op, mode)
1815 || memory_operand (op, mode)
1816 || volatile_mem_operand (op, mode));
1819 /* Return 1 if the operand is a general register or memory operand without
1820 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1821 instruction. */
1824 lwa_operand (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1828 rtx inner = op;
1830 if (reload_completed && GET_CODE (inner) == SUBREG)
1831 inner = SUBREG_REG (inner);
1833 return gpc_reg_operand (inner, mode)
1834 || (memory_operand (inner, mode)
1835 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1836 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1837 && (GET_CODE (XEXP (inner, 0)) != PLUS
1838 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1839 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1842 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1845 symbol_ref_operand (op, mode)
1846 rtx op;
1847 enum machine_mode mode;
1849 if (mode != VOIDmode && GET_MODE (op) != mode)
1850 return 0;
1852 return (GET_CODE (op) == SYMBOL_REF);
1855 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1856 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1859 call_operand (op, mode)
1860 rtx op;
1861 enum machine_mode mode;
1863 if (mode != VOIDmode && GET_MODE (op) != mode)
1864 return 0;
1866 return (GET_CODE (op) == SYMBOL_REF
1867 || (GET_CODE (op) == REG
1868 && (REGNO (op) == LINK_REGISTER_REGNUM
1869 || REGNO (op) == COUNT_REGISTER_REGNUM
1870 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1873 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1874 this file and the function is not weakly defined. */
1877 current_file_function_operand (op, mode)
1878 rtx op;
1879 enum machine_mode mode ATTRIBUTE_UNUSED;
1881 return (GET_CODE (op) == SYMBOL_REF
1882 && (SYMBOL_REF_FLAG (op)
1883 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1884 && ! DECL_WEAK (current_function_decl))));
1887 /* Return 1 if this operand is a valid input for a move insn. */
1890 input_operand (op, mode)
1891 rtx op;
1892 enum machine_mode mode;
1894 /* Memory is always valid. */
1895 if (memory_operand (op, mode))
1896 return 1;
1898 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1899 if (GET_CODE (op) == CONSTANT_P_RTX)
1900 return 1;
1902 /* For floating-point, easy constants are valid. */
1903 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1904 && CONSTANT_P (op)
1905 && easy_fp_constant (op, mode))
1906 return 1;
1908 /* Allow any integer constant. */
1909 if (GET_MODE_CLASS (mode) == MODE_INT
1910 && (GET_CODE (op) == CONST_INT
1911 || GET_CODE (op) == CONST_DOUBLE))
1912 return 1;
1914 /* For floating-point or multi-word mode, the only remaining valid type
1915 is a register. */
1916 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1917 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1918 return register_operand (op, mode);
1920 /* The only cases left are integral modes one word or smaller (we
1921 do not get called for MODE_CC values). These can be in any
1922 register. */
1923 if (register_operand (op, mode))
1924 return 1;
1926 /* A SYMBOL_REF referring to the TOC is valid. */
1927 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1928 return 1;
1930 /* A constant pool expression (relative to the TOC) is valid */
1931 if (TOC_RELATIVE_EXPR_P (op))
1932 return 1;
1934 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1935 to be valid. */
1936 if (DEFAULT_ABI == ABI_V4
1937 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1938 && small_data_operand (op, Pmode))
1939 return 1;
1941 return 0;
1944 /* Return 1 for an operand in small memory on V.4/eabi. */
1947 small_data_operand (op, mode)
1948 rtx op ATTRIBUTE_UNUSED;
1949 enum machine_mode mode ATTRIBUTE_UNUSED;
1951 #if TARGET_ELF
1952 rtx sym_ref;
1954 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1955 return 0;
1957 if (DEFAULT_ABI != ABI_V4)
1958 return 0;
1960 if (GET_CODE (op) == SYMBOL_REF)
1961 sym_ref = op;
1963 else if (GET_CODE (op) != CONST
1964 || GET_CODE (XEXP (op, 0)) != PLUS
1965 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1966 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1967 return 0;
1969 else
1971 rtx sum = XEXP (op, 0);
1972 HOST_WIDE_INT summand;
1974 /* We have to be careful here, because it is the referenced address
1975 that must be 32k from _SDA_BASE_, not just the symbol. */
1976 summand = INTVAL (XEXP (sum, 1));
1977 if (summand < 0 || summand > g_switch_value)
1978 return 0;
1980 sym_ref = XEXP (sum, 0);
1983 if (*XSTR (sym_ref, 0) != '@')
1984 return 0;
1986 return 1;
1988 #else
1989 return 0;
1990 #endif
1993 static int
1994 constant_pool_expr_1 (op, have_sym, have_toc)
1995 rtx op;
1996 int *have_sym;
1997 int *have_toc;
1999 switch (GET_CODE(op))
2001 case SYMBOL_REF:
2002 if (CONSTANT_POOL_ADDRESS_P (op))
2004 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2006 *have_sym = 1;
2007 return 1;
2009 else
2010 return 0;
2012 else if (! strcmp (XSTR (op, 0), toc_label_name))
2014 *have_toc = 1;
2015 return 1;
2017 else
2018 return 0;
2019 case PLUS:
2020 case MINUS:
2021 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2022 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2023 case CONST:
2024 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2025 case CONST_INT:
2026 return 1;
2027 default:
2028 return 0;
2033 constant_pool_expr_p (op)
2034 rtx op;
2036 int have_sym = 0;
2037 int have_toc = 0;
2038 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2042 toc_relative_expr_p (op)
2043 rtx op;
2045 int have_sym = 0;
2046 int have_toc = 0;
2047 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2050 /* Try machine-dependent ways of modifying an illegitimate address
2051 to be legitimate. If we find one, return the new, valid address.
2052 This is used from only one place: `memory_address' in explow.c.
2054 OLDX is the address as it was before break_out_memory_refs was
2055 called. In some cases it is useful to look at this to decide what
2056 needs to be done.
2058 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2060 It is always safe for this function to do nothing. It exists to
2061 recognize opportunities to optimize the output.
2063 On RS/6000, first check for the sum of a register with a constant
2064 integer that is out of range. If so, generate code to add the
2065 constant with the low-order 16 bits masked to the register and force
2066 this result into another register (this can be done with `cau').
2067 Then generate an address of REG+(CONST&0xffff), allowing for the
2068 possibility of bit 16 being a one.
2070 Then check for the sum of a register and something not constant, try to
2071 load the other things into a register and return the sum. */
2073 rs6000_legitimize_address (x, oldx, mode)
2074 rtx x;
2075 rtx oldx ATTRIBUTE_UNUSED;
2076 enum machine_mode mode;
2078 if (GET_CODE (x) == PLUS
2079 && GET_CODE (XEXP (x, 0)) == REG
2080 && GET_CODE (XEXP (x, 1)) == CONST_INT
2081 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2083 HOST_WIDE_INT high_int, low_int;
2084 rtx sum;
2085 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2086 high_int = INTVAL (XEXP (x, 1)) - low_int;
2087 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2088 GEN_INT (high_int)), 0);
2089 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2091 else if (GET_CODE (x) == PLUS
2092 && GET_CODE (XEXP (x, 0)) == REG
2093 && GET_CODE (XEXP (x, 1)) != CONST_INT
2094 && GET_MODE_NUNITS (mode) == 1
2095 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2096 || TARGET_POWERPC64
2097 || mode != DFmode)
2098 && (TARGET_POWERPC64 || mode != DImode)
2099 && mode != TImode)
2101 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2102 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2104 else if (ALTIVEC_VECTOR_MODE (mode))
2106 rtx reg;
2108 /* Make sure both operands are registers. */
2109 if (GET_CODE (x) == PLUS)
2110 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2111 force_reg (Pmode, XEXP (x, 1)));
2113 reg = force_reg (Pmode, x);
2114 return reg;
2116 else if (SPE_VECTOR_MODE (mode))
2118 /* We accept [reg + reg] and [reg + OFFSET]. */
2120 if (GET_CODE (x) == PLUS)
2122 rtx op1 = XEXP (x, 0);
2123 rtx op2 = XEXP (x, 1);
2125 op1 = force_reg (Pmode, op1);
2127 if (GET_CODE (op2) != REG
2128 && (GET_CODE (op2) != CONST_INT
2129 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2130 op2 = force_reg (Pmode, op2);
2132 return gen_rtx_PLUS (Pmode, op1, op2);
2135 return force_reg (Pmode, x);
2137 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
2138 && GET_CODE (x) != CONST_INT
2139 && GET_CODE (x) != CONST_DOUBLE
2140 && CONSTANT_P (x)
2141 && GET_MODE_NUNITS (mode) == 1
2142 && (GET_MODE_BITSIZE (mode) <= 32
2143 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2145 rtx reg = gen_reg_rtx (Pmode);
2146 emit_insn (gen_elf_high (reg, (x)));
2147 return gen_rtx_LO_SUM (Pmode, reg, (x));
2149 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2150 && ! flag_pic
2151 && GET_CODE (x) != CONST_INT
2152 && GET_CODE (x) != CONST_DOUBLE
2153 && CONSTANT_P (x)
2154 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2155 && mode != DImode
2156 && mode != TImode)
2158 rtx reg = gen_reg_rtx (Pmode);
2159 emit_insn (gen_macho_high (reg, (x)));
2160 return gen_rtx_LO_SUM (Pmode, reg, (x));
2162 else if (TARGET_TOC
2163 && CONSTANT_POOL_EXPR_P (x)
2164 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2166 return create_TOC_reference (x);
2168 else
2169 return NULL_RTX;
2172 /* The convention appears to be to define this wherever it is used.
2173 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2174 is now used here. */
2175 #ifndef REG_MODE_OK_FOR_BASE_P
2176 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2177 #endif
2179 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2180 replace the input X, or the original X if no replacement is called for.
2181 The output parameter *WIN is 1 if the calling macro should goto WIN,
2182 0 if it should not.
2184 For RS/6000, we wish to handle large displacements off a base
2185 register by splitting the addend across an addiu/addis and the mem insn.
2186 This cuts number of extra insns needed from 3 to 1.
2188 On Darwin, we use this to generate code for floating point constants.
2189 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2190 The Darwin code is inside #if TARGET_MACHO because only then is
2191 machopic_function_base_name() defined. */
2193 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2194 rtx x;
2195 enum machine_mode mode;
2196 int opnum;
2197 int type;
2198 int ind_levels ATTRIBUTE_UNUSED;
2199 int *win;
2201 /* We must recognize output that we have already generated ourselves. */
2202 if (GET_CODE (x) == PLUS
2203 && GET_CODE (XEXP (x, 0)) == PLUS
2204 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2205 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2206 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2208 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2209 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2210 opnum, (enum reload_type)type);
2211 *win = 1;
2212 return x;
2215 #if TARGET_MACHO
2216 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2217 && GET_CODE (x) == LO_SUM
2218 && GET_CODE (XEXP (x, 0)) == PLUS
2219 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2220 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2221 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2222 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2223 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2224 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2225 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2227 /* Result of previous invocation of this function on Darwin
2228 floating point constant. */
2229 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2230 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2231 opnum, (enum reload_type)type);
2232 *win = 1;
2233 return x;
2235 #endif
2236 if (GET_CODE (x) == PLUS
2237 && GET_CODE (XEXP (x, 0)) == REG
2238 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2239 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2240 && GET_CODE (XEXP (x, 1)) == CONST_INT
2241 && !SPE_VECTOR_MODE (mode)
2242 && !ALTIVEC_VECTOR_MODE (mode))
2244 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2245 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2246 HOST_WIDE_INT high
2247 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2249 /* Check for 32-bit overflow. */
2250 if (high + low != val)
2252 *win = 0;
2253 return x;
2256 /* Reload the high part into a base reg; leave the low part
2257 in the mem directly. */
2259 x = gen_rtx_PLUS (GET_MODE (x),
2260 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2261 GEN_INT (high)),
2262 GEN_INT (low));
2264 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2265 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2266 opnum, (enum reload_type)type);
2267 *win = 1;
2268 return x;
2270 #if TARGET_MACHO
2271 if (GET_CODE (x) == SYMBOL_REF
2272 && DEFAULT_ABI == ABI_DARWIN
2273 && !ALTIVEC_VECTOR_MODE (mode)
2274 && flag_pic)
2276 /* Darwin load of floating point constant. */
2277 rtx offset = gen_rtx (CONST, Pmode,
2278 gen_rtx (MINUS, Pmode, x,
2279 gen_rtx (SYMBOL_REF, Pmode,
2280 machopic_function_base_name ())));
2281 x = gen_rtx (LO_SUM, GET_MODE (x),
2282 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2283 gen_rtx (HIGH, Pmode, offset)), offset);
2284 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2285 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2286 opnum, (enum reload_type)type);
2287 *win = 1;
2288 return x;
2290 #endif
2291 if (TARGET_TOC
2292 && CONSTANT_POOL_EXPR_P (x)
2293 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2295 (x) = create_TOC_reference (x);
2296 *win = 1;
2297 return x;
2299 *win = 0;
2300 return x;
2303 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2304 that is a valid memory address for an instruction.
2305 The MODE argument is the machine mode for the MEM expression
2306 that wants to use this address.
2308 On the RS/6000, there are four valid address: a SYMBOL_REF that
2309 refers to a constant pool entry of an address (or the sum of it
2310 plus a constant), a short (16-bit signed) constant plus a register,
2311 the sum of two registers, or a register indirect, possibly with an
2312 auto-increment. For DFmode and DImode with an constant plus register,
2313 we must ensure that both words are addressable or PowerPC64 with offset
2314 word aligned.
2316 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2317 32-bit DImode, TImode), indexed addressing cannot be used because
2318 adjacent memory cells are accessed by adding word-sized offsets
2319 during assembly output. */
2321 rs6000_legitimate_address (mode, x, reg_ok_strict)
2322 enum machine_mode mode;
2323 rtx x;
2324 int reg_ok_strict;
2326 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2327 return 1;
2328 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2329 && !ALTIVEC_VECTOR_MODE (mode)
2330 && !SPE_VECTOR_MODE (mode)
2331 && TARGET_UPDATE
2332 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2333 return 1;
2334 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2335 return 1;
2336 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2337 return 1;
2338 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2339 if (! reg_ok_strict
2340 && GET_CODE (x) == PLUS
2341 && GET_CODE (XEXP (x, 0)) == REG
2342 && XEXP (x, 0) == virtual_stack_vars_rtx
2343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2344 return 1;
2345 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2346 return 1;
2347 if (mode != TImode
2348 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2349 || TARGET_POWERPC64
2350 || mode != DFmode)
2351 && (TARGET_POWERPC64 || mode != DImode)
2352 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2353 return 1;
2354 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2355 return 1;
2356 return 0;
2359 /* Try to output insns to set TARGET equal to the constant C if it can
2360 be done in less than N insns. Do all computations in MODE.
2361 Returns the place where the output has been placed if it can be
2362 done and the insns have been emitted. If it would take more than N
2363 insns, zero is returned and no insns and emitted. */
2366 rs6000_emit_set_const (dest, mode, source, n)
2367 rtx dest, source;
2368 enum machine_mode mode;
2369 int n ATTRIBUTE_UNUSED;
2371 HOST_WIDE_INT c0, c1;
2373 if (mode == QImode || mode == HImode || mode == SImode)
2375 if (dest == NULL)
2376 dest = gen_reg_rtx (mode);
2377 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2378 return dest;
2381 if (GET_CODE (source) == CONST_INT)
2383 c0 = INTVAL (source);
2384 c1 = -(c0 < 0);
2386 else if (GET_CODE (source) == CONST_DOUBLE)
2388 #if HOST_BITS_PER_WIDE_INT >= 64
2389 c0 = CONST_DOUBLE_LOW (source);
2390 c1 = -(c0 < 0);
2391 #else
2392 c0 = CONST_DOUBLE_LOW (source);
2393 c1 = CONST_DOUBLE_HIGH (source);
2394 #endif
2396 else
2397 abort ();
2399 return rs6000_emit_set_long_const (dest, c0, c1);
2402 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2403 fall back to a straight forward decomposition. We do this to avoid
2404 exponential run times encountered when looking for longer sequences
2405 with rs6000_emit_set_const. */
2406 static rtx
2407 rs6000_emit_set_long_const (dest, c1, c2)
2408 rtx dest;
2409 HOST_WIDE_INT c1, c2;
2411 if (!TARGET_POWERPC64)
2413 rtx operand1, operand2;
2415 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2416 DImode);
2417 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2418 DImode);
2419 emit_move_insn (operand1, GEN_INT (c1));
2420 emit_move_insn (operand2, GEN_INT (c2));
2422 else
2424 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2426 ud1 = c1 & 0xffff;
2427 ud2 = (c1 & 0xffff0000) >> 16;
2428 #if HOST_BITS_PER_WIDE_INT >= 64
2429 c2 = c1 >> 32;
2430 #endif
2431 ud3 = c2 & 0xffff;
2432 ud4 = (c2 & 0xffff0000) >> 16;
2434 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2435 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2437 if (ud1 & 0x8000)
2438 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2439 else
2440 emit_move_insn (dest, GEN_INT (ud1));
2443 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2444 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2446 if (ud2 & 0x8000)
2447 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2448 - 0x80000000));
2449 else
2450 emit_move_insn (dest, GEN_INT (ud2 << 16));
2451 if (ud1 != 0)
2452 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2454 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2455 || (ud4 == 0 && ! (ud3 & 0x8000)))
2457 if (ud3 & 0x8000)
2458 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2459 - 0x80000000));
2460 else
2461 emit_move_insn (dest, GEN_INT (ud3 << 16));
2463 if (ud2 != 0)
2464 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2465 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2466 if (ud1 != 0)
2467 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2469 else
2471 if (ud4 & 0x8000)
2472 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2473 - 0x80000000));
2474 else
2475 emit_move_insn (dest, GEN_INT (ud4 << 16));
2477 if (ud3 != 0)
2478 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2480 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2481 if (ud2 != 0)
2482 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2483 GEN_INT (ud2 << 16)));
2484 if (ud1 != 0)
2485 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2488 return dest;
2491 /* Emit a move from SOURCE to DEST in mode MODE. */
2492 void
2493 rs6000_emit_move (dest, source, mode)
2494 rtx dest;
2495 rtx source;
2496 enum machine_mode mode;
2498 rtx operands[2];
2499 operands[0] = dest;
2500 operands[1] = source;
2502 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2503 if (GET_CODE (operands[1]) == CONST_DOUBLE
2504 && ! FLOAT_MODE_P (mode)
2505 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2507 /* FIXME. This should never happen. */
2508 /* Since it seems that it does, do the safe thing and convert
2509 to a CONST_INT. */
2510 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2512 if (GET_CODE (operands[1]) == CONST_DOUBLE
2513 && ! FLOAT_MODE_P (mode)
2514 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2515 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2516 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2517 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2518 abort ();
2520 /* Check if GCC is setting up a block move that will end up using FP
2521 registers as temporaries. We must make sure this is acceptable. */
2522 if (GET_CODE (operands[0]) == MEM
2523 && GET_CODE (operands[1]) == MEM
2524 && mode == DImode
2525 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2526 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2527 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2528 ? 32 : MEM_ALIGN (operands[0])))
2529 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2530 ? 32
2531 : MEM_ALIGN (operands[1]))))
2532 && ! MEM_VOLATILE_P (operands [0])
2533 && ! MEM_VOLATILE_P (operands [1]))
2535 emit_move_insn (adjust_address (operands[0], SImode, 0),
2536 adjust_address (operands[1], SImode, 0));
2537 emit_move_insn (adjust_address (operands[0], SImode, 4),
2538 adjust_address (operands[1], SImode, 4));
2539 return;
2542 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2543 operands[1] = force_reg (mode, operands[1]);
2545 if (mode == SFmode && ! TARGET_POWERPC
2546 && TARGET_HARD_FLOAT && TARGET_FPRS
2547 && GET_CODE (operands[0]) == MEM)
2549 int regnum;
2551 if (reload_in_progress || reload_completed)
2552 regnum = true_regnum (operands[1]);
2553 else if (GET_CODE (operands[1]) == REG)
2554 regnum = REGNO (operands[1]);
2555 else
2556 regnum = -1;
2558 /* If operands[1] is a register, on POWER it may have
2559 double-precision data in it, so truncate it to single
2560 precision. */
2561 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2563 rtx newreg;
2564 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2565 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2566 operands[1] = newreg;
2570 /* Handle the case where reload calls us with an invalid address;
2571 and the case of CONSTANT_P_RTX. */
2572 if (!ALTIVEC_VECTOR_MODE (mode)
2573 && (! general_operand (operands[1], mode)
2574 || ! nonimmediate_operand (operands[0], mode)
2575 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2577 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2578 return;
2581 /* FIXME: In the long term, this switch statement should go away
2582 and be replaced by a sequence of tests based on things like
2583 mode == Pmode. */
2584 switch (mode)
2586 case HImode:
2587 case QImode:
2588 if (CONSTANT_P (operands[1])
2589 && GET_CODE (operands[1]) != CONST_INT)
2590 operands[1] = force_const_mem (mode, operands[1]);
2591 break;
2593 case TFmode:
2594 case DFmode:
2595 case SFmode:
2596 if (CONSTANT_P (operands[1])
2597 && ! easy_fp_constant (operands[1], mode))
2598 operands[1] = force_const_mem (mode, operands[1]);
2599 break;
2601 case V16QImode:
2602 case V8HImode:
2603 case V4SFmode:
2604 case V4SImode:
2605 case V4HImode:
2606 case V2SFmode:
2607 case V2SImode:
2608 if (CONSTANT_P (operands[1])
2609 && !easy_vector_constant (operands[1]))
2610 operands[1] = force_const_mem (mode, operands[1]);
2611 break;
2613 case SImode:
2614 case DImode:
2615 /* Use default pattern for address of ELF small data */
2616 if (TARGET_ELF
2617 && mode == Pmode
2618 && DEFAULT_ABI == ABI_V4
2619 && (GET_CODE (operands[1]) == SYMBOL_REF
2620 || GET_CODE (operands[1]) == CONST)
2621 && small_data_operand (operands[1], mode))
2623 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2624 return;
2627 if (DEFAULT_ABI == ABI_V4
2628 && mode == Pmode && mode == SImode
2629 && flag_pic == 1 && got_operand (operands[1], mode))
2631 emit_insn (gen_movsi_got (operands[0], operands[1]));
2632 return;
2635 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2636 && TARGET_NO_TOC && ! flag_pic
2637 && mode == Pmode
2638 && CONSTANT_P (operands[1])
2639 && GET_CODE (operands[1]) != HIGH
2640 && GET_CODE (operands[1]) != CONST_INT)
2642 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2644 /* If this is a function address on -mcall-aixdesc,
2645 convert it to the address of the descriptor. */
2646 if (DEFAULT_ABI == ABI_AIX
2647 && GET_CODE (operands[1]) == SYMBOL_REF
2648 && XSTR (operands[1], 0)[0] == '.')
2650 const char *name = XSTR (operands[1], 0);
2651 rtx new_ref;
2652 while (*name == '.')
2653 name++;
2654 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2655 CONSTANT_POOL_ADDRESS_P (new_ref)
2656 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2657 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2658 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2659 operands[1] = new_ref;
2662 if (DEFAULT_ABI == ABI_DARWIN)
2664 emit_insn (gen_macho_high (target, operands[1]));
2665 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2666 return;
2669 emit_insn (gen_elf_high (target, operands[1]));
2670 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2671 return;
2674 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2675 and we have put it in the TOC, we just need to make a TOC-relative
2676 reference to it. */
2677 if (TARGET_TOC
2678 && GET_CODE (operands[1]) == SYMBOL_REF
2679 && CONSTANT_POOL_EXPR_P (operands[1])
2680 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2681 get_pool_mode (operands[1])))
2683 operands[1] = create_TOC_reference (operands[1]);
2685 else if (mode == Pmode
2686 && CONSTANT_P (operands[1])
2687 && ((GET_CODE (operands[1]) != CONST_INT
2688 && ! easy_fp_constant (operands[1], mode))
2689 || (GET_CODE (operands[1]) == CONST_INT
2690 && num_insns_constant (operands[1], mode) > 2)
2691 || (GET_CODE (operands[0]) == REG
2692 && FP_REGNO_P (REGNO (operands[0]))))
2693 && GET_CODE (operands[1]) != HIGH
2694 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2695 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2697 /* Emit a USE operation so that the constant isn't deleted if
2698 expensive optimizations are turned on because nobody
2699 references it. This should only be done for operands that
2700 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2701 This should not be done for operands that contain LABEL_REFs.
2702 For now, we just handle the obvious case. */
2703 if (GET_CODE (operands[1]) != LABEL_REF)
2704 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2706 #if TARGET_MACHO
2707 /* Darwin uses a special PIC legitimizer. */
2708 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2710 operands[1] =
2711 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2712 operands[0]);
2713 if (operands[0] != operands[1])
2714 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2715 return;
2717 #endif
2719 /* If we are to limit the number of things we put in the TOC and
2720 this is a symbol plus a constant we can add in one insn,
2721 just put the symbol in the TOC and add the constant. Don't do
2722 this if reload is in progress. */
2723 if (GET_CODE (operands[1]) == CONST
2724 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2725 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2726 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2727 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2728 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2729 && ! side_effects_p (operands[0]))
2731 rtx sym =
2732 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2733 rtx other = XEXP (XEXP (operands[1], 0), 1);
2735 sym = force_reg (mode, sym);
2736 if (mode == SImode)
2737 emit_insn (gen_addsi3 (operands[0], sym, other));
2738 else
2739 emit_insn (gen_adddi3 (operands[0], sym, other));
2740 return;
2743 operands[1] = force_const_mem (mode, operands[1]);
2745 if (TARGET_TOC
2746 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2747 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2748 get_pool_constant (XEXP (operands[1], 0)),
2749 get_pool_mode (XEXP (operands[1], 0))))
2751 operands[1]
2752 = gen_rtx_MEM (mode,
2753 create_TOC_reference (XEXP (operands[1], 0)));
2754 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2755 RTX_UNCHANGING_P (operands[1]) = 1;
2758 break;
2760 case TImode:
2761 if (GET_CODE (operands[0]) == MEM
2762 && GET_CODE (XEXP (operands[0], 0)) != REG
2763 && ! reload_in_progress)
2764 operands[0]
2765 = replace_equiv_address (operands[0],
2766 copy_addr_to_reg (XEXP (operands[0], 0)));
2768 if (GET_CODE (operands[1]) == MEM
2769 && GET_CODE (XEXP (operands[1], 0)) != REG
2770 && ! reload_in_progress)
2771 operands[1]
2772 = replace_equiv_address (operands[1],
2773 copy_addr_to_reg (XEXP (operands[1], 0)));
2774 break;
2776 default:
2777 abort ();
2780 /* Above, we may have called force_const_mem which may have returned
2781 an invalid address. If we can, fix this up; otherwise, reload will
2782 have to deal with it. */
2783 if (GET_CODE (operands[1]) == MEM
2784 && ! memory_address_p (mode, XEXP (operands[1], 0))
2785 && ! reload_in_progress)
2786 operands[1] = adjust_address (operands[1], mode, 0);
2788 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2789 return;
2792 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2793 for a call to a function whose data type is FNTYPE.
2794 For a library call, FNTYPE is 0.
2796 For incoming args we set the number of arguments in the prototype large
2797 so we never return a PARALLEL. */
2799 void
2800 init_cumulative_args (cum, fntype, libname, incoming)
2801 CUMULATIVE_ARGS *cum;
2802 tree fntype;
2803 rtx libname ATTRIBUTE_UNUSED;
2804 int incoming;
2806 static CUMULATIVE_ARGS zero_cumulative;
2808 *cum = zero_cumulative;
2809 cum->words = 0;
2810 cum->fregno = FP_ARG_MIN_REG;
2811 cum->vregno = ALTIVEC_ARG_MIN_REG;
2812 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2813 cum->call_cookie = CALL_NORMAL;
2814 cum->sysv_gregno = GP_ARG_MIN_REG;
2816 if (incoming)
2817 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2819 else if (cum->prototype)
2820 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2821 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2822 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2824 else
2825 cum->nargs_prototype = 0;
2827 cum->orig_nargs = cum->nargs_prototype;
2829 /* Check for a longcall attribute. */
2830 if (fntype
2831 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2832 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2833 cum->call_cookie = CALL_LONG;
2835 if (TARGET_DEBUG_ARG)
2837 fprintf (stderr, "\ninit_cumulative_args:");
2838 if (fntype)
2840 tree ret_type = TREE_TYPE (fntype);
2841 fprintf (stderr, " ret code = %s,",
2842 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2845 if (cum->call_cookie & CALL_LONG)
2846 fprintf (stderr, " longcall,");
2848 fprintf (stderr, " proto = %d, nargs = %d\n",
2849 cum->prototype, cum->nargs_prototype);
2853 /* If defined, a C expression which determines whether, and in which
2854 direction, to pad out an argument with extra space. The value
2855 should be of type `enum direction': either `upward' to pad above
2856 the argument, `downward' to pad below, or `none' to inhibit
2857 padding.
2859 For the AIX ABI structs are always stored left shifted in their
2860 argument slot. */
2862 enum direction
2863 function_arg_padding (mode, type)
2864 enum machine_mode mode;
2865 tree type;
2867 if (type != 0 && AGGREGATE_TYPE_P (type))
2868 return upward;
2870 /* This is the default definition. */
2871 return (! BYTES_BIG_ENDIAN
2872 ? upward
2873 : ((mode == BLKmode
2874 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2875 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2876 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2877 ? downward : upward));
2880 /* If defined, a C expression that gives the alignment boundary, in bits,
2881 of an argument with the specified mode and type. If it is not defined,
2882 PARM_BOUNDARY is used for all arguments.
2884 V.4 wants long longs to be double word aligned. */
2887 function_arg_boundary (mode, type)
2888 enum machine_mode mode;
2889 tree type ATTRIBUTE_UNUSED;
2891 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2892 return 64;
2893 else if (SPE_VECTOR_MODE (mode))
2894 return 64;
2895 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2896 return 128;
2897 else
2898 return PARM_BOUNDARY;
2901 /* Update the data in CUM to advance over an argument
2902 of mode MODE and data type TYPE.
2903 (TYPE is null for libcalls where that information may not be available.) */
2905 void
2906 function_arg_advance (cum, mode, type, named)
2907 CUMULATIVE_ARGS *cum;
2908 enum machine_mode mode;
2909 tree type;
2910 int named;
2912 cum->nargs_prototype--;
2914 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2916 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2917 cum->vregno++;
2918 else
2919 cum->words += RS6000_ARG_SIZE (mode, type);
2921 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
2922 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
2923 cum->sysv_gregno++;
2924 else if (DEFAULT_ABI == ABI_V4)
2926 if (TARGET_HARD_FLOAT && TARGET_FPRS
2927 && (mode == SFmode || mode == DFmode))
2929 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2930 cum->fregno++;
2931 else
2933 if (mode == DFmode)
2934 cum->words += cum->words & 1;
2935 cum->words += RS6000_ARG_SIZE (mode, type);
2938 else
2940 int n_words;
2941 int gregno = cum->sysv_gregno;
2943 /* Aggregates and IEEE quad get passed by reference. */
2944 if ((type && AGGREGATE_TYPE_P (type))
2945 || mode == TFmode)
2946 n_words = 1;
2947 else
2948 n_words = RS6000_ARG_SIZE (mode, type);
2950 /* Long long and SPE vectors are put in odd registers. */
2951 if (n_words == 2 && (gregno & 1) == 0)
2952 gregno += 1;
2954 /* Long long and SPE vectors are not split between registers
2955 and stack. */
2956 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2958 /* Long long is aligned on the stack. */
2959 if (n_words == 2)
2960 cum->words += cum->words & 1;
2961 cum->words += n_words;
2964 /* Note: continuing to accumulate gregno past when we've started
2965 spilling to the stack indicates the fact that we've started
2966 spilling to the stack to expand_builtin_saveregs. */
2967 cum->sysv_gregno = gregno + n_words;
2970 if (TARGET_DEBUG_ARG)
2972 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2973 cum->words, cum->fregno);
2974 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2975 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2976 fprintf (stderr, "mode = %4s, named = %d\n",
2977 GET_MODE_NAME (mode), named);
2980 else
2982 int align = (TARGET_32BIT && (cum->words & 1) != 0
2983 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2985 cum->words += align + RS6000_ARG_SIZE (mode, type);
2987 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2988 && TARGET_HARD_FLOAT && TARGET_FPRS)
2989 cum->fregno++;
2991 if (TARGET_DEBUG_ARG)
2993 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2994 cum->words, cum->fregno);
2995 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2996 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2997 fprintf (stderr, "named = %d, align = %d\n", named, align);
3002 /* Determine where to put an argument to a function.
3003 Value is zero to push the argument on the stack,
3004 or a hard register in which to store the argument.
3006 MODE is the argument's machine mode.
3007 TYPE is the data type of the argument (as a tree).
3008 This is null for libcalls where that information may
3009 not be available.
3010 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3011 the preceding args and about the function being called.
3012 NAMED is nonzero if this argument is a named parameter
3013 (otherwise it is an extra parameter matching an ellipsis).
3015 On RS/6000 the first eight words of non-FP are normally in registers
3016 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3017 Under V.4, the first 8 FP args are in registers.
3019 If this is floating-point and no prototype is specified, we use
3020 both an FP and integer register (or possibly FP reg and stack). Library
3021 functions (when TYPE is zero) always have the proper types for args,
3022 so we can pass the FP value just in one register. emit_library_function
3023 doesn't support PARALLEL anyway. */
3025 struct rtx_def *
3026 function_arg (cum, mode, type, named)
3027 CUMULATIVE_ARGS *cum;
3028 enum machine_mode mode;
3029 tree type;
3030 int named;
3032 enum rs6000_abi abi = DEFAULT_ABI;
3034 /* Return a marker to indicate whether CR1 needs to set or clear the
3035 bit that V.4 uses to say fp args were passed in registers.
3036 Assume that we don't need the marker for software floating point,
3037 or compiler generated library calls. */
3038 if (mode == VOIDmode)
3040 if (abi == ABI_V4
3041 && cum->nargs_prototype < 0
3042 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3044 /* For the SPE, we need to crxor CR6 always. */
3045 if (TARGET_SPE_ABI)
3046 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3047 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3048 return GEN_INT (cum->call_cookie
3049 | ((cum->fregno == FP_ARG_MIN_REG)
3050 ? CALL_V4_SET_FP_ARGS
3051 : CALL_V4_CLEAR_FP_ARGS));
3054 return GEN_INT (cum->call_cookie);
3057 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3059 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3060 return gen_rtx_REG (mode, cum->vregno);
3061 else
3062 return NULL;
3064 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3066 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3067 return gen_rtx_REG (mode, cum->sysv_gregno);
3068 else
3069 return NULL;
3071 else if (abi == ABI_V4)
3073 if (TARGET_HARD_FLOAT && TARGET_FPRS
3074 && (mode == SFmode || mode == DFmode))
3076 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3077 return gen_rtx_REG (mode, cum->fregno);
3078 else
3079 return NULL;
3081 else
3083 int n_words;
3084 int gregno = cum->sysv_gregno;
3086 /* Aggregates and IEEE quad get passed by reference. */
3087 if ((type && AGGREGATE_TYPE_P (type))
3088 || mode == TFmode)
3089 n_words = 1;
3090 else
3091 n_words = RS6000_ARG_SIZE (mode, type);
3093 /* Long long and SPE vectors are put in odd registers. */
3094 if (n_words == 2 && (gregno & 1) == 0)
3095 gregno += 1;
3097 /* Long long and SPE vectors are not split between registers
3098 and stack. */
3099 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3101 /* SPE vectors in ... get split into 2 registers. */
3102 if (TARGET_SPE && TARGET_SPE_ABI
3103 && SPE_VECTOR_MODE (mode) && !named)
3105 rtx r1, r2;
3106 enum machine_mode m = GET_MODE_INNER (mode);
3108 r1 = gen_rtx_REG (m, gregno);
3109 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3110 r2 = gen_rtx_REG (m, gregno + 1);
3111 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3112 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3114 return gen_rtx_REG (mode, gregno);
3116 else
3117 return NULL;
3120 else
3122 int align = (TARGET_32BIT && (cum->words & 1) != 0
3123 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3124 int align_words = cum->words + align;
3126 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3127 return NULL_RTX;
3129 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3131 if (! type
3132 || ((cum->nargs_prototype > 0)
3133 /* IBM AIX extended its linkage convention definition always
3134 to require FP args after register save area hole on the
3135 stack. */
3136 && (DEFAULT_ABI != ABI_AIX
3137 || ! TARGET_XL_CALL
3138 || (align_words < GP_ARG_NUM_REG))))
3139 return gen_rtx_REG (mode, cum->fregno);
3141 return gen_rtx_PARALLEL (mode,
3142 gen_rtvec (2,
3143 gen_rtx_EXPR_LIST (VOIDmode,
3144 ((align_words >= GP_ARG_NUM_REG)
3145 ? NULL_RTX
3146 : (align_words
3147 + RS6000_ARG_SIZE (mode, type)
3148 > GP_ARG_NUM_REG
3149 /* If this is partially on the stack, then
3150 we only include the portion actually
3151 in registers here. */
3152 ? gen_rtx_REG (SImode,
3153 GP_ARG_MIN_REG + align_words)
3154 : gen_rtx_REG (mode,
3155 GP_ARG_MIN_REG + align_words))),
3156 const0_rtx),
3157 gen_rtx_EXPR_LIST (VOIDmode,
3158 gen_rtx_REG (mode, cum->fregno),
3159 const0_rtx)));
3161 else if (align_words < GP_ARG_NUM_REG)
3162 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3163 else
3164 return NULL_RTX;
3168 /* For an arg passed partly in registers and partly in memory,
3169 this is the number of registers used.
3170 For args passed entirely in registers or entirely in memory, zero. */
3173 function_arg_partial_nregs (cum, mode, type, named)
3174 CUMULATIVE_ARGS *cum;
3175 enum machine_mode mode;
3176 tree type;
3177 int named ATTRIBUTE_UNUSED;
3179 if (DEFAULT_ABI == ABI_V4)
3180 return 0;
3182 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3183 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3185 if (cum->nargs_prototype >= 0)
3186 return 0;
3189 if (cum->words < GP_ARG_NUM_REG
3190 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3192 int ret = GP_ARG_NUM_REG - cum->words;
3193 if (ret && TARGET_DEBUG_ARG)
3194 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3196 return ret;
3199 return 0;
3202 /* A C expression that indicates when an argument must be passed by
3203 reference. If nonzero for an argument, a copy of that argument is
3204 made in memory and a pointer to the argument is passed instead of
3205 the argument itself. The pointer is passed in whatever way is
3206 appropriate for passing a pointer to that type.
3208 Under V.4, structures and unions are passed by reference. */
3211 function_arg_pass_by_reference (cum, mode, type, named)
3212 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3213 enum machine_mode mode ATTRIBUTE_UNUSED;
3214 tree type;
3215 int named ATTRIBUTE_UNUSED;
3217 if (DEFAULT_ABI == ABI_V4
3218 && ((type && AGGREGATE_TYPE_P (type))
3219 || mode == TFmode))
3221 if (TARGET_DEBUG_ARG)
3222 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3224 return 1;
3227 return 0;
3230 /* Perform any needed actions needed for a function that is receiving a
3231 variable number of arguments.
3233 CUM is as above.
3235 MODE and TYPE are the mode and type of the current parameter.
3237 PRETEND_SIZE is a variable that should be set to the amount of stack
3238 that must be pushed by the prolog to pretend that our caller pushed
3241 Normally, this macro will push all remaining incoming registers on the
3242 stack and set PRETEND_SIZE to the length of the registers pushed. */
3244 void
3245 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3246 CUMULATIVE_ARGS *cum;
3247 enum machine_mode mode;
3248 tree type;
3249 int *pretend_size;
3250 int no_rtl;
3253 CUMULATIVE_ARGS next_cum;
3254 int reg_size = TARGET_32BIT ? 4 : 8;
3255 rtx save_area = NULL_RTX, mem;
3256 int first_reg_offset, set;
3257 tree fntype;
3258 int stdarg_p;
3260 fntype = TREE_TYPE (current_function_decl);
3261 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3262 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3263 != void_type_node));
3265 /* For varargs, we do not want to skip the dummy va_dcl argument.
3266 For stdargs, we do want to skip the last named argument. */
3267 next_cum = *cum;
3268 if (stdarg_p)
3269 function_arg_advance (&next_cum, mode, type, 1);
3271 if (DEFAULT_ABI == ABI_V4)
3273 /* Indicate to allocate space on the stack for varargs save area. */
3274 cfun->machine->sysv_varargs_p = 1;
3275 if (! no_rtl)
3276 save_area = plus_constant (virtual_stack_vars_rtx,
3277 - RS6000_VARARGS_SIZE);
3279 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3281 else
3283 first_reg_offset = next_cum.words;
3284 save_area = virtual_incoming_args_rtx;
3285 cfun->machine->sysv_varargs_p = 0;
3287 if (MUST_PASS_IN_STACK (mode, type))
3288 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3291 set = get_varargs_alias_set ();
3292 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3294 mem = gen_rtx_MEM (BLKmode,
3295 plus_constant (save_area,
3296 first_reg_offset * reg_size)),
3297 set_mem_alias_set (mem, set);
3298 set_mem_align (mem, BITS_PER_WORD);
3300 move_block_from_reg
3301 (GP_ARG_MIN_REG + first_reg_offset, mem,
3302 GP_ARG_NUM_REG - first_reg_offset,
3303 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3305 /* ??? Does ABI_V4 need this at all? */
3306 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3309 /* Save FP registers if needed. */
3310 if (DEFAULT_ABI == ABI_V4
3311 && TARGET_HARD_FLOAT && TARGET_FPRS
3312 && ! no_rtl
3313 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3315 int fregno = next_cum.fregno;
3316 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3317 rtx lab = gen_label_rtx ();
3318 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3320 emit_jump_insn (gen_rtx_SET (VOIDmode,
3321 pc_rtx,
3322 gen_rtx_IF_THEN_ELSE (VOIDmode,
3323 gen_rtx_NE (VOIDmode, cr1,
3324 const0_rtx),
3325 gen_rtx_LABEL_REF (VOIDmode, lab),
3326 pc_rtx)));
3328 while (fregno <= FP_ARG_V4_MAX_REG)
3330 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3331 set_mem_alias_set (mem, set);
3332 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3333 fregno++;
3334 off += 8;
3337 emit_label (lab);
3341 /* Create the va_list data type. */
3343 tree
3344 rs6000_build_va_list ()
3346 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3348 /* For AIX, prefer 'char *' because that's what the system
3349 header files like. */
3350 if (DEFAULT_ABI != ABI_V4)
3351 return build_pointer_type (char_type_node);
3353 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3354 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3356 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3357 unsigned_char_type_node);
3358 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3359 unsigned_char_type_node);
3360 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3361 ptr_type_node);
3362 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3363 ptr_type_node);
3365 DECL_FIELD_CONTEXT (f_gpr) = record;
3366 DECL_FIELD_CONTEXT (f_fpr) = record;
3367 DECL_FIELD_CONTEXT (f_ovf) = record;
3368 DECL_FIELD_CONTEXT (f_sav) = record;
3370 TREE_CHAIN (record) = type_decl;
3371 TYPE_NAME (record) = type_decl;
3372 TYPE_FIELDS (record) = f_gpr;
3373 TREE_CHAIN (f_gpr) = f_fpr;
3374 TREE_CHAIN (f_fpr) = f_ovf;
3375 TREE_CHAIN (f_ovf) = f_sav;
3377 layout_type (record);
3379 /* The correct type is an array type of one element. */
3380 return build_array_type (record, build_index_type (size_zero_node));
3383 /* Implement va_start. */
3385 void
3386 rs6000_va_start (valist, nextarg)
3387 tree valist;
3388 rtx nextarg;
3390 HOST_WIDE_INT words, n_gpr, n_fpr;
3391 tree f_gpr, f_fpr, f_ovf, f_sav;
3392 tree gpr, fpr, ovf, sav, t;
3394 /* Only SVR4 needs something special. */
3395 if (DEFAULT_ABI != ABI_V4)
3397 std_expand_builtin_va_start (valist, nextarg);
3398 return;
3401 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3402 f_fpr = TREE_CHAIN (f_gpr);
3403 f_ovf = TREE_CHAIN (f_fpr);
3404 f_sav = TREE_CHAIN (f_ovf);
3406 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3407 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3408 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3409 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3410 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3412 /* Count number of gp and fp argument registers used. */
3413 words = current_function_args_info.words;
3414 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3415 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3417 if (TARGET_DEBUG_ARG)
3419 fputs ("va_start: words = ", stderr);
3420 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3421 fputs (", n_gpr = ", stderr);
3422 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3423 fputs (", n_fpr = ", stderr);
3424 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3425 putc ('\n', stderr);
3428 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3429 TREE_SIDE_EFFECTS (t) = 1;
3430 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3432 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3433 TREE_SIDE_EFFECTS (t) = 1;
3434 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3436 /* Find the overflow area. */
3437 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3438 if (words != 0)
3439 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3440 build_int_2 (words * UNITS_PER_WORD, 0));
3441 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3442 TREE_SIDE_EFFECTS (t) = 1;
3443 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3445 /* Find the register save area. */
3446 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3447 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3448 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3449 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3450 TREE_SIDE_EFFECTS (t) = 1;
3451 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3454 /* Implement va_arg. */
3457 rs6000_va_arg (valist, type)
3458 tree valist, type;
3460 tree f_gpr, f_fpr, f_ovf, f_sav;
3461 tree gpr, fpr, ovf, sav, reg, t, u;
3462 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3463 rtx lab_false, lab_over, addr_rtx, r;
3465 if (DEFAULT_ABI != ABI_V4)
3466 return std_expand_builtin_va_arg (valist, type);
3468 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3469 f_fpr = TREE_CHAIN (f_gpr);
3470 f_ovf = TREE_CHAIN (f_fpr);
3471 f_sav = TREE_CHAIN (f_ovf);
3473 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3474 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3475 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3476 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3477 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3479 size = int_size_in_bytes (type);
3480 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3482 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3484 /* Aggregates and long doubles are passed by reference. */
3485 indirect_p = 1;
3486 reg = gpr;
3487 n_reg = 1;
3488 sav_ofs = 0;
3489 sav_scale = 4;
3490 size = UNITS_PER_WORD;
3491 rsize = 1;
3493 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3495 /* FP args go in FP registers, if present. */
3496 indirect_p = 0;
3497 reg = fpr;
3498 n_reg = 1;
3499 sav_ofs = 8*4;
3500 sav_scale = 8;
3502 else
3504 /* Otherwise into GP registers. */
3505 indirect_p = 0;
3506 reg = gpr;
3507 n_reg = rsize;
3508 sav_ofs = 0;
3509 sav_scale = 4;
3512 /* Pull the value out of the saved registers ... */
3514 lab_false = gen_label_rtx ();
3515 lab_over = gen_label_rtx ();
3516 addr_rtx = gen_reg_rtx (Pmode);
3518 /* AltiVec vectors never go in registers. */
3519 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3521 TREE_THIS_VOLATILE (reg) = 1;
3522 emit_cmp_and_jump_insns
3523 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3524 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3525 lab_false);
3527 /* Long long is aligned in the registers. */
3528 if (n_reg > 1)
3530 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3531 build_int_2 (n_reg - 1, 0));
3532 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3533 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3534 TREE_SIDE_EFFECTS (u) = 1;
3535 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3538 if (sav_ofs)
3539 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3540 else
3541 t = sav;
3543 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3544 build_int_2 (n_reg, 0));
3545 TREE_SIDE_EFFECTS (u) = 1;
3547 u = build1 (CONVERT_EXPR, integer_type_node, u);
3548 TREE_SIDE_EFFECTS (u) = 1;
3550 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3551 TREE_SIDE_EFFECTS (u) = 1;
3553 t = build (PLUS_EXPR, ptr_type_node, t, u);
3554 TREE_SIDE_EFFECTS (t) = 1;
3556 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3557 if (r != addr_rtx)
3558 emit_move_insn (addr_rtx, r);
3560 emit_jump_insn (gen_jump (lab_over));
3561 emit_barrier ();
3564 emit_label (lab_false);
3566 /* ... otherwise out of the overflow area. */
3568 /* Make sure we don't find reg 7 for the next int arg.
3570 All AltiVec vectors go in the overflow area. So in the AltiVec
3571 case we need to get the vectors from the overflow area, but
3572 remember where the GPRs and FPRs are. */
3573 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3574 || !TARGET_ALTIVEC))
3576 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3577 TREE_SIDE_EFFECTS (t) = 1;
3578 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3581 /* Care for on-stack alignment if needed. */
3582 if (rsize <= 1)
3583 t = ovf;
3584 else
3586 int align;
3588 /* AltiVec vectors are 16 byte aligned. */
3589 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3590 align = 15;
3591 else
3592 align = 7;
3594 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3595 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3597 t = save_expr (t);
3599 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3600 if (r != addr_rtx)
3601 emit_move_insn (addr_rtx, r);
3603 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3604 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3605 TREE_SIDE_EFFECTS (t) = 1;
3606 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3608 emit_label (lab_over);
3610 if (indirect_p)
3612 r = gen_rtx_MEM (Pmode, addr_rtx);
3613 set_mem_alias_set (r, get_varargs_alias_set ());
3614 emit_move_insn (addr_rtx, r);
3617 return addr_rtx;
3620 /* Builtins. */
3622 #define def_builtin(MASK, NAME, TYPE, CODE) \
3623 do { \
3624 if ((MASK) & target_flags) \
3625 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3626 NULL, NULL_TREE); \
3627 } while (0)
3629 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3631 static const struct builtin_description bdesc_3arg[] =
3633 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3634 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3635 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3636 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3637 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3638 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3639 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3640 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3641 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3642 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3643 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3644 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3645 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3646 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3647 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3648 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3649 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3650 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3651 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3652 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3653 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3654 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3655 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3658 /* DST operations: void foo (void *, const int, const char). */
3660 static const struct builtin_description bdesc_dst[] =
3662 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3663 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3664 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3665 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3668 /* Simple binary operations: VECc = foo (VECa, VECb). */
3670 static struct builtin_description bdesc_2arg[] =
3672 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3673 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3674 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3675 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3676 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3677 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3678 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3679 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3680 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3681 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3682 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3683 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3684 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3685 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3686 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3687 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3688 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3689 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3690 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3691 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3692 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3693 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3694 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3695 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3696 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3697 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3698 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3699 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3700 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3701 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3702 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3703 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3704 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3705 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3706 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3707 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3708 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3709 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3710 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3711 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3712 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3713 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3714 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3715 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3716 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3717 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3718 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3719 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3720 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3721 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3722 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3723 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3724 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3725 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3726 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3727 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3728 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3729 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3730 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3731 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3732 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3733 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3734 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3735 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3736 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3737 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3738 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3739 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3740 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3741 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3742 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3743 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3744 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3745 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3746 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3747 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3748 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3749 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3750 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3751 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3752 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3753 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3754 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3755 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3756 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3757 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3758 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3759 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3760 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3761 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3762 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3763 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3764 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3765 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3766 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3767 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3768 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3769 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3770 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3771 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3772 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3773 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3774 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3775 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3776 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3777 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3778 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3779 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3780 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3781 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3782 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3783 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3784 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3786 /* Place holder, leave as first spe builtin. */
3787 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
3788 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
3789 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
3790 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
3791 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
3792 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
3793 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
3794 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
3795 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
3796 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
3797 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
3798 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
3799 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
3800 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
3801 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
3802 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
3803 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
3804 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
3805 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
3806 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
3807 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
3808 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
3809 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
3810 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
3811 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
3812 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
3813 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
3814 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
3815 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
3816 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
3817 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
3818 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
3819 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
3820 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
3821 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
3822 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
3823 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
3824 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
3825 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
3826 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
3827 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
3828 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
3829 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
3830 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
3831 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
3832 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
3833 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
3834 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
3835 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
3836 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
3837 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
3838 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
3839 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
3840 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
3841 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
3842 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
3843 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
3844 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
3845 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
3846 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
3847 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
3848 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
3849 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
3850 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
3851 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
3852 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
3853 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
3854 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
3855 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
3856 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
3857 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
3858 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
3859 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
3860 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
3861 { 0, CODE_FOR_spe_evmwlsmf, "__builtin_spe_evmwlsmf", SPE_BUILTIN_EVMWLSMF },
3862 { 0, CODE_FOR_spe_evmwlsmfa, "__builtin_spe_evmwlsmfa", SPE_BUILTIN_EVMWLSMFA },
3863 { 0, CODE_FOR_spe_evmwlsmfaaw, "__builtin_spe_evmwlsmfaaw", SPE_BUILTIN_EVMWLSMFAAW },
3864 { 0, CODE_FOR_spe_evmwlsmfanw, "__builtin_spe_evmwlsmfanw", SPE_BUILTIN_EVMWLSMFANW },
3865 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
3866 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
3867 { 0, CODE_FOR_spe_evmwlssf, "__builtin_spe_evmwlssf", SPE_BUILTIN_EVMWLSSF },
3868 { 0, CODE_FOR_spe_evmwlssfa, "__builtin_spe_evmwlssfa", SPE_BUILTIN_EVMWLSSFA },
3869 { 0, CODE_FOR_spe_evmwlssfaaw, "__builtin_spe_evmwlssfaaw", SPE_BUILTIN_EVMWLSSFAAW },
3870 { 0, CODE_FOR_spe_evmwlssfanw, "__builtin_spe_evmwlssfanw", SPE_BUILTIN_EVMWLSSFANW },
3871 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
3872 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
3873 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
3874 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
3875 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
3876 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
3877 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
3878 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
3879 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
3880 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
3881 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
3882 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
3883 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
3884 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
3885 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
3886 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
3887 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
3888 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
3889 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
3890 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
3891 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
3892 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
3893 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
3894 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
3895 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
3896 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
3897 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
3898 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
3899 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
3900 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
3901 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
3902 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
3903 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
3905 /* SPE binary operations expecting a 5-bit unsigned literal. */
3906 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
3908 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
3909 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
3910 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
3911 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
3912 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
3913 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
3914 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
3915 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
3916 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
3917 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
3918 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
3919 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
3920 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
3921 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
3922 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
3923 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
3924 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
3925 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
3926 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
3927 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
3928 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
3929 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
3930 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
3931 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
3932 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
3933 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
3935 /* Place-holder. Leave as last binary SPE builtin. */
3936 { 0, CODE_FOR_spe_evxor, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
3939 /* AltiVec predicates. */
3941 struct builtin_description_predicates
3943 const unsigned int mask;
3944 const enum insn_code icode;
3945 const char *opcode;
3946 const char *const name;
3947 const enum rs6000_builtins code;
3950 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3952 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3953 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3954 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3955 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3956 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3957 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3958 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3959 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3960 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3967 /* SPE predicates. */
3968 static struct builtin_description bdesc_spe_predicates[] =
3970 /* Place-holder. Leave as first. */
3971 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
3972 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
3973 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
3974 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
3975 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
3976 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
3977 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
3978 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
3979 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
3980 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
3981 /* Place-holder. Leave as last. */
3982 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
3985 /* SPE evsel predicates. */
3986 static struct builtin_description bdesc_spe_evsel[] =
3988 /* Place-holder. Leave as first. */
3989 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
3990 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
3991 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
3992 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
3993 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
3994 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
3995 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
3996 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
3997 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
3998 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
3999 /* Place-holder. Leave as last. */
4000 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4003 /* ABS* opreations. */
4005 static const struct builtin_description bdesc_abs[] =
4007 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4008 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4009 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4010 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4011 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4012 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4013 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4016 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4017 foo (VECa). */
4019 static struct builtin_description bdesc_1arg[] =
4021 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4022 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4023 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4024 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4025 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4026 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4027 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4031 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4032 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4033 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4034 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4035 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4036 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4037 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4039 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4040 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4041 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4042 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4043 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4044 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4045 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4046 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4047 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4048 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4049 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4050 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4051 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4052 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4053 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4054 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4055 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4056 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4057 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4058 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4059 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4060 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4061 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4062 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4063 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4064 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4065 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4066 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4067 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4068 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4069 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4070 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4072 /* Place-holder. Leave as last unary SPE builtin. */
4073 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4076 static rtx
4077 rs6000_expand_unop_builtin (icode, arglist, target)
4078 enum insn_code icode;
4079 tree arglist;
4080 rtx target;
4082 rtx pat;
4083 tree arg0 = TREE_VALUE (arglist);
4084 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4085 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4086 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4088 /* If we got invalid arguments bail out before generating bad rtl. */
4089 if (arg0 == error_mark_node)
4090 return const0_rtx;
4092 switch (icode)
4094 /* Only allow 5-bit *signed* literals. */
4095 case CODE_FOR_altivec_vspltisb:
4096 case CODE_FOR_altivec_vspltish:
4097 case CODE_FOR_altivec_vspltisw:
4098 case CODE_FOR_spe_evsplatfi:
4099 case CODE_FOR_spe_evsplati:
4100 if (GET_CODE (op0) != CONST_INT
4101 || INTVAL (op0) > 0x1f
4102 || INTVAL (op0) < -0x1f)
4104 error ("argument 1 must be a 5-bit signed literal");
4105 return const0_rtx;
4107 break;
4108 default:
4109 break;
4112 if (target == 0
4113 || GET_MODE (target) != tmode
4114 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4115 target = gen_reg_rtx (tmode);
4117 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4118 op0 = copy_to_mode_reg (mode0, op0);
4120 pat = GEN_FCN (icode) (target, op0);
4121 if (! pat)
4122 return 0;
4123 emit_insn (pat);
4125 return target;
4128 static rtx
4129 altivec_expand_abs_builtin (icode, arglist, target)
4130 enum insn_code icode;
4131 tree arglist;
4132 rtx target;
4134 rtx pat, scratch1, scratch2;
4135 tree arg0 = TREE_VALUE (arglist);
4136 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4137 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4138 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4140 /* If we have invalid arguments, bail out before generating bad rtl. */
4141 if (arg0 == error_mark_node)
4142 return const0_rtx;
4144 if (target == 0
4145 || GET_MODE (target) != tmode
4146 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4147 target = gen_reg_rtx (tmode);
4149 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4150 op0 = copy_to_mode_reg (mode0, op0);
4152 scratch1 = gen_reg_rtx (mode0);
4153 scratch2 = gen_reg_rtx (mode0);
4155 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4156 if (! pat)
4157 return 0;
4158 emit_insn (pat);
4160 return target;
4163 static rtx
4164 rs6000_expand_binop_builtin (icode, arglist, target)
4165 enum insn_code icode;
4166 tree arglist;
4167 rtx target;
4169 rtx pat;
4170 tree arg0 = TREE_VALUE (arglist);
4171 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4172 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4173 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4174 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4175 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4176 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4178 /* If we got invalid arguments bail out before generating bad rtl. */
4179 if (arg0 == error_mark_node || arg1 == error_mark_node)
4180 return const0_rtx;
4182 switch (icode)
4184 /* Only allow 5-bit unsigned literals. */
4185 case CODE_FOR_altivec_vcfux:
4186 case CODE_FOR_altivec_vcfsx:
4187 case CODE_FOR_altivec_vctsxs:
4188 case CODE_FOR_altivec_vctuxs:
4189 case CODE_FOR_altivec_vspltb:
4190 case CODE_FOR_altivec_vsplth:
4191 case CODE_FOR_altivec_vspltw:
4192 case CODE_FOR_spe_evaddiw:
4193 case CODE_FOR_spe_evldd:
4194 case CODE_FOR_spe_evldh:
4195 case CODE_FOR_spe_evldw:
4196 case CODE_FOR_spe_evlhhesplat:
4197 case CODE_FOR_spe_evlhhossplat:
4198 case CODE_FOR_spe_evlhhousplat:
4199 case CODE_FOR_spe_evlwhe:
4200 case CODE_FOR_spe_evlwhos:
4201 case CODE_FOR_spe_evlwhou:
4202 case CODE_FOR_spe_evlwhsplat:
4203 case CODE_FOR_spe_evlwwsplat:
4204 case CODE_FOR_spe_evrlwi:
4205 case CODE_FOR_spe_evslwi:
4206 case CODE_FOR_spe_evsrwis:
4207 case CODE_FOR_spe_evsrwiu:
4208 if (TREE_CODE (arg1) != INTEGER_CST
4209 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4211 error ("argument 2 must be a 5-bit unsigned literal");
4212 return const0_rtx;
4214 break;
4215 default:
4216 break;
4219 if (target == 0
4220 || GET_MODE (target) != tmode
4221 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4222 target = gen_reg_rtx (tmode);
4224 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4225 op0 = copy_to_mode_reg (mode0, op0);
4226 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4227 op1 = copy_to_mode_reg (mode1, op1);
4229 pat = GEN_FCN (icode) (target, op0, op1);
4230 if (! pat)
4231 return 0;
4232 emit_insn (pat);
4234 return target;
4237 static rtx
4238 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4239 enum insn_code icode;
4240 const char *opcode;
4241 tree arglist;
4242 rtx target;
4244 rtx pat, scratch;
4245 tree cr6_form = TREE_VALUE (arglist);
4246 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4247 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4248 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4249 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4250 enum machine_mode tmode = SImode;
4251 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4252 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4253 int cr6_form_int;
4255 if (TREE_CODE (cr6_form) != INTEGER_CST)
4257 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4258 return const0_rtx;
4260 else
4261 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4263 if (mode0 != mode1)
4264 abort ();
4266 /* If we have invalid arguments, bail out before generating bad rtl. */
4267 if (arg0 == error_mark_node || arg1 == error_mark_node)
4268 return const0_rtx;
4270 if (target == 0
4271 || GET_MODE (target) != tmode
4272 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4273 target = gen_reg_rtx (tmode);
4275 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4276 op0 = copy_to_mode_reg (mode0, op0);
4277 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4278 op1 = copy_to_mode_reg (mode1, op1);
4280 scratch = gen_reg_rtx (mode0);
4282 pat = GEN_FCN (icode) (scratch, op0, op1,
4283 gen_rtx (SYMBOL_REF, Pmode, opcode));
4284 if (! pat)
4285 return 0;
4286 emit_insn (pat);
4288 /* The vec_any* and vec_all* predicates use the same opcodes for two
4289 different operations, but the bits in CR6 will be different
4290 depending on what information we want. So we have to play tricks
4291 with CR6 to get the right bits out.
4293 If you think this is disgusting, look at the specs for the
4294 AltiVec predicates. */
4296 switch (cr6_form_int)
4298 case 0:
4299 emit_insn (gen_cr6_test_for_zero (target));
4300 break;
4301 case 1:
4302 emit_insn (gen_cr6_test_for_zero_reverse (target));
4303 break;
4304 case 2:
4305 emit_insn (gen_cr6_test_for_lt (target));
4306 break;
4307 case 3:
4308 emit_insn (gen_cr6_test_for_lt_reverse (target));
4309 break;
4310 default:
4311 error ("argument 1 of __builtin_altivec_predicate is out of range");
4312 break;
4315 return target;
4318 static rtx
4319 altivec_expand_stv_builtin (icode, arglist)
4320 enum insn_code icode;
4321 tree arglist;
4323 tree arg0 = TREE_VALUE (arglist);
4324 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4325 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4326 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4327 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4328 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4329 rtx pat;
4330 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4331 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4332 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4334 /* Invalid arguments. Bail before doing anything stoopid! */
4335 if (arg0 == error_mark_node
4336 || arg1 == error_mark_node
4337 || arg2 == error_mark_node)
4338 return const0_rtx;
4340 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4341 op0 = copy_to_mode_reg (mode2, op0);
4342 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4343 op1 = copy_to_mode_reg (mode0, op1);
4344 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4345 op2 = copy_to_mode_reg (mode1, op2);
4347 pat = GEN_FCN (icode) (op1, op2, op0);
4348 if (pat)
4349 emit_insn (pat);
4350 return NULL_RTX;
4353 static rtx
4354 rs6000_expand_ternop_builtin (icode, arglist, target)
4355 enum insn_code icode;
4356 tree arglist;
4357 rtx target;
4359 rtx pat;
4360 tree arg0 = TREE_VALUE (arglist);
4361 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4362 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4363 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4364 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4365 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4366 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4367 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4368 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4369 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4371 /* If we got invalid arguments bail out before generating bad rtl. */
4372 if (arg0 == error_mark_node
4373 || arg1 == error_mark_node
4374 || arg2 == error_mark_node)
4375 return const0_rtx;
4377 switch (icode)
4379 /* Only allow 4-bit unsigned literals. */
4380 case CODE_FOR_altivec_vsldoi_4sf:
4381 case CODE_FOR_altivec_vsldoi_4si:
4382 case CODE_FOR_altivec_vsldoi_8hi:
4383 case CODE_FOR_altivec_vsldoi_16qi:
4384 if (TREE_CODE (arg2) != INTEGER_CST
4385 || TREE_INT_CST_LOW (arg2) & ~0xf)
4387 error ("argument 3 must be a 4-bit unsigned literal");
4388 return const0_rtx;
4390 break;
4391 default:
4392 break;
4395 if (target == 0
4396 || GET_MODE (target) != tmode
4397 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4398 target = gen_reg_rtx (tmode);
4400 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4401 op0 = copy_to_mode_reg (mode0, op0);
4402 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4403 op1 = copy_to_mode_reg (mode1, op1);
4404 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4405 op2 = copy_to_mode_reg (mode2, op2);
4407 pat = GEN_FCN (icode) (target, op0, op1, op2);
4408 if (! pat)
4409 return 0;
4410 emit_insn (pat);
4412 return target;
4415 /* Expand the lvx builtins. */
4416 static rtx
4417 altivec_expand_ld_builtin (exp, target, expandedp)
4418 tree exp;
4419 rtx target;
4420 bool *expandedp;
4422 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4423 tree arglist = TREE_OPERAND (exp, 1);
4424 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4425 tree arg0;
4426 enum machine_mode tmode, mode0;
4427 rtx pat, op0;
4428 enum insn_code icode;
4430 switch (fcode)
4432 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4433 icode = CODE_FOR_altivec_lvx_16qi;
4434 break;
4435 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4436 icode = CODE_FOR_altivec_lvx_8hi;
4437 break;
4438 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4439 icode = CODE_FOR_altivec_lvx_4si;
4440 break;
4441 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4442 icode = CODE_FOR_altivec_lvx_4sf;
4443 break;
4444 default:
4445 *expandedp = false;
4446 return NULL_RTX;
4449 *expandedp = true;
4451 arg0 = TREE_VALUE (arglist);
4452 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4453 tmode = insn_data[icode].operand[0].mode;
4454 mode0 = insn_data[icode].operand[1].mode;
4456 if (target == 0
4457 || GET_MODE (target) != tmode
4458 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4459 target = gen_reg_rtx (tmode);
4461 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4462 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4464 pat = GEN_FCN (icode) (target, op0);
4465 if (! pat)
4466 return 0;
4467 emit_insn (pat);
4468 return target;
4471 /* Expand the stvx builtins. */
4472 static rtx
4473 altivec_expand_st_builtin (exp, target, expandedp)
4474 tree exp;
4475 rtx target ATTRIBUTE_UNUSED;
4476 bool *expandedp;
4478 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4479 tree arglist = TREE_OPERAND (exp, 1);
4480 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4481 tree arg0, arg1;
4482 enum machine_mode mode0, mode1;
4483 rtx pat, op0, op1;
4484 enum insn_code icode;
4486 switch (fcode)
4488 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4489 icode = CODE_FOR_altivec_stvx_16qi;
4490 break;
4491 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4492 icode = CODE_FOR_altivec_stvx_8hi;
4493 break;
4494 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4495 icode = CODE_FOR_altivec_stvx_4si;
4496 break;
4497 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4498 icode = CODE_FOR_altivec_stvx_4sf;
4499 break;
4500 default:
4501 *expandedp = false;
4502 return NULL_RTX;
4505 arg0 = TREE_VALUE (arglist);
4506 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4507 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4508 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4509 mode0 = insn_data[icode].operand[0].mode;
4510 mode1 = insn_data[icode].operand[1].mode;
4512 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4513 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4514 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4515 op1 = copy_to_mode_reg (mode1, op1);
4517 pat = GEN_FCN (icode) (op0, op1);
4518 if (pat)
4519 emit_insn (pat);
4521 *expandedp = true;
4522 return NULL_RTX;
4525 /* Expand the dst builtins. */
4526 static rtx
4527 altivec_expand_dst_builtin (exp, target, expandedp)
4528 tree exp;
4529 rtx target ATTRIBUTE_UNUSED;
4530 bool *expandedp;
4532 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4533 tree arglist = TREE_OPERAND (exp, 1);
4534 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4535 tree arg0, arg1, arg2;
4536 enum machine_mode mode0, mode1, mode2;
4537 rtx pat, op0, op1, op2;
4538 struct builtin_description *d;
4539 size_t i;
4541 *expandedp = false;
4543 /* Handle DST variants. */
4544 d = (struct builtin_description *) bdesc_dst;
4545 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4546 if (d->code == fcode)
4548 arg0 = TREE_VALUE (arglist);
4549 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4550 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4551 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4552 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4553 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4554 mode0 = insn_data[d->icode].operand[0].mode;
4555 mode1 = insn_data[d->icode].operand[1].mode;
4556 mode2 = insn_data[d->icode].operand[2].mode;
4558 /* Invalid arguments, bail out before generating bad rtl. */
4559 if (arg0 == error_mark_node
4560 || arg1 == error_mark_node
4561 || arg2 == error_mark_node)
4562 return const0_rtx;
4564 if (TREE_CODE (arg2) != INTEGER_CST
4565 || TREE_INT_CST_LOW (arg2) & ~0x3)
4567 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4568 return const0_rtx;
4571 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4572 op0 = copy_to_mode_reg (mode0, op0);
4573 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4574 op1 = copy_to_mode_reg (mode1, op1);
4576 pat = GEN_FCN (d->icode) (op0, op1, op2);
4577 if (pat != 0)
4578 emit_insn (pat);
4580 *expandedp = true;
4581 return NULL_RTX;
4584 return NULL_RTX;
4587 /* Expand the builtin in EXP and store the result in TARGET. Store
4588 true in *EXPANDEDP if we found a builtin to expand. */
4589 static rtx
4590 altivec_expand_builtin (exp, target, expandedp)
4591 tree exp;
4592 rtx target;
4593 bool *expandedp;
4595 struct builtin_description *d;
4596 struct builtin_description_predicates *dp;
4597 size_t i;
4598 enum insn_code icode;
4599 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4600 tree arglist = TREE_OPERAND (exp, 1);
4601 tree arg0;
4602 rtx op0, pat;
4603 enum machine_mode tmode, mode0;
4604 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4606 target = altivec_expand_ld_builtin (exp, target, expandedp);
4607 if (*expandedp)
4608 return target;
4610 target = altivec_expand_st_builtin (exp, target, expandedp);
4611 if (*expandedp)
4612 return target;
4614 target = altivec_expand_dst_builtin (exp, target, expandedp);
4615 if (*expandedp)
4616 return target;
4618 *expandedp = true;
4620 switch (fcode)
4622 case ALTIVEC_BUILTIN_STVX:
4623 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4624 case ALTIVEC_BUILTIN_STVEBX:
4625 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4626 case ALTIVEC_BUILTIN_STVEHX:
4627 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4628 case ALTIVEC_BUILTIN_STVEWX:
4629 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4630 case ALTIVEC_BUILTIN_STVXL:
4631 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4633 case ALTIVEC_BUILTIN_MFVSCR:
4634 icode = CODE_FOR_altivec_mfvscr;
4635 tmode = insn_data[icode].operand[0].mode;
4637 if (target == 0
4638 || GET_MODE (target) != tmode
4639 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4640 target = gen_reg_rtx (tmode);
4642 pat = GEN_FCN (icode) (target);
4643 if (! pat)
4644 return 0;
4645 emit_insn (pat);
4646 return target;
4648 case ALTIVEC_BUILTIN_MTVSCR:
4649 icode = CODE_FOR_altivec_mtvscr;
4650 arg0 = TREE_VALUE (arglist);
4651 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4652 mode0 = insn_data[icode].operand[0].mode;
4654 /* If we got invalid arguments bail out before generating bad rtl. */
4655 if (arg0 == error_mark_node)
4656 return const0_rtx;
4658 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4659 op0 = copy_to_mode_reg (mode0, op0);
4661 pat = GEN_FCN (icode) (op0);
4662 if (pat)
4663 emit_insn (pat);
4664 return NULL_RTX;
4666 case ALTIVEC_BUILTIN_DSSALL:
4667 emit_insn (gen_altivec_dssall ());
4668 return NULL_RTX;
4670 case ALTIVEC_BUILTIN_DSS:
4671 icode = CODE_FOR_altivec_dss;
4672 arg0 = TREE_VALUE (arglist);
4673 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4674 mode0 = insn_data[icode].operand[0].mode;
4676 /* If we got invalid arguments bail out before generating bad rtl. */
4677 if (arg0 == error_mark_node)
4678 return const0_rtx;
4680 if (TREE_CODE (arg0) != INTEGER_CST
4681 || TREE_INT_CST_LOW (arg0) & ~0x3)
4683 error ("argument to dss must be a 2-bit unsigned literal");
4684 return const0_rtx;
4687 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4688 op0 = copy_to_mode_reg (mode0, op0);
4690 emit_insn (gen_altivec_dss (op0));
4691 return NULL_RTX;
4694 /* Expand abs* operations. */
4695 d = (struct builtin_description *) bdesc_abs;
4696 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4697 if (d->code == fcode)
4698 return altivec_expand_abs_builtin (d->icode, arglist, target);
4700 /* Expand the AltiVec predicates. */
4701 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4702 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4703 if (dp->code == fcode)
4704 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4706 /* LV* are funky. We initialized them differently. */
4707 switch (fcode)
4709 case ALTIVEC_BUILTIN_LVSL:
4710 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4711 arglist, target);
4712 case ALTIVEC_BUILTIN_LVSR:
4713 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4714 arglist, target);
4715 case ALTIVEC_BUILTIN_LVEBX:
4716 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4717 arglist, target);
4718 case ALTIVEC_BUILTIN_LVEHX:
4719 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4720 arglist, target);
4721 case ALTIVEC_BUILTIN_LVEWX:
4722 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4723 arglist, target);
4724 case ALTIVEC_BUILTIN_LVXL:
4725 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4726 arglist, target);
4727 case ALTIVEC_BUILTIN_LVX:
4728 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
4729 arglist, target);
4730 default:
4731 break;
4732 /* Fall through. */
4735 *expandedp = false;
4736 return NULL_RTX;
4739 /* Binops that need to be initialized manually, but can be expanded
4740 automagically by rs6000_expand_binop_builtin. */
4741 static struct builtin_description bdesc_2arg_spe[] =
4743 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
4744 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
4745 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
4746 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
4747 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
4748 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
4749 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
4750 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
4751 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
4752 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
4753 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
4754 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
4755 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
4756 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
4757 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
4758 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
4759 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
4760 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
4761 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
4762 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
4763 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
4764 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
4767 /* Expand the builtin in EXP and store the result in TARGET. Store
4768 true in *EXPANDEDP if we found a builtin to expand.
4770 This expands the SPE builtins that are not simple unary and binary
4771 operations. */
4772 static rtx
4773 spe_expand_builtin (exp, target, expandedp)
4774 tree exp;
4775 rtx target;
4776 bool *expandedp;
4778 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4779 tree arglist = TREE_OPERAND (exp, 1);
4780 tree arg1, arg0;
4781 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4782 enum insn_code icode;
4783 enum machine_mode tmode, mode0;
4784 rtx pat, op0;
4785 struct builtin_description *d;
4786 size_t i;
4788 *expandedp = true;
4790 /* Syntax check for a 5-bit unsigned immediate. */
4791 switch (fcode)
4793 case SPE_BUILTIN_EVSTDD:
4794 case SPE_BUILTIN_EVSTDH:
4795 case SPE_BUILTIN_EVSTDW:
4796 case SPE_BUILTIN_EVSTWHE:
4797 case SPE_BUILTIN_EVSTWHO:
4798 case SPE_BUILTIN_EVSTWWE:
4799 case SPE_BUILTIN_EVSTWWO:
4800 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4801 if (TREE_CODE (arg1) != INTEGER_CST
4802 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4804 error ("argument 2 must be a 5-bit unsigned literal");
4805 return const0_rtx;
4807 break;
4808 default:
4809 break;
4812 d = (struct builtin_description *) bdesc_2arg_spe;
4813 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
4814 if (d->code == fcode)
4815 return rs6000_expand_binop_builtin (d->icode, arglist, target);
4817 d = (struct builtin_description *) bdesc_spe_predicates;
4818 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
4819 if (d->code == fcode)
4820 return spe_expand_predicate_builtin (d->icode, arglist, target);
4822 d = (struct builtin_description *) bdesc_spe_evsel;
4823 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
4824 if (d->code == fcode)
4825 return spe_expand_evsel_builtin (d->icode, arglist, target);
4827 switch (fcode)
4829 case SPE_BUILTIN_EVSTDDX:
4830 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
4831 case SPE_BUILTIN_EVSTDHX:
4832 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
4833 case SPE_BUILTIN_EVSTDWX:
4834 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
4835 case SPE_BUILTIN_EVSTWHEX:
4836 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
4837 case SPE_BUILTIN_EVSTWHOX:
4838 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
4839 case SPE_BUILTIN_EVSTWWEX:
4840 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
4841 case SPE_BUILTIN_EVSTWWOX:
4842 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
4843 case SPE_BUILTIN_EVSTDD:
4844 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
4845 case SPE_BUILTIN_EVSTDH:
4846 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
4847 case SPE_BUILTIN_EVSTDW:
4848 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
4849 case SPE_BUILTIN_EVSTWHE:
4850 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
4851 case SPE_BUILTIN_EVSTWHO:
4852 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
4853 case SPE_BUILTIN_EVSTWWE:
4854 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
4855 case SPE_BUILTIN_EVSTWWO:
4856 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
4857 case SPE_BUILTIN_MFSPEFSCR:
4858 icode = CODE_FOR_spe_mfspefscr;
4859 tmode = insn_data[icode].operand[0].mode;
4861 if (target == 0
4862 || GET_MODE (target) != tmode
4863 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4864 target = gen_reg_rtx (tmode);
4866 pat = GEN_FCN (icode) (target);
4867 if (! pat)
4868 return 0;
4869 emit_insn (pat);
4870 return target;
4871 case SPE_BUILTIN_MTSPEFSCR:
4872 icode = CODE_FOR_spe_mtspefscr;
4873 arg0 = TREE_VALUE (arglist);
4874 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4875 mode0 = insn_data[icode].operand[0].mode;
4877 if (arg0 == error_mark_node)
4878 return const0_rtx;
4880 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4881 op0 = copy_to_mode_reg (mode0, op0);
4883 pat = GEN_FCN (icode) (op0);
4884 if (pat)
4885 emit_insn (pat);
4886 return NULL_RTX;
4887 default:
4888 break;
4891 *expandedp = false;
4892 return NULL_RTX;
4895 static rtx
4896 spe_expand_predicate_builtin (icode, arglist, target)
4897 enum insn_code icode;
4898 tree arglist;
4899 rtx target;
4901 rtx pat, scratch, tmp;
4902 tree form = TREE_VALUE (arglist);
4903 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4904 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4905 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4906 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4907 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4908 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4909 int form_int;
4910 enum rtx_code code;
4912 if (TREE_CODE (form) != INTEGER_CST)
4914 error ("argument 1 of __builtin_spe_predicate must be a constant");
4915 return const0_rtx;
4917 else
4918 form_int = TREE_INT_CST_LOW (form);
4920 if (mode0 != mode1)
4921 abort ();
4923 if (arg0 == error_mark_node || arg1 == error_mark_node)
4924 return const0_rtx;
4926 if (target == 0
4927 || GET_MODE (target) != SImode
4928 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
4929 target = gen_reg_rtx (SImode);
4931 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4932 op0 = copy_to_mode_reg (mode0, op0);
4933 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4934 op1 = copy_to_mode_reg (mode1, op1);
4936 scratch = gen_reg_rtx (CCmode);
4938 pat = GEN_FCN (icode) (scratch, op0, op1);
4939 if (! pat)
4940 return const0_rtx;
4941 emit_insn (pat);
4943 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
4944 _lower_. We use one compare, but look in different bits of the
4945 CR for each variant.
4947 There are 2 elements in each SPE simd type (upper/lower). The CR
4948 bits are set as follows:
4950 BIT0 | BIT 1 | BIT 2 | BIT 3
4951 U | L | (U | L) | (U & L)
4953 So, for an "all" relationship, BIT 3 would be set.
4954 For an "any" relationship, BIT 2 would be set. Etc.
4956 Following traditional nomenclature, these bits map to:
4958 BIT0 | BIT 1 | BIT 2 | BIT 3
4959 LT | GT | EQ | OV
4961 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
4964 switch (form_int)
4966 /* All variant. OV bit. */
4967 case 0:
4968 /* We need to get to the OV bit, which is the ORDERED bit. We
4969 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
4970 that's ugly and will trigger a validate_condition_mode abort.
4971 So let's just use another pattern. */
4972 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
4973 return target;
4974 /* Any variant. EQ bit. */
4975 case 1:
4976 code = EQ;
4977 break;
4978 /* Upper variant. LT bit. */
4979 case 2:
4980 code = LT;
4981 break;
4982 /* Lower variant. GT bit. */
4983 case 3:
4984 code = GT;
4985 break;
4986 default:
4987 error ("argument 1 of __builtin_spe_predicate is out of range");
4988 return const0_rtx;
4991 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
4992 emit_move_insn (target, tmp);
4994 return target;
4997 /* The evsel builtins look like this:
4999 e = __builtin_spe_evsel_OP (a, b, c, d);
5001 and work like this:
5003 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5004 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5007 static rtx
5008 spe_expand_evsel_builtin (icode, arglist, target)
5009 enum insn_code icode;
5010 tree arglist;
5011 rtx target;
5013 rtx pat, scratch;
5014 tree arg0 = TREE_VALUE (arglist);
5015 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5016 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5017 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5018 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5019 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5020 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5021 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5022 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5023 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5025 if (mode0 != mode1)
5026 abort ();
5028 if (arg0 == error_mark_node || arg1 == error_mark_node
5029 || arg2 == error_mark_node || arg3 == error_mark_node)
5030 return const0_rtx;
5032 if (target == 0
5033 || GET_MODE (target) != mode0
5034 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5035 target = gen_reg_rtx (mode0);
5037 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5038 op0 = copy_to_mode_reg (mode0, op0);
5039 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5040 op1 = copy_to_mode_reg (mode0, op1);
5041 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5042 op2 = copy_to_mode_reg (mode0, op2);
5043 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5044 op3 = copy_to_mode_reg (mode0, op3);
5046 /* Generate the compare. */
5047 scratch = gen_reg_rtx (CCmode);
5048 pat = GEN_FCN (icode) (scratch, op0, op1);
5049 if (! pat)
5050 return const0_rtx;
5051 emit_insn (pat);
5053 if (mode0 == V2SImode)
5054 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5055 else
5056 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5058 return target;
5061 /* Expand an expression EXP that calls a built-in function,
5062 with result going to TARGET if that's convenient
5063 (and in mode MODE if that's convenient).
5064 SUBTARGET may be used as the target for computing one of EXP's operands.
5065 IGNORE is nonzero if the value is to be ignored. */
5067 static rtx
5068 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5069 tree exp;
5070 rtx target;
5071 rtx subtarget ATTRIBUTE_UNUSED;
5072 enum machine_mode mode ATTRIBUTE_UNUSED;
5073 int ignore ATTRIBUTE_UNUSED;
5075 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5076 tree arglist = TREE_OPERAND (exp, 1);
5077 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5078 struct builtin_description *d;
5079 size_t i;
5080 rtx ret;
5081 bool success;
5083 if (TARGET_ALTIVEC)
5085 ret = altivec_expand_builtin (exp, target, &success);
5087 if (success)
5088 return ret;
5090 if (TARGET_SPE)
5092 ret = spe_expand_builtin (exp, target, &success);
5094 if (success)
5095 return ret;
5098 /* Handle simple unary operations. */
5099 d = (struct builtin_description *) bdesc_1arg;
5100 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5101 if (d->code == fcode)
5102 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5104 /* Handle simple binary operations. */
5105 d = (struct builtin_description *) bdesc_2arg;
5106 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5107 if (d->code == fcode)
5108 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5110 /* Handle simple ternary operations. */
5111 d = (struct builtin_description *) bdesc_3arg;
5112 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5113 if (d->code == fcode)
5114 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5116 abort ();
5117 return NULL_RTX;
5120 static void
5121 rs6000_init_builtins ()
5123 if (TARGET_SPE)
5124 spe_init_builtins ();
5125 if (TARGET_ALTIVEC)
5126 altivec_init_builtins ();
5127 rs6000_common_init_builtins ();
5130 /* Search through a set of builtins and enable the mask bits.
5131 DESC is an array of builtins.
5132 SIZE is the totaly number of builtins.
5133 START is the builtin enum at which to start.
5134 END is the builtin enum at which to end. */
5135 static void
5136 enable_mask_for_builtins (desc, size, start, end)
5137 struct builtin_description *desc;
5138 int size;
5139 enum rs6000_builtins start, end;
5141 int i;
5143 for (i = 0; i < size; ++i)
5144 if (desc[i].code == start)
5145 break;
5147 if (i == size)
5148 return;
5150 for (; i < size; ++i)
5152 /* Flip all the bits on. */
5153 desc[i].mask = target_flags;
5154 if (desc[i].code == end)
5155 break;
5159 static void
5160 spe_init_builtins (void)
5162 tree endlink = void_list_node;
5163 tree puint_type_node = build_pointer_type (unsigned_type_node);
5164 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5165 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5166 struct builtin_description *d;
5167 size_t i;
5169 tree v2si_ftype_4_v2si
5170 = build_function_type
5171 (V2SI_type_node,
5172 tree_cons (NULL_TREE, V2SI_type_node,
5173 tree_cons (NULL_TREE, V2SI_type_node,
5174 tree_cons (NULL_TREE, V2SI_type_node,
5175 tree_cons (NULL_TREE, V2SI_type_node,
5176 endlink)))));
5178 tree v2sf_ftype_4_v2sf
5179 = build_function_type
5180 (V2SF_type_node,
5181 tree_cons (NULL_TREE, V2SF_type_node,
5182 tree_cons (NULL_TREE, V2SF_type_node,
5183 tree_cons (NULL_TREE, V2SF_type_node,
5184 tree_cons (NULL_TREE, V2SF_type_node,
5185 endlink)))));
5187 tree int_ftype_int_v2si_v2si
5188 = build_function_type
5189 (integer_type_node,
5190 tree_cons (NULL_TREE, integer_type_node,
5191 tree_cons (NULL_TREE, V2SI_type_node,
5192 tree_cons (NULL_TREE, V2SI_type_node,
5193 endlink))));
5195 tree int_ftype_int_v2sf_v2sf
5196 = build_function_type
5197 (integer_type_node,
5198 tree_cons (NULL_TREE, integer_type_node,
5199 tree_cons (NULL_TREE, V2SF_type_node,
5200 tree_cons (NULL_TREE, V2SF_type_node,
5201 endlink))));
5203 tree void_ftype_v2si_puint_int
5204 = build_function_type (void_type_node,
5205 tree_cons (NULL_TREE, V2SI_type_node,
5206 tree_cons (NULL_TREE, puint_type_node,
5207 tree_cons (NULL_TREE,
5208 integer_type_node,
5209 endlink))));
5211 tree void_ftype_v2si_puint_char
5212 = build_function_type (void_type_node,
5213 tree_cons (NULL_TREE, V2SI_type_node,
5214 tree_cons (NULL_TREE, puint_type_node,
5215 tree_cons (NULL_TREE,
5216 char_type_node,
5217 endlink))));
5219 tree void_ftype_v2si_pv2si_int
5220 = build_function_type (void_type_node,
5221 tree_cons (NULL_TREE, V2SI_type_node,
5222 tree_cons (NULL_TREE, pv2si_type_node,
5223 tree_cons (NULL_TREE,
5224 integer_type_node,
5225 endlink))));
5227 tree void_ftype_v2si_pv2si_char
5228 = build_function_type (void_type_node,
5229 tree_cons (NULL_TREE, V2SI_type_node,
5230 tree_cons (NULL_TREE, pv2si_type_node,
5231 tree_cons (NULL_TREE,
5232 char_type_node,
5233 endlink))));
5235 tree void_ftype_int
5236 = build_function_type (void_type_node,
5237 tree_cons (NULL_TREE, integer_type_node, endlink));
5239 tree int_ftype_void
5240 = build_function_type (integer_type_node,
5241 tree_cons (NULL_TREE, void_type_node, endlink));
5243 tree v2si_ftype_pv2si_int
5244 = build_function_type (V2SI_type_node,
5245 tree_cons (NULL_TREE, pv2si_type_node,
5246 tree_cons (NULL_TREE, integer_type_node,
5247 endlink)));
5249 tree v2si_ftype_puint_int
5250 = build_function_type (V2SI_type_node,
5251 tree_cons (NULL_TREE, puint_type_node,
5252 tree_cons (NULL_TREE, integer_type_node,
5253 endlink)));
5255 tree v2si_ftype_pushort_int
5256 = build_function_type (V2SI_type_node,
5257 tree_cons (NULL_TREE, pushort_type_node,
5258 tree_cons (NULL_TREE, integer_type_node,
5259 endlink)));
5261 /* The initialization of the simple binary and unary builtins is
5262 done in rs6000_common_init_builtins, but we have to enable the
5263 mask bits here manually because we have run out of `target_flags'
5264 bits. We really need to redesign this mask business. */
5266 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5267 ARRAY_SIZE (bdesc_2arg),
5268 SPE_BUILTIN_EVADDW,
5269 SPE_BUILTIN_EVXOR);
5270 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5271 ARRAY_SIZE (bdesc_1arg),
5272 SPE_BUILTIN_EVABS,
5273 SPE_BUILTIN_EVSUBFUSIAAW);
5274 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5275 ARRAY_SIZE (bdesc_spe_predicates),
5276 SPE_BUILTIN_EVCMPEQ,
5277 SPE_BUILTIN_EVFSTSTLT);
5278 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5279 ARRAY_SIZE (bdesc_spe_evsel),
5280 SPE_BUILTIN_EVSEL_CMPGTS,
5281 SPE_BUILTIN_EVSEL_FSTSTEQ);
5283 /* Initialize irregular SPE builtins. */
5285 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5286 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5287 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5288 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5289 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5290 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5291 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5292 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5293 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5294 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5295 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5296 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5297 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5298 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5299 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5300 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5302 /* Loads. */
5303 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5304 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5305 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5306 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5307 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5308 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5309 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5310 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5311 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5312 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5313 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5314 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5315 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5316 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5317 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5318 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5319 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5320 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5321 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5322 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5323 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5324 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5326 /* Predicates. */
5327 d = (struct builtin_description *) bdesc_spe_predicates;
5328 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5330 tree type;
5332 switch (insn_data[d->icode].operand[1].mode)
5334 case V2SImode:
5335 type = int_ftype_int_v2si_v2si;
5336 break;
5337 case V2SFmode:
5338 type = int_ftype_int_v2sf_v2sf;
5339 break;
5340 default:
5341 abort ();
5344 def_builtin (d->mask, d->name, type, d->code);
5347 /* Evsel predicates. */
5348 d = (struct builtin_description *) bdesc_spe_evsel;
5349 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5351 tree type;
5353 switch (insn_data[d->icode].operand[1].mode)
5355 case V2SImode:
5356 type = v2si_ftype_4_v2si;
5357 break;
5358 case V2SFmode:
5359 type = v2sf_ftype_4_v2sf;
5360 break;
5361 default:
5362 abort ();
5365 def_builtin (d->mask, d->name, type, d->code);
5369 static void
5370 altivec_init_builtins (void)
5372 struct builtin_description *d;
5373 struct builtin_description_predicates *dp;
5374 size_t i;
5375 tree pfloat_type_node = build_pointer_type (float_type_node);
5376 tree pint_type_node = build_pointer_type (integer_type_node);
5377 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5378 tree pchar_type_node = build_pointer_type (char_type_node);
5380 tree pvoid_type_node = build_pointer_type (void_type_node);
5382 tree int_ftype_int_v4si_v4si
5383 = build_function_type_list (integer_type_node,
5384 integer_type_node, V4SI_type_node,
5385 V4SI_type_node, NULL_TREE);
5386 tree v4sf_ftype_pfloat
5387 = build_function_type_list (V4SF_type_node, pfloat_type_node, NULL_TREE);
5388 tree void_ftype_pfloat_v4sf
5389 = build_function_type_list (void_type_node,
5390 pfloat_type_node, V4SF_type_node, NULL_TREE);
5391 tree v4si_ftype_pint
5392 = build_function_type_list (V4SI_type_node, pint_type_node, NULL_TREE); tree void_ftype_pint_v4si
5393 = build_function_type_list (void_type_node,
5394 pint_type_node, V4SI_type_node, NULL_TREE);
5395 tree v8hi_ftype_pshort
5396 = build_function_type_list (V8HI_type_node, pshort_type_node, NULL_TREE);
5397 tree void_ftype_pshort_v8hi
5398 = build_function_type_list (void_type_node,
5399 pshort_type_node, V8HI_type_node, NULL_TREE);
5400 tree v16qi_ftype_pchar
5401 = build_function_type_list (V16QI_type_node, pchar_type_node, NULL_TREE);
5402 tree void_ftype_pchar_v16qi
5403 = build_function_type_list (void_type_node,
5404 pchar_type_node, V16QI_type_node, NULL_TREE);
5405 tree void_ftype_v4si
5406 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5407 tree v8hi_ftype_void
5408 = build_function_type (V8HI_type_node, void_list_node);
5409 tree void_ftype_void
5410 = build_function_type (void_type_node, void_list_node);
5411 tree void_ftype_qi
5412 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5413 tree v16qi_ftype_int_pvoid
5414 = build_function_type_list (V16QI_type_node,
5415 integer_type_node, pvoid_type_node, NULL_TREE);
5416 tree v8hi_ftype_int_pvoid
5417 = build_function_type_list (V8HI_type_node,
5418 integer_type_node, pvoid_type_node, NULL_TREE);
5419 tree v4si_ftype_int_pvoid
5420 = build_function_type_list (V4SI_type_node,
5421 integer_type_node, pvoid_type_node, NULL_TREE);
5422 tree void_ftype_v4si_int_pvoid
5423 = build_function_type_list (void_type_node,
5424 V4SI_type_node, integer_type_node,
5425 pvoid_type_node, NULL_TREE);
5426 tree void_ftype_v16qi_int_pvoid
5427 = build_function_type_list (void_type_node,
5428 V16QI_type_node, integer_type_node,
5429 pvoid_type_node, NULL_TREE);
5430 tree void_ftype_v8hi_int_pvoid
5431 = build_function_type_list (void_type_node,
5432 V8HI_type_node, integer_type_node,
5433 pvoid_type_node, NULL_TREE);
5434 tree int_ftype_int_v8hi_v8hi
5435 = build_function_type_list (integer_type_node,
5436 integer_type_node, V8HI_type_node,
5437 V8HI_type_node, NULL_TREE);
5438 tree int_ftype_int_v16qi_v16qi
5439 = build_function_type_list (integer_type_node,
5440 integer_type_node, V16QI_type_node,
5441 V16QI_type_node, NULL_TREE);
5442 tree int_ftype_int_v4sf_v4sf
5443 = build_function_type_list (integer_type_node,
5444 integer_type_node, V4SF_type_node,
5445 V4SF_type_node, NULL_TREE);
5446 tree v4si_ftype_v4si
5447 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5448 tree v8hi_ftype_v8hi
5449 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5450 tree v16qi_ftype_v16qi
5451 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5452 tree v4sf_ftype_v4sf
5453 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5454 tree void_ftype_pvoid_int_char
5455 = build_function_type_list (void_type_node,
5456 pvoid_type_node, integer_type_node,
5457 char_type_node, NULL_TREE);
5459 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5460 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5461 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5462 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5463 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5464 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5465 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5466 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5467 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5468 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5469 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5470 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5471 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
5472 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
5473 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
5474 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
5475 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
5476 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
5477 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
5478 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5479 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5480 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5481 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5482 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5484 /* Add the DST variants. */
5485 d = (struct builtin_description *) bdesc_dst;
5486 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5487 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
5489 /* Initialize the predicates. */
5490 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5491 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5493 enum machine_mode mode1;
5494 tree type;
5496 mode1 = insn_data[dp->icode].operand[1].mode;
5498 switch (mode1)
5500 case V4SImode:
5501 type = int_ftype_int_v4si_v4si;
5502 break;
5503 case V8HImode:
5504 type = int_ftype_int_v8hi_v8hi;
5505 break;
5506 case V16QImode:
5507 type = int_ftype_int_v16qi_v16qi;
5508 break;
5509 case V4SFmode:
5510 type = int_ftype_int_v4sf_v4sf;
5511 break;
5512 default:
5513 abort ();
5516 def_builtin (dp->mask, dp->name, type, dp->code);
5519 /* Initialize the abs* operators. */
5520 d = (struct builtin_description *) bdesc_abs;
5521 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5523 enum machine_mode mode0;
5524 tree type;
5526 mode0 = insn_data[d->icode].operand[0].mode;
5528 switch (mode0)
5530 case V4SImode:
5531 type = v4si_ftype_v4si;
5532 break;
5533 case V8HImode:
5534 type = v8hi_ftype_v8hi;
5535 break;
5536 case V16QImode:
5537 type = v16qi_ftype_v16qi;
5538 break;
5539 case V4SFmode:
5540 type = v4sf_ftype_v4sf;
5541 break;
5542 default:
5543 abort ();
5546 def_builtin (d->mask, d->name, type, d->code);
5550 static void
5551 rs6000_common_init_builtins (void)
5553 struct builtin_description *d;
5554 size_t i;
5556 tree v4sf_ftype_v4sf_v4sf_v16qi
5557 = build_function_type_list (V4SF_type_node,
5558 V4SF_type_node, V4SF_type_node,
5559 V16QI_type_node, NULL_TREE);
5560 tree v4si_ftype_v4si_v4si_v16qi
5561 = build_function_type_list (V4SI_type_node,
5562 V4SI_type_node, V4SI_type_node,
5563 V16QI_type_node, NULL_TREE);
5564 tree v8hi_ftype_v8hi_v8hi_v16qi
5565 = build_function_type_list (V8HI_type_node,
5566 V8HI_type_node, V8HI_type_node,
5567 V16QI_type_node, NULL_TREE);
5568 tree v16qi_ftype_v16qi_v16qi_v16qi
5569 = build_function_type_list (V16QI_type_node,
5570 V16QI_type_node, V16QI_type_node,
5571 V16QI_type_node, NULL_TREE);
5572 tree v4si_ftype_char
5573 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5574 tree v8hi_ftype_char
5575 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5576 tree v16qi_ftype_char
5577 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5578 tree v8hi_ftype_v16qi
5579 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5580 tree v4sf_ftype_v4sf
5581 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5583 tree v2si_ftype_v2si_v2si
5584 = build_function_type_list (V2SI_type_node,
5585 V2SI_type_node, V2SI_type_node, NULL_TREE);
5587 tree v2sf_ftype_v2sf_v2sf
5588 = build_function_type_list (V2SF_type_node,
5589 V2SF_type_node, V2SF_type_node, NULL_TREE);
5591 tree v2si_ftype_int_int
5592 = build_function_type_list (V2SI_type_node,
5593 integer_type_node, integer_type_node,
5594 NULL_TREE);
5596 tree v2si_ftype_v2si
5597 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5599 tree v2sf_ftype_v2sf
5600 = build_function_type_list (V2SF_type_node,
5601 V2SF_type_node, NULL_TREE);
5603 tree v2sf_ftype_v2si
5604 = build_function_type_list (V2SF_type_node,
5605 V2SI_type_node, NULL_TREE);
5607 tree v2si_ftype_v2sf
5608 = build_function_type_list (V2SI_type_node,
5609 V2SF_type_node, NULL_TREE);
5611 tree v2si_ftype_v2si_char
5612 = build_function_type_list (V2SI_type_node,
5613 V2SI_type_node, char_type_node, NULL_TREE);
5615 tree v2si_ftype_int_char
5616 = build_function_type_list (V2SI_type_node,
5617 integer_type_node, char_type_node, NULL_TREE);
5619 tree v2si_ftype_char
5620 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5622 tree int_ftype_int_int
5623 = build_function_type_list (integer_type_node,
5624 integer_type_node, integer_type_node,
5625 NULL_TREE);
5627 tree v4si_ftype_v4si_v4si
5628 = build_function_type_list (V4SI_type_node,
5629 V4SI_type_node, V4SI_type_node, NULL_TREE);
5630 tree v4sf_ftype_v4si_char
5631 = build_function_type_list (V4SF_type_node,
5632 V4SI_type_node, char_type_node, NULL_TREE);
5633 tree v4si_ftype_v4sf_char
5634 = build_function_type_list (V4SI_type_node,
5635 V4SF_type_node, char_type_node, NULL_TREE);
5636 tree v4si_ftype_v4si_char
5637 = build_function_type_list (V4SI_type_node,
5638 V4SI_type_node, char_type_node, NULL_TREE);
5639 tree v8hi_ftype_v8hi_char
5640 = build_function_type_list (V8HI_type_node,
5641 V8HI_type_node, char_type_node, NULL_TREE);
5642 tree v16qi_ftype_v16qi_char
5643 = build_function_type_list (V16QI_type_node,
5644 V16QI_type_node, char_type_node, NULL_TREE);
5645 tree v16qi_ftype_v16qi_v16qi_char
5646 = build_function_type_list (V16QI_type_node,
5647 V16QI_type_node, V16QI_type_node,
5648 char_type_node, NULL_TREE);
5649 tree v8hi_ftype_v8hi_v8hi_char
5650 = build_function_type_list (V8HI_type_node,
5651 V8HI_type_node, V8HI_type_node,
5652 char_type_node, NULL_TREE);
5653 tree v4si_ftype_v4si_v4si_char
5654 = build_function_type_list (V4SI_type_node,
5655 V4SI_type_node, V4SI_type_node,
5656 char_type_node, NULL_TREE);
5657 tree v4sf_ftype_v4sf_v4sf_char
5658 = build_function_type_list (V4SF_type_node,
5659 V4SF_type_node, V4SF_type_node,
5660 char_type_node, NULL_TREE);
5661 tree v4sf_ftype_v4sf_v4sf
5662 = build_function_type_list (V4SF_type_node,
5663 V4SF_type_node, V4SF_type_node, NULL_TREE);
5664 tree v4sf_ftype_v4sf_v4sf_v4si
5665 = build_function_type_list (V4SF_type_node,
5666 V4SF_type_node, V4SF_type_node,
5667 V4SI_type_node, NULL_TREE);
5668 tree v4sf_ftype_v4sf_v4sf_v4sf
5669 = build_function_type_list (V4SF_type_node,
5670 V4SF_type_node, V4SF_type_node,
5671 V4SF_type_node, NULL_TREE);
5672 tree v4si_ftype_v4si_v4si_v4si
5673 = build_function_type_list (V4SI_type_node,
5674 V4SI_type_node, V4SI_type_node,
5675 V4SI_type_node, NULL_TREE);
5676 tree v8hi_ftype_v8hi_v8hi
5677 = build_function_type_list (V8HI_type_node,
5678 V8HI_type_node, V8HI_type_node, NULL_TREE);
5679 tree v8hi_ftype_v8hi_v8hi_v8hi
5680 = build_function_type_list (V8HI_type_node,
5681 V8HI_type_node, V8HI_type_node,
5682 V8HI_type_node, NULL_TREE);
5683 tree v4si_ftype_v8hi_v8hi_v4si
5684 = build_function_type_list (V4SI_type_node,
5685 V8HI_type_node, V8HI_type_node,
5686 V4SI_type_node, NULL_TREE);
5687 tree v4si_ftype_v16qi_v16qi_v4si
5688 = build_function_type_list (V4SI_type_node,
5689 V16QI_type_node, V16QI_type_node,
5690 V4SI_type_node, NULL_TREE);
5691 tree v16qi_ftype_v16qi_v16qi
5692 = build_function_type_list (V16QI_type_node,
5693 V16QI_type_node, V16QI_type_node, NULL_TREE);
5694 tree v4si_ftype_v4sf_v4sf
5695 = build_function_type_list (V4SI_type_node,
5696 V4SF_type_node, V4SF_type_node, NULL_TREE);
5697 tree v8hi_ftype_v16qi_v16qi
5698 = build_function_type_list (V8HI_type_node,
5699 V16QI_type_node, V16QI_type_node, NULL_TREE);
5700 tree v4si_ftype_v8hi_v8hi
5701 = build_function_type_list (V4SI_type_node,
5702 V8HI_type_node, V8HI_type_node, NULL_TREE);
5703 tree v8hi_ftype_v4si_v4si
5704 = build_function_type_list (V8HI_type_node,
5705 V4SI_type_node, V4SI_type_node, NULL_TREE);
5706 tree v16qi_ftype_v8hi_v8hi
5707 = build_function_type_list (V16QI_type_node,
5708 V8HI_type_node, V8HI_type_node, NULL_TREE);
5709 tree v4si_ftype_v16qi_v4si
5710 = build_function_type_list (V4SI_type_node,
5711 V16QI_type_node, V4SI_type_node, NULL_TREE);
5712 tree v4si_ftype_v16qi_v16qi
5713 = build_function_type_list (V4SI_type_node,
5714 V16QI_type_node, V16QI_type_node, NULL_TREE);
5715 tree v4si_ftype_v8hi_v4si
5716 = build_function_type_list (V4SI_type_node,
5717 V8HI_type_node, V4SI_type_node, NULL_TREE);
5718 tree v4si_ftype_v8hi
5719 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
5720 tree int_ftype_v4si_v4si
5721 = build_function_type_list (integer_type_node,
5722 V4SI_type_node, V4SI_type_node, NULL_TREE);
5723 tree int_ftype_v4sf_v4sf
5724 = build_function_type_list (integer_type_node,
5725 V4SF_type_node, V4SF_type_node, NULL_TREE);
5726 tree int_ftype_v16qi_v16qi
5727 = build_function_type_list (integer_type_node,
5728 V16QI_type_node, V16QI_type_node, NULL_TREE);
5729 tree int_ftype_v8hi_v8hi
5730 = build_function_type_list (integer_type_node,
5731 V8HI_type_node, V8HI_type_node, NULL_TREE);
5733 /* Add the simple ternary operators. */
5734 d = (struct builtin_description *) bdesc_3arg;
5735 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5738 enum machine_mode mode0, mode1, mode2, mode3;
5739 tree type;
5741 if (d->name == 0)
5742 continue;
5744 mode0 = insn_data[d->icode].operand[0].mode;
5745 mode1 = insn_data[d->icode].operand[1].mode;
5746 mode2 = insn_data[d->icode].operand[2].mode;
5747 mode3 = insn_data[d->icode].operand[3].mode;
5749 /* When all four are of the same mode. */
5750 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
5752 switch (mode0)
5754 case V4SImode:
5755 type = v4si_ftype_v4si_v4si_v4si;
5756 break;
5757 case V4SFmode:
5758 type = v4sf_ftype_v4sf_v4sf_v4sf;
5759 break;
5760 case V8HImode:
5761 type = v8hi_ftype_v8hi_v8hi_v8hi;
5762 break;
5763 case V16QImode:
5764 type = v16qi_ftype_v16qi_v16qi_v16qi;
5765 break;
5766 default:
5767 abort();
5770 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
5772 switch (mode0)
5774 case V4SImode:
5775 type = v4si_ftype_v4si_v4si_v16qi;
5776 break;
5777 case V4SFmode:
5778 type = v4sf_ftype_v4sf_v4sf_v16qi;
5779 break;
5780 case V8HImode:
5781 type = v8hi_ftype_v8hi_v8hi_v16qi;
5782 break;
5783 case V16QImode:
5784 type = v16qi_ftype_v16qi_v16qi_v16qi;
5785 break;
5786 default:
5787 abort();
5790 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
5791 && mode3 == V4SImode)
5792 type = v4si_ftype_v16qi_v16qi_v4si;
5793 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
5794 && mode3 == V4SImode)
5795 type = v4si_ftype_v8hi_v8hi_v4si;
5796 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
5797 && mode3 == V4SImode)
5798 type = v4sf_ftype_v4sf_v4sf_v4si;
5800 /* vchar, vchar, vchar, 4 bit literal. */
5801 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
5802 && mode3 == QImode)
5803 type = v16qi_ftype_v16qi_v16qi_char;
5805 /* vshort, vshort, vshort, 4 bit literal. */
5806 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
5807 && mode3 == QImode)
5808 type = v8hi_ftype_v8hi_v8hi_char;
5810 /* vint, vint, vint, 4 bit literal. */
5811 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
5812 && mode3 == QImode)
5813 type = v4si_ftype_v4si_v4si_char;
5815 /* vfloat, vfloat, vfloat, 4 bit literal. */
5816 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
5817 && mode3 == QImode)
5818 type = v4sf_ftype_v4sf_v4sf_char;
5820 else
5821 abort ();
5823 def_builtin (d->mask, d->name, type, d->code);
5826 /* Add the simple binary operators. */
5827 d = (struct builtin_description *) bdesc_2arg;
5828 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5830 enum machine_mode mode0, mode1, mode2;
5831 tree type;
5833 if (d->name == 0)
5834 continue;
5836 mode0 = insn_data[d->icode].operand[0].mode;
5837 mode1 = insn_data[d->icode].operand[1].mode;
5838 mode2 = insn_data[d->icode].operand[2].mode;
5840 /* When all three operands are of the same mode. */
5841 if (mode0 == mode1 && mode1 == mode2)
5843 switch (mode0)
5845 case V4SFmode:
5846 type = v4sf_ftype_v4sf_v4sf;
5847 break;
5848 case V4SImode:
5849 type = v4si_ftype_v4si_v4si;
5850 break;
5851 case V16QImode:
5852 type = v16qi_ftype_v16qi_v16qi;
5853 break;
5854 case V8HImode:
5855 type = v8hi_ftype_v8hi_v8hi;
5856 break;
5857 case V2SImode:
5858 type = v2si_ftype_v2si_v2si;
5859 break;
5860 case V2SFmode:
5861 type = v2sf_ftype_v2sf_v2sf;
5862 break;
5863 case SImode:
5864 type = int_ftype_int_int;
5865 break;
5866 default:
5867 abort ();
5871 /* A few other combos we really don't want to do manually. */
5873 /* vint, vfloat, vfloat. */
5874 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
5875 type = v4si_ftype_v4sf_v4sf;
5877 /* vshort, vchar, vchar. */
5878 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
5879 type = v8hi_ftype_v16qi_v16qi;
5881 /* vint, vshort, vshort. */
5882 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
5883 type = v4si_ftype_v8hi_v8hi;
5885 /* vshort, vint, vint. */
5886 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
5887 type = v8hi_ftype_v4si_v4si;
5889 /* vchar, vshort, vshort. */
5890 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
5891 type = v16qi_ftype_v8hi_v8hi;
5893 /* vint, vchar, vint. */
5894 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
5895 type = v4si_ftype_v16qi_v4si;
5897 /* vint, vchar, vchar. */
5898 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
5899 type = v4si_ftype_v16qi_v16qi;
5901 /* vint, vshort, vint. */
5902 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
5903 type = v4si_ftype_v8hi_v4si;
5905 /* vint, vint, 5 bit literal. */
5906 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
5907 type = v4si_ftype_v4si_char;
5909 /* vshort, vshort, 5 bit literal. */
5910 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
5911 type = v8hi_ftype_v8hi_char;
5913 /* vchar, vchar, 5 bit literal. */
5914 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
5915 type = v16qi_ftype_v16qi_char;
5917 /* vfloat, vint, 5 bit literal. */
5918 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
5919 type = v4sf_ftype_v4si_char;
5921 /* vint, vfloat, 5 bit literal. */
5922 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
5923 type = v4si_ftype_v4sf_char;
5925 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
5926 type = v2si_ftype_int_int;
5928 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
5929 type = v2si_ftype_v2si_char;
5931 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
5932 type = v2si_ftype_int_char;
5934 /* int, x, x. */
5935 else if (mode0 == SImode)
5937 switch (mode1)
5939 case V4SImode:
5940 type = int_ftype_v4si_v4si;
5941 break;
5942 case V4SFmode:
5943 type = int_ftype_v4sf_v4sf;
5944 break;
5945 case V16QImode:
5946 type = int_ftype_v16qi_v16qi;
5947 break;
5948 case V8HImode:
5949 type = int_ftype_v8hi_v8hi;
5950 break;
5951 default:
5952 abort ();
5956 else
5957 abort ();
5959 def_builtin (d->mask, d->name, type, d->code);
5962 /* Add the simple unary operators. */
5963 d = (struct builtin_description *) bdesc_1arg;
5964 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5966 enum machine_mode mode0, mode1;
5967 tree type;
5969 if (d->name == 0)
5970 continue;
5972 mode0 = insn_data[d->icode].operand[0].mode;
5973 mode1 = insn_data[d->icode].operand[1].mode;
5975 if (mode0 == V4SImode && mode1 == QImode)
5976 type = v4si_ftype_char;
5977 else if (mode0 == V8HImode && mode1 == QImode)
5978 type = v8hi_ftype_char;
5979 else if (mode0 == V16QImode && mode1 == QImode)
5980 type = v16qi_ftype_char;
5981 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5982 type = v4sf_ftype_v4sf;
5983 else if (mode0 == V8HImode && mode1 == V16QImode)
5984 type = v8hi_ftype_v16qi;
5985 else if (mode0 == V4SImode && mode1 == V8HImode)
5986 type = v4si_ftype_v8hi;
5987 else if (mode0 == V2SImode && mode1 == V2SImode)
5988 type = v2si_ftype_v2si;
5989 else if (mode0 == V2SFmode && mode1 == V2SFmode)
5990 type = v2sf_ftype_v2sf;
5991 else if (mode0 == V2SFmode && mode1 == V2SImode)
5992 type = v2sf_ftype_v2si;
5993 else if (mode0 == V2SImode && mode1 == V2SFmode)
5994 type = v2si_ftype_v2sf;
5995 else if (mode0 == V2SImode && mode1 == QImode)
5996 type = v2si_ftype_char;
5997 else
5998 abort ();
6000 def_builtin (d->mask, d->name, type, d->code);
6004 /* Generate a memory reference for expand_block_move, copying volatile,
6005 and other bits from an original memory reference. */
6007 static rtx
6008 expand_block_move_mem (mode, addr, orig_mem)
6009 enum machine_mode mode;
6010 rtx addr;
6011 rtx orig_mem;
6013 rtx mem = gen_rtx_MEM (mode, addr);
6015 MEM_COPY_ATTRIBUTES (mem, orig_mem);
6016 return mem;
6019 /* Expand a block move operation, and return 1 if successful. Return 0
6020 if we should let the compiler generate normal code.
6022 operands[0] is the destination
6023 operands[1] is the source
6024 operands[2] is the length
6025 operands[3] is the alignment */
6027 #define MAX_MOVE_REG 4
6030 expand_block_move (operands)
6031 rtx operands[];
6033 rtx orig_dest = operands[0];
6034 rtx orig_src = operands[1];
6035 rtx bytes_rtx = operands[2];
6036 rtx align_rtx = operands[3];
6037 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6038 int align;
6039 int bytes;
6040 int offset;
6041 int num_reg;
6042 int i;
6043 rtx src_reg;
6044 rtx dest_reg;
6045 rtx src_addr;
6046 rtx dest_addr;
6047 rtx tmp_reg;
6048 rtx stores[MAX_MOVE_REG];
6049 int move_bytes;
6051 /* If this is not a fixed size move, just call memcpy */
6052 if (! constp)
6053 return 0;
6055 /* If this is not a fixed size alignment, abort */
6056 if (GET_CODE (align_rtx) != CONST_INT)
6057 abort ();
6058 align = INTVAL (align_rtx);
6060 /* Anything to move? */
6061 bytes = INTVAL (bytes_rtx);
6062 if (bytes <= 0)
6063 return 1;
6065 /* store_one_arg depends on expand_block_move to handle at least the size of
6066 reg_parm_stack_space. */
6067 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6068 return 0;
6070 /* Move the address into scratch registers. */
6071 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
6072 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
6074 if (TARGET_STRING) /* string instructions are available */
6076 for ( ; bytes > 0; bytes -= move_bytes)
6078 if (bytes > 24 /* move up to 32 bytes at a time */
6079 && ! fixed_regs[5]
6080 && ! fixed_regs[6]
6081 && ! fixed_regs[7]
6082 && ! fixed_regs[8]
6083 && ! fixed_regs[9]
6084 && ! fixed_regs[10]
6085 && ! fixed_regs[11]
6086 && ! fixed_regs[12])
6088 move_bytes = (bytes > 32) ? 32 : bytes;
6089 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
6090 dest_reg,
6091 orig_dest),
6092 expand_block_move_mem (BLKmode,
6093 src_reg,
6094 orig_src),
6095 GEN_INT ((move_bytes == 32)
6096 ? 0 : move_bytes),
6097 align_rtx));
6099 else if (bytes > 16 /* move up to 24 bytes at a time */
6100 && ! fixed_regs[5]
6101 && ! fixed_regs[6]
6102 && ! fixed_regs[7]
6103 && ! fixed_regs[8]
6104 && ! fixed_regs[9]
6105 && ! fixed_regs[10])
6107 move_bytes = (bytes > 24) ? 24 : bytes;
6108 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
6109 dest_reg,
6110 orig_dest),
6111 expand_block_move_mem (BLKmode,
6112 src_reg,
6113 orig_src),
6114 GEN_INT (move_bytes),
6115 align_rtx));
6117 else if (bytes > 8 /* move up to 16 bytes at a time */
6118 && ! fixed_regs[5]
6119 && ! fixed_regs[6]
6120 && ! fixed_regs[7]
6121 && ! fixed_regs[8])
6123 move_bytes = (bytes > 16) ? 16 : bytes;
6124 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
6125 dest_reg,
6126 orig_dest),
6127 expand_block_move_mem (BLKmode,
6128 src_reg,
6129 orig_src),
6130 GEN_INT (move_bytes),
6131 align_rtx));
6133 else if (bytes >= 8 && TARGET_POWERPC64
6134 /* 64-bit loads and stores require word-aligned
6135 displacements. */
6136 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6138 move_bytes = 8;
6139 tmp_reg = gen_reg_rtx (DImode);
6140 emit_move_insn (tmp_reg,
6141 expand_block_move_mem (DImode,
6142 src_reg, orig_src));
6143 emit_move_insn (expand_block_move_mem (DImode,
6144 dest_reg, orig_dest),
6145 tmp_reg);
6147 else if (bytes > 4 && !TARGET_POWERPC64)
6148 { /* move up to 8 bytes at a time */
6149 move_bytes = (bytes > 8) ? 8 : bytes;
6150 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
6151 dest_reg,
6152 orig_dest),
6153 expand_block_move_mem (BLKmode,
6154 src_reg,
6155 orig_src),
6156 GEN_INT (move_bytes),
6157 align_rtx));
6159 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6160 { /* move 4 bytes */
6161 move_bytes = 4;
6162 tmp_reg = gen_reg_rtx (SImode);
6163 emit_move_insn (tmp_reg,
6164 expand_block_move_mem (SImode,
6165 src_reg, orig_src));
6166 emit_move_insn (expand_block_move_mem (SImode,
6167 dest_reg, orig_dest),
6168 tmp_reg);
6170 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6171 { /* move 2 bytes */
6172 move_bytes = 2;
6173 tmp_reg = gen_reg_rtx (HImode);
6174 emit_move_insn (tmp_reg,
6175 expand_block_move_mem (HImode,
6176 src_reg, orig_src));
6177 emit_move_insn (expand_block_move_mem (HImode,
6178 dest_reg, orig_dest),
6179 tmp_reg);
6181 else if (bytes == 1) /* move 1 byte */
6183 move_bytes = 1;
6184 tmp_reg = gen_reg_rtx (QImode);
6185 emit_move_insn (tmp_reg,
6186 expand_block_move_mem (QImode,
6187 src_reg, orig_src));
6188 emit_move_insn (expand_block_move_mem (QImode,
6189 dest_reg, orig_dest),
6190 tmp_reg);
6192 else
6193 { /* move up to 4 bytes at a time */
6194 move_bytes = (bytes > 4) ? 4 : bytes;
6195 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
6196 dest_reg,
6197 orig_dest),
6198 expand_block_move_mem (BLKmode,
6199 src_reg,
6200 orig_src),
6201 GEN_INT (move_bytes),
6202 align_rtx));
6205 if (bytes > move_bytes)
6207 if (! TARGET_POWERPC64)
6209 emit_insn (gen_addsi3 (src_reg, src_reg,
6210 GEN_INT (move_bytes)));
6211 emit_insn (gen_addsi3 (dest_reg, dest_reg,
6212 GEN_INT (move_bytes)));
6214 else
6216 emit_insn (gen_adddi3 (src_reg, src_reg,
6217 GEN_INT (move_bytes)));
6218 emit_insn (gen_adddi3 (dest_reg, dest_reg,
6219 GEN_INT (move_bytes)));
6225 else /* string instructions not available */
6227 num_reg = offset = 0;
6228 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
6230 /* Calculate the correct offset for src/dest */
6231 if (offset == 0)
6233 src_addr = src_reg;
6234 dest_addr = dest_reg;
6236 else
6238 src_addr = plus_constant (src_reg, offset);
6239 dest_addr = plus_constant (dest_reg, offset);
6242 /* Generate the appropriate load and store, saving the stores
6243 for later. */
6244 if (bytes >= 8 && TARGET_POWERPC64
6245 /* 64-bit loads and stores require word-aligned
6246 displacements. */
6247 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6249 move_bytes = 8;
6250 tmp_reg = gen_reg_rtx (DImode);
6251 emit_insn (gen_movdi (tmp_reg,
6252 expand_block_move_mem (DImode,
6253 src_addr,
6254 orig_src)));
6255 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
6256 dest_addr,
6257 orig_dest),
6258 tmp_reg);
6260 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6262 move_bytes = 4;
6263 tmp_reg = gen_reg_rtx (SImode);
6264 emit_insn (gen_movsi (tmp_reg,
6265 expand_block_move_mem (SImode,
6266 src_addr,
6267 orig_src)));
6268 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
6269 dest_addr,
6270 orig_dest),
6271 tmp_reg);
6273 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6275 move_bytes = 2;
6276 tmp_reg = gen_reg_rtx (HImode);
6277 emit_insn (gen_movhi (tmp_reg,
6278 expand_block_move_mem (HImode,
6279 src_addr,
6280 orig_src)));
6281 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
6282 dest_addr,
6283 orig_dest),
6284 tmp_reg);
6286 else
6288 move_bytes = 1;
6289 tmp_reg = gen_reg_rtx (QImode);
6290 emit_insn (gen_movqi (tmp_reg,
6291 expand_block_move_mem (QImode,
6292 src_addr,
6293 orig_src)));
6294 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
6295 dest_addr,
6296 orig_dest),
6297 tmp_reg);
6300 if (num_reg >= MAX_MOVE_REG)
6302 for (i = 0; i < num_reg; i++)
6303 emit_insn (stores[i]);
6304 num_reg = 0;
6308 for (i = 0; i < num_reg; i++)
6309 emit_insn (stores[i]);
6312 return 1;
6316 /* Return 1 if OP is a load multiple operation. It is known to be a
6317 PARALLEL and the first section will be tested. */
6320 load_multiple_operation (op, mode)
6321 rtx op;
6322 enum machine_mode mode ATTRIBUTE_UNUSED;
6324 int count = XVECLEN (op, 0);
6325 unsigned int dest_regno;
6326 rtx src_addr;
6327 int i;
6329 /* Perform a quick check so we don't blow up below. */
6330 if (count <= 1
6331 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6332 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6333 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6334 return 0;
6336 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6337 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6339 for (i = 1; i < count; i++)
6341 rtx elt = XVECEXP (op, 0, i);
6343 if (GET_CODE (elt) != SET
6344 || GET_CODE (SET_DEST (elt)) != REG
6345 || GET_MODE (SET_DEST (elt)) != SImode
6346 || REGNO (SET_DEST (elt)) != dest_regno + i
6347 || GET_CODE (SET_SRC (elt)) != MEM
6348 || GET_MODE (SET_SRC (elt)) != SImode
6349 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6350 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6351 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6352 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6353 return 0;
6356 return 1;
6359 /* Similar, but tests for store multiple. Here, the second vector element
6360 is a CLOBBER. It will be tested later. */
6363 store_multiple_operation (op, mode)
6364 rtx op;
6365 enum machine_mode mode ATTRIBUTE_UNUSED;
6367 int count = XVECLEN (op, 0) - 1;
6368 unsigned int src_regno;
6369 rtx dest_addr;
6370 int i;
6372 /* Perform a quick check so we don't blow up below. */
6373 if (count <= 1
6374 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6375 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6376 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6377 return 0;
6379 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6380 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6382 for (i = 1; i < count; i++)
6384 rtx elt = XVECEXP (op, 0, i + 1);
6386 if (GET_CODE (elt) != SET
6387 || GET_CODE (SET_SRC (elt)) != REG
6388 || GET_MODE (SET_SRC (elt)) != SImode
6389 || REGNO (SET_SRC (elt)) != src_regno + i
6390 || GET_CODE (SET_DEST (elt)) != MEM
6391 || GET_MODE (SET_DEST (elt)) != SImode
6392 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6393 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6394 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6395 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6396 return 0;
6399 return 1;
6402 /* Return 1 for a parallel vrsave operation. */
6405 vrsave_operation (op, mode)
6406 rtx op;
6407 enum machine_mode mode ATTRIBUTE_UNUSED;
6409 int count = XVECLEN (op, 0);
6410 unsigned int dest_regno, src_regno;
6411 int i;
6413 if (count <= 1
6414 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6415 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6416 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6417 return 0;
6419 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6420 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6422 if (dest_regno != VRSAVE_REGNO
6423 && src_regno != VRSAVE_REGNO)
6424 return 0;
6426 for (i = 1; i < count; i++)
6428 rtx elt = XVECEXP (op, 0, i);
6430 if (GET_CODE (elt) != CLOBBER
6431 && GET_CODE (elt) != SET)
6432 return 0;
6435 return 1;
6438 /* Return 1 for an PARALLEL suitable for mtcrf. */
6441 mtcrf_operation (op, mode)
6442 rtx op;
6443 enum machine_mode mode ATTRIBUTE_UNUSED;
6445 int count = XVECLEN (op, 0);
6446 int i;
6447 rtx src_reg;
6449 /* Perform a quick check so we don't blow up below. */
6450 if (count < 1
6451 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6452 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6453 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6454 return 0;
6455 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6457 if (GET_CODE (src_reg) != REG
6458 || GET_MODE (src_reg) != SImode
6459 || ! INT_REGNO_P (REGNO (src_reg)))
6460 return 0;
6462 for (i = 0; i < count; i++)
6464 rtx exp = XVECEXP (op, 0, i);
6465 rtx unspec;
6466 int maskval;
6468 if (GET_CODE (exp) != SET
6469 || GET_CODE (SET_DEST (exp)) != REG
6470 || GET_MODE (SET_DEST (exp)) != CCmode
6471 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6472 return 0;
6473 unspec = SET_SRC (exp);
6474 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6476 if (GET_CODE (unspec) != UNSPEC
6477 || XINT (unspec, 1) != 20
6478 || XVECLEN (unspec, 0) != 2
6479 || XVECEXP (unspec, 0, 0) != src_reg
6480 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6481 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6482 return 0;
6484 return 1;
6487 /* Return 1 for an PARALLEL suitable for lmw. */
6490 lmw_operation (op, mode)
6491 rtx op;
6492 enum machine_mode mode ATTRIBUTE_UNUSED;
6494 int count = XVECLEN (op, 0);
6495 unsigned int dest_regno;
6496 rtx src_addr;
6497 unsigned int base_regno;
6498 HOST_WIDE_INT offset;
6499 int i;
6501 /* Perform a quick check so we don't blow up below. */
6502 if (count <= 1
6503 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6504 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6505 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6506 return 0;
6508 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6509 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6511 if (dest_regno > 31
6512 || count != 32 - (int) dest_regno)
6513 return 0;
6515 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6517 offset = 0;
6518 base_regno = REGNO (src_addr);
6519 if (base_regno == 0)
6520 return 0;
6522 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6524 offset = INTVAL (XEXP (src_addr, 1));
6525 base_regno = REGNO (XEXP (src_addr, 0));
6527 else
6528 return 0;
6530 for (i = 0; i < count; i++)
6532 rtx elt = XVECEXP (op, 0, i);
6533 rtx newaddr;
6534 rtx addr_reg;
6535 HOST_WIDE_INT newoffset;
6537 if (GET_CODE (elt) != SET
6538 || GET_CODE (SET_DEST (elt)) != REG
6539 || GET_MODE (SET_DEST (elt)) != SImode
6540 || REGNO (SET_DEST (elt)) != dest_regno + i
6541 || GET_CODE (SET_SRC (elt)) != MEM
6542 || GET_MODE (SET_SRC (elt)) != SImode)
6543 return 0;
6544 newaddr = XEXP (SET_SRC (elt), 0);
6545 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6547 newoffset = 0;
6548 addr_reg = newaddr;
6550 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6552 addr_reg = XEXP (newaddr, 0);
6553 newoffset = INTVAL (XEXP (newaddr, 1));
6555 else
6556 return 0;
6557 if (REGNO (addr_reg) != base_regno
6558 || newoffset != offset + 4 * i)
6559 return 0;
6562 return 1;
6565 /* Return 1 for an PARALLEL suitable for stmw. */
6568 stmw_operation (op, mode)
6569 rtx op;
6570 enum machine_mode mode ATTRIBUTE_UNUSED;
6572 int count = XVECLEN (op, 0);
6573 unsigned int src_regno;
6574 rtx dest_addr;
6575 unsigned int base_regno;
6576 HOST_WIDE_INT offset;
6577 int i;
6579 /* Perform a quick check so we don't blow up below. */
6580 if (count <= 1
6581 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6582 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6583 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6584 return 0;
6586 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6587 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6589 if (src_regno > 31
6590 || count != 32 - (int) src_regno)
6591 return 0;
6593 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6595 offset = 0;
6596 base_regno = REGNO (dest_addr);
6597 if (base_regno == 0)
6598 return 0;
6600 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6602 offset = INTVAL (XEXP (dest_addr, 1));
6603 base_regno = REGNO (XEXP (dest_addr, 0));
6605 else
6606 return 0;
6608 for (i = 0; i < count; i++)
6610 rtx elt = XVECEXP (op, 0, i);
6611 rtx newaddr;
6612 rtx addr_reg;
6613 HOST_WIDE_INT newoffset;
6615 if (GET_CODE (elt) != SET
6616 || GET_CODE (SET_SRC (elt)) != REG
6617 || GET_MODE (SET_SRC (elt)) != SImode
6618 || REGNO (SET_SRC (elt)) != src_regno + i
6619 || GET_CODE (SET_DEST (elt)) != MEM
6620 || GET_MODE (SET_DEST (elt)) != SImode)
6621 return 0;
6622 newaddr = XEXP (SET_DEST (elt), 0);
6623 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6625 newoffset = 0;
6626 addr_reg = newaddr;
6628 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6630 addr_reg = XEXP (newaddr, 0);
6631 newoffset = INTVAL (XEXP (newaddr, 1));
6633 else
6634 return 0;
6635 if (REGNO (addr_reg) != base_regno
6636 || newoffset != offset + 4 * i)
6637 return 0;
6640 return 1;
6643 /* A validation routine: say whether CODE, a condition code, and MODE
6644 match. The other alternatives either don't make sense or should
6645 never be generated. */
6647 static void
6648 validate_condition_mode (code, mode)
6649 enum rtx_code code;
6650 enum machine_mode mode;
6652 if (GET_RTX_CLASS (code) != '<'
6653 || GET_MODE_CLASS (mode) != MODE_CC)
6654 abort ();
6656 /* These don't make sense. */
6657 if ((code == GT || code == LT || code == GE || code == LE)
6658 && mode == CCUNSmode)
6659 abort ();
6661 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6662 && mode != CCUNSmode)
6663 abort ();
6665 if (mode != CCFPmode
6666 && (code == ORDERED || code == UNORDERED
6667 || code == UNEQ || code == LTGT
6668 || code == UNGT || code == UNLT
6669 || code == UNGE || code == UNLE))
6670 abort ();
6672 /* These should never be generated except for
6673 flag_unsafe_math_optimizations. */
6674 if (mode == CCFPmode
6675 && ! flag_unsafe_math_optimizations
6676 && (code == LE || code == GE
6677 || code == UNEQ || code == LTGT
6678 || code == UNGT || code == UNLT))
6679 abort ();
6681 /* These are invalid; the information is not there. */
6682 if (mode == CCEQmode
6683 && code != EQ && code != NE)
6684 abort ();
6687 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6688 We only check the opcode against the mode of the CC value here. */
6691 branch_comparison_operator (op, mode)
6692 rtx op;
6693 enum machine_mode mode ATTRIBUTE_UNUSED;
6695 enum rtx_code code = GET_CODE (op);
6696 enum machine_mode cc_mode;
6698 if (GET_RTX_CLASS (code) != '<')
6699 return 0;
6701 cc_mode = GET_MODE (XEXP (op, 0));
6702 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6703 return 0;
6705 validate_condition_mode (code, cc_mode);
6707 return 1;
6710 /* Return 1 if OP is a comparison operation that is valid for a branch
6711 insn and which is true if the corresponding bit in the CC register
6712 is set. */
6715 branch_positive_comparison_operator (op, mode)
6716 rtx op;
6717 enum machine_mode mode;
6719 enum rtx_code code;
6721 if (! branch_comparison_operator (op, mode))
6722 return 0;
6724 code = GET_CODE (op);
6725 return (code == EQ || code == LT || code == GT
6726 || (TARGET_SPE && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
6727 || code == LTU || code == GTU
6728 || code == UNORDERED);
6731 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
6732 We check the opcode against the mode of the CC value and disallow EQ or
6733 NE comparisons for integers. */
6736 scc_comparison_operator (op, mode)
6737 rtx op;
6738 enum machine_mode mode;
6740 enum rtx_code code = GET_CODE (op);
6741 enum machine_mode cc_mode;
6743 if (GET_MODE (op) != mode && mode != VOIDmode)
6744 return 0;
6746 if (GET_RTX_CLASS (code) != '<')
6747 return 0;
6749 cc_mode = GET_MODE (XEXP (op, 0));
6750 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
6751 return 0;
6753 validate_condition_mode (code, cc_mode);
6755 if (code == NE && cc_mode != CCFPmode)
6756 return 0;
6758 return 1;
6762 trap_comparison_operator (op, mode)
6763 rtx op;
6764 enum machine_mode mode;
6766 if (mode != VOIDmode && mode != GET_MODE (op))
6767 return 0;
6768 return GET_RTX_CLASS (GET_CODE (op)) == '<';
6772 boolean_operator (op, mode)
6773 rtx op;
6774 enum machine_mode mode ATTRIBUTE_UNUSED;
6776 enum rtx_code code = GET_CODE (op);
6777 return (code == AND || code == IOR || code == XOR);
6781 boolean_or_operator (op, mode)
6782 rtx op;
6783 enum machine_mode mode ATTRIBUTE_UNUSED;
6785 enum rtx_code code = GET_CODE (op);
6786 return (code == IOR || code == XOR);
6790 min_max_operator (op, mode)
6791 rtx op;
6792 enum machine_mode mode ATTRIBUTE_UNUSED;
6794 enum rtx_code code = GET_CODE (op);
6795 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
6798 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
6799 mask required to convert the result of a rotate insn into a shift
6800 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
6803 includes_lshift_p (shiftop, andop)
6804 rtx shiftop;
6805 rtx andop;
6807 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6809 shift_mask <<= INTVAL (shiftop);
6811 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6814 /* Similar, but for right shift. */
6817 includes_rshift_p (shiftop, andop)
6818 rtx shiftop;
6819 rtx andop;
6821 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
6823 shift_mask >>= INTVAL (shiftop);
6825 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
6828 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
6829 to perform a left shift. It must have exactly SHIFTOP least
6830 signifigant 0's, then one or more 1's, then zero or more 0's. */
6833 includes_rldic_lshift_p (shiftop, andop)
6834 rtx shiftop;
6835 rtx andop;
6837 if (GET_CODE (andop) == CONST_INT)
6839 HOST_WIDE_INT c, lsb, shift_mask;
6841 c = INTVAL (andop);
6842 if (c == 0 || c == ~0)
6843 return 0;
6845 shift_mask = ~0;
6846 shift_mask <<= INTVAL (shiftop);
6848 /* Find the least signifigant one bit. */
6849 lsb = c & -c;
6851 /* It must coincide with the LSB of the shift mask. */
6852 if (-lsb != shift_mask)
6853 return 0;
6855 /* Invert to look for the next transition (if any). */
6856 c = ~c;
6858 /* Remove the low group of ones (originally low group of zeros). */
6859 c &= -lsb;
6861 /* Again find the lsb, and check we have all 1's above. */
6862 lsb = c & -c;
6863 return c == -lsb;
6865 else if (GET_CODE (andop) == CONST_DOUBLE
6866 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6868 HOST_WIDE_INT low, high, lsb;
6869 HOST_WIDE_INT shift_mask_low, shift_mask_high;
6871 low = CONST_DOUBLE_LOW (andop);
6872 if (HOST_BITS_PER_WIDE_INT < 64)
6873 high = CONST_DOUBLE_HIGH (andop);
6875 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
6876 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
6877 return 0;
6879 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6881 shift_mask_high = ~0;
6882 if (INTVAL (shiftop) > 32)
6883 shift_mask_high <<= INTVAL (shiftop) - 32;
6885 lsb = high & -high;
6887 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
6888 return 0;
6890 high = ~high;
6891 high &= -lsb;
6893 lsb = high & -high;
6894 return high == -lsb;
6897 shift_mask_low = ~0;
6898 shift_mask_low <<= INTVAL (shiftop);
6900 lsb = low & -low;
6902 if (-lsb != shift_mask_low)
6903 return 0;
6905 if (HOST_BITS_PER_WIDE_INT < 64)
6906 high = ~high;
6907 low = ~low;
6908 low &= -lsb;
6910 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
6912 lsb = high & -high;
6913 return high == -lsb;
6916 lsb = low & -low;
6917 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
6919 else
6920 return 0;
6923 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
6924 to perform a left shift. It must have SHIFTOP or more least
6925 signifigant 0's, with the remainder of the word 1's. */
6928 includes_rldicr_lshift_p (shiftop, andop)
6929 rtx shiftop;
6930 rtx andop;
6932 if (GET_CODE (andop) == CONST_INT)
6934 HOST_WIDE_INT c, lsb, shift_mask;
6936 shift_mask = ~0;
6937 shift_mask <<= INTVAL (shiftop);
6938 c = INTVAL (andop);
6940 /* Find the least signifigant one bit. */
6941 lsb = c & -c;
6943 /* It must be covered by the shift mask.
6944 This test also rejects c == 0. */
6945 if ((lsb & shift_mask) == 0)
6946 return 0;
6948 /* Check we have all 1's above the transition, and reject all 1's. */
6949 return c == -lsb && lsb != 1;
6951 else if (GET_CODE (andop) == CONST_DOUBLE
6952 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
6954 HOST_WIDE_INT low, lsb, shift_mask_low;
6956 low = CONST_DOUBLE_LOW (andop);
6958 if (HOST_BITS_PER_WIDE_INT < 64)
6960 HOST_WIDE_INT high, shift_mask_high;
6962 high = CONST_DOUBLE_HIGH (andop);
6964 if (low == 0)
6966 shift_mask_high = ~0;
6967 if (INTVAL (shiftop) > 32)
6968 shift_mask_high <<= INTVAL (shiftop) - 32;
6970 lsb = high & -high;
6972 if ((lsb & shift_mask_high) == 0)
6973 return 0;
6975 return high == -lsb;
6977 if (high != ~0)
6978 return 0;
6981 shift_mask_low = ~0;
6982 shift_mask_low <<= INTVAL (shiftop);
6984 lsb = low & -low;
6986 if ((lsb & shift_mask_low) == 0)
6987 return 0;
6989 return low == -lsb && lsb != 1;
6991 else
6992 return 0;
6995 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6996 for lfq and stfq insns.
6998 Note reg1 and reg2 *must* be hard registers. To be sure we will
6999 abort if we are passed pseudo registers. */
7002 registers_ok_for_quad_peep (reg1, reg2)
7003 rtx reg1, reg2;
7005 /* We might have been passed a SUBREG. */
7006 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7007 return 0;
7009 return (REGNO (reg1) == REGNO (reg2) - 1);
7012 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7013 addr1 and addr2 must be in consecutive memory locations
7014 (addr2 == addr1 + 8). */
7017 addrs_ok_for_quad_peep (addr1, addr2)
7018 rtx addr1;
7019 rtx addr2;
7021 unsigned int reg1;
7022 int offset1;
7024 /* Extract an offset (if used) from the first addr. */
7025 if (GET_CODE (addr1) == PLUS)
7027 /* If not a REG, return zero. */
7028 if (GET_CODE (XEXP (addr1, 0)) != REG)
7029 return 0;
7030 else
7032 reg1 = REGNO (XEXP (addr1, 0));
7033 /* The offset must be constant! */
7034 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7035 return 0;
7036 offset1 = INTVAL (XEXP (addr1, 1));
7039 else if (GET_CODE (addr1) != REG)
7040 return 0;
7041 else
7043 reg1 = REGNO (addr1);
7044 /* This was a simple (mem (reg)) expression. Offset is 0. */
7045 offset1 = 0;
7048 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
7049 if (GET_CODE (addr2) != PLUS)
7050 return 0;
7052 if (GET_CODE (XEXP (addr2, 0)) != REG
7053 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7054 return 0;
7056 if (reg1 != REGNO (XEXP (addr2, 0)))
7057 return 0;
7059 /* The offset for the second addr must be 8 more than the first addr. */
7060 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7061 return 0;
7063 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7064 instructions. */
7065 return 1;
7068 /* Return the register class of a scratch register needed to copy IN into
7069 or out of a register in CLASS in MODE. If it can be done directly,
7070 NO_REGS is returned. */
7072 enum reg_class
7073 secondary_reload_class (class, mode, in)
7074 enum reg_class class;
7075 enum machine_mode mode ATTRIBUTE_UNUSED;
7076 rtx in;
7078 int regno;
7080 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
7082 /* We cannot copy a symbolic operand directly into anything
7083 other than BASE_REGS for TARGET_ELF. So indicate that a
7084 register from BASE_REGS is needed as an intermediate
7085 register.
7087 On Darwin, pic addresses require a load from memory, which
7088 needs a base register. */
7089 if (class != BASE_REGS
7090 && (GET_CODE (in) == SYMBOL_REF
7091 || GET_CODE (in) == HIGH
7092 || GET_CODE (in) == LABEL_REF
7093 || GET_CODE (in) == CONST))
7094 return BASE_REGS;
7097 if (GET_CODE (in) == REG)
7099 regno = REGNO (in);
7100 if (regno >= FIRST_PSEUDO_REGISTER)
7102 regno = true_regnum (in);
7103 if (regno >= FIRST_PSEUDO_REGISTER)
7104 regno = -1;
7107 else if (GET_CODE (in) == SUBREG)
7109 regno = true_regnum (in);
7110 if (regno >= FIRST_PSEUDO_REGISTER)
7111 regno = -1;
7113 else
7114 regno = -1;
7116 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7117 into anything. */
7118 if (class == GENERAL_REGS || class == BASE_REGS
7119 || (regno >= 0 && INT_REGNO_P (regno)))
7120 return NO_REGS;
7122 /* Constants, memory, and FP registers can go into FP registers. */
7123 if ((regno == -1 || FP_REGNO_P (regno))
7124 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7125 return NO_REGS;
7127 /* Memory, and AltiVec registers can go into AltiVec registers. */
7128 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7129 && class == ALTIVEC_REGS)
7130 return NO_REGS;
7132 /* We can copy among the CR registers. */
7133 if ((class == CR_REGS || class == CR0_REGS)
7134 && regno >= 0 && CR_REGNO_P (regno))
7135 return NO_REGS;
7137 /* Otherwise, we need GENERAL_REGS. */
7138 return GENERAL_REGS;
7141 /* Given a comparison operation, return the bit number in CCR to test. We
7142 know this is a valid comparison.
7144 SCC_P is 1 if this is for an scc. That means that %D will have been
7145 used instead of %C, so the bits will be in different places.
7147 Return -1 if OP isn't a valid comparison for some reason. */
7150 ccr_bit (op, scc_p)
7151 rtx op;
7152 int scc_p;
7154 enum rtx_code code = GET_CODE (op);
7155 enum machine_mode cc_mode;
7156 int cc_regnum;
7157 int base_bit;
7158 rtx reg;
7160 if (GET_RTX_CLASS (code) != '<')
7161 return -1;
7163 reg = XEXP (op, 0);
7165 if (GET_CODE (reg) != REG
7166 || ! CR_REGNO_P (REGNO (reg)))
7167 abort ();
7169 cc_mode = GET_MODE (reg);
7170 cc_regnum = REGNO (reg);
7171 base_bit = 4 * (cc_regnum - CR0_REGNO);
7173 validate_condition_mode (code, cc_mode);
7175 switch (code)
7177 case NE:
7178 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7179 return base_bit + 1;
7180 return scc_p ? base_bit + 3 : base_bit + 2;
7181 case EQ:
7182 if (TARGET_SPE && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7183 return base_bit + 1;
7184 return base_bit + 2;
7185 case GT: case GTU: case UNLE:
7186 return base_bit + 1;
7187 case LT: case LTU: case UNGE:
7188 return base_bit;
7189 case ORDERED: case UNORDERED:
7190 return base_bit + 3;
7192 case GE: case GEU:
7193 /* If scc, we will have done a cror to put the bit in the
7194 unordered position. So test that bit. For integer, this is ! LT
7195 unless this is an scc insn. */
7196 return scc_p ? base_bit + 3 : base_bit;
7198 case LE: case LEU:
7199 return scc_p ? base_bit + 3 : base_bit + 1;
7201 default:
7202 abort ();
7206 /* Return the GOT register. */
7208 struct rtx_def *
7209 rs6000_got_register (value)
7210 rtx value ATTRIBUTE_UNUSED;
7212 /* The second flow pass currently (June 1999) can't update
7213 regs_ever_live without disturbing other parts of the compiler, so
7214 update it here to make the prolog/epilogue code happy. */
7215 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7216 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7218 current_function_uses_pic_offset_table = 1;
7220 return pic_offset_table_rtx;
7223 /* Function to init struct machine_function.
7224 This will be called, via a pointer variable,
7225 from push_function_context. */
7227 static struct machine_function *
7228 rs6000_init_machine_status ()
7230 return ggc_alloc_cleared (sizeof (machine_function));
7233 /* These macros test for integers and extract the low-order bits. */
7234 #define INT_P(X) \
7235 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7236 && GET_MODE (X) == VOIDmode)
7238 #define INT_LOWPART(X) \
7239 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7242 extract_MB (op)
7243 rtx op;
7245 int i;
7246 unsigned long val = INT_LOWPART (op);
7248 /* If the high bit is zero, the value is the first 1 bit we find
7249 from the left. */
7250 if ((val & 0x80000000) == 0)
7252 if ((val & 0xffffffff) == 0)
7253 abort ();
7255 i = 1;
7256 while (((val <<= 1) & 0x80000000) == 0)
7257 ++i;
7258 return i;
7261 /* If the high bit is set and the low bit is not, or the mask is all
7262 1's, the value is zero. */
7263 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7264 return 0;
7266 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7267 from the right. */
7268 i = 31;
7269 while (((val >>= 1) & 1) != 0)
7270 --i;
7272 return i;
7276 extract_ME (op)
7277 rtx op;
7279 int i;
7280 unsigned long val = INT_LOWPART (op);
7282 /* If the low bit is zero, the value is the first 1 bit we find from
7283 the right. */
7284 if ((val & 1) == 0)
7286 if ((val & 0xffffffff) == 0)
7287 abort ();
7289 i = 30;
7290 while (((val >>= 1) & 1) == 0)
7291 --i;
7293 return i;
7296 /* If the low bit is set and the high bit is not, or the mask is all
7297 1's, the value is 31. */
7298 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7299 return 31;
7301 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7302 from the left. */
7303 i = 0;
7304 while (((val <<= 1) & 0x80000000) != 0)
7305 ++i;
7307 return i;
7310 /* Print an operand. Recognize special options, documented below. */
7312 #if TARGET_ELF
7313 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7314 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7315 #else
7316 #define SMALL_DATA_RELOC "sda21"
7317 #define SMALL_DATA_REG 0
7318 #endif
7320 void
7321 print_operand (file, x, code)
7322 FILE *file;
7323 rtx x;
7324 int code;
7326 int i;
7327 HOST_WIDE_INT val;
7328 unsigned HOST_WIDE_INT uval;
7330 switch (code)
7332 case '.':
7333 /* Write out an instruction after the call which may be replaced
7334 with glue code by the loader. This depends on the AIX version. */
7335 asm_fprintf (file, RS6000_CALL_GLUE);
7336 return;
7338 /* %a is output_address. */
7340 case 'A':
7341 /* If X is a constant integer whose low-order 5 bits are zero,
7342 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7343 in the AIX assembler where "sri" with a zero shift count
7344 writes a trash instruction. */
7345 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7346 putc ('l', file);
7347 else
7348 putc ('r', file);
7349 return;
7351 case 'b':
7352 /* If constant, low-order 16 bits of constant, unsigned.
7353 Otherwise, write normally. */
7354 if (INT_P (x))
7355 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7356 else
7357 print_operand (file, x, 0);
7358 return;
7360 case 'B':
7361 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7362 for 64-bit mask direction. */
7363 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7364 return;
7366 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7367 output_operand. */
7369 case 'D':
7370 /* There used to be a comment for 'C' reading "This is an
7371 optional cror needed for certain floating-point
7372 comparisons. Otherwise write nothing." */
7374 /* Similar, except that this is for an scc, so we must be able to
7375 encode the test in a single bit that is one. We do the above
7376 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7377 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7378 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7380 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7382 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7383 base_bit + 2,
7384 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7387 else if (GET_CODE (x) == NE)
7389 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7391 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7392 base_bit + 2, base_bit + 2);
7394 else if (TARGET_SPE && TARGET_HARD_FLOAT
7395 && GET_CODE (x) == EQ
7396 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7398 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7400 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7401 base_bit + 1, base_bit + 1);
7403 return;
7405 case 'E':
7406 /* X is a CR register. Print the number of the EQ bit of the CR */
7407 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7408 output_operand_lossage ("invalid %%E value");
7409 else
7410 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7411 return;
7413 case 'f':
7414 /* X is a CR register. Print the shift count needed to move it
7415 to the high-order four bits. */
7416 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7417 output_operand_lossage ("invalid %%f value");
7418 else
7419 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7420 return;
7422 case 'F':
7423 /* Similar, but print the count for the rotate in the opposite
7424 direction. */
7425 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7426 output_operand_lossage ("invalid %%F value");
7427 else
7428 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7429 return;
7431 case 'G':
7432 /* X is a constant integer. If it is negative, print "m",
7433 otherwise print "z". This is to make a aze or ame insn. */
7434 if (GET_CODE (x) != CONST_INT)
7435 output_operand_lossage ("invalid %%G value");
7436 else if (INTVAL (x) >= 0)
7437 putc ('z', file);
7438 else
7439 putc ('m', file);
7440 return;
7442 case 'h':
7443 /* If constant, output low-order five bits. Otherwise, write
7444 normally. */
7445 if (INT_P (x))
7446 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7447 else
7448 print_operand (file, x, 0);
7449 return;
7451 case 'H':
7452 /* If constant, output low-order six bits. Otherwise, write
7453 normally. */
7454 if (INT_P (x))
7455 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7456 else
7457 print_operand (file, x, 0);
7458 return;
7460 case 'I':
7461 /* Print `i' if this is a constant, else nothing. */
7462 if (INT_P (x))
7463 putc ('i', file);
7464 return;
7466 case 'j':
7467 /* Write the bit number in CCR for jump. */
7468 i = ccr_bit (x, 0);
7469 if (i == -1)
7470 output_operand_lossage ("invalid %%j code");
7471 else
7472 fprintf (file, "%d", i);
7473 return;
7475 case 'J':
7476 /* Similar, but add one for shift count in rlinm for scc and pass
7477 scc flag to `ccr_bit'. */
7478 i = ccr_bit (x, 1);
7479 if (i == -1)
7480 output_operand_lossage ("invalid %%J code");
7481 else
7482 /* If we want bit 31, write a shift count of zero, not 32. */
7483 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7484 return;
7486 case 'k':
7487 /* X must be a constant. Write the 1's complement of the
7488 constant. */
7489 if (! INT_P (x))
7490 output_operand_lossage ("invalid %%k value");
7491 else
7492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7493 return;
7495 case 'K':
7496 /* X must be a symbolic constant on ELF. Write an
7497 expression suitable for an 'addi' that adds in the low 16
7498 bits of the MEM. */
7499 if (GET_CODE (x) != CONST)
7501 print_operand_address (file, x);
7502 fputs ("@l", file);
7504 else
7506 if (GET_CODE (XEXP (x, 0)) != PLUS
7507 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7508 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7509 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7510 output_operand_lossage ("invalid %%K value");
7511 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7512 fputs ("@l", file);
7513 /* For GNU as, there must be a non-alphanumeric character
7514 between 'l' and the number. The '-' is added by
7515 print_operand() already. */
7516 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7517 fputs ("+", file);
7518 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7520 return;
7522 /* %l is output_asm_label. */
7524 case 'L':
7525 /* Write second word of DImode or DFmode reference. Works on register
7526 or non-indexed memory only. */
7527 if (GET_CODE (x) == REG)
7528 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7529 else if (GET_CODE (x) == MEM)
7531 /* Handle possible auto-increment. Since it is pre-increment and
7532 we have already done it, we can just use an offset of word. */
7533 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7534 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7535 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7536 UNITS_PER_WORD));
7537 else
7538 output_address (XEXP (adjust_address_nv (x, SImode,
7539 UNITS_PER_WORD),
7540 0));
7542 if (small_data_operand (x, GET_MODE (x)))
7543 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7544 reg_names[SMALL_DATA_REG]);
7546 return;
7548 case 'm':
7549 /* MB value for a mask operand. */
7550 if (! mask_operand (x, SImode))
7551 output_operand_lossage ("invalid %%m value");
7553 fprintf (file, "%d", extract_MB (x));
7554 return;
7556 case 'M':
7557 /* ME value for a mask operand. */
7558 if (! mask_operand (x, SImode))
7559 output_operand_lossage ("invalid %%M value");
7561 fprintf (file, "%d", extract_ME (x));
7562 return;
7564 /* %n outputs the negative of its operand. */
7566 case 'N':
7567 /* Write the number of elements in the vector times 4. */
7568 if (GET_CODE (x) != PARALLEL)
7569 output_operand_lossage ("invalid %%N value");
7570 else
7571 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7572 return;
7574 case 'O':
7575 /* Similar, but subtract 1 first. */
7576 if (GET_CODE (x) != PARALLEL)
7577 output_operand_lossage ("invalid %%O value");
7578 else
7579 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7580 return;
7582 case 'p':
7583 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7584 if (! INT_P (x)
7585 || INT_LOWPART (x) < 0
7586 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7587 output_operand_lossage ("invalid %%p value");
7588 else
7589 fprintf (file, "%d", i);
7590 return;
7592 case 'P':
7593 /* The operand must be an indirect memory reference. The result
7594 is the register number. */
7595 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7596 || REGNO (XEXP (x, 0)) >= 32)
7597 output_operand_lossage ("invalid %%P value");
7598 else
7599 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7600 return;
7602 case 'q':
7603 /* This outputs the logical code corresponding to a boolean
7604 expression. The expression may have one or both operands
7605 negated (if one, only the first one). For condition register
7606 logical operations, it will also treat the negated
7607 CR codes as NOTs, but not handle NOTs of them. */
7609 const char *const *t = 0;
7610 const char *s;
7611 enum rtx_code code = GET_CODE (x);
7612 static const char * const tbl[3][3] = {
7613 { "and", "andc", "nor" },
7614 { "or", "orc", "nand" },
7615 { "xor", "eqv", "xor" } };
7617 if (code == AND)
7618 t = tbl[0];
7619 else if (code == IOR)
7620 t = tbl[1];
7621 else if (code == XOR)
7622 t = tbl[2];
7623 else
7624 output_operand_lossage ("invalid %%q value");
7626 if (GET_CODE (XEXP (x, 0)) != NOT)
7627 s = t[0];
7628 else
7630 if (GET_CODE (XEXP (x, 1)) == NOT)
7631 s = t[2];
7632 else
7633 s = t[1];
7636 fputs (s, file);
7638 return;
7640 case 'R':
7641 /* X is a CR register. Print the mask for `mtcrf'. */
7642 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7643 output_operand_lossage ("invalid %%R value");
7644 else
7645 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7646 return;
7648 case 's':
7649 /* Low 5 bits of 32 - value */
7650 if (! INT_P (x))
7651 output_operand_lossage ("invalid %%s value");
7652 else
7653 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7654 return;
7656 case 'S':
7657 /* PowerPC64 mask position. All 0's is excluded.
7658 CONST_INT 32-bit mask is considered sign-extended so any
7659 transition must occur within the CONST_INT, not on the boundary. */
7660 if (! mask64_operand (x, DImode))
7661 output_operand_lossage ("invalid %%S value");
7663 uval = INT_LOWPART (x);
7665 if (uval & 1) /* Clear Left */
7667 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7668 i = 64;
7670 else /* Clear Right */
7672 uval = ~uval;
7673 uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
7674 i = 63;
7676 while (uval != 0)
7677 --i, uval >>= 1;
7678 if (i < 0)
7679 abort ();
7680 fprintf (file, "%d", i);
7681 return;
7683 case 't':
7684 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
7685 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
7686 abort ();
7688 /* Bit 3 is OV bit. */
7689 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
7691 /* If we want bit 31, write a shift count of zero, not 32. */
7692 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7693 return;
7695 case 'T':
7696 /* Print the symbolic name of a branch target register. */
7697 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
7698 && REGNO (x) != COUNT_REGISTER_REGNUM))
7699 output_operand_lossage ("invalid %%T value");
7700 else if (REGNO (x) == LINK_REGISTER_REGNUM)
7701 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
7702 else
7703 fputs ("ctr", file);
7704 return;
7706 case 'u':
7707 /* High-order 16 bits of constant for use in unsigned operand. */
7708 if (! INT_P (x))
7709 output_operand_lossage ("invalid %%u value");
7710 else
7711 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7712 (INT_LOWPART (x) >> 16) & 0xffff);
7713 return;
7715 case 'v':
7716 /* High-order 16 bits of constant for use in signed operand. */
7717 if (! INT_P (x))
7718 output_operand_lossage ("invalid %%v value");
7719 else
7720 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
7721 (INT_LOWPART (x) >> 16) & 0xffff);
7722 return;
7724 case 'U':
7725 /* Print `u' if this has an auto-increment or auto-decrement. */
7726 if (GET_CODE (x) == MEM
7727 && (GET_CODE (XEXP (x, 0)) == PRE_INC
7728 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
7729 putc ('u', file);
7730 return;
7732 case 'V':
7733 /* Print the trap code for this operand. */
7734 switch (GET_CODE (x))
7736 case EQ:
7737 fputs ("eq", file); /* 4 */
7738 break;
7739 case NE:
7740 fputs ("ne", file); /* 24 */
7741 break;
7742 case LT:
7743 fputs ("lt", file); /* 16 */
7744 break;
7745 case LE:
7746 fputs ("le", file); /* 20 */
7747 break;
7748 case GT:
7749 fputs ("gt", file); /* 8 */
7750 break;
7751 case GE:
7752 fputs ("ge", file); /* 12 */
7753 break;
7754 case LTU:
7755 fputs ("llt", file); /* 2 */
7756 break;
7757 case LEU:
7758 fputs ("lle", file); /* 6 */
7759 break;
7760 case GTU:
7761 fputs ("lgt", file); /* 1 */
7762 break;
7763 case GEU:
7764 fputs ("lge", file); /* 5 */
7765 break;
7766 default:
7767 abort ();
7769 break;
7771 case 'w':
7772 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
7773 normally. */
7774 if (INT_P (x))
7775 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7776 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
7777 else
7778 print_operand (file, x, 0);
7779 return;
7781 case 'W':
7782 /* MB value for a PowerPC64 rldic operand. */
7783 val = (GET_CODE (x) == CONST_INT
7784 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
7786 if (val < 0)
7787 i = -1;
7788 else
7789 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
7790 if ((val <<= 1) < 0)
7791 break;
7793 #if HOST_BITS_PER_WIDE_INT == 32
7794 if (GET_CODE (x) == CONST_INT && i >= 0)
7795 i += 32; /* zero-extend high-part was all 0's */
7796 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
7798 val = CONST_DOUBLE_LOW (x);
7800 if (val == 0)
7801 abort ();
7802 else if (val < 0)
7803 --i;
7804 else
7805 for ( ; i < 64; i++)
7806 if ((val <<= 1) < 0)
7807 break;
7809 #endif
7811 fprintf (file, "%d", i + 1);
7812 return;
7814 case 'X':
7815 if (GET_CODE (x) == MEM
7816 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
7817 putc ('x', file);
7818 return;
7820 case 'Y':
7821 /* Like 'L', for third word of TImode */
7822 if (GET_CODE (x) == REG)
7823 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
7824 else if (GET_CODE (x) == MEM)
7826 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7827 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7828 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
7829 else
7830 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
7831 if (small_data_operand (x, GET_MODE (x)))
7832 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7833 reg_names[SMALL_DATA_REG]);
7835 return;
7837 case 'z':
7838 /* X is a SYMBOL_REF. Write out the name preceded by a
7839 period and without any trailing data in brackets. Used for function
7840 names. If we are configured for System V (or the embedded ABI) on
7841 the PowerPC, do not emit the period, since those systems do not use
7842 TOCs and the like. */
7843 if (GET_CODE (x) != SYMBOL_REF)
7844 abort ();
7846 if (XSTR (x, 0)[0] != '.')
7848 switch (DEFAULT_ABI)
7850 default:
7851 abort ();
7853 case ABI_AIX:
7854 putc ('.', file);
7855 break;
7857 case ABI_V4:
7858 case ABI_AIX_NODESC:
7859 case ABI_DARWIN:
7860 break;
7863 #if TARGET_AIX
7864 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
7865 #else
7866 assemble_name (file, XSTR (x, 0));
7867 #endif
7868 return;
7870 case 'Z':
7871 /* Like 'L', for last word of TImode. */
7872 if (GET_CODE (x) == REG)
7873 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
7874 else if (GET_CODE (x) == MEM)
7876 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7877 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7878 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
7879 else
7880 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
7881 if (small_data_operand (x, GET_MODE (x)))
7882 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7883 reg_names[SMALL_DATA_REG]);
7885 return;
7887 /* Print AltiVec or SPE memory operand. */
7888 case 'y':
7890 rtx tmp;
7892 if (GET_CODE (x) != MEM)
7893 abort ();
7895 tmp = XEXP (x, 0);
7897 if (TARGET_SPE)
7899 /* Handle [reg]. */
7900 if (GET_CODE (tmp) == REG)
7902 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
7903 break;
7905 /* Handle [reg+UIMM]. */
7906 else if (GET_CODE (tmp) == PLUS &&
7907 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
7909 int x;
7911 if (GET_CODE (XEXP (tmp, 0)) != REG)
7912 abort ();
7914 x = INTVAL (XEXP (tmp, 1));
7915 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
7916 break;
7919 /* Fall through. Must be [reg+reg]. */
7921 if (GET_CODE (tmp) == REG)
7922 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
7923 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
7925 if (REGNO (XEXP (tmp, 0)) == 0)
7926 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
7927 reg_names[ REGNO (XEXP (tmp, 0)) ]);
7928 else
7929 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
7930 reg_names[ REGNO (XEXP (tmp, 1)) ]);
7932 else
7933 abort ();
7934 break;
7937 case 0:
7938 if (GET_CODE (x) == REG)
7939 fprintf (file, "%s", reg_names[REGNO (x)]);
7940 else if (GET_CODE (x) == MEM)
7942 /* We need to handle PRE_INC and PRE_DEC here, since we need to
7943 know the width from the mode. */
7944 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
7945 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
7946 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7947 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
7948 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
7949 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
7950 else
7951 output_address (XEXP (x, 0));
7953 else
7954 output_addr_const (file, x);
7955 return;
7957 default:
7958 output_operand_lossage ("invalid %%xn code");
7962 /* Print the address of an operand. */
7964 void
7965 print_operand_address (file, x)
7966 FILE *file;
7967 rtx x;
7969 if (GET_CODE (x) == REG)
7970 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
7971 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
7972 || GET_CODE (x) == LABEL_REF)
7974 output_addr_const (file, x);
7975 if (small_data_operand (x, GET_MODE (x)))
7976 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7977 reg_names[SMALL_DATA_REG]);
7978 else if (TARGET_TOC)
7979 abort ();
7981 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
7983 if (REGNO (XEXP (x, 0)) == 0)
7984 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
7985 reg_names[ REGNO (XEXP (x, 0)) ]);
7986 else
7987 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
7988 reg_names[ REGNO (XEXP (x, 1)) ]);
7990 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
7992 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
7993 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7995 #if TARGET_ELF
7996 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7997 && CONSTANT_P (XEXP (x, 1)))
7999 output_addr_const (file, XEXP (x, 1));
8000 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8002 #endif
8003 #if TARGET_MACHO
8004 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8005 && CONSTANT_P (XEXP (x, 1)))
8007 fprintf (file, "lo16(");
8008 output_addr_const (file, XEXP (x, 1));
8009 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8011 #endif
8012 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8014 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8016 rtx contains_minus = XEXP (x, 1);
8017 rtx minus, symref;
8018 const char *name;
8020 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8021 turn it into (sym) for output_addr_const. */
8022 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8023 contains_minus = XEXP (contains_minus, 0);
8025 minus = XEXP (contains_minus, 0);
8026 symref = XEXP (minus, 0);
8027 XEXP (contains_minus, 0) = symref;
8028 if (TARGET_ELF)
8030 char *newname;
8032 name = XSTR (symref, 0);
8033 newname = alloca (strlen (name) + sizeof ("@toc"));
8034 strcpy (newname, name);
8035 strcat (newname, "@toc");
8036 XSTR (symref, 0) = newname;
8038 output_addr_const (file, XEXP (x, 1));
8039 if (TARGET_ELF)
8040 XSTR (symref, 0) = name;
8041 XEXP (contains_minus, 0) = minus;
8043 else
8044 output_addr_const (file, XEXP (x, 1));
8046 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8048 else
8049 abort ();
8052 /* Target hook for assembling integer objects. The powerpc version has
8053 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8054 is defined. It also needs to handle DI-mode objects on 64-bit
8055 targets. */
8057 static bool
8058 rs6000_assemble_integer (x, size, aligned_p)
8059 rtx x;
8060 unsigned int size;
8061 int aligned_p;
8063 #ifdef RELOCATABLE_NEEDS_FIXUP
8064 /* Special handling for SI values. */
8065 if (size == 4 && aligned_p)
8067 extern int in_toc_section PARAMS ((void));
8068 static int recurse = 0;
8070 /* For -mrelocatable, we mark all addresses that need to be fixed up
8071 in the .fixup section. */
8072 if (TARGET_RELOCATABLE
8073 && !in_toc_section ()
8074 && !in_text_section ()
8075 && !recurse
8076 && GET_CODE (x) != CONST_INT
8077 && GET_CODE (x) != CONST_DOUBLE
8078 && CONSTANT_P (x))
8080 char buf[256];
8082 recurse = 1;
8083 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8084 fixuplabelno++;
8085 ASM_OUTPUT_LABEL (asm_out_file, buf);
8086 fprintf (asm_out_file, "\t.long\t(");
8087 output_addr_const (asm_out_file, x);
8088 fprintf (asm_out_file, ")@fixup\n");
8089 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8090 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8091 fprintf (asm_out_file, "\t.long\t");
8092 assemble_name (asm_out_file, buf);
8093 fprintf (asm_out_file, "\n\t.previous\n");
8094 recurse = 0;
8095 return true;
8097 /* Remove initial .'s to turn a -mcall-aixdesc function
8098 address into the address of the descriptor, not the function
8099 itself. */
8100 else if (GET_CODE (x) == SYMBOL_REF
8101 && XSTR (x, 0)[0] == '.'
8102 && DEFAULT_ABI == ABI_AIX)
8104 const char *name = XSTR (x, 0);
8105 while (*name == '.')
8106 name++;
8108 fprintf (asm_out_file, "\t.long\t%s\n", name);
8109 return true;
8112 #endif /* RELOCATABLE_NEEDS_FIXUP */
8113 return default_assemble_integer (x, size, aligned_p);
8116 enum rtx_code
8117 rs6000_reverse_condition (mode, code)
8118 enum machine_mode mode;
8119 enum rtx_code code;
8121 /* Reversal of FP compares takes care -- an ordered compare
8122 becomes an unordered compare and vice versa. */
8123 if (mode == CCFPmode)
8124 return reverse_condition_maybe_unordered (code);
8125 else
8126 return reverse_condition (code);
8129 /* Generate a compare for CODE. Return a brand-new rtx that
8130 represents the result of the compare. */
8132 static rtx
8133 rs6000_generate_compare (code)
8134 enum rtx_code code;
8136 enum machine_mode comp_mode;
8137 rtx compare_result;
8139 if (rs6000_compare_fp_p)
8140 comp_mode = CCFPmode;
8141 else if (code == GTU || code == LTU
8142 || code == GEU || code == LEU)
8143 comp_mode = CCUNSmode;
8144 else
8145 comp_mode = CCmode;
8147 /* First, the compare. */
8148 compare_result = gen_reg_rtx (comp_mode);
8150 /* SPE FP compare instructions on the GPRs. Yuck! */
8151 if ((TARGET_SPE && TARGET_HARD_FLOAT) && rs6000_compare_fp_p)
8153 rtx cmp, or1, or2, or_result, compare_result2;
8155 switch (code)
8157 case EQ:
8158 case UNEQ:
8159 case NE:
8160 case LTGT:
8161 cmp = flag_unsafe_math_optimizations
8162 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8163 rs6000_compare_op1)
8164 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8165 rs6000_compare_op1);
8166 break;
8167 case GT:
8168 case GTU:
8169 case UNGT:
8170 case UNGE:
8171 case GE:
8172 case GEU:
8173 cmp = flag_unsafe_math_optimizations
8174 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8175 rs6000_compare_op1)
8176 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8177 rs6000_compare_op1);
8178 break;
8179 case LT:
8180 case LTU:
8181 case UNLT:
8182 case UNLE:
8183 case LE:
8184 case LEU:
8185 cmp = flag_unsafe_math_optimizations
8186 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8187 rs6000_compare_op1)
8188 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8189 rs6000_compare_op1);
8190 break;
8191 default:
8192 abort ();
8195 /* Synthesize LE and GE from LT/GT || EQ. */
8196 if (code == LE || code == GE || code == LEU || code == GEU)
8198 /* Synthesize GE/LE frome GT/LT || EQ. */
8200 emit_insn (cmp);
8202 switch (code)
8204 case LE: code = LT; break;
8205 case GE: code = GT; break;
8206 case LEU: code = LT; break;
8207 case GEU: code = GT; break;
8208 default: abort ();
8211 or1 = gen_reg_rtx (SImode);
8212 or2 = gen_reg_rtx (SImode);
8213 or_result = gen_reg_rtx (CCEQmode);
8214 compare_result2 = gen_reg_rtx (CCFPmode);
8216 /* Do the EQ. */
8217 cmp = flag_unsafe_math_optimizations
8218 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8219 rs6000_compare_op1)
8220 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8221 rs6000_compare_op1);
8222 emit_insn (cmp);
8224 /* The MC8540 FP compare instructions set the CR bits
8225 differently than other PPC compare instructions. For
8226 that matter, there is no generic test instruction, but a
8227 testgt, testlt, and testeq. For a true condition, bit 2
8228 is set (x1xx) in the CR. Following the traditional CR
8229 values:
8231 LT GT EQ OV
8232 bit3 bit2 bit1 bit0
8234 ... bit 2 would be a GT CR alias, so later on we
8235 look in the GT bits for the branch instructins.
8236 However, we must be careful to emit correct RTL in
8237 the meantime, so optimizations don't get confused. */
8239 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8240 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8242 /* OR them together. */
8243 cmp = gen_rtx_SET (VOIDmode, or_result,
8244 gen_rtx_COMPARE (CCEQmode,
8245 gen_rtx_IOR (SImode, or1, or2),
8246 const_true_rtx));
8247 compare_result = or_result;
8248 code = EQ;
8250 else
8252 /* We only care about 1 bit (x1xx), so map everything to NE to
8253 maintain rtl sanity. We'll get to the right bit (x1xx) at
8254 code output time. */
8255 if (code == NE || code == LTGT)
8256 /* Do the inverse here because we have no cmpne
8257 instruction. We use the cmpeq instruction and expect
8258 to get a 0 instead. */
8259 code = EQ;
8260 else
8261 code = NE;
8264 emit_insn (cmp);
8266 else
8267 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8268 gen_rtx_COMPARE (comp_mode,
8269 rs6000_compare_op0,
8270 rs6000_compare_op1)));
8272 /* Some kinds of FP comparisons need an OR operation;
8273 except for flag_unsafe_math_optimizations we don't bother. */
8274 if (rs6000_compare_fp_p
8275 && ! flag_unsafe_math_optimizations
8276 && ! (TARGET_HARD_FLOAT && TARGET_SPE)
8277 && (code == LE || code == GE
8278 || code == UNEQ || code == LTGT
8279 || code == UNGT || code == UNLT))
8281 enum rtx_code or1, or2;
8282 rtx or1_rtx, or2_rtx, compare2_rtx;
8283 rtx or_result = gen_reg_rtx (CCEQmode);
8285 switch (code)
8287 case LE: or1 = LT; or2 = EQ; break;
8288 case GE: or1 = GT; or2 = EQ; break;
8289 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8290 case LTGT: or1 = LT; or2 = GT; break;
8291 case UNGT: or1 = UNORDERED; or2 = GT; break;
8292 case UNLT: or1 = UNORDERED; or2 = LT; break;
8293 default: abort ();
8295 validate_condition_mode (or1, comp_mode);
8296 validate_condition_mode (or2, comp_mode);
8297 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8298 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8299 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8300 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8301 const_true_rtx);
8302 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8304 compare_result = or_result;
8305 code = EQ;
8308 validate_condition_mode (code, GET_MODE (compare_result));
8310 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8314 /* Emit the RTL for an sCOND pattern. */
8316 void
8317 rs6000_emit_sCOND (code, result)
8318 enum rtx_code code;
8319 rtx result;
8321 rtx condition_rtx;
8322 enum machine_mode op_mode;
8324 condition_rtx = rs6000_generate_compare (code);
8326 op_mode = GET_MODE (rs6000_compare_op0);
8327 if (op_mode == VOIDmode)
8328 op_mode = GET_MODE (rs6000_compare_op1);
8330 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8332 PUT_MODE (condition_rtx, DImode);
8333 convert_move (result, condition_rtx, 0);
8335 else
8337 PUT_MODE (condition_rtx, SImode);
8338 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8342 /* Emit a branch of kind CODE to location LOC. */
8344 void
8345 rs6000_emit_cbranch (code, loc)
8346 enum rtx_code code;
8347 rtx loc;
8349 rtx condition_rtx, loc_ref;
8351 condition_rtx = rs6000_generate_compare (code);
8352 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8353 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8354 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8355 loc_ref, pc_rtx)));
8358 /* Return the string to output a conditional branch to LABEL, which is
8359 the operand number of the label, or -1 if the branch is really a
8360 conditional return.
8362 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8363 condition code register and its mode specifies what kind of
8364 comparison we made.
8366 REVERSED is non-zero if we should reverse the sense of the comparison.
8368 INSN is the insn. */
8370 char *
8371 output_cbranch (op, label, reversed, insn)
8372 rtx op;
8373 const char * label;
8374 int reversed;
8375 rtx insn;
8377 static char string[64];
8378 enum rtx_code code = GET_CODE (op);
8379 rtx cc_reg = XEXP (op, 0);
8380 enum machine_mode mode = GET_MODE (cc_reg);
8381 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8382 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8383 int really_reversed = reversed ^ need_longbranch;
8384 char *s = string;
8385 const char *ccode;
8386 const char *pred;
8387 rtx note;
8389 validate_condition_mode (code, mode);
8391 /* Work out which way this really branches. We could use
8392 reverse_condition_maybe_unordered here always but this
8393 makes the resulting assembler clearer. */
8394 if (really_reversed)
8395 code = rs6000_reverse_condition (mode, code);
8397 if ((TARGET_SPE && TARGET_HARD_FLOAT) && mode == CCFPmode)
8399 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8400 to the GT bit. */
8401 if (code == EQ)
8402 /* Opposite of GT. */
8403 code = UNLE;
8404 else if (code == NE)
8405 code = GT;
8406 else
8407 abort ();
8410 switch (code)
8412 /* Not all of these are actually distinct opcodes, but
8413 we distinguish them for clarity of the resulting assembler. */
8414 case NE: case LTGT:
8415 ccode = "ne"; break;
8416 case EQ: case UNEQ:
8417 ccode = "eq"; break;
8418 case GE: case GEU:
8419 ccode = "ge"; break;
8420 case GT: case GTU: case UNGT:
8421 ccode = "gt"; break;
8422 case LE: case LEU:
8423 ccode = "le"; break;
8424 case LT: case LTU: case UNLT:
8425 ccode = "lt"; break;
8426 case UNORDERED: ccode = "un"; break;
8427 case ORDERED: ccode = "nu"; break;
8428 case UNGE: ccode = "nl"; break;
8429 case UNLE: ccode = "ng"; break;
8430 default:
8431 abort ();
8434 /* Maybe we have a guess as to how likely the branch is.
8435 The old mnemonics don't have a way to specify this information. */
8436 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8437 if (note != NULL_RTX)
8439 /* PROB is the difference from 50%. */
8440 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8442 /* For branches that are very close to 50%, assume not-taken. */
8443 if (abs (prob) > REG_BR_PROB_BASE / 20
8444 && ((prob > 0) ^ need_longbranch))
8445 pred = "+";
8446 else
8447 pred = "-";
8449 else
8450 pred = "";
8452 if (label == NULL)
8453 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8454 else
8455 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8457 /* We need to escape any '%' characters in the reg_names string.
8458 Assume they'd only be the first character... */
8459 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8460 *s++ = '%';
8461 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8463 if (label != NULL)
8465 /* If the branch distance was too far, we may have to use an
8466 unconditional branch to go the distance. */
8467 if (need_longbranch)
8468 s += sprintf (s, ",$+8\n\tb %s", label);
8469 else
8470 s += sprintf (s, ",%s", label);
8473 return string;
8476 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8477 operands of the last comparison is nonzero/true, FALSE_COND if it
8478 is zero/false. Return 0 if the hardware has no such operation. */
8481 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8482 rtx dest;
8483 rtx op;
8484 rtx true_cond;
8485 rtx false_cond;
8487 enum rtx_code code = GET_CODE (op);
8488 rtx op0 = rs6000_compare_op0;
8489 rtx op1 = rs6000_compare_op1;
8490 REAL_VALUE_TYPE c1;
8491 enum machine_mode compare_mode = GET_MODE (op0);
8492 enum machine_mode result_mode = GET_MODE (dest);
8493 rtx temp;
8495 /* These modes should always match. */
8496 if (GET_MODE (op1) != compare_mode
8497 /* In the isel case however, we can use a compare immediate, so
8498 op1 may be a small constant. */
8499 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8500 return 0;
8501 if (GET_MODE (true_cond) != result_mode)
8502 return 0;
8503 if (GET_MODE (false_cond) != result_mode)
8504 return 0;
8506 /* First, work out if the hardware can do this at all, or
8507 if it's too slow... */
8508 if (! rs6000_compare_fp_p)
8510 if (TARGET_ISEL)
8511 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8512 return 0;
8515 /* Eliminate half of the comparisons by switching operands, this
8516 makes the remaining code simpler. */
8517 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8518 || code == LTGT || code == LT)
8520 code = reverse_condition_maybe_unordered (code);
8521 temp = true_cond;
8522 true_cond = false_cond;
8523 false_cond = temp;
8526 /* UNEQ and LTGT take four instructions for a comparison with zero,
8527 it'll probably be faster to use a branch here too. */
8528 if (code == UNEQ)
8529 return 0;
8531 if (GET_CODE (op1) == CONST_DOUBLE)
8532 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8534 /* We're going to try to implement comparions by performing
8535 a subtract, then comparing against zero. Unfortunately,
8536 Inf - Inf is NaN which is not zero, and so if we don't
8537 know that the the operand is finite and the comparison
8538 would treat EQ different to UNORDERED, we can't do it. */
8539 if (! flag_unsafe_math_optimizations
8540 && code != GT && code != UNGE
8541 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
8542 /* Constructs of the form (a OP b ? a : b) are safe. */
8543 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8544 || (! rtx_equal_p (op0, true_cond)
8545 && ! rtx_equal_p (op1, true_cond))))
8546 return 0;
8547 /* At this point we know we can use fsel. */
8549 /* Reduce the comparison to a comparison against zero. */
8550 temp = gen_reg_rtx (compare_mode);
8551 emit_insn (gen_rtx_SET (VOIDmode, temp,
8552 gen_rtx_MINUS (compare_mode, op0, op1)));
8553 op0 = temp;
8554 op1 = CONST0_RTX (compare_mode);
8556 /* If we don't care about NaNs we can reduce some of the comparisons
8557 down to faster ones. */
8558 if (flag_unsafe_math_optimizations)
8559 switch (code)
8561 case GT:
8562 code = LE;
8563 temp = true_cond;
8564 true_cond = false_cond;
8565 false_cond = temp;
8566 break;
8567 case UNGE:
8568 code = GE;
8569 break;
8570 case UNEQ:
8571 code = EQ;
8572 break;
8573 default:
8574 break;
8577 /* Now, reduce everything down to a GE. */
8578 switch (code)
8580 case GE:
8581 break;
8583 case LE:
8584 temp = gen_reg_rtx (compare_mode);
8585 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8586 op0 = temp;
8587 break;
8589 case ORDERED:
8590 temp = gen_reg_rtx (compare_mode);
8591 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8592 op0 = temp;
8593 break;
8595 case EQ:
8596 temp = gen_reg_rtx (compare_mode);
8597 emit_insn (gen_rtx_SET (VOIDmode, temp,
8598 gen_rtx_NEG (compare_mode,
8599 gen_rtx_ABS (compare_mode, op0))));
8600 op0 = temp;
8601 break;
8603 case UNGE:
8604 temp = gen_reg_rtx (result_mode);
8605 emit_insn (gen_rtx_SET (VOIDmode, temp,
8606 gen_rtx_IF_THEN_ELSE (result_mode,
8607 gen_rtx_GE (VOIDmode,
8608 op0, op1),
8609 true_cond, false_cond)));
8610 false_cond = temp;
8611 true_cond = false_cond;
8613 temp = gen_reg_rtx (compare_mode);
8614 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8615 op0 = temp;
8616 break;
8618 case GT:
8619 temp = gen_reg_rtx (result_mode);
8620 emit_insn (gen_rtx_SET (VOIDmode, temp,
8621 gen_rtx_IF_THEN_ELSE (result_mode,
8622 gen_rtx_GE (VOIDmode,
8623 op0, op1),
8624 true_cond, false_cond)));
8625 true_cond = temp;
8626 false_cond = true_cond;
8628 temp = gen_reg_rtx (compare_mode);
8629 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8630 op0 = temp;
8631 break;
8633 default:
8634 abort ();
8637 emit_insn (gen_rtx_SET (VOIDmode, dest,
8638 gen_rtx_IF_THEN_ELSE (result_mode,
8639 gen_rtx_GE (VOIDmode,
8640 op0, op1),
8641 true_cond, false_cond)));
8642 return 1;
8645 /* Same as above, but for ints (isel). */
8647 static int
8648 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
8649 rtx dest;
8650 rtx op;
8651 rtx true_cond;
8652 rtx false_cond;
8654 rtx condition_rtx, cr;
8656 /* All isel implementations thus far are 32-bits. */
8657 if (GET_MODE (rs6000_compare_op0) != SImode)
8658 return 0;
8660 /* We still have to do the compare, because isel doesn't do a
8661 compare, it just looks at the CRx bits set by a previous compare
8662 instruction. */
8663 condition_rtx = rs6000_generate_compare (GET_CODE (op));
8664 cr = XEXP (condition_rtx, 0);
8666 if (GET_MODE (cr) == CCmode)
8667 emit_insn (gen_isel_signed (dest, condition_rtx,
8668 true_cond, false_cond, cr));
8669 else
8670 emit_insn (gen_isel_unsigned (dest, condition_rtx,
8671 true_cond, false_cond, cr));
8673 return 1;
8676 const char *
8677 output_isel (operands)
8678 rtx *operands;
8680 enum rtx_code code;
8682 code = GET_CODE (operands[1]);
8683 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
8685 PUT_CODE (operands[1], reverse_condition (code));
8686 return "isel %0,%3,%2,%j1";
8688 else
8689 return "isel %0,%2,%3,%j1";
8692 void
8693 rs6000_emit_minmax (dest, code, op0, op1)
8694 rtx dest;
8695 enum rtx_code code;
8696 rtx op0;
8697 rtx op1;
8699 enum machine_mode mode = GET_MODE (op0);
8700 enum rtx_code c;
8701 rtx target;
8703 if (code == SMAX || code == SMIN)
8704 c = GE;
8705 else
8706 c = GEU;
8708 if (code == SMAX || code == UMAX)
8709 target = emit_conditional_move (dest, c, op0, op1, mode,
8710 op0, op1, mode, 0);
8711 else
8712 target = emit_conditional_move (dest, c, op0, op1, mode,
8713 op1, op0, mode, 0);
8714 if (target == NULL_RTX)
8715 abort ();
8716 if (target != dest)
8717 emit_move_insn (dest, target);
8720 /* This page contains routines that are used to determine what the
8721 function prologue and epilogue code will do and write them out. */
8723 /* Return the first fixed-point register that is required to be
8724 saved. 32 if none. */
8727 first_reg_to_save ()
8729 int first_reg;
8731 /* Find lowest numbered live register. */
8732 for (first_reg = 13; first_reg <= 31; first_reg++)
8733 if (regs_ever_live[first_reg]
8734 && (! call_used_regs[first_reg]
8735 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
8736 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8737 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
8738 break;
8740 #if TARGET_MACHO
8741 if (flag_pic && current_function_uses_pic_offset_table &&
8742 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
8743 return RS6000_PIC_OFFSET_TABLE_REGNUM;
8744 #endif
8746 return first_reg;
8749 /* Similar, for FP regs. */
8752 first_fp_reg_to_save ()
8754 int first_reg;
8756 /* Find lowest numbered live register. */
8757 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
8758 if (regs_ever_live[first_reg])
8759 break;
8761 return first_reg;
8764 /* Similar, for AltiVec regs. */
8766 static int
8767 first_altivec_reg_to_save ()
8769 int i;
8771 /* Stack frame remains as is unless we are in AltiVec ABI. */
8772 if (! TARGET_ALTIVEC_ABI)
8773 return LAST_ALTIVEC_REGNO + 1;
8775 /* Find lowest numbered live register. */
8776 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
8777 if (regs_ever_live[i])
8778 break;
8780 return i;
8783 /* Return a 32-bit mask of the AltiVec registers we need to set in
8784 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
8785 the 32-bit word is 0. */
8787 static unsigned int
8788 compute_vrsave_mask ()
8790 unsigned int i, mask = 0;
8792 /* First, find out if we use _any_ altivec registers. */
8793 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8794 if (regs_ever_live[i])
8795 mask |= ALTIVEC_REG_BIT (i);
8797 if (mask == 0)
8798 return mask;
8800 /* Next, add all registers that are call-clobbered. We do this
8801 because post-reload register optimizers such as regrename_optimize
8802 may choose to use them. They never change the register class
8803 chosen by reload, so cannot create new uses of altivec registers
8804 if there were none before, so the early exit above is safe. */
8805 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
8806 altivec registers not saved in the mask, which might well make the
8807 adjustments below more effective in eliding the save/restore of
8808 VRSAVE in small functions. */
8809 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8810 if (call_used_regs[i])
8811 mask |= ALTIVEC_REG_BIT (i);
8813 /* Next, remove the argument registers from the set. These must
8814 be in the VRSAVE mask set by the caller, so we don't need to add
8815 them in again. More importantly, the mask we compute here is
8816 used to generate CLOBBERs in the set_vrsave insn, and we do not
8817 wish the argument registers to die. */
8818 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
8819 mask &= ~ALTIVEC_REG_BIT (i);
8821 /* Similarly, remove the return value from the set. */
8823 bool yes = false;
8824 diddle_return_value (is_altivec_return_reg, &yes);
8825 if (yes)
8826 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
8829 return mask;
8832 static void
8833 is_altivec_return_reg (reg, xyes)
8834 rtx reg;
8835 void *xyes;
8837 bool *yes = (bool *) xyes;
8838 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
8839 *yes = true;
8843 /* Calculate the stack information for the current function. This is
8844 complicated by having two separate calling sequences, the AIX calling
8845 sequence and the V.4 calling sequence.
8847 AIX (and Darwin/Mac OS X) stack frames look like:
8848 32-bit 64-bit
8849 SP----> +---------------------------------------+
8850 | back chain to caller | 0 0
8851 +---------------------------------------+
8852 | saved CR | 4 8 (8-11)
8853 +---------------------------------------+
8854 | saved LR | 8 16
8855 +---------------------------------------+
8856 | reserved for compilers | 12 24
8857 +---------------------------------------+
8858 | reserved for binders | 16 32
8859 +---------------------------------------+
8860 | saved TOC pointer | 20 40
8861 +---------------------------------------+
8862 | Parameter save area (P) | 24 48
8863 +---------------------------------------+
8864 | Alloca space (A) | 24+P etc.
8865 +---------------------------------------+
8866 | Local variable space (L) | 24+P+A
8867 +---------------------------------------+
8868 | Float/int conversion temporary (X) | 24+P+A+L
8869 +---------------------------------------+
8870 | Save area for AltiVec registers (W) | 24+P+A+L+X
8871 +---------------------------------------+
8872 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
8873 +---------------------------------------+
8874 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
8875 +---------------------------------------+
8876 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
8877 +---------------------------------------+
8878 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
8879 +---------------------------------------+
8880 old SP->| back chain to caller's caller |
8881 +---------------------------------------+
8883 The required alignment for AIX configurations is two words (i.e., 8
8884 or 16 bytes).
8887 V.4 stack frames look like:
8889 SP----> +---------------------------------------+
8890 | back chain to caller | 0
8891 +---------------------------------------+
8892 | caller's saved LR | 4
8893 +---------------------------------------+
8894 | Parameter save area (P) | 8
8895 +---------------------------------------+
8896 | Alloca space (A) | 8+P
8897 +---------------------------------------+
8898 | Varargs save area (V) | 8+P+A
8899 +---------------------------------------+
8900 | Local variable space (L) | 8+P+A+V
8901 +---------------------------------------+
8902 | Float/int conversion temporary (X) | 8+P+A+V+L
8903 +---------------------------------------+
8904 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
8905 +---------------------------------------+
8906 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
8907 +---------------------------------------+
8908 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
8909 +---------------------------------------+
8910 | SPE: area for 64-bit GP registers |
8911 +---------------------------------------+
8912 | SPE alignment padding |
8913 +---------------------------------------+
8914 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
8915 +---------------------------------------+
8916 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
8917 +---------------------------------------+
8918 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
8919 +---------------------------------------+
8920 old SP->| back chain to caller's caller |
8921 +---------------------------------------+
8923 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
8924 given. (But note below and in sysv4.h that we require only 8 and
8925 may round up the size of our stack frame anyways. The historical
8926 reason is early versions of powerpc-linux which didn't properly
8927 align the stack at program startup. A happy side-effect is that
8928 -mno-eabi libraries can be used with -meabi programs.)
8930 The EABI configuration defaults to the V.4 layout, unless
8931 -mcall-aix is used, in which case the AIX layout is used. However,
8932 the stack alignment requirements may differ. If -mno-eabi is not
8933 given, the required stack alignment is 8 bytes; if -mno-eabi is
8934 given, the required alignment is 16 bytes. (But see V.4 comment
8935 above.) */
8937 #ifndef ABI_STACK_BOUNDARY
8938 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
8939 #endif
8941 rs6000_stack_t *
8942 rs6000_stack_info ()
8944 static rs6000_stack_t info, zero_info;
8945 rs6000_stack_t *info_ptr = &info;
8946 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8947 enum rs6000_abi abi;
8948 int ehrd_size;
8949 int total_raw_size;
8951 /* Zero all fields portably. */
8952 info = zero_info;
8954 /* Select which calling sequence. */
8955 info_ptr->abi = abi = DEFAULT_ABI;
8957 /* Calculate which registers need to be saved & save area size. */
8958 info_ptr->first_gp_reg_save = first_reg_to_save ();
8959 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
8960 even if it currently looks like we won't. */
8961 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
8962 || (flag_pic == 1 && abi == ABI_V4)
8963 || (flag_pic && abi == ABI_DARWIN))
8964 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
8965 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
8966 else
8967 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
8969 /* For the SPE, we have an additional upper 32-bits on each GPR.
8970 Ideally we should save the entire 64-bits only when the upper
8971 half is used in SIMD instructions. Since we only record
8972 registers live (not the size they are used in), this proves
8973 difficult because we'd have to traverse the instruction chain at
8974 the right time, taking reload into account. This is a real pain,
8975 so we opt to save the GPRs in 64-bits always. Anyone overly
8976 concerned with frame size can fix this. ;-).
8978 So... since we save all GPRs (except the SP) in 64-bits, the
8979 traditional GP save area will be empty. */
8980 if (TARGET_SPE_ABI)
8981 info_ptr->gp_size = 0;
8983 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
8984 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
8986 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
8987 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
8988 - info_ptr->first_altivec_reg_save);
8990 /* Does this function call anything? */
8991 info_ptr->calls_p = (! current_function_is_leaf
8992 || cfun->machine->ra_needs_full_frame);
8994 /* Determine if we need to save the link register. */
8995 if (rs6000_ra_ever_killed ()
8996 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
8997 #ifdef TARGET_RELOCATABLE
8998 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
8999 #endif
9000 || (info_ptr->first_fp_reg_save != 64
9001 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9002 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9003 || (abi == ABI_V4 && current_function_calls_alloca)
9004 || (DEFAULT_ABI == ABI_DARWIN
9005 && flag_pic
9006 && current_function_uses_pic_offset_table)
9007 || info_ptr->calls_p)
9009 info_ptr->lr_save_p = 1;
9010 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9013 /* Determine if we need to save the condition code registers. */
9014 if (regs_ever_live[CR2_REGNO]
9015 || regs_ever_live[CR3_REGNO]
9016 || regs_ever_live[CR4_REGNO])
9018 info_ptr->cr_save_p = 1;
9019 if (abi == ABI_V4)
9020 info_ptr->cr_size = reg_size;
9023 /* If the current function calls __builtin_eh_return, then we need
9024 to allocate stack space for registers that will hold data for
9025 the exception handler. */
9026 if (current_function_calls_eh_return)
9028 unsigned int i;
9029 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9030 continue;
9032 /* SPE saves EH registers in 64-bits. */
9033 ehrd_size = i * (TARGET_SPE_ABI ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9035 else
9036 ehrd_size = 0;
9038 /* Determine various sizes. */
9039 info_ptr->reg_size = reg_size;
9040 info_ptr->fixed_size = RS6000_SAVE_AREA;
9041 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9042 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9043 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9046 if (TARGET_SPE_ABI)
9047 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9048 else
9049 info_ptr->spe_gp_size = 0;
9051 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9053 info_ptr->vrsave_mask = compute_vrsave_mask ();
9054 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9056 else
9058 info_ptr->vrsave_mask = 0;
9059 info_ptr->vrsave_size = 0;
9062 /* Calculate the offsets. */
9063 switch (abi)
9065 case ABI_NONE:
9066 default:
9067 abort ();
9069 case ABI_AIX:
9070 case ABI_AIX_NODESC:
9071 case ABI_DARWIN:
9072 info_ptr->fp_save_offset = - info_ptr->fp_size;
9073 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9075 if (TARGET_ALTIVEC_ABI)
9077 info_ptr->vrsave_save_offset
9078 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9080 /* Align stack so vector save area is on a quadword boundary. */
9081 if (info_ptr->altivec_size != 0)
9082 info_ptr->altivec_padding_size
9083 = 16 - (-info_ptr->vrsave_save_offset % 16);
9084 else
9085 info_ptr->altivec_padding_size = 0;
9087 info_ptr->altivec_save_offset
9088 = info_ptr->vrsave_save_offset
9089 - info_ptr->altivec_padding_size
9090 - info_ptr->altivec_size;
9092 /* Adjust for AltiVec case. */
9093 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9095 else
9096 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9097 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9098 info_ptr->lr_save_offset = 2*reg_size;
9099 break;
9101 case ABI_V4:
9102 info_ptr->fp_save_offset = - info_ptr->fp_size;
9103 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9104 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9106 if (TARGET_SPE_ABI)
9108 /* Align stack so SPE GPR save area is aligned on a
9109 double-word boundary. */
9110 if (info_ptr->spe_gp_size != 0)
9111 info_ptr->spe_padding_size
9112 = 8 - (-info_ptr->cr_save_offset % 8);
9113 else
9114 info_ptr->spe_padding_size = 0;
9116 info_ptr->spe_gp_save_offset
9117 = info_ptr->cr_save_offset
9118 - info_ptr->spe_padding_size
9119 - info_ptr->spe_gp_size;
9121 /* Adjust for SPE case. */
9122 info_ptr->toc_save_offset
9123 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9125 else if (TARGET_ALTIVEC_ABI)
9127 info_ptr->vrsave_save_offset
9128 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9130 /* Align stack so vector save area is on a quadword boundary. */
9131 if (info_ptr->altivec_size != 0)
9132 info_ptr->altivec_padding_size
9133 = 16 - (-info_ptr->vrsave_save_offset % 16);
9134 else
9135 info_ptr->altivec_padding_size = 0;
9137 info_ptr->altivec_save_offset
9138 = info_ptr->vrsave_save_offset
9139 - info_ptr->altivec_padding_size
9140 - info_ptr->altivec_size;
9142 /* Adjust for AltiVec case. */
9143 info_ptr->toc_save_offset
9144 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9146 else
9147 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9148 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9149 info_ptr->lr_save_offset = reg_size;
9150 break;
9153 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9154 + info_ptr->gp_size
9155 + info_ptr->altivec_size
9156 + info_ptr->altivec_padding_size
9157 + info_ptr->vrsave_size
9158 + info_ptr->spe_gp_size
9159 + info_ptr->spe_padding_size
9160 + ehrd_size
9161 + info_ptr->cr_size
9162 + info_ptr->lr_size
9163 + info_ptr->vrsave_size
9164 + info_ptr->toc_size,
9165 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9166 ? 16 : 8);
9168 total_raw_size = (info_ptr->vars_size
9169 + info_ptr->parm_size
9170 + info_ptr->save_size
9171 + info_ptr->varargs_size
9172 + info_ptr->fixed_size);
9174 info_ptr->total_size =
9175 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9177 /* Determine if we need to allocate any stack frame:
9179 For AIX we need to push the stack if a frame pointer is needed
9180 (because the stack might be dynamically adjusted), if we are
9181 debugging, if we make calls, or if the sum of fp_save, gp_save,
9182 and local variables are more than the space needed to save all
9183 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9184 + 18*8 = 288 (GPR13 reserved).
9186 For V.4 we don't have the stack cushion that AIX uses, but assume
9187 that the debugger can handle stackless frames. */
9189 if (info_ptr->calls_p)
9190 info_ptr->push_p = 1;
9192 else if (abi == ABI_V4)
9193 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9195 else
9196 info_ptr->push_p = (frame_pointer_needed
9197 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
9198 || ((total_raw_size - info_ptr->fixed_size)
9199 > (TARGET_32BIT ? 220 : 288)));
9201 /* Zero offsets if we're not saving those registers. */
9202 if (info_ptr->fp_size == 0)
9203 info_ptr->fp_save_offset = 0;
9205 if (info_ptr->gp_size == 0)
9206 info_ptr->gp_save_offset = 0;
9208 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9209 info_ptr->altivec_save_offset = 0;
9211 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9212 info_ptr->vrsave_save_offset = 0;
9214 if (! TARGET_SPE_ABI || info_ptr->spe_gp_size == 0)
9215 info_ptr->spe_gp_save_offset = 0;
9217 if (! info_ptr->lr_save_p)
9218 info_ptr->lr_save_offset = 0;
9220 if (! info_ptr->cr_save_p)
9221 info_ptr->cr_save_offset = 0;
9223 if (! info_ptr->toc_save_p)
9224 info_ptr->toc_save_offset = 0;
9226 return info_ptr;
9229 void
9230 debug_stack_info (info)
9231 rs6000_stack_t *info;
9233 const char *abi_string;
9235 if (! info)
9236 info = rs6000_stack_info ();
9238 fprintf (stderr, "\nStack information for function %s:\n",
9239 ((current_function_decl && DECL_NAME (current_function_decl))
9240 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9241 : "<unknown>"));
9243 switch (info->abi)
9245 default: abi_string = "Unknown"; break;
9246 case ABI_NONE: abi_string = "NONE"; break;
9247 case ABI_AIX:
9248 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9249 case ABI_DARWIN: abi_string = "Darwin"; break;
9250 case ABI_V4: abi_string = "V.4"; break;
9253 fprintf (stderr, "\tABI = %5s\n", abi_string);
9255 if (TARGET_ALTIVEC_ABI)
9256 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9258 if (TARGET_SPE_ABI)
9259 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9261 if (info->first_gp_reg_save != 32)
9262 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9264 if (info->first_fp_reg_save != 64)
9265 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9267 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9268 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9269 info->first_altivec_reg_save);
9271 if (info->lr_save_p)
9272 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9274 if (info->cr_save_p)
9275 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9277 if (info->toc_save_p)
9278 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9280 if (info->vrsave_mask)
9281 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9283 if (info->push_p)
9284 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9286 if (info->calls_p)
9287 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9289 if (info->gp_save_offset)
9290 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9292 if (info->fp_save_offset)
9293 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9295 if (info->altivec_save_offset)
9296 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9297 info->altivec_save_offset);
9299 if (info->spe_gp_save_offset)
9300 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9301 info->spe_gp_save_offset);
9303 if (info->vrsave_save_offset)
9304 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9305 info->vrsave_save_offset);
9307 if (info->lr_save_offset)
9308 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9310 if (info->cr_save_offset)
9311 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9313 if (info->toc_save_offset)
9314 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9316 if (info->varargs_save_offset)
9317 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9319 if (info->total_size)
9320 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9322 if (info->varargs_size)
9323 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9325 if (info->vars_size)
9326 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9328 if (info->parm_size)
9329 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9331 if (info->fixed_size)
9332 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9334 if (info->gp_size)
9335 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9337 if (info->spe_gp_size)
9338 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9340 if (info->fp_size)
9341 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9343 if (info->altivec_size)
9344 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9346 if (info->vrsave_size)
9347 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9349 if (info->altivec_padding_size)
9350 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9351 info->altivec_padding_size);
9353 if (info->spe_padding_size)
9354 fprintf (stderr, "\tspe_padding_size = %5d\n",
9355 info->spe_padding_size);
9357 if (info->lr_size)
9358 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9360 if (info->cr_size)
9361 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9363 if (info->toc_size)
9364 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9366 if (info->save_size)
9367 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9369 if (info->reg_size != 4)
9370 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9372 fprintf (stderr, "\n");
9376 rs6000_return_addr (count, frame)
9377 int count;
9378 rtx frame;
9380 /* Currently we don't optimize very well between prolog and body
9381 code and for PIC code the code can be actually quite bad, so
9382 don't try to be too clever here. */
9383 if (count != 0 || flag_pic != 0)
9385 cfun->machine->ra_needs_full_frame = 1;
9387 return
9388 gen_rtx_MEM
9389 (Pmode,
9390 memory_address
9391 (Pmode,
9392 plus_constant (copy_to_reg
9393 (gen_rtx_MEM (Pmode,
9394 memory_address (Pmode, frame))),
9395 RETURN_ADDRESS_OFFSET)));
9398 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9401 static int
9402 rs6000_ra_ever_killed ()
9404 rtx top;
9406 #ifdef ASM_OUTPUT_MI_THUNK
9407 if (current_function_is_thunk)
9408 return 0;
9409 #endif
9410 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
9411 || cfun->machine->ra_needs_full_frame)
9412 return regs_ever_live[LINK_REGISTER_REGNUM];
9414 push_topmost_sequence ();
9415 top = get_insns ();
9416 pop_topmost_sequence ();
9418 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9419 top, NULL_RTX);
9422 /* Add a REG_MAYBE_DEAD note to the insn. */
9423 static void
9424 rs6000_maybe_dead (insn)
9425 rtx insn;
9427 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9428 const0_rtx,
9429 REG_NOTES (insn));
9432 /* Emit instructions needed to load the TOC register.
9433 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9434 a constant pool; or for SVR4 -fpic. */
9436 void
9437 rs6000_emit_load_toc_table (fromprolog)
9438 int fromprolog;
9440 rtx dest;
9441 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9443 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
9445 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9447 rtx temp = (fromprolog
9448 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9449 : gen_reg_rtx (Pmode));
9450 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
9451 rs6000_maybe_dead (emit_move_insn (dest, temp));
9453 else if (flag_pic == 2)
9455 char buf[30];
9456 rtx tempLR = (fromprolog
9457 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9458 : gen_reg_rtx (Pmode));
9459 rtx temp0 = (fromprolog
9460 ? gen_rtx_REG (Pmode, 0)
9461 : gen_reg_rtx (Pmode));
9462 rtx symF;
9464 /* possibly create the toc section */
9465 if (! toc_initialized)
9467 toc_section ();
9468 function_section (current_function_decl);
9471 if (fromprolog)
9473 rtx symL;
9475 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9476 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9478 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9479 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9481 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9482 symF)));
9483 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9484 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9485 symL,
9486 symF)));
9488 else
9490 rtx tocsym;
9491 static int reload_toc_labelno = 0;
9493 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9495 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9496 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9498 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
9499 symF,
9500 tocsym)));
9501 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9502 rs6000_maybe_dead (emit_move_insn (temp0,
9503 gen_rtx_MEM (Pmode, dest)));
9505 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
9507 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
9509 /* This is for AIX code running in non-PIC ELF. */
9510 char buf[30];
9511 rtx realsym;
9512 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9513 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9515 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
9516 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
9518 else
9519 abort ();
9521 else
9523 if (TARGET_32BIT)
9524 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
9525 else
9526 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
9530 int
9531 get_TOC_alias_set ()
9533 static int set = -1;
9534 if (set == -1)
9535 set = new_alias_set ();
9536 return set;
9539 /* This retuns nonzero if the current function uses the TOC. This is
9540 determined by the presence of (unspec ... 7), which is generated by
9541 the various load_toc_* patterns. */
9544 uses_TOC ()
9546 rtx insn;
9548 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9549 if (INSN_P (insn))
9551 rtx pat = PATTERN (insn);
9552 int i;
9554 if (GET_CODE (pat) == PARALLEL)
9555 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
9556 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
9557 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
9558 return 1;
9560 return 0;
9564 create_TOC_reference (symbol)
9565 rtx symbol;
9567 return gen_rtx_PLUS (Pmode,
9568 gen_rtx_REG (Pmode, TOC_REGISTER),
9569 gen_rtx_CONST (Pmode,
9570 gen_rtx_MINUS (Pmode, symbol,
9571 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
9574 #if TARGET_AIX
9575 /* __throw will restore its own return address to be the same as the
9576 return address of the function that the throw is being made to.
9577 This is unfortunate, because we want to check the original
9578 return address to see if we need to restore the TOC.
9579 So we have to squirrel it away here.
9580 This is used only in compiling __throw and __rethrow.
9582 Most of this code should be removed by CSE. */
9583 static rtx insn_after_throw;
9585 /* This does the saving... */
9586 void
9587 rs6000_aix_emit_builtin_unwind_init ()
9589 rtx mem;
9590 rtx stack_top = gen_reg_rtx (Pmode);
9591 rtx opcode_addr = gen_reg_rtx (Pmode);
9593 insn_after_throw = gen_reg_rtx (SImode);
9595 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9596 emit_move_insn (stack_top, mem);
9598 mem = gen_rtx_MEM (Pmode,
9599 gen_rtx_PLUS (Pmode, stack_top,
9600 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9601 emit_move_insn (opcode_addr, mem);
9602 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
9605 /* Emit insns to _restore_ the TOC register, at runtime (specifically
9606 in _eh.o). Only used on AIX.
9608 The idea is that on AIX, function calls look like this:
9609 bl somefunction-trampoline
9610 lwz r2,20(sp)
9612 and later,
9613 somefunction-trampoline:
9614 stw r2,20(sp)
9615 ... load function address in the count register ...
9616 bctr
9617 or like this, if the linker determines that this is not a cross-module call
9618 and so the TOC need not be restored:
9619 bl somefunction
9621 or like this, if the compiler could determine that this is not a
9622 cross-module call:
9623 bl somefunction
9624 now, the tricky bit here is that register 2 is saved and restored
9625 by the _linker_, so we can't readily generate debugging information
9626 for it. So we need to go back up the call chain looking at the
9627 insns at return addresses to see which calls saved the TOC register
9628 and so see where it gets restored from.
9630 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
9631 just before the actual epilogue.
9633 On the bright side, this incurs no space or time overhead unless an
9634 exception is thrown, except for the extra code in libgcc.a.
9636 The parameter STACKSIZE is a register containing (at runtime)
9637 the amount to be popped off the stack in addition to the stack frame
9638 of this routine (which will be __throw or __rethrow, and so is
9639 guaranteed to have a stack frame). */
9641 void
9642 rs6000_emit_eh_toc_restore (stacksize)
9643 rtx stacksize;
9645 rtx top_of_stack;
9646 rtx bottom_of_stack = gen_reg_rtx (Pmode);
9647 rtx tocompare = gen_reg_rtx (SImode);
9648 rtx opcode = gen_reg_rtx (SImode);
9649 rtx opcode_addr = gen_reg_rtx (Pmode);
9650 rtx mem;
9651 rtx loop_start = gen_label_rtx ();
9652 rtx no_toc_restore_needed = gen_label_rtx ();
9653 rtx loop_exit = gen_label_rtx ();
9655 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
9656 set_mem_alias_set (mem, rs6000_sr_alias_set);
9657 emit_move_insn (bottom_of_stack, mem);
9659 top_of_stack = expand_binop (Pmode, add_optab,
9660 bottom_of_stack, stacksize,
9661 NULL_RTX, 1, OPTAB_WIDEN);
9663 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
9664 : 0xE8410028, SImode));
9666 if (insn_after_throw == NULL_RTX)
9667 abort ();
9668 emit_move_insn (opcode, insn_after_throw);
9670 emit_note (NULL, NOTE_INSN_LOOP_BEG);
9671 emit_label (loop_start);
9673 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
9674 SImode, NULL_RTX, NULL_RTX,
9675 no_toc_restore_needed);
9677 mem = gen_rtx_MEM (Pmode,
9678 gen_rtx_PLUS (Pmode, bottom_of_stack,
9679 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
9680 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
9682 emit_label (no_toc_restore_needed);
9683 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
9684 Pmode, NULL_RTX, NULL_RTX,
9685 loop_exit);
9687 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
9688 set_mem_alias_set (mem, rs6000_sr_alias_set);
9689 emit_move_insn (bottom_of_stack, mem);
9691 mem = gen_rtx_MEM (Pmode,
9692 gen_rtx_PLUS (Pmode, bottom_of_stack,
9693 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
9694 emit_move_insn (opcode_addr, mem);
9695 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
9697 emit_note (NULL, NOTE_INSN_LOOP_CONT);
9698 emit_jump (loop_start);
9699 emit_note (NULL, NOTE_INSN_LOOP_END);
9700 emit_label (loop_exit);
9702 #endif /* TARGET_AIX */
9704 /* This ties together stack memory (MEM with an alias set of
9705 rs6000_sr_alias_set) and the change to the stack pointer. */
9707 static void
9708 rs6000_emit_stack_tie ()
9710 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
9712 set_mem_alias_set (mem, rs6000_sr_alias_set);
9713 emit_insn (gen_stack_tie (mem));
9716 /* Emit the correct code for allocating stack space, as insns.
9717 If COPY_R12, make sure a copy of the old frame is left in r12.
9718 The generated code may use hard register 0 as a temporary. */
9720 static void
9721 rs6000_emit_allocate_stack (size, copy_r12)
9722 HOST_WIDE_INT size;
9723 int copy_r12;
9725 rtx insn;
9726 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
9727 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
9728 rtx todec = GEN_INT (-size);
9730 if (current_function_limit_stack)
9732 if (REG_P (stack_limit_rtx)
9733 && REGNO (stack_limit_rtx) > 1
9734 && REGNO (stack_limit_rtx) <= 31)
9736 emit_insn (Pmode == SImode
9737 ? gen_addsi3 (tmp_reg,
9738 stack_limit_rtx,
9739 GEN_INT (size))
9740 : gen_adddi3 (tmp_reg,
9741 stack_limit_rtx,
9742 GEN_INT (size)));
9744 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9745 const0_rtx));
9747 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
9748 && TARGET_32BIT
9749 && DEFAULT_ABI == ABI_V4)
9751 rtx toload = gen_rtx_CONST (VOIDmode,
9752 gen_rtx_PLUS (Pmode,
9753 stack_limit_rtx,
9754 GEN_INT (size)));
9756 emit_insn (gen_elf_high (tmp_reg, toload));
9757 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
9758 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
9759 const0_rtx));
9761 else
9762 warning ("stack limit expression is not supported");
9765 if (copy_r12 || ! TARGET_UPDATE)
9766 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
9768 if (TARGET_UPDATE)
9770 if (size > 32767)
9772 /* Need a note here so that try_split doesn't get confused. */
9773 if (get_last_insn() == NULL_RTX)
9774 emit_note (0, NOTE_INSN_DELETED);
9775 insn = emit_move_insn (tmp_reg, todec);
9776 try_split (PATTERN (insn), insn, 0);
9777 todec = tmp_reg;
9780 if (Pmode == SImode)
9781 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
9782 todec, stack_reg));
9783 else
9784 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
9785 todec, stack_reg));
9787 else
9789 if (Pmode == SImode)
9790 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
9791 else
9792 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
9793 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
9794 gen_rtx_REG (Pmode, 12));
9797 RTX_FRAME_RELATED_P (insn) = 1;
9798 REG_NOTES (insn) =
9799 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9800 gen_rtx_SET (VOIDmode, stack_reg,
9801 gen_rtx_PLUS (Pmode, stack_reg,
9802 GEN_INT (-size))),
9803 REG_NOTES (insn));
9806 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
9807 knows that:
9809 (mem (plus (blah) (regXX)))
9811 is really:
9813 (mem (plus (blah) (const VALUE_OF_REGXX))). */
9815 static void
9816 altivec_frame_fixup (insn, reg, val)
9817 rtx insn, reg;
9818 HOST_WIDE_INT val;
9820 rtx real;
9822 real = copy_rtx (PATTERN (insn));
9824 real = replace_rtx (real, reg, GEN_INT (val));
9826 RTX_FRAME_RELATED_P (insn) = 1;
9827 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9828 real,
9829 REG_NOTES (insn));
9832 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
9833 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
9834 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
9835 deduce these equivalences by itself so it wasn't necessary to hold
9836 its hand so much. */
9838 static void
9839 rs6000_frame_related (insn, reg, val, reg2, rreg)
9840 rtx insn;
9841 rtx reg;
9842 HOST_WIDE_INT val;
9843 rtx reg2;
9844 rtx rreg;
9846 rtx real, temp;
9848 /* copy_rtx will not make unique copies of registers, so we need to
9849 ensure we don't have unwanted sharing here. */
9850 if (reg == reg2)
9851 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9853 if (reg == rreg)
9854 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
9856 real = copy_rtx (PATTERN (insn));
9858 if (reg2 != NULL_RTX)
9859 real = replace_rtx (real, reg2, rreg);
9861 real = replace_rtx (real, reg,
9862 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
9863 STACK_POINTER_REGNUM),
9864 GEN_INT (val)));
9866 /* We expect that 'real' is either a SET or a PARALLEL containing
9867 SETs (and possibly other stuff). In a PARALLEL, all the SETs
9868 are important so they all have to be marked RTX_FRAME_RELATED_P. */
9870 if (GET_CODE (real) == SET)
9872 rtx set = real;
9874 temp = simplify_rtx (SET_SRC (set));
9875 if (temp)
9876 SET_SRC (set) = temp;
9877 temp = simplify_rtx (SET_DEST (set));
9878 if (temp)
9879 SET_DEST (set) = temp;
9880 if (GET_CODE (SET_DEST (set)) == MEM)
9882 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9883 if (temp)
9884 XEXP (SET_DEST (set), 0) = temp;
9887 else if (GET_CODE (real) == PARALLEL)
9889 int i;
9890 for (i = 0; i < XVECLEN (real, 0); i++)
9891 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
9893 rtx set = XVECEXP (real, 0, i);
9895 temp = simplify_rtx (SET_SRC (set));
9896 if (temp)
9897 SET_SRC (set) = temp;
9898 temp = simplify_rtx (SET_DEST (set));
9899 if (temp)
9900 SET_DEST (set) = temp;
9901 if (GET_CODE (SET_DEST (set)) == MEM)
9903 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
9904 if (temp)
9905 XEXP (SET_DEST (set), 0) = temp;
9907 RTX_FRAME_RELATED_P (set) = 1;
9910 else
9911 abort ();
9913 RTX_FRAME_RELATED_P (insn) = 1;
9914 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9915 real,
9916 REG_NOTES (insn));
9919 /* Returns an insn that has a vrsave set operation with the
9920 appropriate CLOBBERs. */
9922 static rtx
9923 generate_set_vrsave (reg, info, epiloguep)
9924 rtx reg;
9925 rs6000_stack_t *info;
9926 int epiloguep;
9928 int nclobs, i;
9929 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
9930 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
9932 clobs[0]
9933 = gen_rtx_SET (VOIDmode,
9934 vrsave,
9935 gen_rtx_UNSPEC_VOLATILE (SImode,
9936 gen_rtvec (2, reg, vrsave),
9937 30));
9939 nclobs = 1;
9941 /* We need to clobber the registers in the mask so the scheduler
9942 does not move sets to VRSAVE before sets of AltiVec registers.
9944 However, if the function receives nonlocal gotos, reload will set
9945 all call saved registers live. We will end up with:
9947 (set (reg 999) (mem))
9948 (parallel [ (set (reg vrsave) (unspec blah))
9949 (clobber (reg 999))])
9951 The clobber will cause the store into reg 999 to be dead, and
9952 flow will attempt to delete an epilogue insn. In this case, we
9953 need an unspec use/set of the register. */
9955 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9956 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
9958 if (!epiloguep || call_used_regs [i])
9959 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
9960 gen_rtx_REG (V4SImode, i));
9961 else
9963 rtx reg = gen_rtx_REG (V4SImode, i);
9965 clobs[nclobs++]
9966 = gen_rtx_SET (VOIDmode,
9967 reg,
9968 gen_rtx_UNSPEC (V4SImode,
9969 gen_rtvec (1, reg), 27));
9973 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
9975 for (i = 0; i < nclobs; ++i)
9976 XVECEXP (insn, 0, i) = clobs[i];
9978 return insn;
9981 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
9982 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
9984 static void
9985 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
9986 rtx frame_reg;
9987 rtx frame_ptr;
9988 enum machine_mode mode;
9989 unsigned int regno;
9990 int offset;
9991 int total_size;
9993 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
9994 rtx replacea, replaceb;
9996 int_rtx = GEN_INT (offset);
9998 /* Some cases that need register indexed addressing. */
9999 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10000 || (TARGET_SPE_ABI
10001 && SPE_VECTOR_MODE (mode)
10002 && !SPE_CONST_OFFSET_OK (offset)))
10004 /* Whomever calls us must make sure r11 is available in the
10005 flow path of instructions in the prologue. */
10006 offset_rtx = gen_rtx_REG (Pmode, 11);
10007 emit_move_insn (offset_rtx, int_rtx);
10009 replacea = offset_rtx;
10010 replaceb = int_rtx;
10012 else
10014 offset_rtx = int_rtx;
10015 replacea = NULL_RTX;
10016 replaceb = NULL_RTX;
10019 reg = gen_rtx_REG (mode, regno);
10020 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10021 mem = gen_rtx_MEM (mode, addr);
10022 set_mem_alias_set (mem, rs6000_sr_alias_set);
10024 insn = emit_move_insn (mem, reg);
10026 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10029 /* Emit an offset memory reference suitable for a frame store, while
10030 converting to a valid addressing mode. */
10032 static rtx
10033 gen_frame_mem_offset (mode, reg, offset)
10034 enum machine_mode mode;
10035 rtx reg;
10036 int offset;
10038 rtx int_rtx, offset_rtx;
10040 int_rtx = GEN_INT (offset);
10042 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10044 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10045 emit_move_insn (offset_rtx, int_rtx);
10047 else
10048 offset_rtx = int_rtx;
10050 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10053 /* Emit function prologue as insns. */
10055 void
10056 rs6000_emit_prologue ()
10058 rs6000_stack_t *info = rs6000_stack_info ();
10059 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10060 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10061 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10062 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10063 rtx frame_reg_rtx = sp_reg_rtx;
10064 rtx cr_save_rtx = NULL;
10065 rtx insn;
10066 int saving_FPRs_inline;
10067 int using_store_multiple;
10068 HOST_WIDE_INT sp_offset = 0;
10070 if (TARGET_SPE_ABI)
10072 reg_mode = V2SImode;
10073 reg_size = 8;
10076 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10077 && !TARGET_SPE_ABI
10078 && info->first_gp_reg_save < 31);
10079 saving_FPRs_inline = (info->first_fp_reg_save == 64
10080 || FP_SAVE_INLINE (info->first_fp_reg_save));
10082 /* For V.4, update stack before we do any saving and set back pointer. */
10083 if (info->push_p && DEFAULT_ABI == ABI_V4)
10085 if (info->total_size < 32767)
10086 sp_offset = info->total_size;
10087 else
10088 frame_reg_rtx = frame_ptr_rtx;
10089 rs6000_emit_allocate_stack (info->total_size,
10090 (frame_reg_rtx != sp_reg_rtx
10091 && (info->cr_save_p
10092 || info->lr_save_p
10093 || info->first_fp_reg_save < 64
10094 || info->first_gp_reg_save < 32
10095 )));
10096 if (frame_reg_rtx != sp_reg_rtx)
10097 rs6000_emit_stack_tie ();
10100 /* Save AltiVec registers if needed. */
10101 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10103 int i;
10105 /* There should be a non inline version of this, for when we
10106 are saving lots of vector registers. */
10107 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10108 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10110 rtx areg, savereg, mem;
10111 int offset;
10113 offset = info->altivec_save_offset + sp_offset
10114 + 16 * (i - info->first_altivec_reg_save);
10116 savereg = gen_rtx_REG (V4SImode, i);
10118 areg = gen_rtx_REG (Pmode, 0);
10119 emit_move_insn (areg, GEN_INT (offset));
10121 /* AltiVec addressing mode is [reg+reg]. */
10122 mem = gen_rtx_MEM (V4SImode,
10123 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10125 set_mem_alias_set (mem, rs6000_sr_alias_set);
10127 insn = emit_move_insn (mem, savereg);
10129 altivec_frame_fixup (insn, areg, offset);
10133 /* VRSAVE is a bit vector representing which AltiVec registers
10134 are used. The OS uses this to determine which vector
10135 registers to save on a context switch. We need to save
10136 VRSAVE on the stack frame, add whatever AltiVec registers we
10137 used in this function, and do the corresponding magic in the
10138 epilogue. */
10140 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10142 rtx reg, mem, vrsave;
10143 int offset;
10145 /* Get VRSAVE onto a GPR. */
10146 reg = gen_rtx_REG (SImode, 12);
10147 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10148 if (TARGET_MACHO)
10149 emit_insn (gen_get_vrsave_internal (reg));
10150 else
10151 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10153 /* Save VRSAVE. */
10154 offset = info->vrsave_save_offset + sp_offset;
10156 = gen_rtx_MEM (SImode,
10157 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10158 set_mem_alias_set (mem, rs6000_sr_alias_set);
10159 insn = emit_move_insn (mem, reg);
10161 /* Include the registers in the mask. */
10162 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10164 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10167 /* If we use the link register, get it into r0. */
10168 if (info->lr_save_p)
10169 emit_move_insn (gen_rtx_REG (Pmode, 0),
10170 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10172 /* If we need to save CR, put it into r12. */
10173 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10175 cr_save_rtx = gen_rtx_REG (SImode, 12);
10176 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10179 /* Do any required saving of fpr's. If only one or two to save, do
10180 it ourselves. Otherwise, call function. */
10181 if (saving_FPRs_inline)
10183 int i;
10184 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10185 if ((regs_ever_live[info->first_fp_reg_save+i]
10186 && ! call_used_regs[info->first_fp_reg_save+i]))
10187 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10188 info->first_fp_reg_save + i,
10189 info->fp_save_offset + sp_offset + 8 * i,
10190 info->total_size);
10192 else if (info->first_fp_reg_save != 64)
10194 int i;
10195 char rname[30];
10196 const char *alloc_rname;
10197 rtvec p;
10198 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10200 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10201 gen_rtx_REG (Pmode,
10202 LINK_REGISTER_REGNUM));
10203 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10204 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10205 alloc_rname = ggc_strdup (rname);
10206 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10207 gen_rtx_SYMBOL_REF (Pmode,
10208 alloc_rname));
10209 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10211 rtx addr, reg, mem;
10212 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10213 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10214 GEN_INT (info->fp_save_offset
10215 + sp_offset + 8*i));
10216 mem = gen_rtx_MEM (DFmode, addr);
10217 set_mem_alias_set (mem, rs6000_sr_alias_set);
10219 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10221 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10222 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10223 NULL_RTX, NULL_RTX);
10226 /* Save GPRs. This is done as a PARALLEL if we are using
10227 the store-multiple instructions. */
10228 if (using_store_multiple)
10230 rtvec p;
10231 int i;
10232 p = rtvec_alloc (32 - info->first_gp_reg_save);
10233 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10235 rtx addr, reg, mem;
10236 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10237 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10238 GEN_INT (info->gp_save_offset
10239 + sp_offset
10240 + reg_size * i));
10241 mem = gen_rtx_MEM (reg_mode, addr);
10242 set_mem_alias_set (mem, rs6000_sr_alias_set);
10244 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10246 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10247 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10248 NULL_RTX, NULL_RTX);
10250 else
10252 int i;
10253 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10254 if ((regs_ever_live[info->first_gp_reg_save+i]
10255 && ! call_used_regs[info->first_gp_reg_save+i])
10256 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10257 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10258 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10260 rtx addr, reg, mem;
10261 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10263 if (TARGET_SPE_ABI)
10265 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10266 rtx b;
10268 if (!SPE_CONST_OFFSET_OK (offset))
10270 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10271 emit_move_insn (b, GEN_INT (offset));
10273 else
10274 b = GEN_INT (offset);
10276 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10277 mem = gen_rtx_MEM (V2SImode, addr);
10278 set_mem_alias_set (mem, rs6000_sr_alias_set);
10279 insn = emit_move_insn (mem, reg);
10281 if (GET_CODE (b) == CONST_INT)
10282 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10283 NULL_RTX, NULL_RTX);
10284 else
10285 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10286 b, GEN_INT (offset));
10288 else
10290 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10291 GEN_INT (info->gp_save_offset
10292 + sp_offset
10293 + reg_size * i));
10294 mem = gen_rtx_MEM (reg_mode, addr);
10295 set_mem_alias_set (mem, rs6000_sr_alias_set);
10297 insn = emit_move_insn (mem, reg);
10298 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10299 NULL_RTX, NULL_RTX);
10304 /* ??? There's no need to emit actual instructions here, but it's the
10305 easiest way to get the frame unwind information emitted. */
10306 if (current_function_calls_eh_return)
10308 unsigned int i, regno;
10310 for (i = 0; ; ++i)
10312 regno = EH_RETURN_DATA_REGNO (i);
10313 if (regno == INVALID_REGNUM)
10314 break;
10316 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10317 info->ehrd_offset + sp_offset
10318 + reg_size * (int) i,
10319 info->total_size);
10323 /* Save lr if we used it. */
10324 if (info->lr_save_p)
10326 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10327 GEN_INT (info->lr_save_offset + sp_offset));
10328 rtx reg = gen_rtx_REG (Pmode, 0);
10329 rtx mem = gen_rtx_MEM (Pmode, addr);
10330 /* This should not be of rs6000_sr_alias_set, because of
10331 __builtin_return_address. */
10333 insn = emit_move_insn (mem, reg);
10334 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10335 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10338 /* Save CR if we use any that must be preserved. */
10339 if (info->cr_save_p)
10341 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10342 GEN_INT (info->cr_save_offset + sp_offset));
10343 rtx mem = gen_rtx_MEM (SImode, addr);
10345 set_mem_alias_set (mem, rs6000_sr_alias_set);
10347 /* If r12 was used to hold the original sp, copy cr into r0 now
10348 that it's free. */
10349 if (REGNO (frame_reg_rtx) == 12)
10351 cr_save_rtx = gen_rtx_REG (SImode, 0);
10352 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10354 insn = emit_move_insn (mem, cr_save_rtx);
10356 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10357 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10358 OK. All we have to do is specify that _one_ condition code
10359 register is saved in this stack slot. The thrower's epilogue
10360 will then restore all the call-saved registers.
10361 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10362 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10363 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10366 /* Update stack and set back pointer unless this is V.4,
10367 for which it was done previously. */
10368 if (info->push_p && DEFAULT_ABI != ABI_V4)
10369 rs6000_emit_allocate_stack (info->total_size, FALSE);
10371 /* Set frame pointer, if needed. */
10372 if (frame_pointer_needed)
10374 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10375 sp_reg_rtx);
10376 RTX_FRAME_RELATED_P (insn) = 1;
10379 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10380 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10381 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10382 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10384 /* If emit_load_toc_table will use the link register, we need to save
10385 it. We use R11 for this purpose because emit_load_toc_table
10386 can use register 0. This allows us to use a plain 'blr' to return
10387 from the procedure more often. */
10388 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
10389 && ! info->lr_save_p
10390 && EXIT_BLOCK_PTR->pred != NULL);
10391 if (save_LR_around_toc_setup)
10392 emit_move_insn (gen_rtx_REG (Pmode, 11),
10393 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10395 rs6000_emit_load_toc_table (TRUE);
10397 if (save_LR_around_toc_setup)
10398 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10399 gen_rtx_REG (Pmode, 11));
10402 if (DEFAULT_ABI == ABI_DARWIN
10403 && flag_pic && current_function_uses_pic_offset_table)
10405 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10407 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
10409 rs6000_maybe_dead (
10410 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10411 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10415 /* Write function prologue. */
10417 static void
10418 rs6000_output_function_prologue (file, size)
10419 FILE *file;
10420 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10422 rs6000_stack_t *info = rs6000_stack_info ();
10424 if (TARGET_DEBUG_STACK)
10425 debug_stack_info (info);
10427 /* Write .extern for any function we will call to save and restore
10428 fp values. */
10429 if (info->first_fp_reg_save < 64
10430 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10431 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10432 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10433 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10434 RESTORE_FP_SUFFIX);
10436 /* Write .extern for AIX common mode routines, if needed. */
10437 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10439 fputs ("\t.extern __mulh\n", file);
10440 fputs ("\t.extern __mull\n", file);
10441 fputs ("\t.extern __divss\n", file);
10442 fputs ("\t.extern __divus\n", file);
10443 fputs ("\t.extern __quoss\n", file);
10444 fputs ("\t.extern __quous\n", file);
10445 common_mode_defined = 1;
10448 if (! HAVE_prologue)
10450 start_sequence ();
10452 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10453 the "toplevel" insn chain. */
10454 emit_note (0, NOTE_INSN_DELETED);
10455 rs6000_emit_prologue ();
10456 emit_note (0, NOTE_INSN_DELETED);
10458 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10460 rtx insn;
10461 unsigned addr = 0;
10462 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10464 INSN_ADDRESSES_NEW (insn, addr);
10465 addr += 4;
10469 if (TARGET_DEBUG_STACK)
10470 debug_rtx_list (get_insns (), 100);
10471 final (get_insns (), file, FALSE, FALSE);
10472 end_sequence ();
10475 rs6000_pic_labelno++;
10478 /* Emit function epilogue as insns.
10480 At present, dwarf2out_frame_debug_expr doesn't understand
10481 register restores, so we don't bother setting RTX_FRAME_RELATED_P
10482 anywhere in the epilogue. Most of the insns below would in any case
10483 need special notes to explain where r11 is in relation to the stack. */
10485 void
10486 rs6000_emit_epilogue (sibcall)
10487 int sibcall;
10489 rs6000_stack_t *info;
10490 int restoring_FPRs_inline;
10491 int using_load_multiple;
10492 int using_mfcr_multiple;
10493 int use_backchain_to_restore_sp;
10494 int sp_offset = 0;
10495 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
10496 rtx frame_reg_rtx = sp_reg_rtx;
10497 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10498 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10499 int i;
10501 if (TARGET_SPE_ABI)
10503 reg_mode = V2SImode;
10504 reg_size = 8;
10507 info = rs6000_stack_info ();
10508 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10509 && !TARGET_SPE_ABI
10510 && info->first_gp_reg_save < 31);
10511 restoring_FPRs_inline = (sibcall
10512 || current_function_calls_eh_return
10513 || info->first_fp_reg_save == 64
10514 || FP_SAVE_INLINE (info->first_fp_reg_save));
10515 use_backchain_to_restore_sp = (frame_pointer_needed
10516 || current_function_calls_alloca
10517 || info->total_size > 32767);
10518 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
10519 || rs6000_cpu == PROCESSOR_PPC603
10520 || rs6000_cpu == PROCESSOR_PPC750
10521 || optimize_size);
10523 /* If we have a frame pointer, a call to alloca, or a large stack
10524 frame, restore the old stack pointer using the backchain. Otherwise,
10525 we know what size to update it with. */
10526 if (use_backchain_to_restore_sp)
10528 /* Under V.4, don't reset the stack pointer until after we're done
10529 loading the saved registers. */
10530 if (DEFAULT_ABI == ABI_V4)
10531 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
10533 emit_move_insn (frame_reg_rtx,
10534 gen_rtx_MEM (Pmode, sp_reg_rtx));
10537 else if (info->push_p)
10539 if (DEFAULT_ABI == ABI_V4)
10540 sp_offset = info->total_size;
10541 else
10543 emit_insn (TARGET_32BIT
10544 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10545 GEN_INT (info->total_size))
10546 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10547 GEN_INT (info->total_size)));
10551 /* Restore AltiVec registers if needed. */
10552 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10554 int i;
10556 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10557 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10559 rtx addr, areg, mem;
10561 areg = gen_rtx_REG (Pmode, 0);
10562 emit_move_insn
10563 (areg, GEN_INT (info->altivec_save_offset
10564 + sp_offset
10565 + 16 * (i - info->first_altivec_reg_save)));
10567 /* AltiVec addressing mode is [reg+reg]. */
10568 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
10569 mem = gen_rtx_MEM (V4SImode, addr);
10570 set_mem_alias_set (mem, rs6000_sr_alias_set);
10572 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
10576 /* Restore VRSAVE if needed. */
10577 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
10579 rtx addr, mem, reg;
10581 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10582 GEN_INT (info->vrsave_save_offset + sp_offset));
10583 mem = gen_rtx_MEM (SImode, addr);
10584 set_mem_alias_set (mem, rs6000_sr_alias_set);
10585 reg = gen_rtx_REG (SImode, 12);
10586 emit_move_insn (reg, mem);
10588 emit_insn (generate_set_vrsave (reg, info, 1));
10591 /* Get the old lr if we saved it. */
10592 if (info->lr_save_p)
10594 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
10595 info->lr_save_offset + sp_offset);
10597 set_mem_alias_set (mem, rs6000_sr_alias_set);
10599 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
10602 /* Get the old cr if we saved it. */
10603 if (info->cr_save_p)
10605 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10606 GEN_INT (info->cr_save_offset + sp_offset));
10607 rtx mem = gen_rtx_MEM (SImode, addr);
10609 set_mem_alias_set (mem, rs6000_sr_alias_set);
10611 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
10614 /* Set LR here to try to overlap restores below. */
10615 if (info->lr_save_p)
10616 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10617 gen_rtx_REG (Pmode, 0));
10619 /* Load exception handler data registers, if needed. */
10620 if (current_function_calls_eh_return)
10622 unsigned int i, regno;
10624 for (i = 0; ; ++i)
10626 rtx mem;
10628 regno = EH_RETURN_DATA_REGNO (i);
10629 if (regno == INVALID_REGNUM)
10630 break;
10632 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
10633 info->ehrd_offset + sp_offset
10634 + reg_size * (int) i);
10635 set_mem_alias_set (mem, rs6000_sr_alias_set);
10637 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
10641 /* Restore GPRs. This is done as a PARALLEL if we are using
10642 the load-multiple instructions. */
10643 if (using_load_multiple)
10645 rtvec p;
10646 p = rtvec_alloc (32 - info->first_gp_reg_save);
10647 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10649 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10650 GEN_INT (info->gp_save_offset
10651 + sp_offset
10652 + reg_size * i));
10653 rtx mem = gen_rtx_MEM (reg_mode, addr);
10655 set_mem_alias_set (mem, rs6000_sr_alias_set);
10657 RTVEC_ELT (p, i) =
10658 gen_rtx_SET (VOIDmode,
10659 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
10660 mem);
10662 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10664 else
10665 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10666 if ((regs_ever_live[info->first_gp_reg_save+i]
10667 && ! call_used_regs[info->first_gp_reg_save+i])
10668 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10669 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10670 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10672 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10673 GEN_INT (info->gp_save_offset
10674 + sp_offset
10675 + reg_size * i));
10676 rtx mem = gen_rtx_MEM (reg_mode, addr);
10678 /* Restore 64-bit quantities for SPE. */
10679 if (TARGET_SPE_ABI)
10681 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10682 rtx b;
10684 if (!SPE_CONST_OFFSET_OK (offset))
10686 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10687 emit_move_insn (b, GEN_INT (offset));
10689 else
10690 b = GEN_INT (offset);
10692 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10693 mem = gen_rtx_MEM (V2SImode, addr);
10696 set_mem_alias_set (mem, rs6000_sr_alias_set);
10698 emit_move_insn (gen_rtx_REG (reg_mode,
10699 info->first_gp_reg_save + i), mem);
10702 /* Restore fpr's if we need to do it without calling a function. */
10703 if (restoring_FPRs_inline)
10704 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10705 if ((regs_ever_live[info->first_fp_reg_save+i]
10706 && ! call_used_regs[info->first_fp_reg_save+i]))
10708 rtx addr, mem;
10709 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10710 GEN_INT (info->fp_save_offset
10711 + sp_offset
10712 + 8 * i));
10713 mem = gen_rtx_MEM (DFmode, addr);
10714 set_mem_alias_set (mem, rs6000_sr_alias_set);
10716 emit_move_insn (gen_rtx_REG (DFmode,
10717 info->first_fp_reg_save + i),
10718 mem);
10721 /* If we saved cr, restore it here. Just those that were used. */
10722 if (info->cr_save_p)
10724 rtx r12_rtx = gen_rtx_REG (SImode, 12);
10725 int count = 0;
10727 if (using_mfcr_multiple)
10729 for (i = 0; i < 8; i++)
10730 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10731 count++;
10732 if (count == 0)
10733 abort ();
10736 if (using_mfcr_multiple && count > 1)
10738 rtvec p;
10739 int ndx;
10741 p = rtvec_alloc (count);
10743 ndx = 0;
10744 for (i = 0; i < 8; i++)
10745 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10747 rtvec r = rtvec_alloc (2);
10748 RTVEC_ELT (r, 0) = r12_rtx;
10749 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
10750 RTVEC_ELT (p, ndx) =
10751 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
10752 gen_rtx_UNSPEC (CCmode, r, 20));
10753 ndx++;
10755 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10756 if (ndx != count)
10757 abort ();
10759 else
10760 for (i = 0; i < 8; i++)
10761 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
10763 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
10764 CR0_REGNO+i),
10765 r12_rtx));
10769 /* If this is V.4, unwind the stack pointer after all of the loads
10770 have been done. We need to emit a block here so that sched
10771 doesn't decide to move the sp change before the register restores
10772 (which may not have any obvious dependency on the stack). This
10773 doesn't hurt performance, because there is no scheduling that can
10774 be done after this point. */
10775 if (DEFAULT_ABI == ABI_V4)
10777 if (frame_reg_rtx != sp_reg_rtx)
10778 rs6000_emit_stack_tie ();
10780 if (use_backchain_to_restore_sp)
10782 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
10784 else if (sp_offset != 0)
10786 emit_insn (Pmode == SImode
10787 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
10788 GEN_INT (sp_offset))
10789 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
10790 GEN_INT (sp_offset)));
10794 if (current_function_calls_eh_return)
10796 rtx sa = EH_RETURN_STACKADJ_RTX;
10797 emit_insn (Pmode == SImode
10798 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
10799 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
10802 if (!sibcall)
10804 rtvec p;
10805 if (! restoring_FPRs_inline)
10806 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
10807 else
10808 p = rtvec_alloc (2);
10810 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
10811 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10812 gen_rtx_REG (Pmode,
10813 LINK_REGISTER_REGNUM));
10815 /* If we have to restore more than two FP registers, branch to the
10816 restore function. It will return to our caller. */
10817 if (! restoring_FPRs_inline)
10819 int i;
10820 char rname[30];
10821 const char *alloc_rname;
10823 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
10824 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
10825 alloc_rname = ggc_strdup (rname);
10826 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
10827 gen_rtx_SYMBOL_REF (Pmode,
10828 alloc_rname));
10830 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10832 rtx addr, mem;
10833 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
10834 GEN_INT (info->fp_save_offset + 8*i));
10835 mem = gen_rtx_MEM (DFmode, addr);
10836 set_mem_alias_set (mem, rs6000_sr_alias_set);
10838 RTVEC_ELT (p, i+3) =
10839 gen_rtx_SET (VOIDmode,
10840 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
10841 mem);
10845 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
10849 /* Write function epilogue. */
10851 static void
10852 rs6000_output_function_epilogue (file, size)
10853 FILE *file;
10854 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10856 rs6000_stack_t *info = rs6000_stack_info ();
10858 if (! HAVE_epilogue)
10860 rtx insn = get_last_insn ();
10861 /* If the last insn was a BARRIER, we don't have to write anything except
10862 the trace table. */
10863 if (GET_CODE (insn) == NOTE)
10864 insn = prev_nonnote_insn (insn);
10865 if (insn == 0 || GET_CODE (insn) != BARRIER)
10867 /* This is slightly ugly, but at least we don't have two
10868 copies of the epilogue-emitting code. */
10869 start_sequence ();
10871 /* A NOTE_INSN_DELETED is supposed to be at the start
10872 and end of the "toplevel" insn chain. */
10873 emit_note (0, NOTE_INSN_DELETED);
10874 rs6000_emit_epilogue (FALSE);
10875 emit_note (0, NOTE_INSN_DELETED);
10877 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10879 rtx insn;
10880 unsigned addr = 0;
10881 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10883 INSN_ADDRESSES_NEW (insn, addr);
10884 addr += 4;
10888 if (TARGET_DEBUG_STACK)
10889 debug_rtx_list (get_insns (), 100);
10890 final (get_insns (), file, FALSE, FALSE);
10891 end_sequence ();
10895 /* Output a traceback table here. See /usr/include/sys/debug.h for info
10896 on its format.
10898 We don't output a traceback table if -finhibit-size-directive was
10899 used. The documentation for -finhibit-size-directive reads
10900 ``don't output a @code{.size} assembler directive, or anything
10901 else that would cause trouble if the function is split in the
10902 middle, and the two halves are placed at locations far apart in
10903 memory.'' The traceback table has this property, since it
10904 includes the offset from the start of the function to the
10905 traceback table itself.
10907 System V.4 Powerpc's (and the embedded ABI derived from it) use a
10908 different traceback table. */
10909 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
10910 && rs6000_traceback != traceback_none)
10912 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10913 const char *language_string = lang_hooks.name;
10914 int fixed_parms = 0, float_parms = 0, parm_info = 0;
10915 int i;
10916 int optional_tbtab;
10918 if (rs6000_traceback == traceback_full)
10919 optional_tbtab = 1;
10920 else if (rs6000_traceback == traceback_part)
10921 optional_tbtab = 0;
10922 else
10923 optional_tbtab = !optimize_size && !TARGET_ELF;
10925 while (*fname == '.') /* V.4 encodes . in the name */
10926 fname++;
10928 /* Need label immediately before tbtab, so we can compute its offset
10929 from the function start. */
10930 if (*fname == '*')
10931 ++fname;
10932 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
10933 ASM_OUTPUT_LABEL (file, fname);
10935 /* The .tbtab pseudo-op can only be used for the first eight
10936 expressions, since it can't handle the possibly variable
10937 length fields that follow. However, if you omit the optional
10938 fields, the assembler outputs zeros for all optional fields
10939 anyways, giving each variable length field is minimum length
10940 (as defined in sys/debug.h). Thus we can not use the .tbtab
10941 pseudo-op at all. */
10943 /* An all-zero word flags the start of the tbtab, for debuggers
10944 that have to find it by searching forward from the entry
10945 point or from the current pc. */
10946 fputs ("\t.long 0\n", file);
10948 /* Tbtab format type. Use format type 0. */
10949 fputs ("\t.byte 0,", file);
10951 /* Language type. Unfortunately, there doesn't seem to be any
10952 official way to get this info, so we use language_string. C
10953 is 0. C++ is 9. No number defined for Obj-C, so use the
10954 value for C for now. There is no official value for Java,
10955 although IBM appears to be using 13. There is no official value
10956 for Chill, so we've chosen 44 pseudo-randomly. */
10957 if (! strcmp (language_string, "GNU C")
10958 || ! strcmp (language_string, "GNU Objective-C"))
10959 i = 0;
10960 else if (! strcmp (language_string, "GNU F77"))
10961 i = 1;
10962 else if (! strcmp (language_string, "GNU Ada"))
10963 i = 3;
10964 else if (! strcmp (language_string, "GNU Pascal"))
10965 i = 2;
10966 else if (! strcmp (language_string, "GNU C++"))
10967 i = 9;
10968 else if (! strcmp (language_string, "GNU Java"))
10969 i = 13;
10970 else if (! strcmp (language_string, "GNU CHILL"))
10971 i = 44;
10972 else
10973 abort ();
10974 fprintf (file, "%d,", i);
10976 /* 8 single bit fields: global linkage (not set for C extern linkage,
10977 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
10978 from start of procedure stored in tbtab, internal function, function
10979 has controlled storage, function has no toc, function uses fp,
10980 function logs/aborts fp operations. */
10981 /* Assume that fp operations are used if any fp reg must be saved. */
10982 fprintf (file, "%d,",
10983 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
10985 /* 6 bitfields: function is interrupt handler, name present in
10986 proc table, function calls alloca, on condition directives
10987 (controls stack walks, 3 bits), saves condition reg, saves
10988 link reg. */
10989 /* The `function calls alloca' bit seems to be set whenever reg 31 is
10990 set up as a frame pointer, even when there is no alloca call. */
10991 fprintf (file, "%d,",
10992 ((optional_tbtab << 6)
10993 | ((optional_tbtab & frame_pointer_needed) << 5)
10994 | (info->cr_save_p << 1)
10995 | (info->lr_save_p)));
10997 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
10998 (6 bits). */
10999 fprintf (file, "%d,",
11000 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11002 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11003 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11005 if (optional_tbtab)
11007 /* Compute the parameter info from the function decl argument
11008 list. */
11009 tree decl;
11010 int next_parm_info_bit = 31;
11012 for (decl = DECL_ARGUMENTS (current_function_decl);
11013 decl; decl = TREE_CHAIN (decl))
11015 rtx parameter = DECL_INCOMING_RTL (decl);
11016 enum machine_mode mode = GET_MODE (parameter);
11018 if (GET_CODE (parameter) == REG)
11020 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11022 int bits;
11024 float_parms++;
11026 if (mode == SFmode)
11027 bits = 0x2;
11028 else if (mode == DFmode)
11029 bits = 0x3;
11030 else
11031 abort ();
11033 /* If only one bit will fit, don't or in this entry. */
11034 if (next_parm_info_bit > 0)
11035 parm_info |= (bits << (next_parm_info_bit - 1));
11036 next_parm_info_bit -= 2;
11038 else
11040 fixed_parms += ((GET_MODE_SIZE (mode)
11041 + (UNITS_PER_WORD - 1))
11042 / UNITS_PER_WORD);
11043 next_parm_info_bit -= 1;
11049 /* Number of fixed point parameters. */
11050 /* This is actually the number of words of fixed point parameters; thus
11051 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11052 fprintf (file, "%d,", fixed_parms);
11054 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11055 all on stack. */
11056 /* This is actually the number of fp registers that hold parameters;
11057 and thus the maximum value is 13. */
11058 /* Set parameters on stack bit if parameters are not in their original
11059 registers, regardless of whether they are on the stack? Xlc
11060 seems to set the bit when not optimizing. */
11061 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11063 if (! optional_tbtab)
11064 return;
11066 /* Optional fields follow. Some are variable length. */
11068 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11069 11 double float. */
11070 /* There is an entry for each parameter in a register, in the order that
11071 they occur in the parameter list. Any intervening arguments on the
11072 stack are ignored. If the list overflows a long (max possible length
11073 34 bits) then completely leave off all elements that don't fit. */
11074 /* Only emit this long if there was at least one parameter. */
11075 if (fixed_parms || float_parms)
11076 fprintf (file, "\t.long %d\n", parm_info);
11078 /* Offset from start of code to tb table. */
11079 fputs ("\t.long ", file);
11080 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11081 #if TARGET_AIX
11082 RS6000_OUTPUT_BASENAME (file, fname);
11083 #else
11084 assemble_name (file, fname);
11085 #endif
11086 fputs ("-.", file);
11087 #if TARGET_AIX
11088 RS6000_OUTPUT_BASENAME (file, fname);
11089 #else
11090 assemble_name (file, fname);
11091 #endif
11092 putc ('\n', file);
11094 /* Interrupt handler mask. */
11095 /* Omit this long, since we never set the interrupt handler bit
11096 above. */
11098 /* Number of CTL (controlled storage) anchors. */
11099 /* Omit this long, since the has_ctl bit is never set above. */
11101 /* Displacement into stack of each CTL anchor. */
11102 /* Omit this list of longs, because there are no CTL anchors. */
11104 /* Length of function name. */
11105 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11107 /* Function name. */
11108 assemble_string (fname, strlen (fname));
11110 /* Register for alloca automatic storage; this is always reg 31.
11111 Only emit this if the alloca bit was set above. */
11112 if (frame_pointer_needed)
11113 fputs ("\t.byte 31\n", file);
11115 fputs ("\t.align 2\n", file);
11119 /* A C compound statement that outputs the assembler code for a thunk
11120 function, used to implement C++ virtual function calls with
11121 multiple inheritance. The thunk acts as a wrapper around a virtual
11122 function, adjusting the implicit object parameter before handing
11123 control off to the real function.
11125 First, emit code to add the integer DELTA to the location that
11126 contains the incoming first argument. Assume that this argument
11127 contains a pointer, and is the one used to pass the `this' pointer
11128 in C++. This is the incoming argument *before* the function
11129 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11130 values of all other incoming arguments.
11132 After the addition, emit code to jump to FUNCTION, which is a
11133 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11134 not touch the return address. Hence returning from FUNCTION will
11135 return to whoever called the current `thunk'.
11137 The effect must be as if FUNCTION had been called directly with the
11138 adjusted first argument. This macro is responsible for emitting
11139 all of the code for a thunk function; output_function_prologue()
11140 and output_function_epilogue() are not invoked.
11142 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11143 been extracted from it.) It might possibly be useful on some
11144 targets, but probably not.
11146 If you do not define this macro, the target-independent code in the
11147 C++ frontend will generate a less efficient heavyweight thunk that
11148 calls FUNCTION instead of jumping to it. The generic approach does
11149 not support varargs. */
11151 void
11152 output_mi_thunk (file, thunk_fndecl, delta, function)
11153 FILE *file;
11154 tree thunk_fndecl ATTRIBUTE_UNUSED;
11155 int delta;
11156 tree function;
11158 const char *this_reg =
11159 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
11160 const char *prefix;
11161 const char *fname;
11162 const char *r0 = reg_names[0];
11163 const char *toc = reg_names[2];
11164 const char *schain = reg_names[11];
11165 const char *r12 = reg_names[12];
11166 char buf[512];
11167 static int labelno = 0;
11169 /* Small constants that can be done by one add instruction. */
11170 if (delta >= -32768 && delta <= 32767)
11172 if (! TARGET_NEW_MNEMONICS)
11173 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
11174 else
11175 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
11178 /* Large constants that can be done by one addis instruction. */
11179 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
11180 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11181 delta >> 16);
11183 /* 32-bit constants that can be done by an add and addis instruction. */
11184 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
11186 /* Break into two pieces, propagating the sign bit from the low
11187 word to the upper word. */
11188 int delta_high = delta >> 16;
11189 int delta_low = delta & 0xffff;
11190 if ((delta_low & 0x8000) != 0)
11192 delta_high++;
11193 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
11196 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
11197 delta_high);
11199 if (! TARGET_NEW_MNEMONICS)
11200 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
11201 else
11202 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
11205 /* 64-bit constants, fixme */
11206 else
11207 abort ();
11209 /* Get the prefix in front of the names. */
11210 switch (DEFAULT_ABI)
11212 default:
11213 abort ();
11215 case ABI_AIX:
11216 prefix = ".";
11217 break;
11219 case ABI_V4:
11220 case ABI_AIX_NODESC:
11221 prefix = "";
11222 break;
11225 /* If the function is compiled in this module, jump to it directly.
11226 Otherwise, load up its address and jump to it. */
11228 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
11230 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
11231 && (! lookup_attribute ("longcall",
11232 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11233 || lookup_attribute ("shortcall",
11234 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11237 fprintf (file, "\tb %s", prefix);
11238 assemble_name (file, fname);
11239 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
11240 putc ('\n', file);
11243 else
11245 switch (DEFAULT_ABI)
11247 default:
11248 abort ();
11250 case ABI_AIX:
11251 /* Set up a TOC entry for the function. */
11252 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
11253 toc_section ();
11254 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
11255 labelno++;
11257 if (TARGET_MINIMAL_TOC)
11258 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11259 else
11261 fputs ("\t.tc ", file);
11262 assemble_name (file, fname);
11263 fputs ("[TC],", file);
11265 assemble_name (file, fname);
11266 putc ('\n', file);
11267 text_section ();
11268 if (TARGET_MINIMAL_TOC)
11269 asm_fprintf (file, (TARGET_32BIT)
11270 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
11271 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
11272 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
11273 assemble_name (file, buf);
11274 if (TARGET_ELF && TARGET_MINIMAL_TOC)
11275 fputs ("-(.LCTOC1)", file);
11276 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
11277 asm_fprintf (file,
11278 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
11279 r0, r12);
11281 asm_fprintf (file,
11282 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
11283 toc, r12);
11285 asm_fprintf (file, "\tmtctr %s\n", r0);
11286 asm_fprintf (file,
11287 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
11288 schain, r12);
11290 asm_fprintf (file, "\tbctr\n");
11291 break;
11293 case ABI_AIX_NODESC:
11294 case ABI_V4:
11295 fprintf (file, "\tb %s", prefix);
11296 assemble_name (file, fname);
11297 if (flag_pic) fputs ("@plt", file);
11298 putc ('\n', file);
11299 break;
11301 #if TARGET_MACHO
11302 case ABI_DARWIN:
11303 fprintf (file, "\tb %s", prefix);
11304 if (flag_pic && !machopic_name_defined_p (fname))
11305 assemble_name (file, machopic_stub_name (fname));
11306 else
11307 assemble_name (file, fname);
11308 putc ('\n', file);
11309 break;
11310 #endif
11316 /* A quick summary of the various types of 'constant-pool tables'
11317 under PowerPC:
11319 Target Flags Name One table per
11320 AIX (none) AIX TOC object file
11321 AIX -mfull-toc AIX TOC object file
11322 AIX -mminimal-toc AIX minimal TOC translation unit
11323 SVR4/EABI (none) SVR4 SDATA object file
11324 SVR4/EABI -fpic SVR4 pic object file
11325 SVR4/EABI -fPIC SVR4 PIC translation unit
11326 SVR4/EABI -mrelocatable EABI TOC function
11327 SVR4/EABI -maix AIX TOC object file
11328 SVR4/EABI -maix -mminimal-toc
11329 AIX minimal TOC translation unit
11331 Name Reg. Set by entries contains:
11332 made by addrs? fp? sum?
11334 AIX TOC 2 crt0 as Y option option
11335 AIX minimal TOC 30 prolog gcc Y Y option
11336 SVR4 SDATA 13 crt0 gcc N Y N
11337 SVR4 pic 30 prolog ld Y not yet N
11338 SVR4 PIC 30 prolog gcc Y option option
11339 EABI TOC 30 prolog gcc Y option option
11343 /* Hash table stuff for keeping track of TOC entries. */
11345 struct toc_hash_struct
11347 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
11348 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
11349 rtx key;
11350 enum machine_mode key_mode;
11351 int labelno;
11354 static htab_t toc_hash_table;
11356 /* Hash functions for the hash table. */
11358 static unsigned
11359 rs6000_hash_constant (k)
11360 rtx k;
11362 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
11363 const char *format = GET_RTX_FORMAT (GET_CODE (k));
11364 int flen = strlen (format);
11365 int fidx;
11367 if (GET_CODE (k) == LABEL_REF)
11368 return result * 1231 + X0INT (XEXP (k, 0), 3);
11370 if (GET_CODE (k) == CODE_LABEL)
11371 fidx = 3;
11372 else
11373 fidx = 0;
11375 for (; fidx < flen; fidx++)
11376 switch (format[fidx])
11378 case 's':
11380 unsigned i, len;
11381 const char *str = XSTR (k, fidx);
11382 len = strlen (str);
11383 result = result * 613 + len;
11384 for (i = 0; i < len; i++)
11385 result = result * 613 + (unsigned) str[i];
11386 break;
11388 case 'u':
11389 case 'e':
11390 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11391 break;
11392 case 'i':
11393 case 'n':
11394 result = result * 613 + (unsigned) XINT (k, fidx);
11395 break;
11396 case 'w':
11397 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11398 result = result * 613 + (unsigned) XWINT (k, fidx);
11399 else
11401 size_t i;
11402 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11403 result = result * 613 + (unsigned) (XWINT (k, fidx)
11404 >> CHAR_BIT * i);
11406 break;
11407 default:
11408 abort ();
11410 return result;
11413 static unsigned
11414 toc_hash_function (hash_entry)
11415 const void * hash_entry;
11417 const struct toc_hash_struct *thc =
11418 (const struct toc_hash_struct *) hash_entry;
11419 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11422 /* Compare H1 and H2 for equivalence. */
11424 static int
11425 toc_hash_eq (h1, h2)
11426 const void * h1;
11427 const void * h2;
11429 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11430 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11432 if (((const struct toc_hash_struct *) h1)->key_mode
11433 != ((const struct toc_hash_struct *) h2)->key_mode)
11434 return 0;
11436 return rtx_equal_p (r1, r2);
11439 /* Mark the hash table-entry HASH_ENTRY. */
11441 static int
11442 toc_hash_mark_entry (hash_slot, unused)
11443 void ** hash_slot;
11444 void * unused ATTRIBUTE_UNUSED;
11446 const struct toc_hash_struct * hash_entry =
11447 *(const struct toc_hash_struct **) hash_slot;
11448 rtx r = hash_entry->key;
11449 ggc_set_mark (hash_entry);
11450 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
11451 if (GET_CODE (r) == LABEL_REF)
11453 ggc_set_mark (r);
11454 ggc_set_mark (XEXP (r, 0));
11456 else
11457 ggc_mark_rtx (r);
11458 return 1;
11461 /* Mark all the elements of the TOC hash-table *HT. */
11463 static void
11464 toc_hash_mark_table (vht)
11465 void *vht;
11467 htab_t *ht = vht;
11469 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
11472 /* These are the names given by the C++ front-end to vtables, and
11473 vtable-like objects. Ideally, this logic should not be here;
11474 instead, there should be some programmatic way of inquiring as
11475 to whether or not an object is a vtable. */
11477 #define VTABLE_NAME_P(NAME) \
11478 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11479 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11480 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11481 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11483 void
11484 rs6000_output_symbol_ref (file, x)
11485 FILE *file;
11486 rtx x;
11488 /* Currently C++ toc references to vtables can be emitted before it
11489 is decided whether the vtable is public or private. If this is
11490 the case, then the linker will eventually complain that there is
11491 a reference to an unknown section. Thus, for vtables only,
11492 we emit the TOC reference to reference the symbol and not the
11493 section. */
11494 const char *name = XSTR (x, 0);
11496 if (VTABLE_NAME_P (name))
11498 RS6000_OUTPUT_BASENAME (file, name);
11500 else
11501 assemble_name (file, name);
11504 /* Output a TOC entry. We derive the entry name from what is being
11505 written. */
11507 void
11508 output_toc (file, x, labelno, mode)
11509 FILE *file;
11510 rtx x;
11511 int labelno;
11512 enum machine_mode mode;
11514 char buf[256];
11515 const char *name = buf;
11516 const char *real_name;
11517 rtx base = x;
11518 int offset = 0;
11520 if (TARGET_NO_TOC)
11521 abort ();
11523 /* When the linker won't eliminate them, don't output duplicate
11524 TOC entries (this happens on AIX if there is any kind of TOC,
11525 and on SVR4 under -fPIC or -mrelocatable). */
11526 if (TARGET_TOC)
11528 struct toc_hash_struct *h;
11529 void * * found;
11531 h = ggc_alloc (sizeof (*h));
11532 h->key = x;
11533 h->key_mode = mode;
11534 h->labelno = labelno;
11536 found = htab_find_slot (toc_hash_table, h, 1);
11537 if (*found == NULL)
11538 *found = h;
11539 else /* This is indeed a duplicate.
11540 Set this label equal to that label. */
11542 fputs ("\t.set ", file);
11543 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11544 fprintf (file, "%d,", labelno);
11545 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11546 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
11547 found)->labelno));
11548 return;
11552 /* If we're going to put a double constant in the TOC, make sure it's
11553 aligned properly when strict alignment is on. */
11554 if (GET_CODE (x) == CONST_DOUBLE
11555 && STRICT_ALIGNMENT
11556 && GET_MODE_BITSIZE (mode) >= 64
11557 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
11558 ASM_OUTPUT_ALIGN (file, 3);
11561 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
11563 /* Handle FP constants specially. Note that if we have a minimal
11564 TOC, things we put here aren't actually in the TOC, so we can allow
11565 FP constants. */
11566 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
11568 REAL_VALUE_TYPE rv;
11569 long k[2];
11571 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11572 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
11574 if (TARGET_64BIT)
11576 if (TARGET_MINIMAL_TOC)
11577 fputs (DOUBLE_INT_ASM_OP, file);
11578 else
11579 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11580 k[0] & 0xffffffff, k[1] & 0xffffffff);
11581 fprintf (file, "0x%lx%08lx\n",
11582 k[0] & 0xffffffff, k[1] & 0xffffffff);
11583 return;
11585 else
11587 if (TARGET_MINIMAL_TOC)
11588 fputs ("\t.long ", file);
11589 else
11590 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
11591 k[0] & 0xffffffff, k[1] & 0xffffffff);
11592 fprintf (file, "0x%lx,0x%lx\n",
11593 k[0] & 0xffffffff, k[1] & 0xffffffff);
11594 return;
11597 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
11599 REAL_VALUE_TYPE rv;
11600 long l;
11602 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
11603 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
11605 if (TARGET_64BIT)
11607 if (TARGET_MINIMAL_TOC)
11608 fputs (DOUBLE_INT_ASM_OP, file);
11609 else
11610 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11611 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
11612 return;
11614 else
11616 if (TARGET_MINIMAL_TOC)
11617 fputs ("\t.long ", file);
11618 else
11619 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
11620 fprintf (file, "0x%lx\n", l & 0xffffffff);
11621 return;
11624 else if (GET_MODE (x) == VOIDmode
11625 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
11627 unsigned HOST_WIDE_INT low;
11628 HOST_WIDE_INT high;
11630 if (GET_CODE (x) == CONST_DOUBLE)
11632 low = CONST_DOUBLE_LOW (x);
11633 high = CONST_DOUBLE_HIGH (x);
11635 else
11636 #if HOST_BITS_PER_WIDE_INT == 32
11638 low = INTVAL (x);
11639 high = (low & 0x80000000) ? ~0 : 0;
11641 #else
11643 low = INTVAL (x) & 0xffffffff;
11644 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
11646 #endif
11648 /* TOC entries are always Pmode-sized, but since this
11649 is a bigendian machine then if we're putting smaller
11650 integer constants in the TOC we have to pad them.
11651 (This is still a win over putting the constants in
11652 a separate constant pool, because then we'd have
11653 to have both a TOC entry _and_ the actual constant.)
11655 For a 32-bit target, CONST_INT values are loaded and shifted
11656 entirely within `low' and can be stored in one TOC entry. */
11658 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
11659 abort ();/* It would be easy to make this work, but it doesn't now. */
11661 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
11662 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
11663 POINTER_SIZE, &low, &high, 0);
11665 if (TARGET_64BIT)
11667 if (TARGET_MINIMAL_TOC)
11668 fputs (DOUBLE_INT_ASM_OP, file);
11669 else
11670 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11671 (long) high & 0xffffffff, (long) low & 0xffffffff);
11672 fprintf (file, "0x%lx%08lx\n",
11673 (long) high & 0xffffffff, (long) low & 0xffffffff);
11674 return;
11676 else
11678 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
11680 if (TARGET_MINIMAL_TOC)
11681 fputs ("\t.long ", file);
11682 else
11683 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
11684 (long) high & 0xffffffff, (long) low & 0xffffffff);
11685 fprintf (file, "0x%lx,0x%lx\n",
11686 (long) high & 0xffffffff, (long) low & 0xffffffff);
11688 else
11690 if (TARGET_MINIMAL_TOC)
11691 fputs ("\t.long ", file);
11692 else
11693 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
11694 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
11696 return;
11700 if (GET_CODE (x) == CONST)
11702 if (GET_CODE (XEXP (x, 0)) != PLUS)
11703 abort ();
11705 base = XEXP (XEXP (x, 0), 0);
11706 offset = INTVAL (XEXP (XEXP (x, 0), 1));
11709 if (GET_CODE (base) == SYMBOL_REF)
11710 name = XSTR (base, 0);
11711 else if (GET_CODE (base) == LABEL_REF)
11712 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
11713 else if (GET_CODE (base) == CODE_LABEL)
11714 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
11715 else
11716 abort ();
11718 real_name = (*targetm.strip_name_encoding) (name);
11719 if (TARGET_MINIMAL_TOC)
11720 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
11721 else
11723 fprintf (file, "\t.tc %s", real_name);
11725 if (offset < 0)
11726 fprintf (file, ".N%d", - offset);
11727 else if (offset)
11728 fprintf (file, ".P%d", offset);
11730 fputs ("[TC],", file);
11733 /* Currently C++ toc references to vtables can be emitted before it
11734 is decided whether the vtable is public or private. If this is
11735 the case, then the linker will eventually complain that there is
11736 a TOC reference to an unknown section. Thus, for vtables only,
11737 we emit the TOC reference to reference the symbol and not the
11738 section. */
11739 if (VTABLE_NAME_P (name))
11741 RS6000_OUTPUT_BASENAME (file, name);
11742 if (offset < 0)
11743 fprintf (file, "%d", offset);
11744 else if (offset > 0)
11745 fprintf (file, "+%d", offset);
11747 else
11748 output_addr_const (file, x);
11749 putc ('\n', file);
11752 /* Output an assembler pseudo-op to write an ASCII string of N characters
11753 starting at P to FILE.
11755 On the RS/6000, we have to do this using the .byte operation and
11756 write out special characters outside the quoted string.
11757 Also, the assembler is broken; very long strings are truncated,
11758 so we must artificially break them up early. */
11760 void
11761 output_ascii (file, p, n)
11762 FILE *file;
11763 const char *p;
11764 int n;
11766 char c;
11767 int i, count_string;
11768 const char *for_string = "\t.byte \"";
11769 const char *for_decimal = "\t.byte ";
11770 const char *to_close = NULL;
11772 count_string = 0;
11773 for (i = 0; i < n; i++)
11775 c = *p++;
11776 if (c >= ' ' && c < 0177)
11778 if (for_string)
11779 fputs (for_string, file);
11780 putc (c, file);
11782 /* Write two quotes to get one. */
11783 if (c == '"')
11785 putc (c, file);
11786 ++count_string;
11789 for_string = NULL;
11790 for_decimal = "\"\n\t.byte ";
11791 to_close = "\"\n";
11792 ++count_string;
11794 if (count_string >= 512)
11796 fputs (to_close, file);
11798 for_string = "\t.byte \"";
11799 for_decimal = "\t.byte ";
11800 to_close = NULL;
11801 count_string = 0;
11804 else
11806 if (for_decimal)
11807 fputs (for_decimal, file);
11808 fprintf (file, "%d", c);
11810 for_string = "\n\t.byte \"";
11811 for_decimal = ", ";
11812 to_close = "\n";
11813 count_string = 0;
11817 /* Now close the string if we have written one. Then end the line. */
11818 if (to_close)
11819 fputs (to_close, file);
11822 /* Generate a unique section name for FILENAME for a section type
11823 represented by SECTION_DESC. Output goes into BUF.
11825 SECTION_DESC can be any string, as long as it is different for each
11826 possible section type.
11828 We name the section in the same manner as xlc. The name begins with an
11829 underscore followed by the filename (after stripping any leading directory
11830 names) with the last period replaced by the string SECTION_DESC. If
11831 FILENAME does not contain a period, SECTION_DESC is appended to the end of
11832 the name. */
11834 void
11835 rs6000_gen_section_name (buf, filename, section_desc)
11836 char **buf;
11837 const char *filename;
11838 const char *section_desc;
11840 const char *q, *after_last_slash, *last_period = 0;
11841 char *p;
11842 int len;
11844 after_last_slash = filename;
11845 for (q = filename; *q; q++)
11847 if (*q == '/')
11848 after_last_slash = q + 1;
11849 else if (*q == '.')
11850 last_period = q;
11853 len = strlen (after_last_slash) + strlen (section_desc) + 2;
11854 *buf = (char *) permalloc (len);
11856 p = *buf;
11857 *p++ = '_';
11859 for (q = after_last_slash; *q; q++)
11861 if (q == last_period)
11863 strcpy (p, section_desc);
11864 p += strlen (section_desc);
11867 else if (ISALNUM (*q))
11868 *p++ = *q;
11871 if (last_period == 0)
11872 strcpy (p, section_desc);
11873 else
11874 *p = '\0';
11877 /* Emit profile function. */
11879 void
11880 output_profile_hook (labelno)
11881 int labelno ATTRIBUTE_UNUSED;
11883 if (DEFAULT_ABI == ABI_AIX)
11885 #ifdef NO_PROFILE_COUNTERS
11886 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
11887 #else
11888 char buf[30];
11889 const char *label_name;
11890 rtx fun;
11892 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11893 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
11894 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
11896 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
11897 fun, Pmode);
11898 #endif
11900 else if (DEFAULT_ABI == ABI_DARWIN)
11902 const char *mcount_name = RS6000_MCOUNT;
11903 int caller_addr_regno = LINK_REGISTER_REGNUM;
11905 /* Be conservative and always set this, at least for now. */
11906 current_function_uses_pic_offset_table = 1;
11908 #if TARGET_MACHO
11909 /* For PIC code, set up a stub and collect the caller's address
11910 from r0, which is where the prologue puts it. */
11911 if (flag_pic)
11913 mcount_name = machopic_stub_name (mcount_name);
11914 if (current_function_uses_pic_offset_table)
11915 caller_addr_regno = 0;
11917 #endif
11918 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
11919 0, VOIDmode, 1,
11920 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
11924 /* Write function profiler code. */
11926 void
11927 output_function_profiler (file, labelno)
11928 FILE *file;
11929 int labelno;
11931 char buf[100];
11932 int save_lr = 8;
11934 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
11935 switch (DEFAULT_ABI)
11937 default:
11938 abort ();
11940 case ABI_V4:
11941 save_lr = 4;
11942 /* Fall through. */
11944 case ABI_AIX_NODESC:
11945 if (!TARGET_32BIT)
11947 warning ("no profiling of 64-bit code for this ABI");
11948 return;
11950 fprintf (file, "\tmflr %s\n", reg_names[0]);
11951 if (flag_pic == 1)
11953 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
11954 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11955 reg_names[0], save_lr, reg_names[1]);
11956 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
11957 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
11958 assemble_name (file, buf);
11959 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
11961 else if (flag_pic > 1)
11963 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11964 reg_names[0], save_lr, reg_names[1]);
11965 /* Now, we need to get the address of the label. */
11966 fputs ("\tbl 1f\n\t.long ", file);
11967 assemble_name (file, buf);
11968 fputs ("-.\n1:", file);
11969 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
11970 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
11971 reg_names[0], reg_names[11]);
11972 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
11973 reg_names[0], reg_names[0], reg_names[11]);
11975 else
11977 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
11978 assemble_name (file, buf);
11979 fputs ("@ha\n", file);
11980 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11981 reg_names[0], save_lr, reg_names[1]);
11982 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
11983 assemble_name (file, buf);
11984 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
11987 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
11989 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
11990 reg_names[STATIC_CHAIN_REGNUM],
11991 12, reg_names[1]);
11992 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
11993 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
11994 reg_names[STATIC_CHAIN_REGNUM],
11995 12, reg_names[1]);
11997 else
11998 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
11999 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12000 break;
12002 case ABI_AIX:
12003 case ABI_DARWIN:
12004 /* Don't do anything, done in output_profile_hook (). */
12005 break;
12009 /* Adjust the cost of a scheduling dependency. Return the new cost of
12010 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12012 static int
12013 rs6000_adjust_cost (insn, link, dep_insn, cost)
12014 rtx insn;
12015 rtx link;
12016 rtx dep_insn ATTRIBUTE_UNUSED;
12017 int cost;
12019 if (! recog_memoized (insn))
12020 return 0;
12022 if (REG_NOTE_KIND (link) != 0)
12023 return 0;
12025 if (REG_NOTE_KIND (link) == 0)
12027 /* Data dependency; DEP_INSN writes a register that INSN reads
12028 some cycles later. */
12029 switch (get_attr_type (insn))
12031 case TYPE_JMPREG:
12032 /* Tell the first scheduling pass about the latency between
12033 a mtctr and bctr (and mtlr and br/blr). The first
12034 scheduling pass will not know about this latency since
12035 the mtctr instruction, which has the latency associated
12036 to it, will be generated by reload. */
12037 return TARGET_POWER ? 5 : 4;
12038 case TYPE_BRANCH:
12039 /* Leave some extra cycles between a compare and its
12040 dependent branch, to inhibit expensive mispredicts. */
12041 if ((rs6000_cpu_attr == CPU_PPC603
12042 || rs6000_cpu_attr == CPU_PPC604
12043 || rs6000_cpu_attr == CPU_PPC604E
12044 || rs6000_cpu_attr == CPU_PPC620
12045 || rs6000_cpu_attr == CPU_PPC630
12046 || rs6000_cpu_attr == CPU_PPC750
12047 || rs6000_cpu_attr == CPU_PPC7400
12048 || rs6000_cpu_attr == CPU_PPC7450
12049 || rs6000_cpu_attr == CPU_POWER4)
12050 && recog_memoized (dep_insn)
12051 && (INSN_CODE (dep_insn) >= 0)
12052 && (get_attr_type (dep_insn) == TYPE_COMPARE
12053 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12054 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12055 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
12056 return cost + 2;
12057 default:
12058 break;
12060 /* Fall out to return default cost. */
12063 return cost;
12066 /* A C statement (sans semicolon) to update the integer scheduling
12067 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12068 INSN earlier, increase the priority to execute INSN later. Do not
12069 define this macro if you do not need to adjust the scheduling
12070 priorities of insns. */
12072 static int
12073 rs6000_adjust_priority (insn, priority)
12074 rtx insn ATTRIBUTE_UNUSED;
12075 int priority;
12077 /* On machines (like the 750) which have asymmetric integer units,
12078 where one integer unit can do multiply and divides and the other
12079 can't, reduce the priority of multiply/divide so it is scheduled
12080 before other integer operations. */
12082 #if 0
12083 if (! INSN_P (insn))
12084 return priority;
12086 if (GET_CODE (PATTERN (insn)) == USE)
12087 return priority;
12089 switch (rs6000_cpu_attr) {
12090 case CPU_PPC750:
12091 switch (get_attr_type (insn))
12093 default:
12094 break;
12096 case TYPE_IMUL:
12097 case TYPE_IDIV:
12098 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12099 priority, priority);
12100 if (priority >= 0 && priority < 0x01000000)
12101 priority >>= 3;
12102 break;
12105 #endif
12107 return priority;
12110 /* Return how many instructions the machine can issue per cycle. */
12112 static int
12113 rs6000_issue_rate ()
12115 switch (rs6000_cpu_attr) {
12116 case CPU_RIOS1: /* ? */
12117 case CPU_RS64A:
12118 case CPU_PPC601: /* ? */
12119 case CPU_PPC7450:
12120 return 3;
12121 case CPU_PPC603:
12122 case CPU_PPC750:
12123 case CPU_PPC7400:
12124 return 2;
12125 case CPU_RIOS2:
12126 case CPU_PPC604:
12127 case CPU_PPC604E:
12128 case CPU_PPC620:
12129 case CPU_PPC630:
12130 case CPU_POWER4:
12131 return 4;
12132 default:
12133 return 1;
12138 /* Length in units of the trampoline for entering a nested function. */
12141 rs6000_trampoline_size ()
12143 int ret = 0;
12145 switch (DEFAULT_ABI)
12147 default:
12148 abort ();
12150 case ABI_AIX:
12151 ret = (TARGET_32BIT) ? 12 : 24;
12152 break;
12154 case ABI_DARWIN:
12155 case ABI_V4:
12156 case ABI_AIX_NODESC:
12157 ret = (TARGET_32BIT) ? 40 : 48;
12158 break;
12161 return ret;
12164 /* Emit RTL insns to initialize the variable parts of a trampoline.
12165 FNADDR is an RTX for the address of the function's pure code.
12166 CXT is an RTX for the static chain value for the function. */
12168 void
12169 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12170 rtx addr;
12171 rtx fnaddr;
12172 rtx cxt;
12174 enum machine_mode pmode = Pmode;
12175 int regsize = (TARGET_32BIT) ? 4 : 8;
12176 rtx ctx_reg = force_reg (pmode, cxt);
12178 switch (DEFAULT_ABI)
12180 default:
12181 abort ();
12183 /* Macros to shorten the code expansions below. */
12184 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12185 #define MEM_PLUS(addr,offset) \
12186 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12188 /* Under AIX, just build the 3 word function descriptor */
12189 case ABI_AIX:
12191 rtx fn_reg = gen_reg_rtx (pmode);
12192 rtx toc_reg = gen_reg_rtx (pmode);
12193 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12194 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12195 emit_move_insn (MEM_DEREF (addr), fn_reg);
12196 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12197 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12199 break;
12201 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12202 case ABI_DARWIN:
12203 case ABI_V4:
12204 case ABI_AIX_NODESC:
12205 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12206 FALSE, VOIDmode, 4,
12207 addr, pmode,
12208 GEN_INT (rs6000_trampoline_size ()), SImode,
12209 fnaddr, pmode,
12210 ctx_reg, pmode);
12211 break;
12214 return;
12218 /* Table of valid machine attributes. */
12220 const struct attribute_spec rs6000_attribute_table[] =
12222 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12223 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12224 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12225 { NULL, 0, 0, false, false, false, NULL }
12228 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12229 struct attribute_spec.handler. */
12231 static tree
12232 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12233 tree *node;
12234 tree name;
12235 tree args ATTRIBUTE_UNUSED;
12236 int flags ATTRIBUTE_UNUSED;
12237 bool *no_add_attrs;
12239 if (TREE_CODE (*node) != FUNCTION_TYPE
12240 && TREE_CODE (*node) != FIELD_DECL
12241 && TREE_CODE (*node) != TYPE_DECL)
12243 warning ("`%s' attribute only applies to functions",
12244 IDENTIFIER_POINTER (name));
12245 *no_add_attrs = true;
12248 return NULL_TREE;
12251 /* Set longcall attributes on all functions declared when
12252 rs6000_default_long_calls is true. */
12253 static void
12254 rs6000_set_default_type_attributes (type)
12255 tree type;
12257 if (rs6000_default_long_calls
12258 && (TREE_CODE (type) == FUNCTION_TYPE
12259 || TREE_CODE (type) == METHOD_TYPE))
12260 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12261 NULL_TREE,
12262 TYPE_ATTRIBUTES (type));
12265 /* Return a reference suitable for calling a function with the
12266 longcall attribute. */
12268 struct rtx_def *
12269 rs6000_longcall_ref (call_ref)
12270 rtx call_ref;
12272 const char *call_name;
12273 tree node;
12275 if (GET_CODE (call_ref) != SYMBOL_REF)
12276 return call_ref;
12278 /* System V adds '.' to the internal name, so skip them. */
12279 call_name = XSTR (call_ref, 0);
12280 if (*call_name == '.')
12282 while (*call_name == '.')
12283 call_name++;
12285 node = get_identifier (call_name);
12286 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12289 return force_reg (Pmode, call_ref);
12293 #ifdef USING_ELFOS_H
12295 /* A C statement or statements to switch to the appropriate section
12296 for output of RTX in mode MODE. You can assume that RTX is some
12297 kind of constant in RTL. The argument MODE is redundant except in
12298 the case of a `const_int' rtx. Select the section by calling
12299 `text_section' or one of the alternatives for other sections.
12301 Do not define this macro if you put all constants in the read-only
12302 data section. */
12304 static void
12305 rs6000_elf_select_rtx_section (mode, x, align)
12306 enum machine_mode mode;
12307 rtx x;
12308 unsigned HOST_WIDE_INT align;
12310 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12311 toc_section ();
12312 else
12313 default_elf_select_rtx_section (mode, x, align);
12316 /* A C statement or statements to switch to the appropriate
12317 section for output of DECL. DECL is either a `VAR_DECL' node
12318 or a constant of some sort. RELOC indicates whether forming
12319 the initial value of DECL requires link-time relocations. */
12321 static void
12322 rs6000_elf_select_section (decl, reloc, align)
12323 tree decl;
12324 int reloc;
12325 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
12327 int size = int_size_in_bytes (TREE_TYPE (decl));
12328 int needs_sdata;
12329 int readonly;
12330 static void (* const sec_funcs[4]) PARAMS ((void)) = {
12331 &readonly_data_section,
12332 &sdata2_section,
12333 &data_section,
12334 &sdata_section
12337 needs_sdata = (size > 0
12338 && size <= g_switch_value
12339 && rs6000_sdata != SDATA_NONE
12340 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12342 if (TREE_CODE (decl) == STRING_CST)
12343 readonly = ! flag_writable_strings;
12344 else if (TREE_CODE (decl) == VAR_DECL)
12345 readonly = (! (flag_pic && reloc)
12346 && TREE_READONLY (decl)
12347 && ! TREE_SIDE_EFFECTS (decl)
12348 && DECL_INITIAL (decl)
12349 && DECL_INITIAL (decl) != error_mark_node
12350 && TREE_CONSTANT (DECL_INITIAL (decl)));
12351 else if (TREE_CODE (decl) == CONSTRUCTOR)
12352 readonly = (! (flag_pic && reloc)
12353 && ! TREE_SIDE_EFFECTS (decl)
12354 && TREE_CONSTANT (decl));
12355 else
12356 readonly = 1;
12357 if (needs_sdata && rs6000_sdata != SDATA_EABI)
12358 readonly = 0;
12360 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
12363 /* A C statement to build up a unique section name, expressed as a
12364 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12365 RELOC indicates whether the initial value of EXP requires
12366 link-time relocations. If you do not define this macro, GCC will use
12367 the symbol name prefixed by `.' as the section name. Note - this
12368 macro can now be called for uninitialized data items as well as
12369 initialised data and functions. */
12371 static void
12372 rs6000_elf_unique_section (decl, reloc)
12373 tree decl;
12374 int reloc;
12376 int len;
12377 int sec;
12378 const char *name;
12379 char *string;
12380 const char *prefix;
12382 static const char *const prefixes[7][2] =
12384 { ".rodata.", ".gnu.linkonce.r." },
12385 { ".sdata2.", ".gnu.linkonce.s2." },
12386 { ".data.", ".gnu.linkonce.d." },
12387 { ".sdata.", ".gnu.linkonce.s." },
12388 { ".bss.", ".gnu.linkonce.b." },
12389 { ".sbss.", ".gnu.linkonce.sb." },
12390 { ".text.", ".gnu.linkonce.t." }
12393 if (TREE_CODE (decl) == FUNCTION_DECL)
12394 sec = 6;
12395 else
12397 int readonly;
12398 int needs_sdata;
12399 int size;
12401 readonly = 1;
12402 if (TREE_CODE (decl) == STRING_CST)
12403 readonly = ! flag_writable_strings;
12404 else if (TREE_CODE (decl) == VAR_DECL)
12405 readonly = (! (flag_pic && reloc)
12406 && TREE_READONLY (decl)
12407 && ! TREE_SIDE_EFFECTS (decl)
12408 && TREE_CONSTANT (DECL_INITIAL (decl)));
12410 size = int_size_in_bytes (TREE_TYPE (decl));
12411 needs_sdata = (size > 0
12412 && size <= g_switch_value
12413 && rs6000_sdata != SDATA_NONE
12414 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
12416 if (DECL_INITIAL (decl) == 0
12417 || DECL_INITIAL (decl) == error_mark_node)
12418 sec = 4;
12419 else if (! readonly)
12420 sec = 2;
12421 else
12422 sec = 0;
12424 if (needs_sdata)
12426 /* .sdata2 is only for EABI. */
12427 if (sec == 0 && rs6000_sdata != SDATA_EABI)
12428 sec = 2;
12429 sec += 1;
12433 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
12434 name = (*targetm.strip_name_encoding) (name);
12435 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
12436 len = strlen (name) + strlen (prefix);
12437 string = alloca (len + 1);
12439 sprintf (string, "%s%s", prefix, name);
12441 DECL_SECTION_NAME (decl) = build_string (len, string);
12445 /* If we are referencing a function that is static or is known to be
12446 in this file, make the SYMBOL_REF special. We can use this to indicate
12447 that we can branch to this function without emitting a no-op after the
12448 call. For real AIX calling sequences, we also replace the
12449 function name with the real name (1 or 2 leading .'s), rather than
12450 the function descriptor name. This saves a lot of overriding code
12451 to read the prefixes. */
12453 static void
12454 rs6000_elf_encode_section_info (decl, first)
12455 tree decl;
12456 int first;
12458 if (!first)
12459 return;
12461 if (TREE_CODE (decl) == FUNCTION_DECL)
12463 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12464 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
12465 && ! DECL_WEAK (decl))
12466 SYMBOL_REF_FLAG (sym_ref) = 1;
12468 if (DEFAULT_ABI == ABI_AIX)
12470 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12471 size_t len2 = strlen (XSTR (sym_ref, 0));
12472 char *str = alloca (len1 + len2 + 1);
12473 str[0] = '.';
12474 str[1] = '.';
12475 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12477 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12480 else if (rs6000_sdata != SDATA_NONE
12481 && DEFAULT_ABI == ABI_V4
12482 && TREE_CODE (decl) == VAR_DECL)
12484 int size = int_size_in_bytes (TREE_TYPE (decl));
12485 tree section_name = DECL_SECTION_NAME (decl);
12486 const char *name = (char *)0;
12487 int len = 0;
12489 if (section_name)
12491 if (TREE_CODE (section_name) == STRING_CST)
12493 name = TREE_STRING_POINTER (section_name);
12494 len = TREE_STRING_LENGTH (section_name);
12496 else
12497 abort ();
12500 if ((size > 0 && size <= g_switch_value)
12501 || (name
12502 && ((len == sizeof (".sdata") - 1
12503 && strcmp (name, ".sdata") == 0)
12504 || (len == sizeof (".sdata2") - 1
12505 && strcmp (name, ".sdata2") == 0)
12506 || (len == sizeof (".sbss") - 1
12507 && strcmp (name, ".sbss") == 0)
12508 || (len == sizeof (".sbss2") - 1
12509 && strcmp (name, ".sbss2") == 0)
12510 || (len == sizeof (".PPC.EMB.sdata0") - 1
12511 && strcmp (name, ".PPC.EMB.sdata0") == 0)
12512 || (len == sizeof (".PPC.EMB.sbss0") - 1
12513 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
12515 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12516 size_t len = strlen (XSTR (sym_ref, 0));
12517 char *str = alloca (len + 2);
12519 str[0] = '@';
12520 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
12521 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
12526 static const char *
12527 rs6000_elf_strip_name_encoding (str)
12528 const char *str;
12530 while (*str == '*' || *str == '@')
12531 str++;
12532 return str;
12535 #endif /* USING_ELFOS_H */
12538 /* Return a REG that occurs in ADDR with coefficient 1.
12539 ADDR can be effectively incremented by incrementing REG.
12541 r0 is special and we must not select it as an address
12542 register by this routine since our caller will try to
12543 increment the returned register via an "la" instruction. */
12545 struct rtx_def *
12546 find_addr_reg (addr)
12547 rtx addr;
12549 while (GET_CODE (addr) == PLUS)
12551 if (GET_CODE (XEXP (addr, 0)) == REG
12552 && REGNO (XEXP (addr, 0)) != 0)
12553 addr = XEXP (addr, 0);
12554 else if (GET_CODE (XEXP (addr, 1)) == REG
12555 && REGNO (XEXP (addr, 1)) != 0)
12556 addr = XEXP (addr, 1);
12557 else if (CONSTANT_P (XEXP (addr, 0)))
12558 addr = XEXP (addr, 1);
12559 else if (CONSTANT_P (XEXP (addr, 1)))
12560 addr = XEXP (addr, 0);
12561 else
12562 abort ();
12564 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
12565 return addr;
12566 abort ();
12569 void
12570 rs6000_fatal_bad_address (op)
12571 rtx op;
12573 fatal_insn ("bad address", op);
12576 /* Called to register all of our global variables with the garbage
12577 collector. */
12579 static void
12580 rs6000_add_gc_roots ()
12582 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
12583 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
12584 toc_hash_mark_table);
12587 #if TARGET_MACHO
12589 #if 0
12590 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
12591 reference and a constant. */
12594 symbolic_operand (op)
12595 rtx op;
12597 switch (GET_CODE (op))
12599 case SYMBOL_REF:
12600 case LABEL_REF:
12601 return 1;
12602 case CONST:
12603 op = XEXP (op, 0);
12604 return (GET_CODE (op) == SYMBOL_REF ||
12605 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
12606 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
12607 && GET_CODE (XEXP (op, 1)) == CONST_INT);
12608 default:
12609 return 0;
12612 #endif
12614 #ifdef RS6000_LONG_BRANCH
12616 static tree stub_list = 0;
12618 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
12619 procedure calls to the linked list. */
12621 void
12622 add_compiler_stub (label_name, function_name, line_number)
12623 tree label_name;
12624 tree function_name;
12625 int line_number;
12627 tree stub = build_tree_list (function_name, label_name);
12628 TREE_TYPE (stub) = build_int_2 (line_number, 0);
12629 TREE_CHAIN (stub) = stub_list;
12630 stub_list = stub;
12633 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
12634 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
12635 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
12637 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
12638 handling procedure calls from the linked list and initializes the
12639 linked list. */
12641 void
12642 output_compiler_stub ()
12644 char tmp_buf[256];
12645 char label_buf[256];
12646 tree stub;
12648 if (!flag_pic)
12649 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12651 fprintf (asm_out_file,
12652 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
12654 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12655 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12656 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
12657 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12659 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
12660 strcpy (label_buf,
12661 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
12662 else
12664 label_buf[0] = '_';
12665 strcpy (label_buf+1,
12666 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
12669 strcpy (tmp_buf, "lis r12,hi16(");
12670 strcat (tmp_buf, label_buf);
12671 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
12672 strcat (tmp_buf, label_buf);
12673 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
12674 output_asm_insn (tmp_buf, 0);
12676 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
12677 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
12678 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
12679 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
12682 stub_list = 0;
12685 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
12686 already there or not. */
12689 no_previous_def (function_name)
12690 tree function_name;
12692 tree stub;
12693 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12694 if (function_name == STUB_FUNCTION_NAME (stub))
12695 return 0;
12696 return 1;
12699 /* GET_PREV_LABEL gets the label name from the previous definition of
12700 the function. */
12702 tree
12703 get_prev_label (function_name)
12704 tree function_name;
12706 tree stub;
12707 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
12708 if (function_name == STUB_FUNCTION_NAME (stub))
12709 return STUB_LABEL_NAME (stub);
12710 return 0;
12713 /* INSN is either a function call or a millicode call. It may have an
12714 unconditional jump in its delay slot.
12716 CALL_DEST is the routine we are calling. */
12718 char *
12719 output_call (insn, call_dest, operand_number)
12720 rtx insn;
12721 rtx call_dest;
12722 int operand_number;
12724 static char buf[256];
12725 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
12727 tree labelname;
12728 tree funname = get_identifier (XSTR (call_dest, 0));
12730 if (no_previous_def (funname))
12732 int line_number = 0;
12733 rtx label_rtx = gen_label_rtx ();
12734 char *label_buf, temp_buf[256];
12735 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
12736 CODE_LABEL_NUMBER (label_rtx));
12737 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
12738 labelname = get_identifier (label_buf);
12739 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
12740 if (insn)
12741 line_number = NOTE_LINE_NUMBER (insn);
12742 add_compiler_stub (labelname, funname, line_number);
12744 else
12745 labelname = get_prev_label (funname);
12747 sprintf (buf, "jbsr %%z%d,%.246s",
12748 operand_number, IDENTIFIER_POINTER (labelname));
12749 return buf;
12751 else
12753 sprintf (buf, "bl %%z%d", operand_number);
12754 return buf;
12758 #endif /* RS6000_LONG_BRANCH */
12760 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
12761 do { \
12762 const char *const symbol_ = (SYMBOL); \
12763 char *buffer_ = (BUF); \
12764 if (symbol_[0] == '"') \
12766 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
12768 else if (name_needs_quotes(symbol_)) \
12770 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
12772 else \
12774 sprintf(buffer_, "L%d$%s", (N), symbol_); \
12776 } while (0)
12779 /* Generate PIC and indirect symbol stubs. */
12781 void
12782 machopic_output_stub (file, symb, stub)
12783 FILE *file;
12784 const char *symb, *stub;
12786 unsigned int length;
12787 char *symbol_name, *lazy_ptr_name;
12788 char *local_label_0;
12789 static int label = 0;
12791 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
12792 symb = (*targetm.strip_name_encoding) (symb);
12794 label += 1;
12796 length = strlen (symb);
12797 symbol_name = alloca (length + 32);
12798 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
12800 lazy_ptr_name = alloca (length + 32);
12801 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
12803 local_label_0 = alloca (length + 32);
12804 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
12806 if (flag_pic == 2)
12807 machopic_picsymbol_stub_section ();
12808 else
12809 machopic_symbol_stub_section ();
12811 fprintf (file, "%s:\n", stub);
12812 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12814 if (flag_pic == 2)
12816 fprintf (file, "\tmflr r0\n");
12817 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
12818 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
12819 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
12820 lazy_ptr_name, local_label_0);
12821 fprintf (file, "\tmtlr r0\n");
12822 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
12823 lazy_ptr_name, local_label_0);
12824 fprintf (file, "\tmtctr r12\n");
12825 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
12826 lazy_ptr_name, local_label_0);
12827 fprintf (file, "\tbctr\n");
12829 else
12830 fprintf (file, "non-pure not supported\n");
12832 machopic_lazy_symbol_ptr_section ();
12833 fprintf (file, "%s:\n", lazy_ptr_name);
12834 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
12835 fprintf (file, "\t.long dyld_stub_binding_helper\n");
12838 /* Legitimize PIC addresses. If the address is already
12839 position-independent, we return ORIG. Newly generated
12840 position-independent addresses go into a reg. This is REG if non
12841 zero, otherwise we allocate register(s) as necessary. */
12843 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
12846 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
12847 rtx orig;
12848 enum machine_mode mode;
12849 rtx reg;
12851 rtx base, offset;
12853 if (reg == NULL && ! reload_in_progress && ! reload_completed)
12854 reg = gen_reg_rtx (Pmode);
12856 if (GET_CODE (orig) == CONST)
12858 if (GET_CODE (XEXP (orig, 0)) == PLUS
12859 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
12860 return orig;
12862 if (GET_CODE (XEXP (orig, 0)) == PLUS)
12864 base =
12865 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
12866 Pmode, reg);
12867 offset =
12868 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
12869 Pmode, reg);
12871 else
12872 abort ();
12874 if (GET_CODE (offset) == CONST_INT)
12876 if (SMALL_INT (offset))
12877 return plus_constant (base, INTVAL (offset));
12878 else if (! reload_in_progress && ! reload_completed)
12879 offset = force_reg (Pmode, offset);
12880 else
12882 rtx mem = force_const_mem (Pmode, orig);
12883 return machopic_legitimize_pic_address (mem, Pmode, reg);
12886 return gen_rtx (PLUS, Pmode, base, offset);
12889 /* Fall back on generic machopic code. */
12890 return machopic_legitimize_pic_address (orig, mode, reg);
12893 /* This is just a placeholder to make linking work without having to
12894 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
12895 ever needed for Darwin (not too likely!) this would have to get a
12896 real definition. */
12898 void
12899 toc_section ()
12903 #endif /* TARGET_MACHO */
12905 #if TARGET_ELF
12906 static unsigned int
12907 rs6000_elf_section_type_flags (decl, name, reloc)
12908 tree decl;
12909 const char *name;
12910 int reloc;
12912 unsigned int flags = default_section_type_flags (decl, name, reloc);
12914 if (TARGET_RELOCATABLE)
12915 flags |= SECTION_WRITE;
12917 return flags;
12920 /* Record an element in the table of global constructors. SYMBOL is
12921 a SYMBOL_REF of the function to be called; PRIORITY is a number
12922 between 0 and MAX_INIT_PRIORITY.
12924 This differs from default_named_section_asm_out_constructor in
12925 that we have special handling for -mrelocatable. */
12927 static void
12928 rs6000_elf_asm_out_constructor (symbol, priority)
12929 rtx symbol;
12930 int priority;
12932 const char *section = ".ctors";
12933 char buf[16];
12935 if (priority != DEFAULT_INIT_PRIORITY)
12937 sprintf (buf, ".ctors.%.5u",
12938 /* Invert the numbering so the linker puts us in the proper
12939 order; constructors are run from right to left, and the
12940 linker sorts in increasing order. */
12941 MAX_INIT_PRIORITY - priority);
12942 section = buf;
12945 named_section_flags (section, SECTION_WRITE);
12946 assemble_align (POINTER_SIZE);
12948 if (TARGET_RELOCATABLE)
12950 fputs ("\t.long (", asm_out_file);
12951 output_addr_const (asm_out_file, symbol);
12952 fputs (")@fixup\n", asm_out_file);
12954 else
12955 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12958 static void
12959 rs6000_elf_asm_out_destructor (symbol, priority)
12960 rtx symbol;
12961 int priority;
12963 const char *section = ".dtors";
12964 char buf[16];
12966 if (priority != DEFAULT_INIT_PRIORITY)
12968 sprintf (buf, ".dtors.%.5u",
12969 /* Invert the numbering so the linker puts us in the proper
12970 order; constructors are run from right to left, and the
12971 linker sorts in increasing order. */
12972 MAX_INIT_PRIORITY - priority);
12973 section = buf;
12976 named_section_flags (section, SECTION_WRITE);
12977 assemble_align (POINTER_SIZE);
12979 if (TARGET_RELOCATABLE)
12981 fputs ("\t.long (", asm_out_file);
12982 output_addr_const (asm_out_file, symbol);
12983 fputs (")@fixup\n", asm_out_file);
12985 else
12986 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
12988 #endif
12990 #if TARGET_XCOFF
12991 static void
12992 xcoff_asm_named_section (name, flags)
12993 const char *name;
12994 unsigned int flags ATTRIBUTE_UNUSED;
12996 fprintf (asm_out_file, "\t.csect %s\n", name);
12999 static void
13000 rs6000_xcoff_select_section (exp, reloc, align)
13001 tree exp;
13002 int reloc;
13003 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13005 if ((TREE_CODE (exp) == STRING_CST
13006 && ! flag_writable_strings)
13007 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
13008 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
13009 && DECL_INITIAL (exp)
13010 && (DECL_INITIAL (exp) == error_mark_node
13011 || TREE_CONSTANT (DECL_INITIAL (exp)))
13012 && ! (reloc)))
13014 if (TREE_PUBLIC (exp))
13015 read_only_data_section ();
13016 else
13017 read_only_private_data_section ();
13019 else
13021 if (TREE_PUBLIC (exp))
13022 data_section ();
13023 else
13024 private_data_section ();
13028 static void
13029 rs6000_xcoff_unique_section (decl, reloc)
13030 tree decl;
13031 int reloc ATTRIBUTE_UNUSED;
13033 const char *name;
13034 char *string;
13035 size_t len;
13037 if (TREE_CODE (decl) == FUNCTION_DECL)
13039 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13040 len = strlen (name) + 5;
13041 string = alloca (len + 1);
13042 sprintf (string, ".%s[PR]", name);
13043 DECL_SECTION_NAME (decl) = build_string (len, string);
13047 /* Select section for constant in constant pool.
13049 On RS/6000, all constants are in the private read-only data area.
13050 However, if this is being placed in the TOC it must be output as a
13051 toc entry. */
13053 static void
13054 rs6000_xcoff_select_rtx_section (mode, x, align)
13055 enum machine_mode mode;
13056 rtx x;
13057 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13059 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13060 toc_section ();
13061 else
13062 read_only_private_data_section ();
13065 /* Remove any trailing [DS] or the like from the symbol name. */
13067 static const char *
13068 rs6000_xcoff_strip_name_encoding (name)
13069 const char *name;
13071 size_t len;
13072 if (*name == '*')
13073 name++;
13074 len = strlen (name);
13075 if (name[len - 1] == ']')
13076 return ggc_alloc_string (name, len - 4);
13077 else
13078 return name;
13081 #endif /* TARGET_XCOFF */
13083 /* Note that this is also used for ELF64. */
13085 static void
13086 rs6000_xcoff_encode_section_info (decl, first)
13087 tree decl;
13088 int first ATTRIBUTE_UNUSED;
13090 if (TREE_CODE (decl) == FUNCTION_DECL
13091 && (TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
13092 && ! DECL_WEAK (decl))
13093 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;