Check for Altivec mode when returning altivec register.
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob03125965c3879ea31e9887c8e35d09539cadda6c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
56 #if TARGET_XCOFF
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
58 #endif
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
62 #endif
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
66 && !((n) & 1))
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack {
73 int first_gp_reg_save; /* first callee saved GP register used */
74 int first_fp_reg_save; /* first callee saved FP register used */
75 int first_altivec_reg_save; /* first callee saved AltiVec register used */
76 int lr_save_p; /* true if the link reg needs to be saved */
77 int cr_save_p; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask; /* mask of vec registers to save */
79 int toc_save_p; /* true if the TOC needs to be saved */
80 int push_p; /* true if we need to allocate stack space */
81 int calls_p; /* true if the function makes any calls */
82 enum rs6000_abi abi; /* which ABI to use */
83 int gp_save_offset; /* offset to save GP regs from initial SP */
84 int fp_save_offset; /* offset to save FP regs from initial SP */
85 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
86 int lr_save_offset; /* offset to save LR from initial SP */
87 int cr_save_offset; /* offset to save CR from initial SP */
88 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
89 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
90 int toc_save_offset; /* offset to save the TOC pointer */
91 int varargs_save_offset; /* offset to save the varargs registers */
92 int ehrd_offset; /* offset to EH return data */
93 int reg_size; /* register size (4 or 8) */
94 int varargs_size; /* size to hold V.4 args passed in regs */
95 HOST_WIDE_INT vars_size; /* variable save area size */
96 int parm_size; /* outgoing parameter size */
97 int save_size; /* save area size */
98 int fixed_size; /* fixed size of stack frame */
99 int gp_size; /* size of saved GP registers */
100 int fp_size; /* size of saved FP registers */
101 int altivec_size; /* size of saved AltiVec registers */
102 int cr_size; /* size to hold CR if not in save_size */
103 int lr_size; /* size to hold LR if not in save_size */
104 int vrsave_size; /* size to hold VRSAVE if not in save_size */
105 int altivec_padding_size; /* size of altivec alignment padding if
106 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size;
109 int toc_size; /* size to hold TOC if not in save_size */
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112 } rs6000_stack_t;
114 /* Target cpu type */
116 enum processor_type rs6000_cpu;
117 struct rs6000_cpu_select rs6000_select[3] =
119 /* switch name, tune arch */
120 { (const char *)0, "--with-cpu=", 1, 1 },
121 { (const char *)0, "-mcpu=", 1, 1 },
122 { (const char *)0, "-mtune=", 1, 0 },
125 /* Always emit branch hint bits. */
126 static GTY(()) bool rs6000_always_hint;
128 /* Schedule instructions for group formation. */
129 static GTY(()) bool rs6000_sched_groups;
131 /* Support adjust_priority scheduler hook
132 and -mprioritize-restricted-insns= option. */
133 const char *rs6000_sched_restricted_insns_priority_str;
134 int rs6000_sched_restricted_insns_priority;
136 /* Support for -msched-costly-dep option. */
137 const char *rs6000_sched_costly_dep_str;
138 enum rs6000_dependence_cost rs6000_sched_costly_dep;
140 /* Support for -minsert-sched-nops option. */
141 const char *rs6000_sched_insert_nops_str;
142 enum rs6000_nop_insertion rs6000_sched_insert_nops;
144 /* Size of long double */
145 const char *rs6000_long_double_size_string;
146 int rs6000_long_double_type_size;
148 /* Whether -mabi=altivec has appeared */
149 int rs6000_altivec_abi;
151 /* Whether VRSAVE instructions should be generated. */
152 int rs6000_altivec_vrsave;
154 /* String from -mvrsave= option. */
155 const char *rs6000_altivec_vrsave_string;
157 /* Nonzero if we want SPE ABI extensions. */
158 int rs6000_spe_abi;
160 /* Whether isel instructions should be generated. */
161 int rs6000_isel;
163 /* Whether SPE simd instructions should be generated. */
164 int rs6000_spe;
166 /* Nonzero if floating point operations are done in the GPRs. */
167 int rs6000_float_gprs = 0;
169 /* String from -mfloat-gprs=. */
170 const char *rs6000_float_gprs_string;
172 /* String from -misel=. */
173 const char *rs6000_isel_string;
175 /* String from -mspe=. */
176 const char *rs6000_spe_string;
178 /* Set to nonzero once AIX common-mode calls have been defined. */
179 static GTY(()) int common_mode_defined;
181 /* Save information from a "cmpxx" operation until the branch or scc is
182 emitted. */
183 rtx rs6000_compare_op0, rs6000_compare_op1;
184 int rs6000_compare_fp_p;
186 /* Label number of label created for -mrelocatable, to call to so we can
187 get the address of the GOT section */
188 int rs6000_pic_labelno;
190 #ifdef USING_ELFOS_H
191 /* Which abi to adhere to */
192 const char *rs6000_abi_name;
194 /* Semantics of the small data area */
195 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
197 /* Which small data model to use */
198 const char *rs6000_sdata_name = (char *)0;
200 /* Counter for labels which are to be placed in .fixup. */
201 int fixuplabelno = 0;
202 #endif
204 /* Bit size of immediate TLS offsets and string from which it is decoded. */
205 int rs6000_tls_size = 32;
206 const char *rs6000_tls_size_string;
208 /* ABI enumeration available for subtarget to use. */
209 enum rs6000_abi rs6000_current_abi;
211 /* ABI string from -mabi= option. */
212 const char *rs6000_abi_string;
214 /* Debug flags */
215 const char *rs6000_debug_name;
216 int rs6000_debug_stack; /* debug stack applications */
217 int rs6000_debug_arg; /* debug argument handling */
219 /* Value is TRUE if register/mode pair is accepatable. */
220 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
222 /* Opaque types. */
223 static GTY(()) tree opaque_V2SI_type_node;
224 static GTY(()) tree opaque_V2SF_type_node;
225 static GTY(()) tree opaque_p_V2SI_type_node;
226 static GTY(()) tree V16QI_type_node;
227 static GTY(()) tree V2SI_type_node;
228 static GTY(()) tree V2SF_type_node;
229 static GTY(()) tree V4HI_type_node;
230 static GTY(()) tree V4SI_type_node;
231 static GTY(()) tree V4SF_type_node;
232 static GTY(()) tree V8HI_type_node;
233 static GTY(()) tree unsigned_V16QI_type_node;
234 static GTY(()) tree unsigned_V8HI_type_node;
235 static GTY(()) tree unsigned_V4SI_type_node;
236 static GTY(()) tree bool_char_type_node; /* __bool char */
237 static GTY(()) tree bool_short_type_node; /* __bool short */
238 static GTY(()) tree bool_int_type_node; /* __bool int */
239 static GTY(()) tree pixel_type_node; /* __pixel */
240 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
241 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
242 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
243 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
245 int rs6000_warn_altivec_long = 1; /* On by default. */
246 const char *rs6000_warn_altivec_long_switch;
248 const char *rs6000_traceback_name;
249 static enum {
250 traceback_default = 0,
251 traceback_none,
252 traceback_part,
253 traceback_full
254 } rs6000_traceback;
256 /* Flag to say the TOC is initialized */
257 int toc_initialized;
258 char toc_label_name[10];
260 /* Alias set for saves and restores from the rs6000 stack. */
261 static GTY(()) int rs6000_sr_alias_set;
263 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
264 The only place that looks at this is rs6000_set_default_type_attributes;
265 everywhere else should rely on the presence or absence of a longcall
266 attribute on the function declaration. */
267 int rs6000_default_long_calls;
268 const char *rs6000_longcall_switch;
270 /* Control alignment for fields within structures. */
271 /* String from -malign-XXXXX. */
272 const char *rs6000_alignment_string;
273 int rs6000_alignment_flags;
275 struct builtin_description
277 /* mask is not const because we're going to alter it below. This
278 nonsense will go away when we rewrite the -march infrastructure
279 to give us more target flag bits. */
280 unsigned int mask;
281 const enum insn_code icode;
282 const char *const name;
283 const enum rs6000_builtins code;
286 /* Target cpu costs. */
288 struct processor_costs {
289 const int mulsi; /* cost of SImode multiplication. */
290 const int mulsi_const; /* cost of SImode multiplication by constant. */
291 const int mulsi_const9; /* cost of SImode mult by short constant. */
292 const int muldi; /* cost of DImode multiplication. */
293 const int divsi; /* cost of SImode division. */
294 const int divdi; /* cost of DImode division. */
295 const int fp; /* cost of simple SFmode and DFmode insns. */
296 const int dmul; /* cost of DFmode multiplication (and fmadd). */
297 const int sdiv; /* cost of SFmode division (fdivs). */
298 const int ddiv; /* cost of DFmode division (fdiv). */
301 const struct processor_costs *rs6000_cost;
303 /* Processor costs (relative to an add) */
305 /* Instruction size costs on 32bit processors. */
306 static const
307 struct processor_costs size32_cost = {
308 COSTS_N_INSNS (1), /* mulsi */
309 COSTS_N_INSNS (1), /* mulsi_const */
310 COSTS_N_INSNS (1), /* mulsi_const9 */
311 COSTS_N_INSNS (1), /* muldi */
312 COSTS_N_INSNS (1), /* divsi */
313 COSTS_N_INSNS (1), /* divdi */
314 COSTS_N_INSNS (1), /* fp */
315 COSTS_N_INSNS (1), /* dmul */
316 COSTS_N_INSNS (1), /* sdiv */
317 COSTS_N_INSNS (1), /* ddiv */
320 /* Instruction size costs on 64bit processors. */
321 static const
322 struct processor_costs size64_cost = {
323 COSTS_N_INSNS (1), /* mulsi */
324 COSTS_N_INSNS (1), /* mulsi_const */
325 COSTS_N_INSNS (1), /* mulsi_const9 */
326 COSTS_N_INSNS (1), /* muldi */
327 COSTS_N_INSNS (1), /* divsi */
328 COSTS_N_INSNS (1), /* divdi */
329 COSTS_N_INSNS (1), /* fp */
330 COSTS_N_INSNS (1), /* dmul */
331 COSTS_N_INSNS (1), /* sdiv */
332 COSTS_N_INSNS (1), /* ddiv */
335 /* Instruction costs on RIOS1 processors. */
336 static const
337 struct processor_costs rios1_cost = {
338 COSTS_N_INSNS (5), /* mulsi */
339 COSTS_N_INSNS (4), /* mulsi_const */
340 COSTS_N_INSNS (3), /* mulsi_const9 */
341 COSTS_N_INSNS (5), /* muldi */
342 COSTS_N_INSNS (19), /* divsi */
343 COSTS_N_INSNS (19), /* divdi */
344 COSTS_N_INSNS (2), /* fp */
345 COSTS_N_INSNS (2), /* dmul */
346 COSTS_N_INSNS (19), /* sdiv */
347 COSTS_N_INSNS (19), /* ddiv */
350 /* Instruction costs on RIOS2 processors. */
351 static const
352 struct processor_costs rios2_cost = {
353 COSTS_N_INSNS (2), /* mulsi */
354 COSTS_N_INSNS (2), /* mulsi_const */
355 COSTS_N_INSNS (2), /* mulsi_const9 */
356 COSTS_N_INSNS (2), /* muldi */
357 COSTS_N_INSNS (13), /* divsi */
358 COSTS_N_INSNS (13), /* divdi */
359 COSTS_N_INSNS (2), /* fp */
360 COSTS_N_INSNS (2), /* dmul */
361 COSTS_N_INSNS (17), /* sdiv */
362 COSTS_N_INSNS (17), /* ddiv */
365 /* Instruction costs on RS64A processors. */
366 static const
367 struct processor_costs rs64a_cost = {
368 COSTS_N_INSNS (20), /* mulsi */
369 COSTS_N_INSNS (12), /* mulsi_const */
370 COSTS_N_INSNS (8), /* mulsi_const9 */
371 COSTS_N_INSNS (34), /* muldi */
372 COSTS_N_INSNS (65), /* divsi */
373 COSTS_N_INSNS (67), /* divdi */
374 COSTS_N_INSNS (4), /* fp */
375 COSTS_N_INSNS (4), /* dmul */
376 COSTS_N_INSNS (31), /* sdiv */
377 COSTS_N_INSNS (31), /* ddiv */
380 /* Instruction costs on MPCCORE processors. */
381 static const
382 struct processor_costs mpccore_cost = {
383 COSTS_N_INSNS (2), /* mulsi */
384 COSTS_N_INSNS (2), /* mulsi_const */
385 COSTS_N_INSNS (2), /* mulsi_const9 */
386 COSTS_N_INSNS (2), /* muldi */
387 COSTS_N_INSNS (6), /* divsi */
388 COSTS_N_INSNS (6), /* divdi */
389 COSTS_N_INSNS (4), /* fp */
390 COSTS_N_INSNS (5), /* dmul */
391 COSTS_N_INSNS (10), /* sdiv */
392 COSTS_N_INSNS (17), /* ddiv */
395 /* Instruction costs on PPC403 processors. */
396 static const
397 struct processor_costs ppc403_cost = {
398 COSTS_N_INSNS (4), /* mulsi */
399 COSTS_N_INSNS (4), /* mulsi_const */
400 COSTS_N_INSNS (4), /* mulsi_const9 */
401 COSTS_N_INSNS (4), /* muldi */
402 COSTS_N_INSNS (33), /* divsi */
403 COSTS_N_INSNS (33), /* divdi */
404 COSTS_N_INSNS (11), /* fp */
405 COSTS_N_INSNS (11), /* dmul */
406 COSTS_N_INSNS (11), /* sdiv */
407 COSTS_N_INSNS (11), /* ddiv */
410 /* Instruction costs on PPC405 processors. */
411 static const
412 struct processor_costs ppc405_cost = {
413 COSTS_N_INSNS (5), /* mulsi */
414 COSTS_N_INSNS (4), /* mulsi_const */
415 COSTS_N_INSNS (3), /* mulsi_const9 */
416 COSTS_N_INSNS (5), /* muldi */
417 COSTS_N_INSNS (35), /* divsi */
418 COSTS_N_INSNS (35), /* divdi */
419 COSTS_N_INSNS (11), /* fp */
420 COSTS_N_INSNS (11), /* dmul */
421 COSTS_N_INSNS (11), /* sdiv */
422 COSTS_N_INSNS (11), /* ddiv */
425 /* Instruction costs on PPC440 processors. */
426 static const
427 struct processor_costs ppc440_cost = {
428 COSTS_N_INSNS (3), /* mulsi */
429 COSTS_N_INSNS (2), /* mulsi_const */
430 COSTS_N_INSNS (2), /* mulsi_const9 */
431 COSTS_N_INSNS (3), /* muldi */
432 COSTS_N_INSNS (34), /* divsi */
433 COSTS_N_INSNS (34), /* divdi */
434 COSTS_N_INSNS (5), /* fp */
435 COSTS_N_INSNS (5), /* dmul */
436 COSTS_N_INSNS (19), /* sdiv */
437 COSTS_N_INSNS (33), /* ddiv */
440 /* Instruction costs on PPC601 processors. */
441 static const
442 struct processor_costs ppc601_cost = {
443 COSTS_N_INSNS (5), /* mulsi */
444 COSTS_N_INSNS (5), /* mulsi_const */
445 COSTS_N_INSNS (5), /* mulsi_const9 */
446 COSTS_N_INSNS (5), /* muldi */
447 COSTS_N_INSNS (36), /* divsi */
448 COSTS_N_INSNS (36), /* divdi */
449 COSTS_N_INSNS (4), /* fp */
450 COSTS_N_INSNS (5), /* dmul */
451 COSTS_N_INSNS (17), /* sdiv */
452 COSTS_N_INSNS (31), /* ddiv */
455 /* Instruction costs on PPC603 processors. */
456 static const
457 struct processor_costs ppc603_cost = {
458 COSTS_N_INSNS (5), /* mulsi */
459 COSTS_N_INSNS (3), /* mulsi_const */
460 COSTS_N_INSNS (2), /* mulsi_const9 */
461 COSTS_N_INSNS (5), /* muldi */
462 COSTS_N_INSNS (37), /* divsi */
463 COSTS_N_INSNS (37), /* divdi */
464 COSTS_N_INSNS (3), /* fp */
465 COSTS_N_INSNS (4), /* dmul */
466 COSTS_N_INSNS (18), /* sdiv */
467 COSTS_N_INSNS (33), /* ddiv */
470 /* Instruction costs on PPC604 processors. */
471 static const
472 struct processor_costs ppc604_cost = {
473 COSTS_N_INSNS (4), /* mulsi */
474 COSTS_N_INSNS (4), /* mulsi_const */
475 COSTS_N_INSNS (4), /* mulsi_const9 */
476 COSTS_N_INSNS (4), /* muldi */
477 COSTS_N_INSNS (20), /* divsi */
478 COSTS_N_INSNS (20), /* divdi */
479 COSTS_N_INSNS (3), /* fp */
480 COSTS_N_INSNS (3), /* dmul */
481 COSTS_N_INSNS (18), /* sdiv */
482 COSTS_N_INSNS (32), /* ddiv */
485 /* Instruction costs on PPC604e processors. */
486 static const
487 struct processor_costs ppc604e_cost = {
488 COSTS_N_INSNS (2), /* mulsi */
489 COSTS_N_INSNS (2), /* mulsi_const */
490 COSTS_N_INSNS (2), /* mulsi_const9 */
491 COSTS_N_INSNS (2), /* muldi */
492 COSTS_N_INSNS (20), /* divsi */
493 COSTS_N_INSNS (20), /* divdi */
494 COSTS_N_INSNS (3), /* fp */
495 COSTS_N_INSNS (3), /* dmul */
496 COSTS_N_INSNS (18), /* sdiv */
497 COSTS_N_INSNS (32), /* ddiv */
500 /* Instruction costs on PPC620 processors. */
501 static const
502 struct processor_costs ppc620_cost = {
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (3), /* mulsi_const9 */
506 COSTS_N_INSNS (7), /* muldi */
507 COSTS_N_INSNS (21), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (18), /* sdiv */
512 COSTS_N_INSNS (32), /* ddiv */
515 /* Instruction costs on PPC630 processors. */
516 static const
517 struct processor_costs ppc630_cost = {
518 COSTS_N_INSNS (5), /* mulsi */
519 COSTS_N_INSNS (4), /* mulsi_const */
520 COSTS_N_INSNS (3), /* mulsi_const9 */
521 COSTS_N_INSNS (7), /* muldi */
522 COSTS_N_INSNS (21), /* divsi */
523 COSTS_N_INSNS (37), /* divdi */
524 COSTS_N_INSNS (3), /* fp */
525 COSTS_N_INSNS (3), /* dmul */
526 COSTS_N_INSNS (17), /* sdiv */
527 COSTS_N_INSNS (21), /* ddiv */
530 /* Instruction costs on PPC750 and PPC7400 processors. */
531 static const
532 struct processor_costs ppc750_cost = {
533 COSTS_N_INSNS (5), /* mulsi */
534 COSTS_N_INSNS (3), /* mulsi_const */
535 COSTS_N_INSNS (2), /* mulsi_const9 */
536 COSTS_N_INSNS (5), /* muldi */
537 COSTS_N_INSNS (17), /* divsi */
538 COSTS_N_INSNS (17), /* divdi */
539 COSTS_N_INSNS (3), /* fp */
540 COSTS_N_INSNS (3), /* dmul */
541 COSTS_N_INSNS (17), /* sdiv */
542 COSTS_N_INSNS (31), /* ddiv */
545 /* Instruction costs on PPC7450 processors. */
546 static const
547 struct processor_costs ppc7450_cost = {
548 COSTS_N_INSNS (4), /* mulsi */
549 COSTS_N_INSNS (3), /* mulsi_const */
550 COSTS_N_INSNS (3), /* mulsi_const9 */
551 COSTS_N_INSNS (4), /* muldi */
552 COSTS_N_INSNS (23), /* divsi */
553 COSTS_N_INSNS (23), /* divdi */
554 COSTS_N_INSNS (5), /* fp */
555 COSTS_N_INSNS (5), /* dmul */
556 COSTS_N_INSNS (21), /* sdiv */
557 COSTS_N_INSNS (35), /* ddiv */
560 /* Instruction costs on PPC8540 processors. */
561 static const
562 struct processor_costs ppc8540_cost = {
563 COSTS_N_INSNS (4), /* mulsi */
564 COSTS_N_INSNS (4), /* mulsi_const */
565 COSTS_N_INSNS (4), /* mulsi_const9 */
566 COSTS_N_INSNS (4), /* muldi */
567 COSTS_N_INSNS (19), /* divsi */
568 COSTS_N_INSNS (19), /* divdi */
569 COSTS_N_INSNS (4), /* fp */
570 COSTS_N_INSNS (4), /* dmul */
571 COSTS_N_INSNS (29), /* sdiv */
572 COSTS_N_INSNS (29), /* ddiv */
575 /* Instruction costs on POWER4 and POWER5 processors. */
576 static const
577 struct processor_costs power4_cost = {
578 COSTS_N_INSNS (3), /* mulsi */
579 COSTS_N_INSNS (2), /* mulsi_const */
580 COSTS_N_INSNS (2), /* mulsi_const9 */
581 COSTS_N_INSNS (4), /* muldi */
582 COSTS_N_INSNS (18), /* divsi */
583 COSTS_N_INSNS (34), /* divdi */
584 COSTS_N_INSNS (3), /* fp */
585 COSTS_N_INSNS (3), /* dmul */
586 COSTS_N_INSNS (17), /* sdiv */
587 COSTS_N_INSNS (17), /* ddiv */
591 static bool rs6000_function_ok_for_sibcall (tree, tree);
592 static int num_insns_constant_wide (HOST_WIDE_INT);
593 static void validate_condition_mode (enum rtx_code, enum machine_mode);
594 static rtx rs6000_generate_compare (enum rtx_code);
595 static void rs6000_maybe_dead (rtx);
596 static void rs6000_emit_stack_tie (void);
597 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
598 static rtx spe_synthesize_frame_save (rtx);
599 static bool spe_func_has_64bit_regs_p (void);
600 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
601 int, HOST_WIDE_INT);
602 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
603 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
604 static unsigned rs6000_hash_constant (rtx);
605 static unsigned toc_hash_function (const void *);
606 static int toc_hash_eq (const void *, const void *);
607 static int constant_pool_expr_1 (rtx, int *, int *);
608 static bool constant_pool_expr_p (rtx);
609 static bool toc_relative_expr_p (rtx);
610 static bool legitimate_small_data_p (enum machine_mode, rtx);
611 static bool legitimate_indexed_address_p (rtx, int);
612 static bool legitimate_indirect_address_p (rtx, int);
613 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
614 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
615 static struct machine_function * rs6000_init_machine_status (void);
616 static bool rs6000_assemble_integer (rtx, unsigned int, int);
617 #ifdef HAVE_GAS_HIDDEN
618 static void rs6000_assemble_visibility (tree, int);
619 #endif
620 static int rs6000_ra_ever_killed (void);
621 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
622 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
623 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
624 static const char *rs6000_mangle_fundamental_type (tree);
625 extern const struct attribute_spec rs6000_attribute_table[];
626 static void rs6000_set_default_type_attributes (tree);
627 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
628 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
629 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
630 tree);
631 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
632 static bool rs6000_return_in_memory (tree, tree);
633 static void rs6000_file_start (void);
634 #if TARGET_ELF
635 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
636 static void rs6000_elf_asm_out_constructor (rtx, int);
637 static void rs6000_elf_asm_out_destructor (rtx, int);
638 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
639 static void rs6000_elf_unique_section (tree, int);
640 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
641 unsigned HOST_WIDE_INT);
642 static void rs6000_elf_encode_section_info (tree, rtx, int)
643 ATTRIBUTE_UNUSED;
644 static bool rs6000_elf_in_small_data_p (tree);
645 #endif
646 #if TARGET_XCOFF
647 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
648 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
649 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
650 static void rs6000_xcoff_unique_section (tree, int);
651 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
652 unsigned HOST_WIDE_INT);
653 static const char * rs6000_xcoff_strip_name_encoding (const char *);
654 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
655 static void rs6000_xcoff_file_start (void);
656 static void rs6000_xcoff_file_end (void);
657 #endif
658 #if TARGET_MACHO
659 static bool rs6000_binds_local_p (tree);
660 #endif
661 static int rs6000_variable_issue (FILE *, int, rtx, int);
662 static bool rs6000_rtx_costs (rtx, int, int, int *);
663 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
664 static bool is_microcoded_insn (rtx);
665 static int is_dispatch_slot_restricted (rtx);
666 static bool is_cracked_insn (rtx);
667 static bool is_branch_slot_insn (rtx);
668 static int rs6000_adjust_priority (rtx, int);
669 static int rs6000_issue_rate (void);
670 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
671 static rtx get_next_active_insn (rtx, rtx);
672 static bool insn_terminates_group_p (rtx , enum group_termination);
673 static bool is_costly_group (rtx *, rtx);
674 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
675 static int redefine_groups (FILE *, int, rtx, rtx);
676 static int pad_groups (FILE *, int, rtx, rtx);
677 static void rs6000_sched_finish (FILE *, int);
678 static int rs6000_use_sched_lookahead (void);
680 static void rs6000_init_builtins (void);
681 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
682 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
683 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
684 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
685 static void altivec_init_builtins (void);
686 static void rs6000_common_init_builtins (void);
687 static void rs6000_init_libfuncs (void);
689 static void enable_mask_for_builtins (struct builtin_description *, int,
690 enum rs6000_builtins,
691 enum rs6000_builtins);
692 static tree build_opaque_vector_type (tree, int);
693 static void spe_init_builtins (void);
694 static rtx spe_expand_builtin (tree, rtx, bool *);
695 static rtx spe_expand_stv_builtin (enum insn_code, tree);
696 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
697 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
698 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
699 static rs6000_stack_t *rs6000_stack_info (void);
700 static void debug_stack_info (rs6000_stack_t *);
702 static rtx altivec_expand_builtin (tree, rtx, bool *);
703 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
704 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
705 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
706 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
707 static rtx altivec_expand_predicate_builtin (enum insn_code,
708 const char *, tree, rtx);
709 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
710 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
711 static void rs6000_parse_abi_options (void);
712 static void rs6000_parse_alignment_option (void);
713 static void rs6000_parse_tls_size_option (void);
714 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
715 static int first_altivec_reg_to_save (void);
716 static unsigned int compute_vrsave_mask (void);
717 static void is_altivec_return_reg (rtx, void *);
718 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
719 int easy_vector_constant (rtx, enum machine_mode);
720 static int easy_vector_same (rtx, enum machine_mode);
721 static int easy_vector_splat_const (int, enum machine_mode);
722 static bool is_ev64_opaque_type (tree);
723 static rtx rs6000_dwarf_register_span (rtx);
724 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
725 static rtx rs6000_tls_get_addr (void);
726 static rtx rs6000_got_sym (void);
727 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
728 static const char *rs6000_get_some_local_dynamic_name (void);
729 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
730 static rtx rs6000_complex_function_value (enum machine_mode);
731 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
732 enum machine_mode, tree);
733 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
734 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
735 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
736 enum machine_mode, tree,
737 int *, int);
738 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
739 tree, bool);
740 #if TARGET_MACHO
741 static void macho_branch_islands (void);
742 static void add_compiler_branch_island (tree, tree, int);
743 static int no_previous_def (tree function_name);
744 static tree get_prev_label (tree function_name);
745 #endif
747 static tree rs6000_build_builtin_va_list (void);
748 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
749 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
751 static enum machine_mode rs6000_eh_return_filter_mode (void);
753 /* Hash table stuff for keeping track of TOC entries. */
755 struct toc_hash_struct GTY(())
757 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
758 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
759 rtx key;
760 enum machine_mode key_mode;
761 int labelno;
764 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
766 /* Default register names. */
767 char rs6000_reg_names[][8] =
769 "0", "1", "2", "3", "4", "5", "6", "7",
770 "8", "9", "10", "11", "12", "13", "14", "15",
771 "16", "17", "18", "19", "20", "21", "22", "23",
772 "24", "25", "26", "27", "28", "29", "30", "31",
773 "0", "1", "2", "3", "4", "5", "6", "7",
774 "8", "9", "10", "11", "12", "13", "14", "15",
775 "16", "17", "18", "19", "20", "21", "22", "23",
776 "24", "25", "26", "27", "28", "29", "30", "31",
777 "mq", "lr", "ctr","ap",
778 "0", "1", "2", "3", "4", "5", "6", "7",
779 "xer",
780 /* AltiVec registers. */
781 "0", "1", "2", "3", "4", "5", "6", "7",
782 "8", "9", "10", "11", "12", "13", "14", "15",
783 "16", "17", "18", "19", "20", "21", "22", "23",
784 "24", "25", "26", "27", "28", "29", "30", "31",
785 "vrsave", "vscr",
786 /* SPE registers. */
787 "spe_acc", "spefscr"
790 #ifdef TARGET_REGNAMES
791 static const char alt_reg_names[][8] =
793 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
794 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
795 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
796 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
797 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
798 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
799 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
800 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
801 "mq", "lr", "ctr", "ap",
802 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
803 "xer",
804 /* AltiVec registers. */
805 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
806 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
807 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
808 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
809 "vrsave", "vscr",
810 /* SPE registers. */
811 "spe_acc", "spefscr"
813 #endif
815 #ifndef MASK_STRICT_ALIGN
816 #define MASK_STRICT_ALIGN 0
817 #endif
818 #ifndef TARGET_PROFILE_KERNEL
819 #define TARGET_PROFILE_KERNEL 0
820 #endif
822 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
823 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
825 /* Return 1 for a symbol ref for a thread-local storage symbol. */
826 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
827 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
829 /* Initialize the GCC target structure. */
830 #undef TARGET_ATTRIBUTE_TABLE
831 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
832 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
833 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
835 #undef TARGET_ASM_ALIGNED_DI_OP
836 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
838 /* Default unaligned ops are only provided for ELF. Find the ops needed
839 for non-ELF systems. */
840 #ifndef OBJECT_FORMAT_ELF
841 #if TARGET_XCOFF
842 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
843 64-bit targets. */
844 #undef TARGET_ASM_UNALIGNED_HI_OP
845 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
846 #undef TARGET_ASM_UNALIGNED_SI_OP
847 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
848 #undef TARGET_ASM_UNALIGNED_DI_OP
849 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
850 #else
851 /* For Darwin. */
852 #undef TARGET_ASM_UNALIGNED_HI_OP
853 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
854 #undef TARGET_ASM_UNALIGNED_SI_OP
855 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
856 #undef TARGET_ASM_UNALIGNED_DI_OP
857 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
858 #undef TARGET_ASM_ALIGNED_DI_OP
859 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
860 #endif
861 #endif
863 /* This hook deals with fixups for relocatable code and DI-mode objects
864 in 64-bit code. */
865 #undef TARGET_ASM_INTEGER
866 #define TARGET_ASM_INTEGER rs6000_assemble_integer
868 #ifdef HAVE_GAS_HIDDEN
869 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
870 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
871 #endif
873 #undef TARGET_HAVE_TLS
874 #define TARGET_HAVE_TLS HAVE_AS_TLS
876 #undef TARGET_CANNOT_FORCE_CONST_MEM
877 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
879 #undef TARGET_ASM_FUNCTION_PROLOGUE
880 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
881 #undef TARGET_ASM_FUNCTION_EPILOGUE
882 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
884 #undef TARGET_SCHED_VARIABLE_ISSUE
885 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
887 #undef TARGET_SCHED_ISSUE_RATE
888 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
889 #undef TARGET_SCHED_ADJUST_COST
890 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
891 #undef TARGET_SCHED_ADJUST_PRIORITY
892 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
893 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
894 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
895 #undef TARGET_SCHED_FINISH
896 #define TARGET_SCHED_FINISH rs6000_sched_finish
898 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
899 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
901 #undef TARGET_INIT_BUILTINS
902 #define TARGET_INIT_BUILTINS rs6000_init_builtins
904 #undef TARGET_EXPAND_BUILTIN
905 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
907 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
908 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
910 #undef TARGET_INIT_LIBFUNCS
911 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
913 #if TARGET_MACHO
914 #undef TARGET_BINDS_LOCAL_P
915 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
916 #endif
918 #undef TARGET_ASM_OUTPUT_MI_THUNK
919 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
921 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
922 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
924 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
925 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
927 #undef TARGET_RTX_COSTS
928 #define TARGET_RTX_COSTS rs6000_rtx_costs
929 #undef TARGET_ADDRESS_COST
930 #define TARGET_ADDRESS_COST hook_int_rtx_0
932 #undef TARGET_VECTOR_OPAQUE_P
933 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
935 #undef TARGET_DWARF_REGISTER_SPAN
936 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
938 /* On rs6000, function arguments are promoted, as are function return
939 values. */
940 #undef TARGET_PROMOTE_FUNCTION_ARGS
941 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
942 #undef TARGET_PROMOTE_FUNCTION_RETURN
943 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
945 #undef TARGET_RETURN_IN_MEMORY
946 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
948 #undef TARGET_SETUP_INCOMING_VARARGS
949 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
951 /* Always strict argument naming on rs6000. */
952 #undef TARGET_STRICT_ARGUMENT_NAMING
953 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
954 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
955 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
956 #undef TARGET_SPLIT_COMPLEX_ARG
957 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
958 #undef TARGET_MUST_PASS_IN_STACK
959 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
960 #undef TARGET_PASS_BY_REFERENCE
961 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
963 #undef TARGET_BUILD_BUILTIN_VA_LIST
964 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
966 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
967 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
969 #undef TARGET_EH_RETURN_FILTER_MODE
970 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
972 struct gcc_target targetm = TARGET_INITIALIZER;
975 /* Value is 1 if hard register REGNO can hold a value of machine-mode
976 MODE. */
977 static int
978 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
980 /* The GPRs can hold any mode, but values bigger than one register
981 cannot go past R31. */
982 if (INT_REGNO_P (regno))
983 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
985 /* The float registers can only hold floating modes and DImode. */
986 if (FP_REGNO_P (regno))
987 return
988 (GET_MODE_CLASS (mode) == MODE_FLOAT
989 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
990 || (GET_MODE_CLASS (mode) == MODE_INT
991 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
993 /* The CR register can only hold CC modes. */
994 if (CR_REGNO_P (regno))
995 return GET_MODE_CLASS (mode) == MODE_CC;
997 if (XER_REGNO_P (regno))
998 return mode == PSImode;
1000 /* AltiVec only in AldyVec registers. */
1001 if (ALTIVEC_REGNO_P (regno))
1002 return ALTIVEC_VECTOR_MODE (mode);
1004 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1005 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1006 return 1;
1008 /* We cannot put TImode anywhere except general register and it must be
1009 able to fit within the register set. */
1011 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1014 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1015 static void
1016 rs6000_init_hard_regno_mode_ok (void)
1018 int r, m;
1020 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1021 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1022 if (rs6000_hard_regno_mode_ok (r, m))
1023 rs6000_hard_regno_mode_ok_p[m][r] = true;
1026 /* If not otherwise specified by a target, make 'long double' equivalent to
1027 'double'. */
1029 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1030 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1031 #endif
1033 /* Override command line options. Mostly we process the processor
1034 type and sometimes adjust other TARGET_ options. */
1036 void
1037 rs6000_override_options (const char *default_cpu)
1039 size_t i, j;
1040 struct rs6000_cpu_select *ptr;
1041 int set_masks;
1043 /* Simplifications for entries below. */
1045 enum {
1046 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1047 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1050 /* This table occasionally claims that a processor does not support
1051 a particular feature even though it does, but the feature is slower
1052 than the alternative. Thus, it shouldn't be relied on as a
1053 complete description of the processor's support.
1055 Please keep this list in order, and don't forget to update the
1056 documentation in invoke.texi when adding a new processor or
1057 flag. */
1058 static struct ptt
1060 const char *const name; /* Canonical processor name. */
1061 const enum processor_type processor; /* Processor type enum value. */
1062 const int target_enable; /* Target flags to enable. */
1063 } const processor_target_table[]
1064 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1065 {"403", PROCESSOR_PPC403,
1066 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1067 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1068 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
1069 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1070 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
1071 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1072 {"601", PROCESSOR_PPC601,
1073 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1074 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1075 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1076 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1077 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1078 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1079 {"620", PROCESSOR_PPC620,
1080 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1081 {"630", PROCESSOR_PPC630,
1082 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1083 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1084 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1085 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1086 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1087 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1088 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1089 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1090 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1091 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1092 {"970", PROCESSOR_POWER4,
1093 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1094 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1095 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1096 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1097 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1098 {"G5", PROCESSOR_POWER4,
1099 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1100 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1101 {"power2", PROCESSOR_POWER,
1102 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1103 {"power3", PROCESSOR_PPC630,
1104 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1105 {"power4", PROCESSOR_POWER4,
1106 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1107 {"power5", PROCESSOR_POWER5,
1108 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1109 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1110 {"powerpc64", PROCESSOR_POWERPC64,
1111 POWERPC_BASE_MASK | MASK_POWERPC64},
1112 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1113 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1114 {"rios2", PROCESSOR_RIOS2,
1115 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1116 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1117 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1118 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
1121 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1123 /* Some OSs don't support saving the high part of 64-bit registers on
1124 context switch. Other OSs don't support saving Altivec registers.
1125 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1126 settings; if the user wants either, the user must explicitly specify
1127 them and we won't interfere with the user's specification. */
1129 enum {
1130 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1131 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1132 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1133 | MASK_MFCRF)
1136 rs6000_init_hard_regno_mode_ok ();
1138 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1139 #ifdef OS_MISSING_POWERPC64
1140 if (OS_MISSING_POWERPC64)
1141 set_masks &= ~MASK_POWERPC64;
1142 #endif
1143 #ifdef OS_MISSING_ALTIVEC
1144 if (OS_MISSING_ALTIVEC)
1145 set_masks &= ~MASK_ALTIVEC;
1146 #endif
1148 /* Don't override these by the processor default if given explicitly. */
1149 set_masks &= ~(target_flags_explicit
1150 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
1152 /* Identify the processor type. */
1153 rs6000_select[0].string = default_cpu;
1154 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1156 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1158 ptr = &rs6000_select[i];
1159 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1161 for (j = 0; j < ptt_size; j++)
1162 if (! strcmp (ptr->string, processor_target_table[j].name))
1164 if (ptr->set_tune_p)
1165 rs6000_cpu = processor_target_table[j].processor;
1167 if (ptr->set_arch_p)
1169 target_flags &= ~set_masks;
1170 target_flags |= (processor_target_table[j].target_enable
1171 & set_masks);
1173 break;
1176 if (j == ptt_size)
1177 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1181 if (TARGET_E500)
1182 rs6000_isel = 1;
1184 /* If we are optimizing big endian systems for space, use the load/store
1185 multiple and string instructions. */
1186 if (BYTES_BIG_ENDIAN && optimize_size)
1187 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1189 /* Don't allow -mmultiple or -mstring on little endian systems
1190 unless the cpu is a 750, because the hardware doesn't support the
1191 instructions used in little endian mode, and causes an alignment
1192 trap. The 750 does not cause an alignment trap (except when the
1193 target is unaligned). */
1195 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1197 if (TARGET_MULTIPLE)
1199 target_flags &= ~MASK_MULTIPLE;
1200 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1201 warning ("-mmultiple is not supported on little endian systems");
1204 if (TARGET_STRING)
1206 target_flags &= ~MASK_STRING;
1207 if ((target_flags_explicit & MASK_STRING) != 0)
1208 warning ("-mstring is not supported on little endian systems");
1212 /* Set debug flags */
1213 if (rs6000_debug_name)
1215 if (! strcmp (rs6000_debug_name, "all"))
1216 rs6000_debug_stack = rs6000_debug_arg = 1;
1217 else if (! strcmp (rs6000_debug_name, "stack"))
1218 rs6000_debug_stack = 1;
1219 else if (! strcmp (rs6000_debug_name, "arg"))
1220 rs6000_debug_arg = 1;
1221 else
1222 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1225 if (rs6000_traceback_name)
1227 if (! strncmp (rs6000_traceback_name, "full", 4))
1228 rs6000_traceback = traceback_full;
1229 else if (! strncmp (rs6000_traceback_name, "part", 4))
1230 rs6000_traceback = traceback_part;
1231 else if (! strncmp (rs6000_traceback_name, "no", 2))
1232 rs6000_traceback = traceback_none;
1233 else
1234 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1235 rs6000_traceback_name);
1238 /* Set size of long double */
1239 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1240 if (rs6000_long_double_size_string)
1242 char *tail;
1243 int size = strtol (rs6000_long_double_size_string, &tail, 10);
1244 if (*tail != '\0' || (size != 64 && size != 128))
1245 error ("Unknown switch -mlong-double-%s",
1246 rs6000_long_double_size_string);
1247 else
1248 rs6000_long_double_type_size = size;
1251 /* Set Altivec ABI as default for powerpc64 linux. */
1252 if (TARGET_ELF && TARGET_64BIT)
1254 rs6000_altivec_abi = 1;
1255 rs6000_altivec_vrsave = 1;
1258 /* Handle -mabi= options. */
1259 rs6000_parse_abi_options ();
1261 /* Handle -malign-XXXXX option. */
1262 rs6000_parse_alignment_option ();
1264 /* Handle generic -mFOO=YES/NO options. */
1265 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
1266 &rs6000_altivec_vrsave);
1267 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
1268 &rs6000_isel);
1269 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
1270 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
1271 &rs6000_float_gprs);
1273 /* Handle -mtls-size option. */
1274 rs6000_parse_tls_size_option ();
1276 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1277 SUBTARGET_OVERRIDE_OPTIONS;
1278 #endif
1279 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1280 SUBSUBTARGET_OVERRIDE_OPTIONS;
1281 #endif
1283 if (TARGET_E500)
1285 if (TARGET_ALTIVEC)
1286 error ("AltiVec and E500 instructions cannot coexist");
1288 /* The e500 does not have string instructions, and we set
1289 MASK_STRING above when optimizing for size. */
1290 if ((target_flags & MASK_STRING) != 0)
1291 target_flags = target_flags & ~MASK_STRING;
1293 /* No SPE means 64-bit long doubles, even if an E500. */
1294 if (rs6000_spe_string != 0
1295 && !strcmp (rs6000_spe_string, "no"))
1296 rs6000_long_double_type_size = 64;
1298 else if (rs6000_select[1].string != NULL)
1300 /* For the powerpc-eabispe configuration, we set all these by
1301 default, so let's unset them if we manually set another
1302 CPU that is not the E500. */
1303 if (rs6000_abi_string == 0)
1304 rs6000_spe_abi = 0;
1305 if (rs6000_spe_string == 0)
1306 rs6000_spe = 0;
1307 if (rs6000_float_gprs_string == 0)
1308 rs6000_float_gprs = 0;
1309 if (rs6000_isel_string == 0)
1310 rs6000_isel = 0;
1311 if (rs6000_long_double_size_string == 0)
1312 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1315 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1316 && rs6000_cpu != PROCESSOR_POWER5);
1317 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1318 || rs6000_cpu == PROCESSOR_POWER5);
1320 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1321 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1322 bits in target_flags so TARGET_SWITCHES cannot be used.
1323 Assumption here is that rs6000_longcall_switch points into the
1324 text of the complete option, rather than being a copy, so we can
1325 scan back for the presence or absence of the no- modifier. */
1326 if (rs6000_longcall_switch)
1328 const char *base = rs6000_longcall_switch;
1329 while (base[-1] != 'm') base--;
1331 if (*rs6000_longcall_switch != '\0')
1332 error ("invalid option `%s'", base);
1333 rs6000_default_long_calls = (base[0] != 'n');
1336 /* Handle -m(no-)warn-altivec-long similarly. */
1337 if (rs6000_warn_altivec_long_switch)
1339 const char *base = rs6000_warn_altivec_long_switch;
1340 while (base[-1] != 'm') base--;
1342 if (*rs6000_warn_altivec_long_switch != '\0')
1343 error ("invalid option `%s'", base);
1344 rs6000_warn_altivec_long = (base[0] != 'n');
1347 /* Handle -mprioritize-restricted-insns option. */
1348 rs6000_sched_restricted_insns_priority
1349 = (rs6000_sched_groups ? 1 : 0);
1350 if (rs6000_sched_restricted_insns_priority_str)
1351 rs6000_sched_restricted_insns_priority =
1352 atoi (rs6000_sched_restricted_insns_priority_str);
1354 /* Handle -msched-costly-dep option. */
1355 rs6000_sched_costly_dep
1356 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1357 if (rs6000_sched_costly_dep_str)
1359 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1360 rs6000_sched_costly_dep = no_dep_costly;
1361 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1362 rs6000_sched_costly_dep = all_deps_costly;
1363 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1364 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1365 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1366 rs6000_sched_costly_dep = store_to_load_dep_costly;
1367 else
1368 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1371 /* Handle -minsert-sched-nops option. */
1372 rs6000_sched_insert_nops
1373 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1374 if (rs6000_sched_insert_nops_str)
1376 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1377 rs6000_sched_insert_nops = sched_finish_none;
1378 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1379 rs6000_sched_insert_nops = sched_finish_pad_groups;
1380 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1381 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1382 else
1383 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1386 #ifdef TARGET_REGNAMES
1387 /* If the user desires alternate register names, copy in the
1388 alternate names now. */
1389 if (TARGET_REGNAMES)
1390 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1391 #endif
1393 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1394 If -maix-struct-return or -msvr4-struct-return was explicitly
1395 used, don't override with the ABI default. */
1396 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
1398 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
1399 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
1400 else
1401 target_flags |= MASK_AIX_STRUCT_RET;
1404 if (TARGET_LONG_DOUBLE_128
1405 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1406 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1408 /* Allocate an alias set for register saves & restores from stack. */
1409 rs6000_sr_alias_set = new_alias_set ();
1411 if (TARGET_TOC)
1412 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1414 /* We can only guarantee the availability of DI pseudo-ops when
1415 assembling for 64-bit targets. */
1416 if (!TARGET_64BIT)
1418 targetm.asm_out.aligned_op.di = NULL;
1419 targetm.asm_out.unaligned_op.di = NULL;
1422 /* Set maximum branch target alignment at two instructions, eight bytes. */
1423 align_jumps_max_skip = 8;
1424 align_loops_max_skip = 8;
1426 /* Arrange to save and restore machine status around nested functions. */
1427 init_machine_status = rs6000_init_machine_status;
1429 /* We should always be splitting complex arguments, but we can't break
1430 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1431 if (DEFAULT_ABI != ABI_AIX)
1432 targetm.calls.split_complex_arg = NULL;
1434 /* Initialize rs6000_cost with the appropriate target costs. */
1435 if (optimize_size)
1436 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1437 else
1438 switch (rs6000_cpu)
1440 case PROCESSOR_RIOS1:
1441 rs6000_cost = &rios1_cost;
1442 break;
1444 case PROCESSOR_RIOS2:
1445 rs6000_cost = &rios2_cost;
1446 break;
1448 case PROCESSOR_RS64A:
1449 rs6000_cost = &rs64a_cost;
1450 break;
1452 case PROCESSOR_MPCCORE:
1453 rs6000_cost = &mpccore_cost;
1454 break;
1456 case PROCESSOR_PPC403:
1457 rs6000_cost = &ppc403_cost;
1458 break;
1460 case PROCESSOR_PPC405:
1461 rs6000_cost = &ppc405_cost;
1462 break;
1464 case PROCESSOR_PPC440:
1465 rs6000_cost = &ppc440_cost;
1466 break;
1468 case PROCESSOR_PPC601:
1469 rs6000_cost = &ppc601_cost;
1470 break;
1472 case PROCESSOR_PPC603:
1473 rs6000_cost = &ppc603_cost;
1474 break;
1476 case PROCESSOR_PPC604:
1477 rs6000_cost = &ppc604_cost;
1478 break;
1480 case PROCESSOR_PPC604e:
1481 rs6000_cost = &ppc604e_cost;
1482 break;
1484 case PROCESSOR_PPC620:
1485 rs6000_cost = &ppc620_cost;
1486 break;
1488 case PROCESSOR_PPC630:
1489 rs6000_cost = &ppc630_cost;
1490 break;
1492 case PROCESSOR_PPC750:
1493 case PROCESSOR_PPC7400:
1494 rs6000_cost = &ppc750_cost;
1495 break;
1497 case PROCESSOR_PPC7450:
1498 rs6000_cost = &ppc7450_cost;
1499 break;
1501 case PROCESSOR_PPC8540:
1502 rs6000_cost = &ppc8540_cost;
1503 break;
1505 case PROCESSOR_POWER4:
1506 case PROCESSOR_POWER5:
1507 rs6000_cost = &power4_cost;
1508 break;
1510 default:
1511 abort ();
1515 /* Handle generic options of the form -mfoo=yes/no.
1516 NAME is the option name.
1517 VALUE is the option value.
1518 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1519 whether the option value is 'yes' or 'no' respectively. */
1520 static void
1521 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1523 if (value == 0)
1524 return;
1525 else if (!strcmp (value, "yes"))
1526 *flag = 1;
1527 else if (!strcmp (value, "no"))
1528 *flag = 0;
1529 else
1530 error ("unknown -m%s= option specified: '%s'", name, value);
1533 /* Handle -mabi= options. */
1534 static void
1535 rs6000_parse_abi_options (void)
1537 if (rs6000_abi_string == 0)
1538 return;
1539 else if (! strcmp (rs6000_abi_string, "altivec"))
1541 rs6000_altivec_abi = 1;
1542 rs6000_spe_abi = 0;
1544 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1545 rs6000_altivec_abi = 0;
1546 else if (! strcmp (rs6000_abi_string, "spe"))
1548 rs6000_spe_abi = 1;
1549 rs6000_altivec_abi = 0;
1550 if (!TARGET_SPE_ABI)
1551 error ("not configured for ABI: '%s'", rs6000_abi_string);
1554 else if (! strcmp (rs6000_abi_string, "no-spe"))
1555 rs6000_spe_abi = 0;
1556 else
1557 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1560 /* Handle -malign-XXXXXX options. */
1561 static void
1562 rs6000_parse_alignment_option (void)
1564 if (rs6000_alignment_string == 0)
1565 return;
1566 else if (! strcmp (rs6000_alignment_string, "power"))
1567 rs6000_alignment_flags = MASK_ALIGN_POWER;
1568 else if (! strcmp (rs6000_alignment_string, "natural"))
1569 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1570 else
1571 error ("unknown -malign-XXXXX option specified: '%s'",
1572 rs6000_alignment_string);
1575 /* Validate and record the size specified with the -mtls-size option. */
1577 static void
1578 rs6000_parse_tls_size_option (void)
1580 if (rs6000_tls_size_string == 0)
1581 return;
1582 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1583 rs6000_tls_size = 16;
1584 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1585 rs6000_tls_size = 32;
1586 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1587 rs6000_tls_size = 64;
1588 else
1589 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1592 void
1593 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1597 /* Do anything needed at the start of the asm file. */
1599 static void
1600 rs6000_file_start (void)
1602 size_t i;
1603 char buffer[80];
1604 const char *start = buffer;
1605 struct rs6000_cpu_select *ptr;
1606 const char *default_cpu = TARGET_CPU_DEFAULT;
1607 FILE *file = asm_out_file;
1609 default_file_start ();
1611 #ifdef TARGET_BI_ARCH
1612 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1613 default_cpu = 0;
1614 #endif
1616 if (flag_verbose_asm)
1618 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1619 rs6000_select[0].string = default_cpu;
1621 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1623 ptr = &rs6000_select[i];
1624 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1626 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1627 start = "";
1631 #ifdef USING_ELFOS_H
1632 switch (rs6000_sdata)
1634 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1635 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1636 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1637 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1640 if (rs6000_sdata && g_switch_value)
1642 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1643 g_switch_value);
1644 start = "";
1646 #endif
1648 if (*start == '\0')
1649 putc ('\n', file);
1653 /* Return nonzero if this function is known to have a null epilogue. */
1656 direct_return (void)
1658 if (reload_completed)
1660 rs6000_stack_t *info = rs6000_stack_info ();
1662 if (info->first_gp_reg_save == 32
1663 && info->first_fp_reg_save == 64
1664 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1665 && ! info->lr_save_p
1666 && ! info->cr_save_p
1667 && info->vrsave_mask == 0
1668 && ! info->push_p)
1669 return 1;
1672 return 0;
1675 /* Returns 1 always. */
1678 any_operand (rtx op ATTRIBUTE_UNUSED,
1679 enum machine_mode mode ATTRIBUTE_UNUSED)
1681 return 1;
1684 /* Returns 1 always. */
1687 any_parallel_operand (rtx op ATTRIBUTE_UNUSED,
1688 enum machine_mode mode ATTRIBUTE_UNUSED)
1690 return 1;
1693 /* Returns 1 if op is the count register. */
1696 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1698 if (GET_CODE (op) != REG)
1699 return 0;
1701 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1702 return 1;
1704 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1705 return 1;
1707 return 0;
1710 /* Returns 1 if op is an altivec register. */
1713 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1716 return (register_operand (op, mode)
1717 && (GET_CODE (op) != REG
1718 || REGNO (op) > FIRST_PSEUDO_REGISTER
1719 || ALTIVEC_REGNO_P (REGNO (op))));
1723 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1725 if (GET_CODE (op) != REG)
1726 return 0;
1728 if (XER_REGNO_P (REGNO (op)))
1729 return 1;
1731 return 0;
1734 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1735 by such constants completes more quickly. */
1738 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1740 return ( GET_CODE (op) == CONST_INT
1741 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1744 /* Return 1 if OP is a constant that can fit in a D field. */
1747 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1749 return (GET_CODE (op) == CONST_INT
1750 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1753 /* Similar for an unsigned D field. */
1756 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1758 return (GET_CODE (op) == CONST_INT
1759 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1762 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1765 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1767 return (GET_CODE (op) == CONST_INT
1768 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1771 /* Returns 1 if OP is a CONST_INT that is a positive value
1772 and an exact power of 2. */
1775 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1777 return (GET_CODE (op) == CONST_INT
1778 && INTVAL (op) > 0
1779 && exact_log2 (INTVAL (op)) >= 0);
1782 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1783 ctr, or lr). */
1786 gpc_reg_operand (rtx op, enum machine_mode mode)
1788 return (register_operand (op, mode)
1789 && (GET_CODE (op) != REG
1790 || (REGNO (op) >= ARG_POINTER_REGNUM
1791 && !XER_REGNO_P (REGNO (op)))
1792 || REGNO (op) < MQ_REGNO));
1795 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1796 CR field. */
1799 cc_reg_operand (rtx op, enum machine_mode mode)
1801 return (register_operand (op, mode)
1802 && (GET_CODE (op) != REG
1803 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1804 || CR_REGNO_P (REGNO (op))));
1807 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1808 CR field that isn't CR0. */
1811 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1813 return (register_operand (op, mode)
1814 && (GET_CODE (op) != REG
1815 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1816 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1819 /* Returns 1 if OP is either a constant integer valid for a D-field or
1820 a non-special register. If a register, it must be in the proper
1821 mode unless MODE is VOIDmode. */
1824 reg_or_short_operand (rtx op, enum machine_mode mode)
1826 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1829 /* Similar, except check if the negation of the constant would be
1830 valid for a D-field. Don't allow a constant zero, since all the
1831 patterns that call this predicate use "addic r1,r2,-constant" on
1832 a constant value to set a carry when r2 is greater or equal to
1833 "constant". That doesn't work for zero. */
1836 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1838 if (GET_CODE (op) == CONST_INT)
1839 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P') && INTVAL (op) != 0;
1841 return gpc_reg_operand (op, mode);
1844 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1845 a non-special register. If a register, it must be in the proper
1846 mode unless MODE is VOIDmode. */
1849 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1851 if (gpc_reg_operand (op, mode))
1852 return 1;
1853 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1854 return 1;
1856 return 0;
1860 /* Return 1 if the operand is either a register or an integer whose
1861 high-order 16 bits are zero. */
1864 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1866 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1869 /* Return 1 is the operand is either a non-special register or ANY
1870 constant integer. */
1873 reg_or_cint_operand (rtx op, enum machine_mode mode)
1875 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1878 /* Return 1 is the operand is either a non-special register or ANY
1879 32-bit signed constant integer. */
1882 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1884 return (gpc_reg_operand (op, mode)
1885 || (GET_CODE (op) == CONST_INT
1886 #if HOST_BITS_PER_WIDE_INT != 32
1887 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1888 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1889 #endif
1893 /* Return 1 is the operand is either a non-special register or a 32-bit
1894 signed constant integer valid for 64-bit addition. */
1897 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1899 return (gpc_reg_operand (op, mode)
1900 || (GET_CODE (op) == CONST_INT
1901 #if HOST_BITS_PER_WIDE_INT == 32
1902 && INTVAL (op) < 0x7fff8000
1903 #else
1904 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1905 < 0x100000000ll)
1906 #endif
1910 /* Return 1 is the operand is either a non-special register or a 32-bit
1911 signed constant integer valid for 64-bit subtraction. */
1914 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1916 return (gpc_reg_operand (op, mode)
1917 || (GET_CODE (op) == CONST_INT
1918 #if HOST_BITS_PER_WIDE_INT == 32
1919 && (- INTVAL (op)) < 0x7fff8000
1920 #else
1921 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1922 < 0x100000000ll)
1923 #endif
1927 /* Return 1 is the operand is either a non-special register or ANY
1928 32-bit unsigned constant integer. */
1931 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1933 if (GET_CODE (op) == CONST_INT)
1935 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1937 if (GET_MODE_BITSIZE (mode) <= 32)
1938 abort ();
1940 if (INTVAL (op) < 0)
1941 return 0;
1944 return ((INTVAL (op) & GET_MODE_MASK (mode)
1945 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1947 else if (GET_CODE (op) == CONST_DOUBLE)
1949 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1950 || mode != DImode)
1951 abort ();
1953 return CONST_DOUBLE_HIGH (op) == 0;
1955 else
1956 return gpc_reg_operand (op, mode);
1959 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1962 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1964 return (GET_CODE (op) == SYMBOL_REF
1965 || GET_CODE (op) == CONST
1966 || GET_CODE (op) == LABEL_REF);
1969 /* Return 1 if the operand is a simple references that can be loaded via
1970 the GOT (labels involving addition aren't allowed). */
1973 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1975 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1978 /* Return the number of instructions it takes to form a constant in an
1979 integer register. */
1981 static int
1982 num_insns_constant_wide (HOST_WIDE_INT value)
1984 /* signed constant loadable with {cal|addi} */
1985 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1986 return 1;
1988 /* constant loadable with {cau|addis} */
1989 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1990 return 1;
1992 #if HOST_BITS_PER_WIDE_INT == 64
1993 else if (TARGET_POWERPC64)
1995 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1996 HOST_WIDE_INT high = value >> 31;
1998 if (high == 0 || high == -1)
1999 return 2;
2001 high >>= 1;
2003 if (low == 0)
2004 return num_insns_constant_wide (high) + 1;
2005 else
2006 return (num_insns_constant_wide (high)
2007 + num_insns_constant_wide (low) + 1);
2009 #endif
2011 else
2012 return 2;
2016 num_insns_constant (rtx op, enum machine_mode mode)
2018 if (GET_CODE (op) == CONST_INT)
2020 #if HOST_BITS_PER_WIDE_INT == 64
2021 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2022 && mask64_operand (op, mode))
2023 return 2;
2024 else
2025 #endif
2026 return num_insns_constant_wide (INTVAL (op));
2029 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
2031 long l;
2032 REAL_VALUE_TYPE rv;
2034 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2035 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2036 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2039 else if (GET_CODE (op) == CONST_DOUBLE)
2041 HOST_WIDE_INT low;
2042 HOST_WIDE_INT high;
2043 long l[2];
2044 REAL_VALUE_TYPE rv;
2045 int endian = (WORDS_BIG_ENDIAN == 0);
2047 if (mode == VOIDmode || mode == DImode)
2049 high = CONST_DOUBLE_HIGH (op);
2050 low = CONST_DOUBLE_LOW (op);
2052 else
2054 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2055 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2056 high = l[endian];
2057 low = l[1 - endian];
2060 if (TARGET_32BIT)
2061 return (num_insns_constant_wide (low)
2062 + num_insns_constant_wide (high));
2064 else
2066 if (high == 0 && low >= 0)
2067 return num_insns_constant_wide (low);
2069 else if (high == -1 && low < 0)
2070 return num_insns_constant_wide (low);
2072 else if (mask64_operand (op, mode))
2073 return 2;
2075 else if (low == 0)
2076 return num_insns_constant_wide (high) + 1;
2078 else
2079 return (num_insns_constant_wide (high)
2080 + num_insns_constant_wide (low) + 1);
2084 else
2085 abort ();
2088 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2089 register with one instruction per word. We only do this if we can
2090 safely read CONST_DOUBLE_{LOW,HIGH}. */
2093 easy_fp_constant (rtx op, enum machine_mode mode)
2095 if (GET_CODE (op) != CONST_DOUBLE
2096 || GET_MODE (op) != mode
2097 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
2098 return 0;
2100 /* Consider all constants with -msoft-float to be easy. */
2101 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
2102 && mode != DImode)
2103 return 1;
2105 /* If we are using V.4 style PIC, consider all constants to be hard. */
2106 if (flag_pic && DEFAULT_ABI == ABI_V4)
2107 return 0;
2109 #ifdef TARGET_RELOCATABLE
2110 /* Similarly if we are using -mrelocatable, consider all constants
2111 to be hard. */
2112 if (TARGET_RELOCATABLE)
2113 return 0;
2114 #endif
2116 if (mode == TFmode)
2118 long k[4];
2119 REAL_VALUE_TYPE rv;
2121 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2122 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
2124 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2125 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
2126 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
2127 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
2130 else if (mode == DFmode)
2132 long k[2];
2133 REAL_VALUE_TYPE rv;
2135 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2136 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
2138 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
2139 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
2142 else if (mode == SFmode)
2144 long l;
2145 REAL_VALUE_TYPE rv;
2147 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2148 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2150 return num_insns_constant_wide (l) == 1;
2153 else if (mode == DImode)
2154 return ((TARGET_POWERPC64
2155 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
2156 || (num_insns_constant (op, DImode) <= 2));
2158 else if (mode == SImode)
2159 return 1;
2160 else
2161 abort ();
2164 /* Returns the constant for the splat instruction, if exists. */
2166 static int
2167 easy_vector_splat_const (int cst, enum machine_mode mode)
2169 switch (mode)
2171 case V4SImode:
2172 if (EASY_VECTOR_15 (cst)
2173 || EASY_VECTOR_15_ADD_SELF (cst))
2174 return cst;
2175 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
2176 break;
2177 cst = cst >> 16;
2178 case V8HImode:
2179 if (EASY_VECTOR_15 (cst)
2180 || EASY_VECTOR_15_ADD_SELF (cst))
2181 return cst;
2182 if ((cst & 0xff) != ((cst >> 8) & 0xff))
2183 break;
2184 cst = cst >> 8;
2185 case V16QImode:
2186 if (EASY_VECTOR_15 (cst)
2187 || EASY_VECTOR_15_ADD_SELF (cst))
2188 return cst;
2189 default:
2190 break;
2192 return 0;
2196 /* Return nonzero if all elements of a vector have the same value. */
2198 static int
2199 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2201 int units, i, cst;
2203 units = CONST_VECTOR_NUNITS (op);
2205 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2206 for (i = 1; i < units; ++i)
2207 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
2208 break;
2209 if (i == units && easy_vector_splat_const (cst, mode))
2210 return 1;
2211 return 0;
2214 /* Return 1 if the operand is a CONST_INT and can be put into a
2215 register without using memory. */
2218 easy_vector_constant (rtx op, enum machine_mode mode)
2220 int cst, cst2;
2222 if (GET_CODE (op) != CONST_VECTOR
2223 || (!TARGET_ALTIVEC
2224 && !TARGET_SPE))
2225 return 0;
2227 if (zero_constant (op, mode)
2228 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
2229 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
2230 return 1;
2232 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2233 return 0;
2235 if (TARGET_SPE && mode == V1DImode)
2236 return 0;
2238 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
2239 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
2241 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2242 li r0, CONSTANT1
2243 evmergelo r0, r0, r0
2244 li r0, CONSTANT2
2246 I don't know how efficient it would be to allow bigger constants,
2247 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2248 instructions is better than a 64-bit memory load, but I don't
2249 have the e500 timing specs. */
2250 if (TARGET_SPE && mode == V2SImode
2251 && cst >= -0x7fff && cst <= 0x7fff
2252 && cst2 >= -0x7fff && cst2 <= 0x7fff)
2253 return 1;
2255 if (TARGET_ALTIVEC
2256 && easy_vector_same (op, mode))
2258 cst = easy_vector_splat_const (cst, mode);
2259 if (EASY_VECTOR_15_ADD_SELF (cst)
2260 || EASY_VECTOR_15 (cst))
2261 return 1;
2263 return 0;
2266 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2269 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
2271 int cst;
2272 if (TARGET_ALTIVEC
2273 && GET_CODE (op) == CONST_VECTOR
2274 && easy_vector_same (op, mode))
2276 cst = easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op, 0)), mode);
2277 if (EASY_VECTOR_15_ADD_SELF (cst))
2278 return 1;
2280 return 0;
2283 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2285 rtx
2286 gen_easy_vector_constant_add_self (rtx op)
2288 int i, units;
2289 rtvec v;
2290 units = GET_MODE_NUNITS (GET_MODE (op));
2291 v = rtvec_alloc (units);
2293 for (i = 0; i < units; i++)
2294 RTVEC_ELT (v, i) =
2295 GEN_INT (INTVAL (CONST_VECTOR_ELT (op, i)) >> 1);
2296 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op), v);
2299 const char *
2300 output_vec_const_move (rtx *operands)
2302 int cst, cst2;
2303 enum machine_mode mode;
2304 rtx dest, vec;
2306 dest = operands[0];
2307 vec = operands[1];
2309 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2310 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2311 mode = GET_MODE (dest);
2313 if (TARGET_ALTIVEC)
2315 if (zero_constant (vec, mode))
2316 return "vxor %0,%0,%0";
2317 else if (easy_vector_constant (vec, mode))
2319 operands[1] = GEN_INT (cst);
2320 switch (mode)
2322 case V4SImode:
2323 if (EASY_VECTOR_15 (cst))
2325 operands[1] = GEN_INT (cst);
2326 return "vspltisw %0,%1";
2328 else if (EASY_VECTOR_15_ADD_SELF (cst))
2329 return "#";
2330 cst = cst >> 16;
2331 case V8HImode:
2332 if (EASY_VECTOR_15 (cst))
2334 operands[1] = GEN_INT (cst);
2335 return "vspltish %0,%1";
2337 else if (EASY_VECTOR_15_ADD_SELF (cst))
2338 return "#";
2339 cst = cst >> 8;
2340 case V16QImode:
2341 if (EASY_VECTOR_15 (cst))
2343 operands[1] = GEN_INT (cst);
2344 return "vspltisb %0,%1";
2346 else if (EASY_VECTOR_15_ADD_SELF (cst))
2347 return "#";
2348 default:
2349 abort ();
2352 else
2353 abort ();
2356 if (TARGET_SPE)
2358 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2359 pattern of V1DI, V4HI, and V2SF.
2361 FIXME: We should probably return # and add post reload
2362 splitters for these, but this way is so easy ;-).
2364 operands[1] = GEN_INT (cst);
2365 operands[2] = GEN_INT (cst2);
2366 if (cst == cst2)
2367 return "li %0,%1\n\tevmergelo %0,%0,%0";
2368 else
2369 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2372 abort ();
2375 /* Return 1 if the operand is the constant 0. This works for scalars
2376 as well as vectors. */
2378 zero_constant (rtx op, enum machine_mode mode)
2380 return op == CONST0_RTX (mode);
2383 /* Return 1 if the operand is 0.0. */
2385 zero_fp_constant (rtx op, enum machine_mode mode)
2387 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
2390 /* Return 1 if the operand is in volatile memory. Note that during
2391 the RTL generation phase, memory_operand does not return TRUE for
2392 volatile memory references. So this function allows us to
2393 recognize volatile references where its safe. */
2396 volatile_mem_operand (rtx op, enum machine_mode mode)
2398 if (GET_CODE (op) != MEM)
2399 return 0;
2401 if (!MEM_VOLATILE_P (op))
2402 return 0;
2404 if (mode != GET_MODE (op))
2405 return 0;
2407 if (reload_completed)
2408 return memory_operand (op, mode);
2410 if (reload_in_progress)
2411 return strict_memory_address_p (mode, XEXP (op, 0));
2413 return memory_address_p (mode, XEXP (op, 0));
2416 /* Return 1 if the operand is an offsettable memory operand. */
2419 offsettable_mem_operand (rtx op, enum machine_mode mode)
2421 return ((GET_CODE (op) == MEM)
2422 && offsettable_address_p (reload_completed || reload_in_progress,
2423 mode, XEXP (op, 0)));
2426 /* Return 1 if the operand is either an easy FP constant (see above) or
2427 memory. */
2430 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
2432 return memory_operand (op, mode) || easy_fp_constant (op, mode);
2435 /* Return 1 if the operand is either a non-special register or an item
2436 that can be used as the operand of a `mode' add insn. */
2439 add_operand (rtx op, enum machine_mode mode)
2441 if (GET_CODE (op) == CONST_INT)
2442 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2443 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2445 return gpc_reg_operand (op, mode);
2448 /* Return 1 if OP is a constant but not a valid add_operand. */
2451 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2453 return (GET_CODE (op) == CONST_INT
2454 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
2455 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
2458 /* Return 1 if the operand is a non-special register or a constant that
2459 can be used as the operand of an OR or XOR insn on the RS/6000. */
2462 logical_operand (rtx op, enum machine_mode mode)
2464 HOST_WIDE_INT opl, oph;
2466 if (gpc_reg_operand (op, mode))
2467 return 1;
2469 if (GET_CODE (op) == CONST_INT)
2471 opl = INTVAL (op) & GET_MODE_MASK (mode);
2473 #if HOST_BITS_PER_WIDE_INT <= 32
2474 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
2475 return 0;
2476 #endif
2478 else if (GET_CODE (op) == CONST_DOUBLE)
2480 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2481 abort ();
2483 opl = CONST_DOUBLE_LOW (op);
2484 oph = CONST_DOUBLE_HIGH (op);
2485 if (oph != 0)
2486 return 0;
2488 else
2489 return 0;
2491 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
2492 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
2495 /* Return 1 if C is a constant that is not a logical operand (as
2496 above), but could be split into one. */
2499 non_logical_cint_operand (rtx op, enum machine_mode mode)
2501 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
2502 && ! logical_operand (op, mode)
2503 && reg_or_logical_cint_operand (op, mode));
2506 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2507 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2508 Reject all ones and all zeros, since these should have been optimized
2509 away and confuse the making of MB and ME. */
2512 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2514 HOST_WIDE_INT c, lsb;
2516 if (GET_CODE (op) != CONST_INT)
2517 return 0;
2519 c = INTVAL (op);
2521 /* Fail in 64-bit mode if the mask wraps around because the upper
2522 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2523 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
2524 return 0;
2526 /* We don't change the number of transitions by inverting,
2527 so make sure we start with the LS bit zero. */
2528 if (c & 1)
2529 c = ~c;
2531 /* Reject all zeros or all ones. */
2532 if (c == 0)
2533 return 0;
2535 /* Find the first transition. */
2536 lsb = c & -c;
2538 /* Invert to look for a second transition. */
2539 c = ~c;
2541 /* Erase first transition. */
2542 c &= -lsb;
2544 /* Find the second transition (if any). */
2545 lsb = c & -c;
2547 /* Match if all the bits above are 1's (or c is zero). */
2548 return c == -lsb;
2551 /* Return 1 for the PowerPC64 rlwinm corner case. */
2554 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2556 HOST_WIDE_INT c, lsb;
2558 if (GET_CODE (op) != CONST_INT)
2559 return 0;
2561 c = INTVAL (op);
2563 if ((c & 0x80000001) != 0x80000001)
2564 return 0;
2566 c = ~c;
2567 if (c == 0)
2568 return 0;
2570 lsb = c & -c;
2571 c = ~c;
2572 c &= -lsb;
2573 lsb = c & -c;
2574 return c == -lsb;
2577 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2578 It is if there are no more than one 1->0 or 0->1 transitions.
2579 Reject all zeros, since zero should have been optimized away and
2580 confuses the making of MB and ME. */
2583 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2585 if (GET_CODE (op) == CONST_INT)
2587 HOST_WIDE_INT c, lsb;
2589 c = INTVAL (op);
2591 /* Reject all zeros. */
2592 if (c == 0)
2593 return 0;
2595 /* We don't change the number of transitions by inverting,
2596 so make sure we start with the LS bit zero. */
2597 if (c & 1)
2598 c = ~c;
2600 /* Find the transition, and check that all bits above are 1's. */
2601 lsb = c & -c;
2603 /* Match if all the bits above are 1's (or c is zero). */
2604 return c == -lsb;
2606 return 0;
2609 /* Like mask64_operand, but allow up to three transitions. This
2610 predicate is used by insn patterns that generate two rldicl or
2611 rldicr machine insns. */
2614 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2616 if (GET_CODE (op) == CONST_INT)
2618 HOST_WIDE_INT c, lsb;
2620 c = INTVAL (op);
2622 /* Disallow all zeros. */
2623 if (c == 0)
2624 return 0;
2626 /* We don't change the number of transitions by inverting,
2627 so make sure we start with the LS bit zero. */
2628 if (c & 1)
2629 c = ~c;
2631 /* Find the first transition. */
2632 lsb = c & -c;
2634 /* Invert to look for a second transition. */
2635 c = ~c;
2637 /* Erase first transition. */
2638 c &= -lsb;
2640 /* Find the second transition. */
2641 lsb = c & -c;
2643 /* Invert to look for a third transition. */
2644 c = ~c;
2646 /* Erase second transition. */
2647 c &= -lsb;
2649 /* Find the third transition (if any). */
2650 lsb = c & -c;
2652 /* Match if all the bits above are 1's (or c is zero). */
2653 return c == -lsb;
2655 return 0;
2658 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2659 implement ANDing by the mask IN. */
2660 void
2661 build_mask64_2_operands (rtx in, rtx *out)
2663 #if HOST_BITS_PER_WIDE_INT >= 64
2664 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2665 int shift;
2667 if (GET_CODE (in) != CONST_INT)
2668 abort ();
2670 c = INTVAL (in);
2671 if (c & 1)
2673 /* Assume c initially something like 0x00fff000000fffff. The idea
2674 is to rotate the word so that the middle ^^^^^^ group of zeros
2675 is at the MS end and can be cleared with an rldicl mask. We then
2676 rotate back and clear off the MS ^^ group of zeros with a
2677 second rldicl. */
2678 c = ~c; /* c == 0xff000ffffff00000 */
2679 lsb = c & -c; /* lsb == 0x0000000000100000 */
2680 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2681 c = ~c; /* c == 0x00fff000000fffff */
2682 c &= -lsb; /* c == 0x00fff00000000000 */
2683 lsb = c & -c; /* lsb == 0x0000100000000000 */
2684 c = ~c; /* c == 0xff000fffffffffff */
2685 c &= -lsb; /* c == 0xff00000000000000 */
2686 shift = 0;
2687 while ((lsb >>= 1) != 0)
2688 shift++; /* shift == 44 on exit from loop */
2689 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2690 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2691 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2693 else
2695 /* Assume c initially something like 0xff000f0000000000. The idea
2696 is to rotate the word so that the ^^^ middle group of zeros
2697 is at the LS end and can be cleared with an rldicr mask. We then
2698 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2699 a second rldicr. */
2700 lsb = c & -c; /* lsb == 0x0000010000000000 */
2701 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2702 c = ~c; /* c == 0x00fff0ffffffffff */
2703 c &= -lsb; /* c == 0x00fff00000000000 */
2704 lsb = c & -c; /* lsb == 0x0000100000000000 */
2705 c = ~c; /* c == 0xff000fffffffffff */
2706 c &= -lsb; /* c == 0xff00000000000000 */
2707 shift = 0;
2708 while ((lsb >>= 1) != 0)
2709 shift++; /* shift == 44 on exit from loop */
2710 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2711 m1 >>= shift; /* m1 == 0x0000000000000fff */
2712 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2715 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2716 masks will be all 1's. We are guaranteed more than one transition. */
2717 out[0] = GEN_INT (64 - shift);
2718 out[1] = GEN_INT (m1);
2719 out[2] = GEN_INT (shift);
2720 out[3] = GEN_INT (m2);
2721 #else
2722 (void)in;
2723 (void)out;
2724 abort ();
2725 #endif
2728 /* Return 1 if the operand is either a non-special register or a constant
2729 that can be used as the operand of a PowerPC64 logical AND insn. */
2732 and64_operand (rtx op, enum machine_mode mode)
2734 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2735 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2737 return (logical_operand (op, mode) || mask64_operand (op, mode));
2740 /* Like the above, but also match constants that can be implemented
2741 with two rldicl or rldicr insns. */
2744 and64_2_operand (rtx op, enum machine_mode mode)
2746 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2747 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2749 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2752 /* Return 1 if the operand is either a non-special register or a
2753 constant that can be used as the operand of an RS/6000 logical AND insn. */
2756 and_operand (rtx op, enum machine_mode mode)
2758 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2759 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2761 return (logical_operand (op, mode) || mask_operand (op, mode));
2764 /* Return 1 if the operand is a general register or memory operand. */
2767 reg_or_mem_operand (rtx op, enum machine_mode mode)
2769 return (gpc_reg_operand (op, mode)
2770 || memory_operand (op, mode)
2771 || macho_lo_sum_memory_operand (op, mode)
2772 || volatile_mem_operand (op, mode));
2775 /* Return 1 if the operand is a general register or memory operand without
2776 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2777 instruction. */
2780 lwa_operand (rtx op, enum machine_mode mode)
2782 rtx inner = op;
2784 if (reload_completed && GET_CODE (inner) == SUBREG)
2785 inner = SUBREG_REG (inner);
2787 return gpc_reg_operand (inner, mode)
2788 || (memory_operand (inner, mode)
2789 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2790 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2791 && (GET_CODE (XEXP (inner, 0)) != PLUS
2792 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2793 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2796 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2799 symbol_ref_operand (rtx op, enum machine_mode mode)
2801 if (mode != VOIDmode && GET_MODE (op) != mode)
2802 return 0;
2804 return (GET_CODE (op) == SYMBOL_REF
2805 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2808 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2809 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2812 call_operand (rtx op, enum machine_mode mode)
2814 if (mode != VOIDmode && GET_MODE (op) != mode)
2815 return 0;
2817 return (GET_CODE (op) == SYMBOL_REF
2818 || (GET_CODE (op) == REG
2819 && (REGNO (op) == LINK_REGISTER_REGNUM
2820 || REGNO (op) == COUNT_REGISTER_REGNUM
2821 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2824 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2825 this file. */
2828 current_file_function_operand (rtx op,
2829 enum machine_mode mode ATTRIBUTE_UNUSED)
2831 return (GET_CODE (op) == SYMBOL_REF
2832 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2833 && (SYMBOL_REF_LOCAL_P (op)
2834 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2837 /* Return 1 if this operand is a valid input for a move insn. */
2840 input_operand (rtx op, enum machine_mode mode)
2842 /* Memory is always valid. */
2843 if (memory_operand (op, mode))
2844 return 1;
2846 /* For floating-point, easy constants are valid. */
2847 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2848 && CONSTANT_P (op)
2849 && easy_fp_constant (op, mode))
2850 return 1;
2852 /* Allow any integer constant. */
2853 if (GET_MODE_CLASS (mode) == MODE_INT
2854 && (GET_CODE (op) == CONST_INT
2855 || GET_CODE (op) == CONST_DOUBLE))
2856 return 1;
2858 /* Allow easy vector constants. */
2859 if (GET_CODE (op) == CONST_VECTOR
2860 && easy_vector_constant (op, mode))
2861 return 1;
2863 /* For floating-point or multi-word mode, the only remaining valid type
2864 is a register. */
2865 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2866 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2867 return register_operand (op, mode);
2869 /* The only cases left are integral modes one word or smaller (we
2870 do not get called for MODE_CC values). These can be in any
2871 register. */
2872 if (register_operand (op, mode))
2873 return 1;
2875 /* A SYMBOL_REF referring to the TOC is valid. */
2876 if (legitimate_constant_pool_address_p (op))
2877 return 1;
2879 /* A constant pool expression (relative to the TOC) is valid */
2880 if (toc_relative_expr_p (op))
2881 return 1;
2883 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2884 to be valid. */
2885 if (DEFAULT_ABI == ABI_V4
2886 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2887 && small_data_operand (op, Pmode))
2888 return 1;
2890 return 0;
2894 /* Darwin, AIX increases natural record alignment to doubleword if the first
2895 field is an FP double while the FP fields remain word aligned. */
2897 unsigned int
2898 rs6000_special_round_type_align (tree type, int computed, int specified)
2900 tree field = TYPE_FIELDS (type);
2902 /* Skip all the static variables only if ABI is greater than
2903 1 or equal to 0. */
2904 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2905 field = TREE_CHAIN (field);
2907 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2908 return MAX (computed, specified);
2910 return MAX (MAX (computed, specified), 64);
2913 /* Return 1 for an operand in small memory on V.4/eabi. */
2916 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2917 enum machine_mode mode ATTRIBUTE_UNUSED)
2919 #if TARGET_ELF
2920 rtx sym_ref;
2922 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2923 return 0;
2925 if (DEFAULT_ABI != ABI_V4)
2926 return 0;
2928 if (GET_CODE (op) == SYMBOL_REF)
2929 sym_ref = op;
2931 else if (GET_CODE (op) != CONST
2932 || GET_CODE (XEXP (op, 0)) != PLUS
2933 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2934 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2935 return 0;
2937 else
2939 rtx sum = XEXP (op, 0);
2940 HOST_WIDE_INT summand;
2942 /* We have to be careful here, because it is the referenced address
2943 that must be 32k from _SDA_BASE_, not just the symbol. */
2944 summand = INTVAL (XEXP (sum, 1));
2945 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2946 return 0;
2948 sym_ref = XEXP (sum, 0);
2951 return SYMBOL_REF_SMALL_P (sym_ref);
2952 #else
2953 return 0;
2954 #endif
2957 /* Return true, if operand is a memory operand and has a
2958 displacement divisible by 4. */
2961 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2963 rtx addr;
2964 int off = 0;
2966 if (!memory_operand (op, mode))
2967 return 0;
2969 addr = XEXP (op, 0);
2970 if (GET_CODE (addr) == PLUS
2971 && GET_CODE (XEXP (addr, 0)) == REG
2972 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2973 off = INTVAL (XEXP (addr, 1));
2975 return (off % 4) == 0;
2978 /* Return true if either operand is a general purpose register. */
2980 bool
2981 gpr_or_gpr_p (rtx op0, rtx op1)
2983 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2984 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2988 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2990 static int
2991 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2993 switch (GET_CODE(op))
2995 case SYMBOL_REF:
2996 if (RS6000_SYMBOL_REF_TLS_P (op))
2997 return 0;
2998 else if (CONSTANT_POOL_ADDRESS_P (op))
3000 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
3002 *have_sym = 1;
3003 return 1;
3005 else
3006 return 0;
3008 else if (! strcmp (XSTR (op, 0), toc_label_name))
3010 *have_toc = 1;
3011 return 1;
3013 else
3014 return 0;
3015 case PLUS:
3016 case MINUS:
3017 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
3018 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
3019 case CONST:
3020 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
3021 case CONST_INT:
3022 return 1;
3023 default:
3024 return 0;
3028 static bool
3029 constant_pool_expr_p (rtx op)
3031 int have_sym = 0;
3032 int have_toc = 0;
3033 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
3036 static bool
3037 toc_relative_expr_p (rtx op)
3039 int have_sym = 0;
3040 int have_toc = 0;
3041 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
3044 bool
3045 legitimate_constant_pool_address_p (rtx x)
3047 return (TARGET_TOC
3048 && GET_CODE (x) == PLUS
3049 && GET_CODE (XEXP (x, 0)) == REG
3050 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3051 && constant_pool_expr_p (XEXP (x, 1)));
3054 static bool
3055 legitimate_small_data_p (enum machine_mode mode, rtx x)
3057 return (DEFAULT_ABI == ABI_V4
3058 && !flag_pic && !TARGET_TOC
3059 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3060 && small_data_operand (x, mode));
3063 /* SPE offset addressing is limited to 5-bits worth of double words. */
3064 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3066 bool
3067 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3069 unsigned HOST_WIDE_INT offset, extra;
3071 if (GET_CODE (x) != PLUS)
3072 return false;
3073 if (GET_CODE (XEXP (x, 0)) != REG)
3074 return false;
3075 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3076 return false;
3077 if (legitimate_constant_pool_address_p (x))
3078 return true;
3079 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3080 return false;
3082 offset = INTVAL (XEXP (x, 1));
3083 extra = 0;
3084 switch (mode)
3086 case V16QImode:
3087 case V8HImode:
3088 case V4SFmode:
3089 case V4SImode:
3090 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3091 which leaves the only valid constant offset of zero, which by
3092 canonicalization rules is also invalid. */
3093 return false;
3095 case V4HImode:
3096 case V2SImode:
3097 case V1DImode:
3098 case V2SFmode:
3099 /* SPE vector modes. */
3100 return SPE_CONST_OFFSET_OK (offset);
3102 case DFmode:
3103 case DImode:
3104 if (mode == DFmode || !TARGET_POWERPC64)
3105 extra = 4;
3106 else if (offset & 3)
3107 return false;
3108 break;
3110 case TFmode:
3111 case TImode:
3112 if (mode == TFmode || !TARGET_POWERPC64)
3113 extra = 12;
3114 else if (offset & 3)
3115 return false;
3116 else
3117 extra = 8;
3118 break;
3120 default:
3121 break;
3124 offset += 0x8000;
3125 return (offset < 0x10000) && (offset + extra < 0x10000);
3128 static bool
3129 legitimate_indexed_address_p (rtx x, int strict)
3131 rtx op0, op1;
3133 if (GET_CODE (x) != PLUS)
3134 return false;
3135 op0 = XEXP (x, 0);
3136 op1 = XEXP (x, 1);
3138 if (!REG_P (op0) || !REG_P (op1))
3139 return false;
3141 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
3142 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3143 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3144 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
3147 static inline bool
3148 legitimate_indirect_address_p (rtx x, int strict)
3150 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3153 static bool
3154 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3156 if (!TARGET_MACHO || !flag_pic
3157 || mode != SImode || GET_CODE(x) != MEM)
3158 return false;
3159 x = XEXP (x, 0);
3161 if (GET_CODE (x) != LO_SUM)
3162 return false;
3163 if (GET_CODE (XEXP (x, 0)) != REG)
3164 return false;
3165 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3166 return false;
3167 x = XEXP (x, 1);
3169 return CONSTANT_P (x);
3172 static bool
3173 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3175 if (GET_CODE (x) != LO_SUM)
3176 return false;
3177 if (GET_CODE (XEXP (x, 0)) != REG)
3178 return false;
3179 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3180 return false;
3181 x = XEXP (x, 1);
3183 if (TARGET_ELF || TARGET_MACHO)
3185 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3186 return false;
3187 if (TARGET_TOC)
3188 return false;
3189 if (GET_MODE_NUNITS (mode) != 1)
3190 return false;
3191 if (GET_MODE_BITSIZE (mode) > 64)
3192 return false;
3194 return CONSTANT_P (x);
3197 return false;
3201 /* Try machine-dependent ways of modifying an illegitimate address
3202 to be legitimate. If we find one, return the new, valid address.
3203 This is used from only one place: `memory_address' in explow.c.
3205 OLDX is the address as it was before break_out_memory_refs was
3206 called. In some cases it is useful to look at this to decide what
3207 needs to be done.
3209 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3211 It is always safe for this function to do nothing. It exists to
3212 recognize opportunities to optimize the output.
3214 On RS/6000, first check for the sum of a register with a constant
3215 integer that is out of range. If so, generate code to add the
3216 constant with the low-order 16 bits masked to the register and force
3217 this result into another register (this can be done with `cau').
3218 Then generate an address of REG+(CONST&0xffff), allowing for the
3219 possibility of bit 16 being a one.
3221 Then check for the sum of a register and something not constant, try to
3222 load the other things into a register and return the sum. */
3225 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3226 enum machine_mode mode)
3228 if (GET_CODE (x) == SYMBOL_REF)
3230 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3231 if (model != 0)
3232 return rs6000_legitimize_tls_address (x, model);
3235 if (GET_CODE (x) == PLUS
3236 && GET_CODE (XEXP (x, 0)) == REG
3237 && GET_CODE (XEXP (x, 1)) == CONST_INT
3238 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
3240 HOST_WIDE_INT high_int, low_int;
3241 rtx sum;
3242 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3243 high_int = INTVAL (XEXP (x, 1)) - low_int;
3244 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3245 GEN_INT (high_int)), 0);
3246 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3248 else if (GET_CODE (x) == PLUS
3249 && GET_CODE (XEXP (x, 0)) == REG
3250 && GET_CODE (XEXP (x, 1)) != CONST_INT
3251 && GET_MODE_NUNITS (mode) == 1
3252 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3253 || TARGET_POWERPC64
3254 || (mode != DFmode && mode != TFmode))
3255 && (TARGET_POWERPC64 || mode != DImode)
3256 && mode != TImode)
3258 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3259 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3261 else if (ALTIVEC_VECTOR_MODE (mode))
3263 rtx reg;
3265 /* Make sure both operands are registers. */
3266 if (GET_CODE (x) == PLUS)
3267 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3268 force_reg (Pmode, XEXP (x, 1)));
3270 reg = force_reg (Pmode, x);
3271 return reg;
3273 else if (SPE_VECTOR_MODE (mode))
3275 /* We accept [reg + reg] and [reg + OFFSET]. */
3277 if (GET_CODE (x) == PLUS)
3279 rtx op1 = XEXP (x, 0);
3280 rtx op2 = XEXP (x, 1);
3282 op1 = force_reg (Pmode, op1);
3284 if (GET_CODE (op2) != REG
3285 && (GET_CODE (op2) != CONST_INT
3286 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
3287 op2 = force_reg (Pmode, op2);
3289 return gen_rtx_PLUS (Pmode, op1, op2);
3292 return force_reg (Pmode, x);
3294 else if (TARGET_ELF
3295 && TARGET_32BIT
3296 && TARGET_NO_TOC
3297 && ! flag_pic
3298 && GET_CODE (x) != CONST_INT
3299 && GET_CODE (x) != CONST_DOUBLE
3300 && CONSTANT_P (x)
3301 && GET_MODE_NUNITS (mode) == 1
3302 && (GET_MODE_BITSIZE (mode) <= 32
3303 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
3305 rtx reg = gen_reg_rtx (Pmode);
3306 emit_insn (gen_elf_high (reg, x));
3307 return gen_rtx_LO_SUM (Pmode, reg, x);
3309 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3310 && ! flag_pic
3311 #if TARGET_MACHO
3312 && ! MACHO_DYNAMIC_NO_PIC_P
3313 #endif
3314 && GET_CODE (x) != CONST_INT
3315 && GET_CODE (x) != CONST_DOUBLE
3316 && CONSTANT_P (x)
3317 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3318 && mode != DImode
3319 && mode != TImode)
3321 rtx reg = gen_reg_rtx (Pmode);
3322 emit_insn (gen_macho_high (reg, x));
3323 return gen_rtx_LO_SUM (Pmode, reg, x);
3325 else if (TARGET_TOC
3326 && constant_pool_expr_p (x)
3327 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3329 return create_TOC_reference (x);
3331 else
3332 return NULL_RTX;
3335 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3336 We need to emit DTP-relative relocations. */
3338 void
3339 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3341 switch (size)
3343 case 4:
3344 fputs ("\t.long\t", file);
3345 break;
3346 case 8:
3347 fputs (DOUBLE_INT_ASM_OP, file);
3348 break;
3349 default:
3350 abort ();
3352 output_addr_const (file, x);
3353 fputs ("@dtprel+0x8000", file);
3356 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3358 static GTY(()) rtx rs6000_tls_symbol;
3359 static rtx
3360 rs6000_tls_get_addr (void)
3362 if (!rs6000_tls_symbol)
3363 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3365 return rs6000_tls_symbol;
3368 /* Construct the SYMBOL_REF for TLS GOT references. */
3370 static GTY(()) rtx rs6000_got_symbol;
3371 static rtx
3372 rs6000_got_sym (void)
3374 if (!rs6000_got_symbol)
3376 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3377 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3378 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3381 return rs6000_got_symbol;
3384 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3385 this (thread-local) address. */
3387 static rtx
3388 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3390 rtx dest, insn;
3392 dest = gen_reg_rtx (Pmode);
3393 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3395 rtx tlsreg;
3397 if (TARGET_64BIT)
3399 tlsreg = gen_rtx_REG (Pmode, 13);
3400 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3402 else
3404 tlsreg = gen_rtx_REG (Pmode, 2);
3405 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3407 emit_insn (insn);
3409 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3411 rtx tlsreg, tmp;
3413 tmp = gen_reg_rtx (Pmode);
3414 if (TARGET_64BIT)
3416 tlsreg = gen_rtx_REG (Pmode, 13);
3417 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3419 else
3421 tlsreg = gen_rtx_REG (Pmode, 2);
3422 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3424 emit_insn (insn);
3425 if (TARGET_64BIT)
3426 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3427 else
3428 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3429 emit_insn (insn);
3431 else
3433 rtx r3, got, tga, tmp1, tmp2, eqv;
3435 if (TARGET_64BIT)
3436 got = gen_rtx_REG (Pmode, TOC_REGISTER);
3437 else
3439 if (flag_pic == 1)
3440 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3441 else
3443 rtx gsym = rs6000_got_sym ();
3444 got = gen_reg_rtx (Pmode);
3445 if (flag_pic == 0)
3446 rs6000_emit_move (got, gsym, Pmode);
3447 else
3449 char buf[30];
3450 static int tls_got_labelno = 0;
3451 rtx tempLR, lab, tmp3, mem;
3452 rtx first, last;
3454 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
3455 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
3456 tempLR = gen_reg_rtx (Pmode);
3457 tmp1 = gen_reg_rtx (Pmode);
3458 tmp2 = gen_reg_rtx (Pmode);
3459 tmp3 = gen_reg_rtx (Pmode);
3460 mem = gen_rtx_MEM (Pmode, tmp1);
3461 RTX_UNCHANGING_P (mem) = 1;
3463 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
3464 gsym));
3465 emit_move_insn (tmp1, tempLR);
3466 emit_move_insn (tmp2, mem);
3467 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3468 last = emit_move_insn (got, tmp3);
3469 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3470 REG_NOTES (last));
3471 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3472 REG_NOTES (first));
3473 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3474 REG_NOTES (last));
3479 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3481 r3 = gen_rtx_REG (Pmode, 3);
3482 if (TARGET_64BIT)
3483 insn = gen_tls_gd_64 (r3, got, addr);
3484 else
3485 insn = gen_tls_gd_32 (r3, got, addr);
3486 start_sequence ();
3487 emit_insn (insn);
3488 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3489 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3490 insn = emit_call_insn (insn);
3491 CONST_OR_PURE_CALL_P (insn) = 1;
3492 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3493 insn = get_insns ();
3494 end_sequence ();
3495 emit_libcall_block (insn, dest, r3, addr);
3497 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3499 r3 = gen_rtx_REG (Pmode, 3);
3500 if (TARGET_64BIT)
3501 insn = gen_tls_ld_64 (r3, got);
3502 else
3503 insn = gen_tls_ld_32 (r3, got);
3504 start_sequence ();
3505 emit_insn (insn);
3506 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3507 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3508 insn = emit_call_insn (insn);
3509 CONST_OR_PURE_CALL_P (insn) = 1;
3510 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3511 insn = get_insns ();
3512 end_sequence ();
3513 tmp1 = gen_reg_rtx (Pmode);
3514 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3515 UNSPEC_TLSLD);
3516 emit_libcall_block (insn, tmp1, r3, eqv);
3517 if (rs6000_tls_size == 16)
3519 if (TARGET_64BIT)
3520 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3521 else
3522 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3524 else if (rs6000_tls_size == 32)
3526 tmp2 = gen_reg_rtx (Pmode);
3527 if (TARGET_64BIT)
3528 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3529 else
3530 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3531 emit_insn (insn);
3532 if (TARGET_64BIT)
3533 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3534 else
3535 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3537 else
3539 tmp2 = gen_reg_rtx (Pmode);
3540 if (TARGET_64BIT)
3541 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3542 else
3543 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3544 emit_insn (insn);
3545 insn = gen_rtx_SET (Pmode, dest,
3546 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3548 emit_insn (insn);
3550 else
3552 /* IE, or 64 bit offset LE. */
3553 tmp2 = gen_reg_rtx (Pmode);
3554 if (TARGET_64BIT)
3555 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3556 else
3557 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3558 emit_insn (insn);
3559 if (TARGET_64BIT)
3560 insn = gen_tls_tls_64 (dest, tmp2, addr);
3561 else
3562 insn = gen_tls_tls_32 (dest, tmp2, addr);
3563 emit_insn (insn);
3567 return dest;
3570 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3571 instruction definitions. */
3574 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3576 return RS6000_SYMBOL_REF_TLS_P (x);
3579 /* Return 1 if X contains a thread-local symbol. */
3581 bool
3582 rs6000_tls_referenced_p (rtx x)
3584 if (! TARGET_HAVE_TLS)
3585 return false;
3587 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3590 /* Return 1 if *X is a thread-local symbol. This is the same as
3591 rs6000_tls_symbol_ref except for the type of the unused argument. */
3593 static inline int
3594 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3596 return RS6000_SYMBOL_REF_TLS_P (*x);
3599 /* The convention appears to be to define this wherever it is used.
3600 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3601 is now used here. */
3602 #ifndef REG_MODE_OK_FOR_BASE_P
3603 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3604 #endif
3606 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3607 replace the input X, or the original X if no replacement is called for.
3608 The output parameter *WIN is 1 if the calling macro should goto WIN,
3609 0 if it should not.
3611 For RS/6000, we wish to handle large displacements off a base
3612 register by splitting the addend across an addiu/addis and the mem insn.
3613 This cuts number of extra insns needed from 3 to 1.
3615 On Darwin, we use this to generate code for floating point constants.
3616 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3617 The Darwin code is inside #if TARGET_MACHO because only then is
3618 machopic_function_base_name() defined. */
3620 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3621 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3623 /* We must recognize output that we have already generated ourselves. */
3624 if (GET_CODE (x) == PLUS
3625 && GET_CODE (XEXP (x, 0)) == PLUS
3626 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3627 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3628 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3630 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3631 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3632 opnum, (enum reload_type)type);
3633 *win = 1;
3634 return x;
3637 #if TARGET_MACHO
3638 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3639 && GET_CODE (x) == LO_SUM
3640 && GET_CODE (XEXP (x, 0)) == PLUS
3641 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3642 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3643 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3644 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3645 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3646 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3647 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3649 /* Result of previous invocation of this function on Darwin
3650 floating point constant. */
3651 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3652 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3653 opnum, (enum reload_type)type);
3654 *win = 1;
3655 return x;
3657 #endif
3658 if (GET_CODE (x) == PLUS
3659 && GET_CODE (XEXP (x, 0)) == REG
3660 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3661 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3662 && GET_CODE (XEXP (x, 1)) == CONST_INT
3663 && !SPE_VECTOR_MODE (mode)
3664 && !ALTIVEC_VECTOR_MODE (mode))
3666 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3667 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3668 HOST_WIDE_INT high
3669 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3671 /* Check for 32-bit overflow. */
3672 if (high + low != val)
3674 *win = 0;
3675 return x;
3678 /* Reload the high part into a base reg; leave the low part
3679 in the mem directly. */
3681 x = gen_rtx_PLUS (GET_MODE (x),
3682 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3683 GEN_INT (high)),
3684 GEN_INT (low));
3686 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3687 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3688 opnum, (enum reload_type)type);
3689 *win = 1;
3690 return x;
3692 #if TARGET_MACHO
3693 if (GET_CODE (x) == SYMBOL_REF
3694 && DEFAULT_ABI == ABI_DARWIN
3695 && !ALTIVEC_VECTOR_MODE (mode)
3696 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3697 /* Don't do this for TFmode, since the result isn't offsettable. */
3698 && mode != TFmode)
3700 if (flag_pic)
3702 rtx offset = gen_rtx_CONST (Pmode,
3703 gen_rtx_MINUS (Pmode, x,
3704 machopic_function_base_sym ()));
3705 x = gen_rtx_LO_SUM (GET_MODE (x),
3706 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3707 gen_rtx_HIGH (Pmode, offset)), offset);
3709 else
3710 x = gen_rtx_LO_SUM (GET_MODE (x),
3711 gen_rtx_HIGH (Pmode, x), x);
3713 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3714 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3715 opnum, (enum reload_type)type);
3716 *win = 1;
3717 return x;
3719 #endif
3720 if (TARGET_TOC
3721 && constant_pool_expr_p (x)
3722 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3724 (x) = create_TOC_reference (x);
3725 *win = 1;
3726 return x;
3728 *win = 0;
3729 return x;
3732 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3733 that is a valid memory address for an instruction.
3734 The MODE argument is the machine mode for the MEM expression
3735 that wants to use this address.
3737 On the RS/6000, there are four valid address: a SYMBOL_REF that
3738 refers to a constant pool entry of an address (or the sum of it
3739 plus a constant), a short (16-bit signed) constant plus a register,
3740 the sum of two registers, or a register indirect, possibly with an
3741 auto-increment. For DFmode and DImode with a constant plus register,
3742 we must ensure that both words are addressable or PowerPC64 with offset
3743 word aligned.
3745 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3746 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3747 adjacent memory cells are accessed by adding word-sized offsets
3748 during assembly output. */
3750 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3752 if (RS6000_SYMBOL_REF_TLS_P (x))
3753 return 0;
3754 if (legitimate_indirect_address_p (x, reg_ok_strict))
3755 return 1;
3756 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3757 && !ALTIVEC_VECTOR_MODE (mode)
3758 && !SPE_VECTOR_MODE (mode)
3759 && TARGET_UPDATE
3760 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3761 return 1;
3762 if (legitimate_small_data_p (mode, x))
3763 return 1;
3764 if (legitimate_constant_pool_address_p (x))
3765 return 1;
3766 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3767 if (! reg_ok_strict
3768 && GET_CODE (x) == PLUS
3769 && GET_CODE (XEXP (x, 0)) == REG
3770 && (XEXP (x, 0) == virtual_stack_vars_rtx
3771 || XEXP (x, 0) == arg_pointer_rtx)
3772 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3773 return 1;
3774 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3775 return 1;
3776 if (mode != TImode
3777 && mode != TFmode
3778 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3779 || TARGET_POWERPC64
3780 || (mode != DFmode && mode != TFmode))
3781 && (TARGET_POWERPC64 || mode != DImode)
3782 && legitimate_indexed_address_p (x, reg_ok_strict))
3783 return 1;
3784 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3785 return 1;
3786 return 0;
3789 /* Go to LABEL if ADDR (a legitimate address expression)
3790 has an effect that depends on the machine mode it is used for.
3792 On the RS/6000 this is true of all integral offsets (since AltiVec
3793 modes don't allow them) or is a pre-increment or decrement.
3795 ??? Except that due to conceptual problems in offsettable_address_p
3796 we can't really report the problems of integral offsets. So leave
3797 this assuming that the adjustable offset must be valid for the
3798 sub-words of a TFmode operand, which is what we had before. */
3800 bool
3801 rs6000_mode_dependent_address (rtx addr)
3803 switch (GET_CODE (addr))
3805 case PLUS:
3806 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3808 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3809 return val + 12 + 0x8000 >= 0x10000;
3811 break;
3813 case LO_SUM:
3814 return true;
3816 case PRE_INC:
3817 case PRE_DEC:
3818 return TARGET_UPDATE;
3820 default:
3821 break;
3824 return false;
3827 /* Return number of consecutive hard regs needed starting at reg REGNO
3828 to hold something of mode MODE.
3829 This is ordinarily the length in words of a value of mode MODE
3830 but can be less for certain modes in special long registers.
3832 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3833 scalar instructions. The upper 32 bits are only available to the
3834 SIMD instructions.
3836 POWER and PowerPC GPRs hold 32 bits worth;
3837 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3840 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3842 if (FP_REGNO_P (regno))
3843 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3845 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3846 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3848 if (ALTIVEC_REGNO_P (regno))
3849 return
3850 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3852 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3855 /* Change register usage conditional on target flags. */
3856 void
3857 rs6000_conditional_register_usage (void)
3859 int i;
3861 /* Set MQ register fixed (already call_used) if not POWER
3862 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3863 be allocated. */
3864 if (! TARGET_POWER)
3865 fixed_regs[64] = 1;
3867 /* 64-bit AIX reserves GPR13 for thread-private data. */
3868 if (TARGET_64BIT)
3869 fixed_regs[13] = call_used_regs[13]
3870 = call_really_used_regs[13] = 1;
3872 /* Conditionally disable FPRs. */
3873 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3874 for (i = 32; i < 64; i++)
3875 fixed_regs[i] = call_used_regs[i]
3876 = call_really_used_regs[i] = 1;
3878 if (DEFAULT_ABI == ABI_V4
3879 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3880 && flag_pic == 2)
3881 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3883 if (DEFAULT_ABI == ABI_V4
3884 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3885 && flag_pic == 1)
3886 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3887 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3888 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3890 if (DEFAULT_ABI == ABI_DARWIN
3891 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3892 global_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3893 = fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3894 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3895 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3897 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3898 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3899 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3901 if (TARGET_ALTIVEC)
3902 global_regs[VSCR_REGNO] = 1;
3904 if (TARGET_SPE)
3906 global_regs[SPEFSCR_REGNO] = 1;
3907 fixed_regs[FIXED_SCRATCH]
3908 = call_used_regs[FIXED_SCRATCH]
3909 = call_really_used_regs[FIXED_SCRATCH] = 1;
3912 if (! TARGET_ALTIVEC)
3914 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3915 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3916 call_really_used_regs[VRSAVE_REGNO] = 1;
3919 if (TARGET_ALTIVEC_ABI)
3920 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3921 call_used_regs[i] = call_really_used_regs[i] = 1;
3924 /* Try to output insns to set TARGET equal to the constant C if it can
3925 be done in less than N insns. Do all computations in MODE.
3926 Returns the place where the output has been placed if it can be
3927 done and the insns have been emitted. If it would take more than N
3928 insns, zero is returned and no insns and emitted. */
3931 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3932 rtx source, int n ATTRIBUTE_UNUSED)
3934 rtx result, insn, set;
3935 HOST_WIDE_INT c0, c1;
3937 if (mode == QImode || mode == HImode)
3939 if (dest == NULL)
3940 dest = gen_reg_rtx (mode);
3941 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3942 return dest;
3944 else if (mode == SImode)
3946 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3948 emit_insn (gen_rtx_SET (VOIDmode, result,
3949 GEN_INT (INTVAL (source)
3950 & (~ (HOST_WIDE_INT) 0xffff))));
3951 emit_insn (gen_rtx_SET (VOIDmode, dest,
3952 gen_rtx_IOR (SImode, result,
3953 GEN_INT (INTVAL (source) & 0xffff))));
3954 result = dest;
3956 else if (mode == DImode)
3958 if (GET_CODE (source) == CONST_INT)
3960 c0 = INTVAL (source);
3961 c1 = -(c0 < 0);
3963 else if (GET_CODE (source) == CONST_DOUBLE)
3965 #if HOST_BITS_PER_WIDE_INT >= 64
3966 c0 = CONST_DOUBLE_LOW (source);
3967 c1 = -(c0 < 0);
3968 #else
3969 c0 = CONST_DOUBLE_LOW (source);
3970 c1 = CONST_DOUBLE_HIGH (source);
3971 #endif
3973 else
3974 abort ();
3976 result = rs6000_emit_set_long_const (dest, c0, c1);
3978 else
3979 abort ();
3981 insn = get_last_insn ();
3982 set = single_set (insn);
3983 if (! CONSTANT_P (SET_SRC (set)))
3984 set_unique_reg_note (insn, REG_EQUAL, source);
3986 return result;
3989 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3990 fall back to a straight forward decomposition. We do this to avoid
3991 exponential run times encountered when looking for longer sequences
3992 with rs6000_emit_set_const. */
3993 static rtx
3994 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3996 if (!TARGET_POWERPC64)
3998 rtx operand1, operand2;
4000 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4001 DImode);
4002 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
4003 DImode);
4004 emit_move_insn (operand1, GEN_INT (c1));
4005 emit_move_insn (operand2, GEN_INT (c2));
4007 else
4009 HOST_WIDE_INT ud1, ud2, ud3, ud4;
4011 ud1 = c1 & 0xffff;
4012 ud2 = (c1 & 0xffff0000) >> 16;
4013 #if HOST_BITS_PER_WIDE_INT >= 64
4014 c2 = c1 >> 32;
4015 #endif
4016 ud3 = c2 & 0xffff;
4017 ud4 = (c2 & 0xffff0000) >> 16;
4019 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4020 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4022 if (ud1 & 0x8000)
4023 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4024 else
4025 emit_move_insn (dest, GEN_INT (ud1));
4028 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4029 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4031 if (ud2 & 0x8000)
4032 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4033 - 0x80000000));
4034 else
4035 emit_move_insn (dest, GEN_INT (ud2 << 16));
4036 if (ud1 != 0)
4037 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4039 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4040 || (ud4 == 0 && ! (ud3 & 0x8000)))
4042 if (ud3 & 0x8000)
4043 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4044 - 0x80000000));
4045 else
4046 emit_move_insn (dest, GEN_INT (ud3 << 16));
4048 if (ud2 != 0)
4049 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
4050 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
4051 if (ud1 != 0)
4052 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4054 else
4056 if (ud4 & 0x8000)
4057 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4058 - 0x80000000));
4059 else
4060 emit_move_insn (dest, GEN_INT (ud4 << 16));
4062 if (ud3 != 0)
4063 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
4065 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
4066 if (ud2 != 0)
4067 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
4068 GEN_INT (ud2 << 16)));
4069 if (ud1 != 0)
4070 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
4073 return dest;
4076 /* Helper for the following. Get rid of [r+r] memory refs
4077 in cases where it won't work (TImode, TFmode). */
4079 static void
4080 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4082 if (GET_CODE (operands[0]) == MEM
4083 && GET_CODE (XEXP (operands[0], 0)) != REG
4084 && ! reload_in_progress)
4085 operands[0]
4086 = replace_equiv_address (operands[0],
4087 copy_addr_to_reg (XEXP (operands[0], 0)));
4089 if (GET_CODE (operands[1]) == MEM
4090 && GET_CODE (XEXP (operands[1], 0)) != REG
4091 && ! reload_in_progress)
4092 operands[1]
4093 = replace_equiv_address (operands[1],
4094 copy_addr_to_reg (XEXP (operands[1], 0)));
4097 /* Emit a move from SOURCE to DEST in mode MODE. */
4098 void
4099 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4101 rtx operands[2];
4102 operands[0] = dest;
4103 operands[1] = source;
4105 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4106 if (GET_CODE (operands[1]) == CONST_DOUBLE
4107 && ! FLOAT_MODE_P (mode)
4108 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4110 /* FIXME. This should never happen. */
4111 /* Since it seems that it does, do the safe thing and convert
4112 to a CONST_INT. */
4113 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4115 if (GET_CODE (operands[1]) == CONST_DOUBLE
4116 && ! FLOAT_MODE_P (mode)
4117 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
4118 && CONST_DOUBLE_LOW (operands[1]) >= 0)
4119 || (CONST_DOUBLE_HIGH (operands[1]) == -1
4120 && CONST_DOUBLE_LOW (operands[1]) < 0)))
4121 abort ();
4123 /* Check if GCC is setting up a block move that will end up using FP
4124 registers as temporaries. We must make sure this is acceptable. */
4125 if (GET_CODE (operands[0]) == MEM
4126 && GET_CODE (operands[1]) == MEM
4127 && mode == DImode
4128 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4129 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4130 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4131 ? 32 : MEM_ALIGN (operands[0])))
4132 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4133 ? 32
4134 : MEM_ALIGN (operands[1]))))
4135 && ! MEM_VOLATILE_P (operands [0])
4136 && ! MEM_VOLATILE_P (operands [1]))
4138 emit_move_insn (adjust_address (operands[0], SImode, 0),
4139 adjust_address (operands[1], SImode, 0));
4140 emit_move_insn (adjust_address (operands[0], SImode, 4),
4141 adjust_address (operands[1], SImode, 4));
4142 return;
4145 if (!no_new_pseudos)
4147 if (GET_CODE (operands[1]) == MEM && optimize > 0
4148 && (mode == QImode || mode == HImode || mode == SImode)
4149 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
4151 rtx reg = gen_reg_rtx (word_mode);
4153 emit_insn (gen_rtx_SET (word_mode, reg,
4154 gen_rtx_ZERO_EXTEND (word_mode,
4155 operands[1])));
4156 operands[1] = gen_lowpart (mode, reg);
4158 if (GET_CODE (operands[0]) != REG)
4159 operands[1] = force_reg (mode, operands[1]);
4162 if (mode == SFmode && ! TARGET_POWERPC
4163 && TARGET_HARD_FLOAT && TARGET_FPRS
4164 && GET_CODE (operands[0]) == MEM)
4166 int regnum;
4168 if (reload_in_progress || reload_completed)
4169 regnum = true_regnum (operands[1]);
4170 else if (GET_CODE (operands[1]) == REG)
4171 regnum = REGNO (operands[1]);
4172 else
4173 regnum = -1;
4175 /* If operands[1] is a register, on POWER it may have
4176 double-precision data in it, so truncate it to single
4177 precision. */
4178 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4180 rtx newreg;
4181 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
4182 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4183 operands[1] = newreg;
4187 /* Recognize the case where operand[1] is a reference to thread-local
4188 data and load its address to a register. */
4189 if (GET_CODE (operands[1]) == SYMBOL_REF)
4191 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
4192 if (model != 0)
4193 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
4196 /* Handle the case where reload calls us with an invalid address. */
4197 if (reload_in_progress && mode == Pmode
4198 && (! general_operand (operands[1], mode)
4199 || ! nonimmediate_operand (operands[0], mode)))
4200 goto emit_set;
4202 /* 128-bit constant floating-point values on Darwin should really be
4203 loaded as two parts. */
4204 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
4205 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
4206 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
4208 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4209 know how to get a DFmode SUBREG of a TFmode. */
4210 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
4211 simplify_gen_subreg (DImode, operands[1], mode, 0),
4212 DImode);
4213 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4214 GET_MODE_SIZE (DImode)),
4215 simplify_gen_subreg (DImode, operands[1], mode,
4216 GET_MODE_SIZE (DImode)),
4217 DImode);
4218 return;
4221 /* FIXME: In the long term, this switch statement should go away
4222 and be replaced by a sequence of tests based on things like
4223 mode == Pmode. */
4224 switch (mode)
4226 case HImode:
4227 case QImode:
4228 if (CONSTANT_P (operands[1])
4229 && GET_CODE (operands[1]) != CONST_INT)
4230 operands[1] = force_const_mem (mode, operands[1]);
4231 break;
4233 case TFmode:
4234 rs6000_eliminate_indexed_memrefs (operands);
4235 /* fall through */
4237 case DFmode:
4238 case SFmode:
4239 if (CONSTANT_P (operands[1])
4240 && ! easy_fp_constant (operands[1], mode))
4241 operands[1] = force_const_mem (mode, operands[1]);
4242 break;
4244 case V16QImode:
4245 case V8HImode:
4246 case V4SFmode:
4247 case V4SImode:
4248 case V4HImode:
4249 case V2SFmode:
4250 case V2SImode:
4251 case V1DImode:
4252 if (CONSTANT_P (operands[1])
4253 && !easy_vector_constant (operands[1], mode))
4254 operands[1] = force_const_mem (mode, operands[1]);
4255 break;
4257 case SImode:
4258 case DImode:
4259 /* Use default pattern for address of ELF small data */
4260 if (TARGET_ELF
4261 && mode == Pmode
4262 && DEFAULT_ABI == ABI_V4
4263 && (GET_CODE (operands[1]) == SYMBOL_REF
4264 || GET_CODE (operands[1]) == CONST)
4265 && small_data_operand (operands[1], mode))
4267 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4268 return;
4271 if (DEFAULT_ABI == ABI_V4
4272 && mode == Pmode && mode == SImode
4273 && flag_pic == 1 && got_operand (operands[1], mode))
4275 emit_insn (gen_movsi_got (operands[0], operands[1]));
4276 return;
4279 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
4280 && TARGET_NO_TOC
4281 && ! flag_pic
4282 && mode == Pmode
4283 && CONSTANT_P (operands[1])
4284 && GET_CODE (operands[1]) != HIGH
4285 && GET_CODE (operands[1]) != CONST_INT)
4287 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
4289 /* If this is a function address on -mcall-aixdesc,
4290 convert it to the address of the descriptor. */
4291 if (DEFAULT_ABI == ABI_AIX
4292 && GET_CODE (operands[1]) == SYMBOL_REF
4293 && XSTR (operands[1], 0)[0] == '.')
4295 const char *name = XSTR (operands[1], 0);
4296 rtx new_ref;
4297 while (*name == '.')
4298 name++;
4299 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
4300 CONSTANT_POOL_ADDRESS_P (new_ref)
4301 = CONSTANT_POOL_ADDRESS_P (operands[1]);
4302 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
4303 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
4304 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
4305 operands[1] = new_ref;
4308 if (DEFAULT_ABI == ABI_DARWIN)
4310 #if TARGET_MACHO
4311 if (MACHO_DYNAMIC_NO_PIC_P)
4313 /* Take care of any required data indirection. */
4314 operands[1] = rs6000_machopic_legitimize_pic_address (
4315 operands[1], mode, operands[0]);
4316 if (operands[0] != operands[1])
4317 emit_insn (gen_rtx_SET (VOIDmode,
4318 operands[0], operands[1]));
4319 return;
4321 #endif
4322 emit_insn (gen_macho_high (target, operands[1]));
4323 emit_insn (gen_macho_low (operands[0], target, operands[1]));
4324 return;
4327 emit_insn (gen_elf_high (target, operands[1]));
4328 emit_insn (gen_elf_low (operands[0], target, operands[1]));
4329 return;
4332 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4333 and we have put it in the TOC, we just need to make a TOC-relative
4334 reference to it. */
4335 if (TARGET_TOC
4336 && GET_CODE (operands[1]) == SYMBOL_REF
4337 && constant_pool_expr_p (operands[1])
4338 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
4339 get_pool_mode (operands[1])))
4341 operands[1] = create_TOC_reference (operands[1]);
4343 else if (mode == Pmode
4344 && CONSTANT_P (operands[1])
4345 && ((GET_CODE (operands[1]) != CONST_INT
4346 && ! easy_fp_constant (operands[1], mode))
4347 || (GET_CODE (operands[1]) == CONST_INT
4348 && num_insns_constant (operands[1], mode) > 2)
4349 || (GET_CODE (operands[0]) == REG
4350 && FP_REGNO_P (REGNO (operands[0]))))
4351 && GET_CODE (operands[1]) != HIGH
4352 && ! legitimate_constant_pool_address_p (operands[1])
4353 && ! toc_relative_expr_p (operands[1]))
4355 /* Emit a USE operation so that the constant isn't deleted if
4356 expensive optimizations are turned on because nobody
4357 references it. This should only be done for operands that
4358 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4359 This should not be done for operands that contain LABEL_REFs.
4360 For now, we just handle the obvious case. */
4361 if (GET_CODE (operands[1]) != LABEL_REF)
4362 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
4364 #if TARGET_MACHO
4365 /* Darwin uses a special PIC legitimizer. */
4366 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
4368 operands[1] =
4369 rs6000_machopic_legitimize_pic_address (operands[1], mode,
4370 operands[0]);
4371 if (operands[0] != operands[1])
4372 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4373 return;
4375 #endif
4377 /* If we are to limit the number of things we put in the TOC and
4378 this is a symbol plus a constant we can add in one insn,
4379 just put the symbol in the TOC and add the constant. Don't do
4380 this if reload is in progress. */
4381 if (GET_CODE (operands[1]) == CONST
4382 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
4383 && GET_CODE (XEXP (operands[1], 0)) == PLUS
4384 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
4385 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
4386 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
4387 && ! side_effects_p (operands[0]))
4389 rtx sym =
4390 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
4391 rtx other = XEXP (XEXP (operands[1], 0), 1);
4393 sym = force_reg (mode, sym);
4394 if (mode == SImode)
4395 emit_insn (gen_addsi3 (operands[0], sym, other));
4396 else
4397 emit_insn (gen_adddi3 (operands[0], sym, other));
4398 return;
4401 operands[1] = force_const_mem (mode, operands[1]);
4403 if (TARGET_TOC
4404 && constant_pool_expr_p (XEXP (operands[1], 0))
4405 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4406 get_pool_constant (XEXP (operands[1], 0)),
4407 get_pool_mode (XEXP (operands[1], 0))))
4409 operands[1]
4410 = gen_rtx_MEM (mode,
4411 create_TOC_reference (XEXP (operands[1], 0)));
4412 set_mem_alias_set (operands[1], get_TOC_alias_set ());
4413 RTX_UNCHANGING_P (operands[1]) = 1;
4416 break;
4418 case TImode:
4419 rs6000_eliminate_indexed_memrefs (operands);
4421 if (TARGET_POWER)
4423 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4424 gen_rtvec (2,
4425 gen_rtx_SET (VOIDmode,
4426 operands[0], operands[1]),
4427 gen_rtx_CLOBBER (VOIDmode,
4428 gen_rtx_SCRATCH (SImode)))));
4429 return;
4431 break;
4433 default:
4434 abort ();
4437 /* Above, we may have called force_const_mem which may have returned
4438 an invalid address. If we can, fix this up; otherwise, reload will
4439 have to deal with it. */
4440 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
4441 operands[1] = validize_mem (operands[1]);
4443 emit_set:
4444 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
4447 /* Nonzero if we can use a floating-point register to pass this arg. */
4448 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4449 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4450 && (CUM)->fregno <= FP_ARG_MAX_REG \
4451 && TARGET_HARD_FLOAT && TARGET_FPRS)
4453 /* Nonzero if we can use an AltiVec register to pass this arg. */
4454 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4455 (ALTIVEC_VECTOR_MODE (MODE) \
4456 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4457 && TARGET_ALTIVEC_ABI \
4458 && (NAMED))
4460 /* Return a nonzero value to say to return the function value in
4461 memory, just as large structures are always returned. TYPE will be
4462 the data type of the value, and FNTYPE will be the type of the
4463 function doing the returning, or @code{NULL} for libcalls.
4465 The AIX ABI for the RS/6000 specifies that all structures are
4466 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4467 specifies that structures <= 8 bytes are returned in r3/r4, but a
4468 draft put them in memory, and GCC used to implement the draft
4469 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4470 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4471 compatibility can change DRAFT_V4_STRUCT_RET to override the
4472 default, and -m switches get the final word. See
4473 rs6000_override_options for more details.
4475 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4476 long double support is enabled. These values are returned in memory.
4478 int_size_in_bytes returns -1 for variable size objects, which go in
4479 memory always. The cast to unsigned makes -1 > 8. */
4481 static bool
4482 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
4484 if (AGGREGATE_TYPE_P (type)
4485 && (TARGET_AIX_STRUCT_RET
4486 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
4487 return true;
4488 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
4489 return true;
4490 return false;
4493 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4494 for a call to a function whose data type is FNTYPE.
4495 For a library call, FNTYPE is 0.
4497 For incoming args we set the number of arguments in the prototype large
4498 so we never return a PARALLEL. */
4500 void
4501 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
4502 rtx libname ATTRIBUTE_UNUSED, int incoming,
4503 int libcall, int n_named_args)
4505 static CUMULATIVE_ARGS zero_cumulative;
4507 *cum = zero_cumulative;
4508 cum->words = 0;
4509 cum->fregno = FP_ARG_MIN_REG;
4510 cum->vregno = ALTIVEC_ARG_MIN_REG;
4511 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
4512 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
4513 ? CALL_LIBCALL : CALL_NORMAL);
4514 cum->sysv_gregno = GP_ARG_MIN_REG;
4515 cum->stdarg = fntype
4516 && (TYPE_ARG_TYPES (fntype) != 0
4517 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4518 != void_type_node));
4520 cum->nargs_prototype = 0;
4521 if (incoming || cum->prototype)
4522 cum->nargs_prototype = n_named_args;
4524 /* Check for a longcall attribute. */
4525 if (fntype
4526 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
4527 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
4528 cum->call_cookie = CALL_LONG;
4530 if (TARGET_DEBUG_ARG)
4532 fprintf (stderr, "\ninit_cumulative_args:");
4533 if (fntype)
4535 tree ret_type = TREE_TYPE (fntype);
4536 fprintf (stderr, " ret code = %s,",
4537 tree_code_name[ (int)TREE_CODE (ret_type) ]);
4540 if (cum->call_cookie & CALL_LONG)
4541 fprintf (stderr, " longcall,");
4543 fprintf (stderr, " proto = %d, nargs = %d\n",
4544 cum->prototype, cum->nargs_prototype);
4547 if (fntype
4548 && !TARGET_ALTIVEC
4549 && TARGET_ALTIVEC_ABI
4550 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
4552 error ("Cannot return value in vector register because"
4553 " altivec instructions are disabled, use -maltivec"
4554 " to enable them.");
4558 /* Return true if TYPE must be passed on the stack and not in registers. */
4560 static bool
4561 rs6000_must_pass_in_stack (enum machine_mode mode, tree type)
4563 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
4564 return must_pass_in_stack_var_size (mode, type);
4565 else
4566 return must_pass_in_stack_var_size_or_pad (mode, type);
4569 /* If defined, a C expression which determines whether, and in which
4570 direction, to pad out an argument with extra space. The value
4571 should be of type `enum direction': either `upward' to pad above
4572 the argument, `downward' to pad below, or `none' to inhibit
4573 padding.
4575 For the AIX ABI structs are always stored left shifted in their
4576 argument slot. */
4578 enum direction
4579 function_arg_padding (enum machine_mode mode, tree type)
4581 #ifndef AGGREGATE_PADDING_FIXED
4582 #define AGGREGATE_PADDING_FIXED 0
4583 #endif
4584 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4585 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4586 #endif
4588 if (!AGGREGATE_PADDING_FIXED)
4590 /* GCC used to pass structures of the same size as integer types as
4591 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4592 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4593 passed padded downward, except that -mstrict-align further
4594 muddied the water in that multi-component structures of 2 and 4
4595 bytes in size were passed padded upward.
4597 The following arranges for best compatibility with previous
4598 versions of gcc, but removes the -mstrict-align dependency. */
4599 if (BYTES_BIG_ENDIAN)
4601 HOST_WIDE_INT size = 0;
4603 if (mode == BLKmode)
4605 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
4606 size = int_size_in_bytes (type);
4608 else
4609 size = GET_MODE_SIZE (mode);
4611 if (size == 1 || size == 2 || size == 4)
4612 return downward;
4614 return upward;
4617 if (AGGREGATES_PAD_UPWARD_ALWAYS)
4619 if (type != 0 && AGGREGATE_TYPE_P (type))
4620 return upward;
4623 /* Fall back to the default. */
4624 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
4627 /* If defined, a C expression that gives the alignment boundary, in bits,
4628 of an argument with the specified mode and type. If it is not defined,
4629 PARM_BOUNDARY is used for all arguments.
4631 V.4 wants long longs to be double word aligned. */
4634 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4636 if (DEFAULT_ABI == ABI_V4 && GET_MODE_SIZE (mode) == 8)
4637 return 64;
4638 else if (SPE_VECTOR_MODE (mode))
4639 return 64;
4640 else if (ALTIVEC_VECTOR_MODE (mode))
4641 return 128;
4642 else
4643 return PARM_BOUNDARY;
4646 /* Compute the size (in words) of a function argument. */
4648 static unsigned long
4649 rs6000_arg_size (enum machine_mode mode, tree type)
4651 unsigned long size;
4653 if (mode != BLKmode)
4654 size = GET_MODE_SIZE (mode);
4655 else
4656 size = int_size_in_bytes (type);
4658 if (TARGET_32BIT)
4659 return (size + 3) >> 2;
4660 else
4661 return (size + 7) >> 3;
4664 /* Update the data in CUM to advance over an argument
4665 of mode MODE and data type TYPE.
4666 (TYPE is null for libcalls where that information may not be available.)
4668 Note that for args passed by reference, function_arg will be called
4669 with MODE and TYPE set to that of the pointer to the arg, not the arg
4670 itself. */
4672 void
4673 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4674 tree type, int named)
4676 cum->nargs_prototype--;
4678 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4680 bool stack = false;
4682 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4684 cum->vregno++;
4685 if (!TARGET_ALTIVEC)
4686 error ("Cannot pass argument in vector register because"
4687 " altivec instructions are disabled, use -maltivec"
4688 " to enable them.");
4690 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4691 even if it is going to be passed in a vector register.
4692 Darwin does the same for variable-argument functions. */
4693 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4694 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4695 stack = true;
4697 else
4698 stack = true;
4700 if (stack)
4702 int align;
4704 /* Vector parameters must be 16-byte aligned. This places
4705 them at 2 mod 4 in terms of words in 32-bit mode, since
4706 the parameter save area starts at offset 24 from the
4707 stack. In 64-bit mode, they just have to start on an
4708 even word, since the parameter save area is 16-byte
4709 aligned. Space for GPRs is reserved even if the argument
4710 will be passed in memory. */
4711 if (TARGET_32BIT)
4712 align = (2 - cum->words) & 3;
4713 else
4714 align = cum->words & 1;
4715 cum->words += align + rs6000_arg_size (mode, type);
4717 if (TARGET_DEBUG_ARG)
4719 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4720 cum->words, align);
4721 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4722 cum->nargs_prototype, cum->prototype,
4723 GET_MODE_NAME (mode));
4727 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4728 && !cum->stdarg
4729 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4730 cum->sysv_gregno++;
4731 else if (DEFAULT_ABI == ABI_V4)
4733 if (TARGET_HARD_FLOAT && TARGET_FPRS
4734 && (mode == SFmode || mode == DFmode))
4736 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4737 cum->fregno++;
4738 else
4740 if (mode == DFmode)
4741 cum->words += cum->words & 1;
4742 cum->words += rs6000_arg_size (mode, type);
4745 else
4747 int n_words = rs6000_arg_size (mode, type);
4748 int gregno = cum->sysv_gregno;
4750 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4751 (r7,r8) or (r9,r10). As does any other 2 word item such
4752 as complex int due to a historical mistake. */
4753 if (n_words == 2)
4754 gregno += (1 - gregno) & 1;
4756 /* Multi-reg args are not split between registers and stack. */
4757 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4759 /* Long long and SPE vectors are aligned on the stack.
4760 So are other 2 word items such as complex int due to
4761 a historical mistake. */
4762 if (n_words == 2)
4763 cum->words += cum->words & 1;
4764 cum->words += n_words;
4767 /* Note: continuing to accumulate gregno past when we've started
4768 spilling to the stack indicates the fact that we've started
4769 spilling to the stack to expand_builtin_saveregs. */
4770 cum->sysv_gregno = gregno + n_words;
4773 if (TARGET_DEBUG_ARG)
4775 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4776 cum->words, cum->fregno);
4777 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4778 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4779 fprintf (stderr, "mode = %4s, named = %d\n",
4780 GET_MODE_NAME (mode), named);
4783 else
4785 int n_words = rs6000_arg_size (mode, type);
4786 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
4788 /* The simple alignment calculation here works because
4789 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4790 If we ever want to handle alignments larger than 8 bytes for
4791 32-bit or 16 bytes for 64-bit, then we'll need to take into
4792 account the offset to the start of the parm save area. */
4793 align &= cum->words;
4794 cum->words += align + n_words;
4796 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4797 && TARGET_HARD_FLOAT && TARGET_FPRS)
4798 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4800 if (TARGET_DEBUG_ARG)
4802 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4803 cum->words, cum->fregno);
4804 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4805 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4806 fprintf (stderr, "named = %d, align = %d\n", named, align);
4811 /* Determine where to put a SIMD argument on the SPE. */
4813 static rtx
4814 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4815 tree type)
4817 if (cum->stdarg)
4819 int gregno = cum->sysv_gregno;
4820 int n_words = rs6000_arg_size (mode, type);
4822 /* SPE vectors are put in odd registers. */
4823 if (n_words == 2 && (gregno & 1) == 0)
4824 gregno += 1;
4826 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4828 rtx r1, r2;
4829 enum machine_mode m = SImode;
4831 r1 = gen_rtx_REG (m, gregno);
4832 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4833 r2 = gen_rtx_REG (m, gregno + 1);
4834 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4835 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4837 else
4838 return NULL_RTX;
4840 else
4842 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4843 return gen_rtx_REG (mode, cum->sysv_gregno);
4844 else
4845 return NULL_RTX;
4849 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4851 static rtx
4852 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
4854 int n_units;
4855 int i, k;
4856 rtx rvec[GP_ARG_NUM_REG + 1];
4858 if (align_words >= GP_ARG_NUM_REG)
4859 return NULL_RTX;
4861 n_units = rs6000_arg_size (mode, type);
4863 /* Optimize the simple case where the arg fits in one gpr, except in
4864 the case of BLKmode due to assign_parms assuming that registers are
4865 BITS_PER_WORD wide. */
4866 if (n_units == 0
4867 || (n_units == 1 && mode != BLKmode))
4868 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4870 k = 0;
4871 if (align_words + n_units > GP_ARG_NUM_REG)
4872 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4873 using a magic NULL_RTX component.
4874 FIXME: This is not strictly correct. Only some of the arg
4875 belongs in memory, not all of it. However, there isn't any way
4876 to do this currently, apart from building rtx descriptions for
4877 the pieces of memory we want stored. Due to bugs in the generic
4878 code we can't use the normal function_arg_partial_nregs scheme
4879 with the PARALLEL arg description we emit here.
4880 In any case, the code to store the whole arg to memory is often
4881 more efficient than code to store pieces, and we know that space
4882 is available in the right place for the whole arg. */
4883 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
4885 i = 0;
4888 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
4889 rtx off = GEN_INT (i++ * 4);
4890 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
4892 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
4894 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
4897 /* Determine where to put an argument to a function.
4898 Value is zero to push the argument on the stack,
4899 or a hard register in which to store the argument.
4901 MODE is the argument's machine mode.
4902 TYPE is the data type of the argument (as a tree).
4903 This is null for libcalls where that information may
4904 not be available.
4905 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4906 the preceding args and about the function being called.
4907 NAMED is nonzero if this argument is a named parameter
4908 (otherwise it is an extra parameter matching an ellipsis).
4910 On RS/6000 the first eight words of non-FP are normally in registers
4911 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4912 Under V.4, the first 8 FP args are in registers.
4914 If this is floating-point and no prototype is specified, we use
4915 both an FP and integer register (or possibly FP reg and stack). Library
4916 functions (when CALL_LIBCALL is set) always have the proper types for args,
4917 so we can pass the FP value just in one register. emit_library_function
4918 doesn't support PARALLEL anyway.
4920 Note that for args passed by reference, function_arg will be called
4921 with MODE and TYPE set to that of the pointer to the arg, not the arg
4922 itself. */
4924 struct rtx_def *
4925 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4926 tree type, int named)
4928 enum rs6000_abi abi = DEFAULT_ABI;
4930 /* Return a marker to indicate whether CR1 needs to set or clear the
4931 bit that V.4 uses to say fp args were passed in registers.
4932 Assume that we don't need the marker for software floating point,
4933 or compiler generated library calls. */
4934 if (mode == VOIDmode)
4936 if (abi == ABI_V4
4937 && cum->nargs_prototype < 0
4938 && (cum->call_cookie & CALL_LIBCALL) == 0
4939 && (cum->prototype || TARGET_NO_PROTOTYPE))
4941 /* For the SPE, we need to crxor CR6 always. */
4942 if (TARGET_SPE_ABI)
4943 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4944 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4945 return GEN_INT (cum->call_cookie
4946 | ((cum->fregno == FP_ARG_MIN_REG)
4947 ? CALL_V4_SET_FP_ARGS
4948 : CALL_V4_CLEAR_FP_ARGS));
4951 return GEN_INT (cum->call_cookie);
4954 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4955 if (TARGET_64BIT && ! cum->prototype)
4957 /* Vector parameters get passed in vector register
4958 and also in GPRs or memory, in absence of prototype. */
4959 int align_words;
4960 rtx slot;
4961 align_words = (cum->words + 1) & ~1;
4963 if (align_words >= GP_ARG_NUM_REG)
4965 slot = NULL_RTX;
4967 else
4969 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4971 return gen_rtx_PARALLEL (mode,
4972 gen_rtvec (2,
4973 gen_rtx_EXPR_LIST (VOIDmode,
4974 slot, const0_rtx),
4975 gen_rtx_EXPR_LIST (VOIDmode,
4976 gen_rtx_REG (mode, cum->vregno),
4977 const0_rtx)));
4979 else
4980 return gen_rtx_REG (mode, cum->vregno);
4981 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4983 if (named || abi == ABI_V4)
4984 return NULL_RTX;
4985 else
4987 /* Vector parameters to varargs functions under AIX or Darwin
4988 get passed in memory and possibly also in GPRs. */
4989 int align, align_words, n_words;
4990 enum machine_mode part_mode;
4992 /* Vector parameters must be 16-byte aligned. This places them at
4993 2 mod 4 in terms of words in 32-bit mode, since the parameter
4994 save area starts at offset 24 from the stack. In 64-bit mode,
4995 they just have to start on an even word, since the parameter
4996 save area is 16-byte aligned. */
4997 if (TARGET_32BIT)
4998 align = (2 - cum->words) & 3;
4999 else
5000 align = cum->words & 1;
5001 align_words = cum->words + align;
5003 /* Out of registers? Memory, then. */
5004 if (align_words >= GP_ARG_NUM_REG)
5005 return NULL_RTX;
5007 if (TARGET_32BIT && TARGET_POWERPC64)
5008 return rs6000_mixed_function_arg (mode, type, align_words);
5010 /* The vector value goes in GPRs. Only the part of the
5011 value in GPRs is reported here. */
5012 part_mode = mode;
5013 n_words = rs6000_arg_size (mode, type);
5014 if (align_words + n_words > GP_ARG_NUM_REG)
5015 /* Fortunately, there are only two possibilities, the value
5016 is either wholly in GPRs or half in GPRs and half not. */
5017 part_mode = DImode;
5019 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
5022 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
5023 return rs6000_spe_function_arg (cum, mode, type);
5024 else if (abi == ABI_V4)
5026 if (TARGET_HARD_FLOAT && TARGET_FPRS
5027 && (mode == SFmode || mode == DFmode))
5029 if (cum->fregno <= FP_ARG_V4_MAX_REG)
5030 return gen_rtx_REG (mode, cum->fregno);
5031 else
5032 return NULL_RTX;
5034 else
5036 int n_words = rs6000_arg_size (mode, type);
5037 int gregno = cum->sysv_gregno;
5039 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5040 (r7,r8) or (r9,r10). As does any other 2 word item such
5041 as complex int due to a historical mistake. */
5042 if (n_words == 2)
5043 gregno += (1 - gregno) & 1;
5045 /* Multi-reg args are not split between registers and stack. */
5046 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5047 return NULL_RTX;
5049 if (TARGET_32BIT && TARGET_POWERPC64)
5050 return rs6000_mixed_function_arg (mode, type,
5051 gregno - GP_ARG_MIN_REG);
5052 return gen_rtx_REG (mode, gregno);
5055 else
5057 int align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5058 int align_words = cum->words + (cum->words & align);
5060 if (USE_FP_FOR_ARG_P (cum, mode, type))
5062 rtx rvec[GP_ARG_NUM_REG + 1];
5063 rtx r;
5064 int k;
5065 bool needs_psave;
5066 enum machine_mode fmode = mode;
5067 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
5069 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
5071 /* Currently, we only ever need one reg here because complex
5072 doubles are split. */
5073 if (cum->fregno != FP_ARG_MAX_REG || fmode != TFmode)
5074 abort ();
5076 /* Long double split over regs and memory. */
5077 fmode = DFmode;
5080 /* Do we also need to pass this arg in the parameter save
5081 area? */
5082 needs_psave = (type
5083 && (cum->nargs_prototype <= 0
5084 || (DEFAULT_ABI == ABI_AIX
5085 && TARGET_XL_CALL
5086 && align_words >= GP_ARG_NUM_REG)));
5088 if (!needs_psave && mode == fmode)
5089 return gen_rtx_REG (fmode, cum->fregno);
5091 k = 0;
5092 if (needs_psave)
5094 /* Describe the part that goes in gprs or the stack.
5095 This piece must come first, before the fprs. */
5096 if (align_words < GP_ARG_NUM_REG)
5098 unsigned long n_words = rs6000_arg_size (mode, type);
5100 if (align_words + n_words > GP_ARG_NUM_REG
5101 || (TARGET_32BIT && TARGET_POWERPC64))
5103 /* If this is partially on the stack, then we only
5104 include the portion actually in registers here. */
5105 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
5106 rtx off;
5109 r = gen_rtx_REG (rmode,
5110 GP_ARG_MIN_REG + align_words);
5111 off = GEN_INT (k * GET_MODE_SIZE (rmode));
5112 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
5114 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
5116 else
5118 /* The whole arg fits in gprs. */
5119 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5120 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5123 else
5124 /* It's entirely in memory. */
5125 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
5128 /* Describe where this piece goes in the fprs. */
5129 r = gen_rtx_REG (fmode, cum->fregno);
5130 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
5132 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
5134 else if (align_words < GP_ARG_NUM_REG)
5136 if (TARGET_32BIT && TARGET_POWERPC64)
5137 return rs6000_mixed_function_arg (mode, type, align_words);
5139 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
5141 else
5142 return NULL_RTX;
5146 /* For an arg passed partly in registers and partly in memory, this is
5147 the number of registers used. For args passed entirely in registers
5148 or entirely in memory, zero. When an arg is described by a PARALLEL,
5149 perhaps using more than one register type, this function returns the
5150 number of registers used by the first element of the PARALLEL. */
5153 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5154 tree type, int named)
5156 int ret = 0;
5157 int align;
5158 int parm_offset;
5159 int align_words;
5161 if (DEFAULT_ABI == ABI_V4)
5162 return 0;
5164 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
5165 && cum->nargs_prototype >= 0)
5166 return 0;
5168 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5169 parm_offset = TARGET_32BIT ? 2 : 0;
5170 align_words = cum->words + ((parm_offset - cum->words) & align);
5172 if (USE_FP_FOR_ARG_P (cum, mode, type)
5173 /* If we are passing this arg in gprs as well, then this function
5174 should return the number of gprs (or memory) partially passed,
5175 *not* the number of fprs. */
5176 && !(type
5177 && (cum->nargs_prototype <= 0
5178 || (DEFAULT_ABI == ABI_AIX
5179 && TARGET_XL_CALL
5180 && align_words >= GP_ARG_NUM_REG))))
5182 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
5183 ret = FP_ARG_MAX_REG + 1 - cum->fregno;
5184 else if (cum->nargs_prototype >= 0)
5185 return 0;
5188 if (align_words < GP_ARG_NUM_REG
5189 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
5190 ret = GP_ARG_NUM_REG - align_words;
5192 if (ret != 0 && TARGET_DEBUG_ARG)
5193 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
5195 return ret;
5198 /* A C expression that indicates when an argument must be passed by
5199 reference. If nonzero for an argument, a copy of that argument is
5200 made in memory and a pointer to the argument is passed instead of
5201 the argument itself. The pointer is passed in whatever way is
5202 appropriate for passing a pointer to that type.
5204 Under V.4, aggregates and long double are passed by reference.
5206 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5207 reference unless the AltiVec vector extension ABI is in force.
5209 As an extension to all ABIs, variable sized types are passed by
5210 reference. */
5212 static bool
5213 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
5214 enum machine_mode mode ATTRIBUTE_UNUSED,
5215 tree type, bool named ATTRIBUTE_UNUSED)
5217 if ((DEFAULT_ABI == ABI_V4
5218 && ((type && AGGREGATE_TYPE_P (type))
5219 || mode == TFmode))
5220 || (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
5221 || (type && int_size_in_bytes (type) < 0))
5223 if (TARGET_DEBUG_ARG)
5224 fprintf (stderr, "function_arg_pass_by_reference\n");
5226 return 1;
5228 return 0;
5231 static void
5232 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
5234 int i;
5235 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
5237 if (nregs == 0)
5238 return;
5240 for (i = 0; i < nregs; i++)
5242 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
5243 if (reload_completed)
5245 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
5246 tem = NULL_RTX;
5247 else
5248 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
5249 i * GET_MODE_SIZE(reg_mode));
5251 else
5252 tem = replace_equiv_address (tem, XEXP (tem, 0));
5254 if (tem == NULL_RTX)
5255 abort ();
5257 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
5262 /* Perform any needed actions needed for a function that is receiving a
5263 variable number of arguments.
5265 CUM is as above.
5267 MODE and TYPE are the mode and type of the current parameter.
5269 PRETEND_SIZE is a variable that should be set to the amount of stack
5270 that must be pushed by the prolog to pretend that our caller pushed
5273 Normally, this macro will push all remaining incoming registers on the
5274 stack and set PRETEND_SIZE to the length of the registers pushed. */
5276 static void
5277 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5278 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
5280 CUMULATIVE_ARGS next_cum;
5281 int reg_size = TARGET_32BIT ? 4 : 8;
5282 rtx save_area = NULL_RTX, mem;
5283 int first_reg_offset, set;
5285 /* Skip the last named argument. */
5286 next_cum = *cum;
5287 function_arg_advance (&next_cum, mode, type, 1);
5289 if (DEFAULT_ABI == ABI_V4)
5291 /* Indicate to allocate space on the stack for varargs save area. */
5292 cfun->machine->sysv_varargs_p = 1;
5293 if (! no_rtl)
5294 save_area = plus_constant (virtual_stack_vars_rtx,
5295 - RS6000_VARARGS_SIZE);
5297 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
5299 else
5301 first_reg_offset = next_cum.words;
5302 save_area = virtual_incoming_args_rtx;
5303 cfun->machine->sysv_varargs_p = 0;
5305 if (targetm.calls.must_pass_in_stack (mode, type))
5306 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
5309 set = get_varargs_alias_set ();
5310 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
5312 mem = gen_rtx_MEM (BLKmode,
5313 plus_constant (save_area,
5314 first_reg_offset * reg_size)),
5315 set_mem_alias_set (mem, set);
5316 set_mem_align (mem, BITS_PER_WORD);
5318 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
5319 GP_ARG_NUM_REG - first_reg_offset);
5322 /* Save FP registers if needed. */
5323 if (DEFAULT_ABI == ABI_V4
5324 && TARGET_HARD_FLOAT && TARGET_FPRS
5325 && ! no_rtl
5326 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
5328 int fregno = next_cum.fregno;
5329 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
5330 rtx lab = gen_label_rtx ();
5331 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
5333 emit_jump_insn (gen_rtx_SET (VOIDmode,
5334 pc_rtx,
5335 gen_rtx_IF_THEN_ELSE (VOIDmode,
5336 gen_rtx_NE (VOIDmode, cr1,
5337 const0_rtx),
5338 gen_rtx_LABEL_REF (VOIDmode, lab),
5339 pc_rtx)));
5341 while (fregno <= FP_ARG_V4_MAX_REG)
5343 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
5344 set_mem_alias_set (mem, set);
5345 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
5346 fregno++;
5347 off += 8;
5350 emit_label (lab);
5354 /* Create the va_list data type. */
5356 static tree
5357 rs6000_build_builtin_va_list (void)
5359 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
5361 /* For AIX, prefer 'char *' because that's what the system
5362 header files like. */
5363 if (DEFAULT_ABI != ABI_V4)
5364 return build_pointer_type (char_type_node);
5366 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5367 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5369 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
5370 unsigned_char_type_node);
5371 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
5372 unsigned_char_type_node);
5373 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5374 every user file. */
5375 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
5376 short_unsigned_type_node);
5377 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
5378 ptr_type_node);
5379 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
5380 ptr_type_node);
5382 DECL_FIELD_CONTEXT (f_gpr) = record;
5383 DECL_FIELD_CONTEXT (f_fpr) = record;
5384 DECL_FIELD_CONTEXT (f_res) = record;
5385 DECL_FIELD_CONTEXT (f_ovf) = record;
5386 DECL_FIELD_CONTEXT (f_sav) = record;
5388 TREE_CHAIN (record) = type_decl;
5389 TYPE_NAME (record) = type_decl;
5390 TYPE_FIELDS (record) = f_gpr;
5391 TREE_CHAIN (f_gpr) = f_fpr;
5392 TREE_CHAIN (f_fpr) = f_res;
5393 TREE_CHAIN (f_res) = f_ovf;
5394 TREE_CHAIN (f_ovf) = f_sav;
5396 layout_type (record);
5398 /* The correct type is an array type of one element. */
5399 return build_array_type (record, build_index_type (size_zero_node));
5402 /* Implement va_start. */
5404 void
5405 rs6000_va_start (tree valist, rtx nextarg)
5407 HOST_WIDE_INT words, n_gpr, n_fpr;
5408 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5409 tree gpr, fpr, ovf, sav, t;
5411 /* Only SVR4 needs something special. */
5412 if (DEFAULT_ABI != ABI_V4)
5414 std_expand_builtin_va_start (valist, nextarg);
5415 return;
5418 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5419 f_fpr = TREE_CHAIN (f_gpr);
5420 f_res = TREE_CHAIN (f_fpr);
5421 f_ovf = TREE_CHAIN (f_res);
5422 f_sav = TREE_CHAIN (f_ovf);
5424 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5425 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5426 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5427 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5428 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5430 /* Count number of gp and fp argument registers used. */
5431 words = current_function_args_info.words;
5432 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
5433 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
5435 if (TARGET_DEBUG_ARG)
5436 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
5437 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
5438 words, n_gpr, n_fpr);
5440 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
5441 build_int_cst (NULL_TREE, n_gpr, 0));
5442 TREE_SIDE_EFFECTS (t) = 1;
5443 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5445 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
5446 build_int_cst (NULL_TREE, n_fpr, 0));
5447 TREE_SIDE_EFFECTS (t) = 1;
5448 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5450 /* Find the overflow area. */
5451 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5452 if (words != 0)
5453 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
5454 build_int_cst (NULL_TREE, words * UNITS_PER_WORD, 0));
5455 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5456 TREE_SIDE_EFFECTS (t) = 1;
5457 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5459 /* Find the register save area. */
5460 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
5461 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5462 build_int_cst (NULL_TREE, -RS6000_VARARGS_SIZE, -1));
5463 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5464 TREE_SIDE_EFFECTS (t) = 1;
5465 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5468 /* Implement va_arg. */
5470 tree
5471 rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
5473 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
5474 tree gpr, fpr, ovf, sav, reg, t, u;
5475 int size, rsize, n_reg, sav_ofs, sav_scale;
5476 tree lab_false, lab_over, addr;
5477 int align;
5478 tree ptrtype = build_pointer_type (type);
5480 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
5482 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
5483 return build_fold_indirect_ref (t);
5486 if (DEFAULT_ABI != ABI_V4)
5488 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
5490 tree elem_type = TREE_TYPE (type);
5491 enum machine_mode elem_mode = TYPE_MODE (elem_type);
5492 int elem_size = GET_MODE_SIZE (elem_mode);
5494 if (elem_size < UNITS_PER_WORD)
5496 tree real_part, imag_part;
5497 tree post = NULL_TREE;
5499 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5500 &post);
5501 /* Copy the value into a temporary, lest the formal temporary
5502 be reused out from under us. */
5503 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
5504 append_to_statement_list (post, pre_p);
5506 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
5507 post_p);
5509 return build (COMPLEX_EXPR, type, real_part, imag_part);
5513 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
5516 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5517 f_fpr = TREE_CHAIN (f_gpr);
5518 f_res = TREE_CHAIN (f_fpr);
5519 f_ovf = TREE_CHAIN (f_res);
5520 f_sav = TREE_CHAIN (f_ovf);
5522 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5523 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
5524 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
5525 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
5526 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
5528 size = int_size_in_bytes (type);
5529 rsize = (size + 3) / 4;
5530 align = 1;
5532 if (TARGET_HARD_FLOAT && TARGET_FPRS
5533 && (TYPE_MODE (type) == SFmode || TYPE_MODE (type) == DFmode))
5535 /* FP args go in FP registers, if present. */
5536 reg = fpr;
5537 n_reg = 1;
5538 sav_ofs = 8*4;
5539 sav_scale = 8;
5540 if (TYPE_MODE (type) == DFmode)
5541 align = 8;
5543 else
5545 /* Otherwise into GP registers. */
5546 reg = gpr;
5547 n_reg = rsize;
5548 sav_ofs = 0;
5549 sav_scale = 4;
5550 if (n_reg == 2)
5551 align = 8;
5554 /* Pull the value out of the saved registers.... */
5556 lab_over = NULL;
5557 addr = create_tmp_var (ptr_type_node, "addr");
5558 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
5560 /* AltiVec vectors never go in registers when -mabi=altivec. */
5561 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5562 align = 16;
5563 else
5565 lab_false = create_artificial_label ();
5566 lab_over = create_artificial_label ();
5568 /* Long long and SPE vectors are aligned in the registers.
5569 As are any other 2 gpr item such as complex int due to a
5570 historical mistake. */
5571 u = reg;
5572 if (n_reg == 2)
5574 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
5575 size_int (n_reg - 1));
5576 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
5579 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
5580 t = build2 (GE_EXPR, boolean_type_node, u, t);
5581 u = build1 (GOTO_EXPR, void_type_node, lab_false);
5582 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
5583 gimplify_and_add (t, pre_p);
5585 t = sav;
5586 if (sav_ofs)
5587 t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
5589 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
5590 u = build1 (CONVERT_EXPR, integer_type_node, u);
5591 u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
5592 t = build2 (PLUS_EXPR, ptr_type_node, t, u);
5594 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
5595 gimplify_and_add (t, pre_p);
5597 t = build1 (GOTO_EXPR, void_type_node, lab_over);
5598 gimplify_and_add (t, pre_p);
5600 t = build1 (LABEL_EXPR, void_type_node, lab_false);
5601 append_to_statement_list (t, pre_p);
5603 if (n_reg > 2)
5605 /* Ensure that we don't find any more args in regs.
5606 Alignment has taken care of the n_reg == 2 case. */
5607 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, size_int (8));
5608 gimplify_and_add (t, pre_p);
5612 /* ... otherwise out of the overflow area. */
5614 /* Care for on-stack alignment if needed. */
5615 t = ovf;
5616 if (align != 1)
5618 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
5619 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
5620 build_int_cst (NULL_TREE, -align, -1));
5622 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
5624 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
5625 gimplify_and_add (u, pre_p);
5627 t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
5628 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5629 gimplify_and_add (t, pre_p);
5631 if (lab_over)
5633 t = build1 (LABEL_EXPR, void_type_node, lab_over);
5634 append_to_statement_list (t, pre_p);
5637 addr = fold_convert (ptrtype, addr);
5638 return build_fold_indirect_ref (addr);
5641 /* Builtins. */
5643 #define def_builtin(MASK, NAME, TYPE, CODE) \
5644 do { \
5645 if ((MASK) & target_flags) \
5646 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5647 NULL, NULL_TREE); \
5648 } while (0)
5650 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5652 static const struct builtin_description bdesc_3arg[] =
5654 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5655 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5656 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5657 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5658 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5659 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5660 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5661 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5662 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5663 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5664 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5665 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5666 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5667 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5668 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5669 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5670 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5671 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5672 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5673 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5674 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5675 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5676 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5679 /* DST operations: void foo (void *, const int, const char). */
5681 static const struct builtin_description bdesc_dst[] =
5683 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5684 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5685 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5686 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5689 /* Simple binary operations: VECc = foo (VECa, VECb). */
5691 static struct builtin_description bdesc_2arg[] =
5693 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5694 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5695 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5696 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5697 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5698 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5699 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5700 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5701 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5702 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5703 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5704 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5705 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5706 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5707 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5708 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5709 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5710 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5711 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5712 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5713 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5714 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5715 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5716 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5717 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5718 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5719 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5720 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5721 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5722 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5723 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5724 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5725 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5726 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5727 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5728 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5729 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5730 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5731 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5732 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5733 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5734 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5735 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5736 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5737 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5738 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5739 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5740 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5741 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5742 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5743 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5744 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5745 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5746 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5747 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5748 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5749 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5750 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5751 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5752 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5753 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5754 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5755 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5756 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5757 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5758 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5759 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5760 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5761 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5762 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5763 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5764 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5765 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5766 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5767 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5768 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5769 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5770 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5771 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5772 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5773 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5774 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5775 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5776 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5777 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5778 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5779 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5780 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5781 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5782 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5783 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5784 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5785 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5786 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5787 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5788 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5789 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5790 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5791 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5792 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5793 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5794 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5795 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5796 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5797 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5798 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5799 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5800 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5801 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5802 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5803 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5804 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5805 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5807 /* Place holder, leave as first spe builtin. */
5808 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5809 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5810 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5811 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5812 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5813 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5814 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5815 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5816 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5817 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5818 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5819 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5820 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5821 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5822 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5823 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5824 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5825 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5826 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5827 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5828 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5829 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5830 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5831 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5832 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5833 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5834 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5835 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5836 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5837 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5838 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5839 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5840 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5841 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5842 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5843 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5844 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5845 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5846 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5847 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5848 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5849 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5850 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5851 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5852 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5853 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5854 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5855 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5856 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5857 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5858 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5859 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5860 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5861 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5862 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5863 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5864 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5865 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5866 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5867 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5868 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5869 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5870 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5871 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5872 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5873 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5874 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5875 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5876 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5877 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5878 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5879 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5880 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5881 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5882 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5883 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5884 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5885 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5886 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5887 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5888 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5889 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5890 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5891 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5892 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5893 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5894 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5895 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5896 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5897 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5898 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5899 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5900 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5901 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5902 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5903 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5904 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5905 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5906 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5907 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5908 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5909 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5910 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5911 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5912 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5913 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5914 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5915 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5916 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5918 /* SPE binary operations expecting a 5-bit unsigned literal. */
5919 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5921 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5922 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5923 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5924 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5925 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5926 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5927 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5928 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5929 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5930 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5931 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5932 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5933 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5934 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5935 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5936 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5937 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5938 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5939 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5940 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5941 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5942 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5943 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5944 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5945 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5946 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5948 /* Place-holder. Leave as last binary SPE builtin. */
5949 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5952 /* AltiVec predicates. */
5954 struct builtin_description_predicates
5956 const unsigned int mask;
5957 const enum insn_code icode;
5958 const char *opcode;
5959 const char *const name;
5960 const enum rs6000_builtins code;
5963 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5965 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5966 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5967 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5968 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5969 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5970 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5971 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5972 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5973 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5974 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5975 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5976 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5977 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5980 /* SPE predicates. */
5981 static struct builtin_description bdesc_spe_predicates[] =
5983 /* Place-holder. Leave as first. */
5984 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5985 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5986 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5987 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5988 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5989 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5990 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5991 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5992 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5993 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5994 /* Place-holder. Leave as last. */
5995 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5998 /* SPE evsel predicates. */
5999 static struct builtin_description bdesc_spe_evsel[] =
6001 /* Place-holder. Leave as first. */
6002 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
6003 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
6004 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
6005 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
6006 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
6007 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
6008 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
6009 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
6010 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
6011 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
6012 /* Place-holder. Leave as last. */
6013 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
6016 /* ABS* operations. */
6018 static const struct builtin_description bdesc_abs[] =
6020 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
6021 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
6022 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
6023 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
6024 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
6025 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
6026 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
6029 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6030 foo (VECa). */
6032 static struct builtin_description bdesc_1arg[] =
6034 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
6035 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
6036 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
6037 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
6038 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
6039 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
6040 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
6041 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
6042 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
6043 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
6044 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
6045 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
6046 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
6047 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
6048 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
6049 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
6050 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
6052 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6053 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6054 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
6055 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
6056 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
6057 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
6058 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
6059 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
6060 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
6061 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
6062 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
6063 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
6064 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
6065 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
6066 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
6067 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
6068 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
6069 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
6070 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
6071 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
6072 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
6073 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
6074 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
6075 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
6076 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
6077 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
6078 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
6079 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
6080 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
6081 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
6083 /* Place-holder. Leave as last unary SPE builtin. */
6084 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
6087 static rtx
6088 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
6090 rtx pat;
6091 tree arg0 = TREE_VALUE (arglist);
6092 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6093 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6094 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6096 if (icode == CODE_FOR_nothing)
6097 /* Builtin not supported on this processor. */
6098 return 0;
6100 /* If we got invalid arguments bail out before generating bad rtl. */
6101 if (arg0 == error_mark_node)
6102 return const0_rtx;
6104 if (icode == CODE_FOR_altivec_vspltisb
6105 || icode == CODE_FOR_altivec_vspltish
6106 || icode == CODE_FOR_altivec_vspltisw
6107 || icode == CODE_FOR_spe_evsplatfi
6108 || icode == CODE_FOR_spe_evsplati)
6110 /* Only allow 5-bit *signed* literals. */
6111 if (GET_CODE (op0) != CONST_INT
6112 || INTVAL (op0) > 0x1f
6113 || INTVAL (op0) < -0x1f)
6115 error ("argument 1 must be a 5-bit signed literal");
6116 return const0_rtx;
6120 if (target == 0
6121 || GET_MODE (target) != tmode
6122 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6123 target = gen_reg_rtx (tmode);
6125 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6126 op0 = copy_to_mode_reg (mode0, op0);
6128 pat = GEN_FCN (icode) (target, op0);
6129 if (! pat)
6130 return 0;
6131 emit_insn (pat);
6133 return target;
6136 static rtx
6137 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
6139 rtx pat, scratch1, scratch2;
6140 tree arg0 = TREE_VALUE (arglist);
6141 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6142 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6143 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6145 /* If we have invalid arguments, bail out before generating bad rtl. */
6146 if (arg0 == error_mark_node)
6147 return const0_rtx;
6149 if (target == 0
6150 || GET_MODE (target) != tmode
6151 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6152 target = gen_reg_rtx (tmode);
6154 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6155 op0 = copy_to_mode_reg (mode0, op0);
6157 scratch1 = gen_reg_rtx (mode0);
6158 scratch2 = gen_reg_rtx (mode0);
6160 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
6161 if (! pat)
6162 return 0;
6163 emit_insn (pat);
6165 return target;
6168 static rtx
6169 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
6171 rtx pat;
6172 tree arg0 = TREE_VALUE (arglist);
6173 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6174 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6175 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6176 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6177 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6178 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6180 if (icode == CODE_FOR_nothing)
6181 /* Builtin not supported on this processor. */
6182 return 0;
6184 /* If we got invalid arguments bail out before generating bad rtl. */
6185 if (arg0 == error_mark_node || arg1 == error_mark_node)
6186 return const0_rtx;
6188 if (icode == CODE_FOR_altivec_vcfux
6189 || icode == CODE_FOR_altivec_vcfsx
6190 || icode == CODE_FOR_altivec_vctsxs
6191 || icode == CODE_FOR_altivec_vctuxs
6192 || icode == CODE_FOR_altivec_vspltb
6193 || icode == CODE_FOR_altivec_vsplth
6194 || icode == CODE_FOR_altivec_vspltw
6195 || icode == CODE_FOR_spe_evaddiw
6196 || icode == CODE_FOR_spe_evldd
6197 || icode == CODE_FOR_spe_evldh
6198 || icode == CODE_FOR_spe_evldw
6199 || icode == CODE_FOR_spe_evlhhesplat
6200 || icode == CODE_FOR_spe_evlhhossplat
6201 || icode == CODE_FOR_spe_evlhhousplat
6202 || icode == CODE_FOR_spe_evlwhe
6203 || icode == CODE_FOR_spe_evlwhos
6204 || icode == CODE_FOR_spe_evlwhou
6205 || icode == CODE_FOR_spe_evlwhsplat
6206 || icode == CODE_FOR_spe_evlwwsplat
6207 || icode == CODE_FOR_spe_evrlwi
6208 || icode == CODE_FOR_spe_evslwi
6209 || icode == CODE_FOR_spe_evsrwis
6210 || icode == CODE_FOR_spe_evsubifw
6211 || icode == CODE_FOR_spe_evsrwiu)
6213 /* Only allow 5-bit unsigned literals. */
6214 STRIP_NOPS (arg1);
6215 if (TREE_CODE (arg1) != INTEGER_CST
6216 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6218 error ("argument 2 must be a 5-bit unsigned literal");
6219 return const0_rtx;
6223 if (target == 0
6224 || GET_MODE (target) != tmode
6225 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6226 target = gen_reg_rtx (tmode);
6228 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6229 op0 = copy_to_mode_reg (mode0, op0);
6230 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6231 op1 = copy_to_mode_reg (mode1, op1);
6233 pat = GEN_FCN (icode) (target, op0, op1);
6234 if (! pat)
6235 return 0;
6236 emit_insn (pat);
6238 return target;
6241 static rtx
6242 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
6243 tree arglist, rtx target)
6245 rtx pat, scratch;
6246 tree cr6_form = TREE_VALUE (arglist);
6247 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6248 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6249 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6250 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6251 enum machine_mode tmode = SImode;
6252 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6253 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6254 int cr6_form_int;
6256 if (TREE_CODE (cr6_form) != INTEGER_CST)
6258 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6259 return const0_rtx;
6261 else
6262 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
6264 if (mode0 != mode1)
6265 abort ();
6267 /* If we have invalid arguments, bail out before generating bad rtl. */
6268 if (arg0 == error_mark_node || arg1 == error_mark_node)
6269 return const0_rtx;
6271 if (target == 0
6272 || GET_MODE (target) != tmode
6273 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6274 target = gen_reg_rtx (tmode);
6276 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6277 op0 = copy_to_mode_reg (mode0, op0);
6278 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6279 op1 = copy_to_mode_reg (mode1, op1);
6281 scratch = gen_reg_rtx (mode0);
6283 pat = GEN_FCN (icode) (scratch, op0, op1,
6284 gen_rtx_SYMBOL_REF (Pmode, opcode));
6285 if (! pat)
6286 return 0;
6287 emit_insn (pat);
6289 /* The vec_any* and vec_all* predicates use the same opcodes for two
6290 different operations, but the bits in CR6 will be different
6291 depending on what information we want. So we have to play tricks
6292 with CR6 to get the right bits out.
6294 If you think this is disgusting, look at the specs for the
6295 AltiVec predicates. */
6297 switch (cr6_form_int)
6299 case 0:
6300 emit_insn (gen_cr6_test_for_zero (target));
6301 break;
6302 case 1:
6303 emit_insn (gen_cr6_test_for_zero_reverse (target));
6304 break;
6305 case 2:
6306 emit_insn (gen_cr6_test_for_lt (target));
6307 break;
6308 case 3:
6309 emit_insn (gen_cr6_test_for_lt_reverse (target));
6310 break;
6311 default:
6312 error ("argument 1 of __builtin_altivec_predicate is out of range");
6313 break;
6316 return target;
6319 static rtx
6320 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
6322 rtx pat, addr;
6323 tree arg0 = TREE_VALUE (arglist);
6324 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6325 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6326 enum machine_mode mode0 = Pmode;
6327 enum machine_mode mode1 = Pmode;
6328 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6329 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6331 if (icode == CODE_FOR_nothing)
6332 /* Builtin not supported on this processor. */
6333 return 0;
6335 /* If we got invalid arguments bail out before generating bad rtl. */
6336 if (arg0 == error_mark_node || arg1 == error_mark_node)
6337 return const0_rtx;
6339 if (target == 0
6340 || GET_MODE (target) != tmode
6341 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6342 target = gen_reg_rtx (tmode);
6344 op1 = copy_to_mode_reg (mode1, op1);
6346 if (op0 == const0_rtx)
6348 addr = gen_rtx_MEM (tmode, op1);
6350 else
6352 op0 = copy_to_mode_reg (mode0, op0);
6353 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
6356 pat = GEN_FCN (icode) (target, addr);
6358 if (! pat)
6359 return 0;
6360 emit_insn (pat);
6362 return target;
6365 static rtx
6366 spe_expand_stv_builtin (enum insn_code icode, tree arglist)
6368 tree arg0 = TREE_VALUE (arglist);
6369 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6370 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6371 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6372 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6373 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6374 rtx pat;
6375 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
6376 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
6377 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
6379 /* Invalid arguments. Bail before doing anything stoopid! */
6380 if (arg0 == error_mark_node
6381 || arg1 == error_mark_node
6382 || arg2 == error_mark_node)
6383 return const0_rtx;
6385 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
6386 op0 = copy_to_mode_reg (mode2, op0);
6387 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
6388 op1 = copy_to_mode_reg (mode0, op1);
6389 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6390 op2 = copy_to_mode_reg (mode1, op2);
6392 pat = GEN_FCN (icode) (op1, op2, op0);
6393 if (pat)
6394 emit_insn (pat);
6395 return NULL_RTX;
6398 static rtx
6399 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
6401 tree arg0 = TREE_VALUE (arglist);
6402 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6403 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6404 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6405 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6406 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6407 rtx pat, addr;
6408 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6409 enum machine_mode mode1 = Pmode;
6410 enum machine_mode mode2 = Pmode;
6412 /* Invalid arguments. Bail before doing anything stoopid! */
6413 if (arg0 == error_mark_node
6414 || arg1 == error_mark_node
6415 || arg2 == error_mark_node)
6416 return const0_rtx;
6418 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
6419 op0 = copy_to_mode_reg (tmode, op0);
6421 op2 = copy_to_mode_reg (mode2, op2);
6423 if (op1 == const0_rtx)
6425 addr = gen_rtx_MEM (tmode, op2);
6427 else
6429 op1 = copy_to_mode_reg (mode1, op1);
6430 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
6433 pat = GEN_FCN (icode) (addr, op0);
6434 if (pat)
6435 emit_insn (pat);
6436 return NULL_RTX;
6439 static rtx
6440 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
6442 rtx pat;
6443 tree arg0 = TREE_VALUE (arglist);
6444 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6445 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6446 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6447 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6448 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6449 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6450 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6451 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6452 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
6454 if (icode == CODE_FOR_nothing)
6455 /* Builtin not supported on this processor. */
6456 return 0;
6458 /* If we got invalid arguments bail out before generating bad rtl. */
6459 if (arg0 == error_mark_node
6460 || arg1 == error_mark_node
6461 || arg2 == error_mark_node)
6462 return const0_rtx;
6464 if (icode == CODE_FOR_altivec_vsldoi_4sf
6465 || icode == CODE_FOR_altivec_vsldoi_4si
6466 || icode == CODE_FOR_altivec_vsldoi_8hi
6467 || icode == CODE_FOR_altivec_vsldoi_16qi)
6469 /* Only allow 4-bit unsigned literals. */
6470 STRIP_NOPS (arg2);
6471 if (TREE_CODE (arg2) != INTEGER_CST
6472 || TREE_INT_CST_LOW (arg2) & ~0xf)
6474 error ("argument 3 must be a 4-bit unsigned literal");
6475 return const0_rtx;
6479 if (target == 0
6480 || GET_MODE (target) != tmode
6481 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6482 target = gen_reg_rtx (tmode);
6484 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6485 op0 = copy_to_mode_reg (mode0, op0);
6486 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6487 op1 = copy_to_mode_reg (mode1, op1);
6488 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
6489 op2 = copy_to_mode_reg (mode2, op2);
6491 pat = GEN_FCN (icode) (target, op0, op1, op2);
6492 if (! pat)
6493 return 0;
6494 emit_insn (pat);
6496 return target;
6499 /* Expand the lvx builtins. */
6500 static rtx
6501 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
6503 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6504 tree arglist = TREE_OPERAND (exp, 1);
6505 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6506 tree arg0;
6507 enum machine_mode tmode, mode0;
6508 rtx pat, op0;
6509 enum insn_code icode;
6511 switch (fcode)
6513 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
6514 icode = CODE_FOR_altivec_lvx_16qi;
6515 break;
6516 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
6517 icode = CODE_FOR_altivec_lvx_8hi;
6518 break;
6519 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
6520 icode = CODE_FOR_altivec_lvx_4si;
6521 break;
6522 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
6523 icode = CODE_FOR_altivec_lvx_4sf;
6524 break;
6525 default:
6526 *expandedp = false;
6527 return NULL_RTX;
6530 *expandedp = true;
6532 arg0 = TREE_VALUE (arglist);
6533 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6534 tmode = insn_data[icode].operand[0].mode;
6535 mode0 = insn_data[icode].operand[1].mode;
6537 if (target == 0
6538 || GET_MODE (target) != tmode
6539 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6540 target = gen_reg_rtx (tmode);
6542 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6543 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6545 pat = GEN_FCN (icode) (target, op0);
6546 if (! pat)
6547 return 0;
6548 emit_insn (pat);
6549 return target;
6552 /* Expand the stvx builtins. */
6553 static rtx
6554 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6555 bool *expandedp)
6557 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6558 tree arglist = TREE_OPERAND (exp, 1);
6559 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6560 tree arg0, arg1;
6561 enum machine_mode mode0, mode1;
6562 rtx pat, op0, op1;
6563 enum insn_code icode;
6565 switch (fcode)
6567 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
6568 icode = CODE_FOR_altivec_stvx_16qi;
6569 break;
6570 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
6571 icode = CODE_FOR_altivec_stvx_8hi;
6572 break;
6573 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
6574 icode = CODE_FOR_altivec_stvx_4si;
6575 break;
6576 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
6577 icode = CODE_FOR_altivec_stvx_4sf;
6578 break;
6579 default:
6580 *expandedp = false;
6581 return NULL_RTX;
6584 arg0 = TREE_VALUE (arglist);
6585 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6586 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6587 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6588 mode0 = insn_data[icode].operand[0].mode;
6589 mode1 = insn_data[icode].operand[1].mode;
6591 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6592 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6593 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6594 op1 = copy_to_mode_reg (mode1, op1);
6596 pat = GEN_FCN (icode) (op0, op1);
6597 if (pat)
6598 emit_insn (pat);
6600 *expandedp = true;
6601 return NULL_RTX;
6604 /* Expand the dst builtins. */
6605 static rtx
6606 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6607 bool *expandedp)
6609 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6610 tree arglist = TREE_OPERAND (exp, 1);
6611 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6612 tree arg0, arg1, arg2;
6613 enum machine_mode mode0, mode1, mode2;
6614 rtx pat, op0, op1, op2;
6615 struct builtin_description *d;
6616 size_t i;
6618 *expandedp = false;
6620 /* Handle DST variants. */
6621 d = (struct builtin_description *) bdesc_dst;
6622 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6623 if (d->code == fcode)
6625 arg0 = TREE_VALUE (arglist);
6626 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6627 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6628 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6629 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6630 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6631 mode0 = insn_data[d->icode].operand[0].mode;
6632 mode1 = insn_data[d->icode].operand[1].mode;
6633 mode2 = insn_data[d->icode].operand[2].mode;
6635 /* Invalid arguments, bail out before generating bad rtl. */
6636 if (arg0 == error_mark_node
6637 || arg1 == error_mark_node
6638 || arg2 == error_mark_node)
6639 return const0_rtx;
6641 *expandedp = true;
6642 STRIP_NOPS (arg2);
6643 if (TREE_CODE (arg2) != INTEGER_CST
6644 || TREE_INT_CST_LOW (arg2) & ~0x3)
6646 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6647 return const0_rtx;
6650 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6651 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6652 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6653 op1 = copy_to_mode_reg (mode1, op1);
6655 pat = GEN_FCN (d->icode) (op0, op1, op2);
6656 if (pat != 0)
6657 emit_insn (pat);
6659 return NULL_RTX;
6662 return NULL_RTX;
6665 /* Expand the builtin in EXP and store the result in TARGET. Store
6666 true in *EXPANDEDP if we found a builtin to expand. */
6667 static rtx
6668 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6670 struct builtin_description *d;
6671 struct builtin_description_predicates *dp;
6672 size_t i;
6673 enum insn_code icode;
6674 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6675 tree arglist = TREE_OPERAND (exp, 1);
6676 tree arg0;
6677 rtx op0, pat;
6678 enum machine_mode tmode, mode0;
6679 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6681 target = altivec_expand_ld_builtin (exp, target, expandedp);
6682 if (*expandedp)
6683 return target;
6685 target = altivec_expand_st_builtin (exp, target, expandedp);
6686 if (*expandedp)
6687 return target;
6689 target = altivec_expand_dst_builtin (exp, target, expandedp);
6690 if (*expandedp)
6691 return target;
6693 *expandedp = true;
6695 switch (fcode)
6697 case ALTIVEC_BUILTIN_STVX:
6698 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6699 case ALTIVEC_BUILTIN_STVEBX:
6700 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6701 case ALTIVEC_BUILTIN_STVEHX:
6702 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6703 case ALTIVEC_BUILTIN_STVEWX:
6704 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6705 case ALTIVEC_BUILTIN_STVXL:
6706 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6708 case ALTIVEC_BUILTIN_MFVSCR:
6709 icode = CODE_FOR_altivec_mfvscr;
6710 tmode = insn_data[icode].operand[0].mode;
6712 if (target == 0
6713 || GET_MODE (target) != tmode
6714 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6715 target = gen_reg_rtx (tmode);
6717 pat = GEN_FCN (icode) (target);
6718 if (! pat)
6719 return 0;
6720 emit_insn (pat);
6721 return target;
6723 case ALTIVEC_BUILTIN_MTVSCR:
6724 icode = CODE_FOR_altivec_mtvscr;
6725 arg0 = TREE_VALUE (arglist);
6726 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6727 mode0 = insn_data[icode].operand[0].mode;
6729 /* If we got invalid arguments bail out before generating bad rtl. */
6730 if (arg0 == error_mark_node)
6731 return const0_rtx;
6733 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6734 op0 = copy_to_mode_reg (mode0, op0);
6736 pat = GEN_FCN (icode) (op0);
6737 if (pat)
6738 emit_insn (pat);
6739 return NULL_RTX;
6741 case ALTIVEC_BUILTIN_DSSALL:
6742 emit_insn (gen_altivec_dssall ());
6743 return NULL_RTX;
6745 case ALTIVEC_BUILTIN_DSS:
6746 icode = CODE_FOR_altivec_dss;
6747 arg0 = TREE_VALUE (arglist);
6748 STRIP_NOPS (arg0);
6749 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6750 mode0 = insn_data[icode].operand[0].mode;
6752 /* If we got invalid arguments bail out before generating bad rtl. */
6753 if (arg0 == error_mark_node)
6754 return const0_rtx;
6756 if (TREE_CODE (arg0) != INTEGER_CST
6757 || TREE_INT_CST_LOW (arg0) & ~0x3)
6759 error ("argument to dss must be a 2-bit unsigned literal");
6760 return const0_rtx;
6763 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6764 op0 = copy_to_mode_reg (mode0, op0);
6766 emit_insn (gen_altivec_dss (op0));
6767 return NULL_RTX;
6769 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6770 arg0 = TREE_VALUE (arglist);
6771 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6772 arg0 = TREE_OPERAND (arg0, 0);
6773 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6774 TREE_STRING_POINTER (arg0));
6776 return const0_rtx;
6779 /* Expand abs* operations. */
6780 d = (struct builtin_description *) bdesc_abs;
6781 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6782 if (d->code == fcode)
6783 return altivec_expand_abs_builtin (d->icode, arglist, target);
6785 /* Expand the AltiVec predicates. */
6786 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6787 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6788 if (dp->code == fcode)
6789 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6791 /* LV* are funky. We initialized them differently. */
6792 switch (fcode)
6794 case ALTIVEC_BUILTIN_LVSL:
6795 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6796 arglist, target);
6797 case ALTIVEC_BUILTIN_LVSR:
6798 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6799 arglist, target);
6800 case ALTIVEC_BUILTIN_LVEBX:
6801 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6802 arglist, target);
6803 case ALTIVEC_BUILTIN_LVEHX:
6804 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6805 arglist, target);
6806 case ALTIVEC_BUILTIN_LVEWX:
6807 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6808 arglist, target);
6809 case ALTIVEC_BUILTIN_LVXL:
6810 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6811 arglist, target);
6812 case ALTIVEC_BUILTIN_LVX:
6813 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6814 arglist, target);
6815 default:
6816 break;
6817 /* Fall through. */
6820 *expandedp = false;
6821 return NULL_RTX;
6824 /* Binops that need to be initialized manually, but can be expanded
6825 automagically by rs6000_expand_binop_builtin. */
6826 static struct builtin_description bdesc_2arg_spe[] =
6828 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6829 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6830 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6831 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6832 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6833 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6834 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6835 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6836 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6837 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6838 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6839 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6840 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6841 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6842 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6843 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6844 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6845 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6846 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6847 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6848 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6849 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6852 /* Expand the builtin in EXP and store the result in TARGET. Store
6853 true in *EXPANDEDP if we found a builtin to expand.
6855 This expands the SPE builtins that are not simple unary and binary
6856 operations. */
6857 static rtx
6858 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6860 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6861 tree arglist = TREE_OPERAND (exp, 1);
6862 tree arg1, arg0;
6863 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6864 enum insn_code icode;
6865 enum machine_mode tmode, mode0;
6866 rtx pat, op0;
6867 struct builtin_description *d;
6868 size_t i;
6870 *expandedp = true;
6872 /* Syntax check for a 5-bit unsigned immediate. */
6873 switch (fcode)
6875 case SPE_BUILTIN_EVSTDD:
6876 case SPE_BUILTIN_EVSTDH:
6877 case SPE_BUILTIN_EVSTDW:
6878 case SPE_BUILTIN_EVSTWHE:
6879 case SPE_BUILTIN_EVSTWHO:
6880 case SPE_BUILTIN_EVSTWWE:
6881 case SPE_BUILTIN_EVSTWWO:
6882 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6883 if (TREE_CODE (arg1) != INTEGER_CST
6884 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6886 error ("argument 2 must be a 5-bit unsigned literal");
6887 return const0_rtx;
6889 break;
6890 default:
6891 break;
6894 /* The evsplat*i instructions are not quite generic. */
6895 switch (fcode)
6897 case SPE_BUILTIN_EVSPLATFI:
6898 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6899 arglist, target);
6900 case SPE_BUILTIN_EVSPLATI:
6901 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6902 arglist, target);
6903 default:
6904 break;
6907 d = (struct builtin_description *) bdesc_2arg_spe;
6908 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6909 if (d->code == fcode)
6910 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6912 d = (struct builtin_description *) bdesc_spe_predicates;
6913 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6914 if (d->code == fcode)
6915 return spe_expand_predicate_builtin (d->icode, arglist, target);
6917 d = (struct builtin_description *) bdesc_spe_evsel;
6918 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6919 if (d->code == fcode)
6920 return spe_expand_evsel_builtin (d->icode, arglist, target);
6922 switch (fcode)
6924 case SPE_BUILTIN_EVSTDDX:
6925 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6926 case SPE_BUILTIN_EVSTDHX:
6927 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6928 case SPE_BUILTIN_EVSTDWX:
6929 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6930 case SPE_BUILTIN_EVSTWHEX:
6931 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6932 case SPE_BUILTIN_EVSTWHOX:
6933 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6934 case SPE_BUILTIN_EVSTWWEX:
6935 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6936 case SPE_BUILTIN_EVSTWWOX:
6937 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6938 case SPE_BUILTIN_EVSTDD:
6939 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6940 case SPE_BUILTIN_EVSTDH:
6941 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6942 case SPE_BUILTIN_EVSTDW:
6943 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6944 case SPE_BUILTIN_EVSTWHE:
6945 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6946 case SPE_BUILTIN_EVSTWHO:
6947 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6948 case SPE_BUILTIN_EVSTWWE:
6949 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6950 case SPE_BUILTIN_EVSTWWO:
6951 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6952 case SPE_BUILTIN_MFSPEFSCR:
6953 icode = CODE_FOR_spe_mfspefscr;
6954 tmode = insn_data[icode].operand[0].mode;
6956 if (target == 0
6957 || GET_MODE (target) != tmode
6958 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6959 target = gen_reg_rtx (tmode);
6961 pat = GEN_FCN (icode) (target);
6962 if (! pat)
6963 return 0;
6964 emit_insn (pat);
6965 return target;
6966 case SPE_BUILTIN_MTSPEFSCR:
6967 icode = CODE_FOR_spe_mtspefscr;
6968 arg0 = TREE_VALUE (arglist);
6969 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6970 mode0 = insn_data[icode].operand[0].mode;
6972 if (arg0 == error_mark_node)
6973 return const0_rtx;
6975 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6976 op0 = copy_to_mode_reg (mode0, op0);
6978 pat = GEN_FCN (icode) (op0);
6979 if (pat)
6980 emit_insn (pat);
6981 return NULL_RTX;
6982 default:
6983 break;
6986 *expandedp = false;
6987 return NULL_RTX;
6990 static rtx
6991 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6993 rtx pat, scratch, tmp;
6994 tree form = TREE_VALUE (arglist);
6995 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6996 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6997 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6998 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6999 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7000 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7001 int form_int;
7002 enum rtx_code code;
7004 if (TREE_CODE (form) != INTEGER_CST)
7006 error ("argument 1 of __builtin_spe_predicate must be a constant");
7007 return const0_rtx;
7009 else
7010 form_int = TREE_INT_CST_LOW (form);
7012 if (mode0 != mode1)
7013 abort ();
7015 if (arg0 == error_mark_node || arg1 == error_mark_node)
7016 return const0_rtx;
7018 if (target == 0
7019 || GET_MODE (target) != SImode
7020 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
7021 target = gen_reg_rtx (SImode);
7023 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7024 op0 = copy_to_mode_reg (mode0, op0);
7025 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7026 op1 = copy_to_mode_reg (mode1, op1);
7028 scratch = gen_reg_rtx (CCmode);
7030 pat = GEN_FCN (icode) (scratch, op0, op1);
7031 if (! pat)
7032 return const0_rtx;
7033 emit_insn (pat);
7035 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7036 _lower_. We use one compare, but look in different bits of the
7037 CR for each variant.
7039 There are 2 elements in each SPE simd type (upper/lower). The CR
7040 bits are set as follows:
7042 BIT0 | BIT 1 | BIT 2 | BIT 3
7043 U | L | (U | L) | (U & L)
7045 So, for an "all" relationship, BIT 3 would be set.
7046 For an "any" relationship, BIT 2 would be set. Etc.
7048 Following traditional nomenclature, these bits map to:
7050 BIT0 | BIT 1 | BIT 2 | BIT 3
7051 LT | GT | EQ | OV
7053 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7056 switch (form_int)
7058 /* All variant. OV bit. */
7059 case 0:
7060 /* We need to get to the OV bit, which is the ORDERED bit. We
7061 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7062 that's ugly and will trigger a validate_condition_mode abort.
7063 So let's just use another pattern. */
7064 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
7065 return target;
7066 /* Any variant. EQ bit. */
7067 case 1:
7068 code = EQ;
7069 break;
7070 /* Upper variant. LT bit. */
7071 case 2:
7072 code = LT;
7073 break;
7074 /* Lower variant. GT bit. */
7075 case 3:
7076 code = GT;
7077 break;
7078 default:
7079 error ("argument 1 of __builtin_spe_predicate is out of range");
7080 return const0_rtx;
7083 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
7084 emit_move_insn (target, tmp);
7086 return target;
7089 /* The evsel builtins look like this:
7091 e = __builtin_spe_evsel_OP (a, b, c, d);
7093 and work like this:
7095 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7096 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7099 static rtx
7100 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
7102 rtx pat, scratch;
7103 tree arg0 = TREE_VALUE (arglist);
7104 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
7105 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7106 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
7107 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
7108 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7109 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
7110 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
7111 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7112 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7114 if (mode0 != mode1)
7115 abort ();
7117 if (arg0 == error_mark_node || arg1 == error_mark_node
7118 || arg2 == error_mark_node || arg3 == error_mark_node)
7119 return const0_rtx;
7121 if (target == 0
7122 || GET_MODE (target) != mode0
7123 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
7124 target = gen_reg_rtx (mode0);
7126 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7127 op0 = copy_to_mode_reg (mode0, op0);
7128 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
7129 op1 = copy_to_mode_reg (mode0, op1);
7130 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
7131 op2 = copy_to_mode_reg (mode0, op2);
7132 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
7133 op3 = copy_to_mode_reg (mode0, op3);
7135 /* Generate the compare. */
7136 scratch = gen_reg_rtx (CCmode);
7137 pat = GEN_FCN (icode) (scratch, op0, op1);
7138 if (! pat)
7139 return const0_rtx;
7140 emit_insn (pat);
7142 if (mode0 == V2SImode)
7143 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
7144 else
7145 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
7147 return target;
7150 /* Expand an expression EXP that calls a built-in function,
7151 with result going to TARGET if that's convenient
7152 (and in mode MODE if that's convenient).
7153 SUBTARGET may be used as the target for computing one of EXP's operands.
7154 IGNORE is nonzero if the value is to be ignored. */
7156 static rtx
7157 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7158 enum machine_mode mode ATTRIBUTE_UNUSED,
7159 int ignore ATTRIBUTE_UNUSED)
7161 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7162 tree arglist = TREE_OPERAND (exp, 1);
7163 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7164 struct builtin_description *d;
7165 size_t i;
7166 rtx ret;
7167 bool success;
7169 if (TARGET_ALTIVEC)
7171 ret = altivec_expand_builtin (exp, target, &success);
7173 if (success)
7174 return ret;
7176 if (TARGET_SPE)
7178 ret = spe_expand_builtin (exp, target, &success);
7180 if (success)
7181 return ret;
7184 if (TARGET_ALTIVEC || TARGET_SPE)
7186 /* Handle simple unary operations. */
7187 d = (struct builtin_description *) bdesc_1arg;
7188 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7189 if (d->code == fcode)
7190 return rs6000_expand_unop_builtin (d->icode, arglist, target);
7192 /* Handle simple binary operations. */
7193 d = (struct builtin_description *) bdesc_2arg;
7194 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7195 if (d->code == fcode)
7196 return rs6000_expand_binop_builtin (d->icode, arglist, target);
7198 /* Handle simple ternary operations. */
7199 d = (struct builtin_description *) bdesc_3arg;
7200 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7201 if (d->code == fcode)
7202 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
7205 abort ();
7206 return NULL_RTX;
7209 static tree
7210 build_opaque_vector_type (tree node, int nunits)
7212 node = copy_node (node);
7213 TYPE_MAIN_VARIANT (node) = node;
7214 return build_vector_type (node, nunits);
7217 static void
7218 rs6000_init_builtins (void)
7220 V2SI_type_node = build_vector_type (intSI_type_node, 2);
7221 V2SF_type_node = build_vector_type (float_type_node, 2);
7222 V4HI_type_node = build_vector_type (intHI_type_node, 4);
7223 V4SI_type_node = build_vector_type (intSI_type_node, 4);
7224 V4SF_type_node = build_vector_type (float_type_node, 4);
7225 V8HI_type_node = build_vector_type (intHI_type_node, 8);
7226 V16QI_type_node = build_vector_type (intQI_type_node, 16);
7228 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
7229 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
7230 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
7232 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
7233 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
7234 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
7236 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7237 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7238 'vector unsigned short'. */
7240 bool_char_type_node = copy_node (unsigned_intQI_type_node);
7241 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
7242 bool_short_type_node = copy_node (unsigned_intHI_type_node);
7243 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
7244 bool_int_type_node = copy_node (unsigned_intSI_type_node);
7245 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
7246 pixel_type_node = copy_node (unsigned_intHI_type_node);
7247 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
7249 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7250 get_identifier ("__bool char"),
7251 bool_char_type_node));
7252 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7253 get_identifier ("__bool short"),
7254 bool_short_type_node));
7255 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7256 get_identifier ("__bool int"),
7257 bool_int_type_node));
7258 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7259 get_identifier ("__pixel"),
7260 pixel_type_node));
7262 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
7263 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
7264 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
7265 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
7267 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7268 get_identifier ("__vector unsigned char"),
7269 unsigned_V16QI_type_node));
7270 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7271 get_identifier ("__vector signed char"),
7272 V16QI_type_node));
7273 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7274 get_identifier ("__vector __bool char"),
7275 bool_V16QI_type_node));
7277 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7278 get_identifier ("__vector unsigned short"),
7279 unsigned_V8HI_type_node));
7280 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7281 get_identifier ("__vector signed short"),
7282 V8HI_type_node));
7283 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7284 get_identifier ("__vector __bool short"),
7285 bool_V8HI_type_node));
7287 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7288 get_identifier ("__vector unsigned int"),
7289 unsigned_V4SI_type_node));
7290 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7291 get_identifier ("__vector signed int"),
7292 V4SI_type_node));
7293 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7294 get_identifier ("__vector __bool int"),
7295 bool_V4SI_type_node));
7297 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7298 get_identifier ("__vector float"),
7299 V4SF_type_node));
7300 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
7301 get_identifier ("__vector __pixel"),
7302 pixel_V8HI_type_node));
7304 if (TARGET_SPE)
7305 spe_init_builtins ();
7306 if (TARGET_ALTIVEC)
7307 altivec_init_builtins ();
7308 if (TARGET_ALTIVEC || TARGET_SPE)
7309 rs6000_common_init_builtins ();
7312 /* Search through a set of builtins and enable the mask bits.
7313 DESC is an array of builtins.
7314 SIZE is the total number of builtins.
7315 START is the builtin enum at which to start.
7316 END is the builtin enum at which to end. */
7317 static void
7318 enable_mask_for_builtins (struct builtin_description *desc, int size,
7319 enum rs6000_builtins start,
7320 enum rs6000_builtins end)
7322 int i;
7324 for (i = 0; i < size; ++i)
7325 if (desc[i].code == start)
7326 break;
7328 if (i == size)
7329 return;
7331 for (; i < size; ++i)
7333 /* Flip all the bits on. */
7334 desc[i].mask = target_flags;
7335 if (desc[i].code == end)
7336 break;
7340 static void
7341 spe_init_builtins (void)
7343 tree endlink = void_list_node;
7344 tree puint_type_node = build_pointer_type (unsigned_type_node);
7345 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
7346 struct builtin_description *d;
7347 size_t i;
7349 tree v2si_ftype_4_v2si
7350 = build_function_type
7351 (opaque_V2SI_type_node,
7352 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7353 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7354 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7355 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7356 endlink)))));
7358 tree v2sf_ftype_4_v2sf
7359 = build_function_type
7360 (opaque_V2SF_type_node,
7361 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7362 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7363 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7364 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7365 endlink)))));
7367 tree int_ftype_int_v2si_v2si
7368 = build_function_type
7369 (integer_type_node,
7370 tree_cons (NULL_TREE, integer_type_node,
7371 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7372 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7373 endlink))));
7375 tree int_ftype_int_v2sf_v2sf
7376 = build_function_type
7377 (integer_type_node,
7378 tree_cons (NULL_TREE, integer_type_node,
7379 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7380 tree_cons (NULL_TREE, opaque_V2SF_type_node,
7381 endlink))));
7383 tree void_ftype_v2si_puint_int
7384 = build_function_type (void_type_node,
7385 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7386 tree_cons (NULL_TREE, puint_type_node,
7387 tree_cons (NULL_TREE,
7388 integer_type_node,
7389 endlink))));
7391 tree void_ftype_v2si_puint_char
7392 = build_function_type (void_type_node,
7393 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7394 tree_cons (NULL_TREE, puint_type_node,
7395 tree_cons (NULL_TREE,
7396 char_type_node,
7397 endlink))));
7399 tree void_ftype_v2si_pv2si_int
7400 = build_function_type (void_type_node,
7401 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7402 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7403 tree_cons (NULL_TREE,
7404 integer_type_node,
7405 endlink))));
7407 tree void_ftype_v2si_pv2si_char
7408 = build_function_type (void_type_node,
7409 tree_cons (NULL_TREE, opaque_V2SI_type_node,
7410 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7411 tree_cons (NULL_TREE,
7412 char_type_node,
7413 endlink))));
7415 tree void_ftype_int
7416 = build_function_type (void_type_node,
7417 tree_cons (NULL_TREE, integer_type_node, endlink));
7419 tree int_ftype_void
7420 = build_function_type (integer_type_node, endlink);
7422 tree v2si_ftype_pv2si_int
7423 = build_function_type (opaque_V2SI_type_node,
7424 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
7425 tree_cons (NULL_TREE, integer_type_node,
7426 endlink)));
7428 tree v2si_ftype_puint_int
7429 = build_function_type (opaque_V2SI_type_node,
7430 tree_cons (NULL_TREE, puint_type_node,
7431 tree_cons (NULL_TREE, integer_type_node,
7432 endlink)));
7434 tree v2si_ftype_pushort_int
7435 = build_function_type (opaque_V2SI_type_node,
7436 tree_cons (NULL_TREE, pushort_type_node,
7437 tree_cons (NULL_TREE, integer_type_node,
7438 endlink)));
7440 tree v2si_ftype_signed_char
7441 = build_function_type (opaque_V2SI_type_node,
7442 tree_cons (NULL_TREE, signed_char_type_node,
7443 endlink));
7445 /* The initialization of the simple binary and unary builtins is
7446 done in rs6000_common_init_builtins, but we have to enable the
7447 mask bits here manually because we have run out of `target_flags'
7448 bits. We really need to redesign this mask business. */
7450 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
7451 ARRAY_SIZE (bdesc_2arg),
7452 SPE_BUILTIN_EVADDW,
7453 SPE_BUILTIN_EVXOR);
7454 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
7455 ARRAY_SIZE (bdesc_1arg),
7456 SPE_BUILTIN_EVABS,
7457 SPE_BUILTIN_EVSUBFUSIAAW);
7458 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
7459 ARRAY_SIZE (bdesc_spe_predicates),
7460 SPE_BUILTIN_EVCMPEQ,
7461 SPE_BUILTIN_EVFSTSTLT);
7462 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
7463 ARRAY_SIZE (bdesc_spe_evsel),
7464 SPE_BUILTIN_EVSEL_CMPGTS,
7465 SPE_BUILTIN_EVSEL_FSTSTEQ);
7467 (*lang_hooks.decls.pushdecl)
7468 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
7469 opaque_V2SI_type_node));
7471 /* Initialize irregular SPE builtins. */
7473 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
7474 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
7475 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
7476 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
7477 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
7478 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
7479 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
7480 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
7481 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
7482 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
7483 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
7484 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
7485 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
7486 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
7487 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
7488 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
7489 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
7490 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
7492 /* Loads. */
7493 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
7494 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
7495 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
7496 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
7497 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
7498 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
7499 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
7500 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
7501 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
7502 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
7503 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
7504 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
7505 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
7506 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
7507 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
7508 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
7509 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
7510 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
7511 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
7512 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
7513 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
7514 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
7516 /* Predicates. */
7517 d = (struct builtin_description *) bdesc_spe_predicates;
7518 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
7520 tree type;
7522 switch (insn_data[d->icode].operand[1].mode)
7524 case V2SImode:
7525 type = int_ftype_int_v2si_v2si;
7526 break;
7527 case V2SFmode:
7528 type = int_ftype_int_v2sf_v2sf;
7529 break;
7530 default:
7531 abort ();
7534 def_builtin (d->mask, d->name, type, d->code);
7537 /* Evsel predicates. */
7538 d = (struct builtin_description *) bdesc_spe_evsel;
7539 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
7541 tree type;
7543 switch (insn_data[d->icode].operand[1].mode)
7545 case V2SImode:
7546 type = v2si_ftype_4_v2si;
7547 break;
7548 case V2SFmode:
7549 type = v2sf_ftype_4_v2sf;
7550 break;
7551 default:
7552 abort ();
7555 def_builtin (d->mask, d->name, type, d->code);
7559 static void
7560 altivec_init_builtins (void)
7562 struct builtin_description *d;
7563 struct builtin_description_predicates *dp;
7564 size_t i;
7565 tree pfloat_type_node = build_pointer_type (float_type_node);
7566 tree pint_type_node = build_pointer_type (integer_type_node);
7567 tree pshort_type_node = build_pointer_type (short_integer_type_node);
7568 tree pchar_type_node = build_pointer_type (char_type_node);
7570 tree pvoid_type_node = build_pointer_type (void_type_node);
7572 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
7573 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
7574 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
7575 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
7577 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
7579 tree int_ftype_int_v4si_v4si
7580 = build_function_type_list (integer_type_node,
7581 integer_type_node, V4SI_type_node,
7582 V4SI_type_node, NULL_TREE);
7583 tree v4sf_ftype_pcfloat
7584 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
7585 tree void_ftype_pfloat_v4sf
7586 = build_function_type_list (void_type_node,
7587 pfloat_type_node, V4SF_type_node, NULL_TREE);
7588 tree v4si_ftype_pcint
7589 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
7590 tree void_ftype_pint_v4si
7591 = build_function_type_list (void_type_node,
7592 pint_type_node, V4SI_type_node, NULL_TREE);
7593 tree v8hi_ftype_pcshort
7594 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
7595 tree void_ftype_pshort_v8hi
7596 = build_function_type_list (void_type_node,
7597 pshort_type_node, V8HI_type_node, NULL_TREE);
7598 tree v16qi_ftype_pcchar
7599 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
7600 tree void_ftype_pchar_v16qi
7601 = build_function_type_list (void_type_node,
7602 pchar_type_node, V16QI_type_node, NULL_TREE);
7603 tree void_ftype_v4si
7604 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
7605 tree v8hi_ftype_void
7606 = build_function_type (V8HI_type_node, void_list_node);
7607 tree void_ftype_void
7608 = build_function_type (void_type_node, void_list_node);
7609 tree void_ftype_int
7610 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
7612 tree v16qi_ftype_long_pcvoid
7613 = build_function_type_list (V16QI_type_node,
7614 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7615 tree v8hi_ftype_long_pcvoid
7616 = build_function_type_list (V8HI_type_node,
7617 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7618 tree v4si_ftype_long_pcvoid
7619 = build_function_type_list (V4SI_type_node,
7620 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7622 tree void_ftype_v4si_long_pvoid
7623 = build_function_type_list (void_type_node,
7624 V4SI_type_node, long_integer_type_node,
7625 pvoid_type_node, NULL_TREE);
7626 tree void_ftype_v16qi_long_pvoid
7627 = build_function_type_list (void_type_node,
7628 V16QI_type_node, long_integer_type_node,
7629 pvoid_type_node, NULL_TREE);
7630 tree void_ftype_v8hi_long_pvoid
7631 = build_function_type_list (void_type_node,
7632 V8HI_type_node, long_integer_type_node,
7633 pvoid_type_node, NULL_TREE);
7634 tree int_ftype_int_v8hi_v8hi
7635 = build_function_type_list (integer_type_node,
7636 integer_type_node, V8HI_type_node,
7637 V8HI_type_node, NULL_TREE);
7638 tree int_ftype_int_v16qi_v16qi
7639 = build_function_type_list (integer_type_node,
7640 integer_type_node, V16QI_type_node,
7641 V16QI_type_node, NULL_TREE);
7642 tree int_ftype_int_v4sf_v4sf
7643 = build_function_type_list (integer_type_node,
7644 integer_type_node, V4SF_type_node,
7645 V4SF_type_node, NULL_TREE);
7646 tree v4si_ftype_v4si
7647 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7648 tree v8hi_ftype_v8hi
7649 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7650 tree v16qi_ftype_v16qi
7651 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7652 tree v4sf_ftype_v4sf
7653 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7654 tree void_ftype_pcvoid_int_int
7655 = build_function_type_list (void_type_node,
7656 pcvoid_type_node, integer_type_node,
7657 integer_type_node, NULL_TREE);
7658 tree int_ftype_pcchar
7659 = build_function_type_list (integer_type_node,
7660 pcchar_type_node, NULL_TREE);
7662 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7663 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7664 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7665 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7666 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7667 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7668 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7669 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7670 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7671 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7672 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7673 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7674 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7675 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7676 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7677 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7678 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7680 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
7682 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7684 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7686 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7688 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7690 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7693 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7695 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7696 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7697 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7699 /* Add the DST variants. */
7700 d = (struct builtin_description *) bdesc_dst;
7701 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7702 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7704 /* Initialize the predicates. */
7705 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7706 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7708 enum machine_mode mode1;
7709 tree type;
7711 mode1 = insn_data[dp->icode].operand[1].mode;
7713 switch (mode1)
7715 case V4SImode:
7716 type = int_ftype_int_v4si_v4si;
7717 break;
7718 case V8HImode:
7719 type = int_ftype_int_v8hi_v8hi;
7720 break;
7721 case V16QImode:
7722 type = int_ftype_int_v16qi_v16qi;
7723 break;
7724 case V4SFmode:
7725 type = int_ftype_int_v4sf_v4sf;
7726 break;
7727 default:
7728 abort ();
7731 def_builtin (dp->mask, dp->name, type, dp->code);
7734 /* Initialize the abs* operators. */
7735 d = (struct builtin_description *) bdesc_abs;
7736 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7738 enum machine_mode mode0;
7739 tree type;
7741 mode0 = insn_data[d->icode].operand[0].mode;
7743 switch (mode0)
7745 case V4SImode:
7746 type = v4si_ftype_v4si;
7747 break;
7748 case V8HImode:
7749 type = v8hi_ftype_v8hi;
7750 break;
7751 case V16QImode:
7752 type = v16qi_ftype_v16qi;
7753 break;
7754 case V4SFmode:
7755 type = v4sf_ftype_v4sf;
7756 break;
7757 default:
7758 abort ();
7761 def_builtin (d->mask, d->name, type, d->code);
7765 static void
7766 rs6000_common_init_builtins (void)
7768 struct builtin_description *d;
7769 size_t i;
7771 tree v4sf_ftype_v4sf_v4sf_v16qi
7772 = build_function_type_list (V4SF_type_node,
7773 V4SF_type_node, V4SF_type_node,
7774 V16QI_type_node, NULL_TREE);
7775 tree v4si_ftype_v4si_v4si_v16qi
7776 = build_function_type_list (V4SI_type_node,
7777 V4SI_type_node, V4SI_type_node,
7778 V16QI_type_node, NULL_TREE);
7779 tree v8hi_ftype_v8hi_v8hi_v16qi
7780 = build_function_type_list (V8HI_type_node,
7781 V8HI_type_node, V8HI_type_node,
7782 V16QI_type_node, NULL_TREE);
7783 tree v16qi_ftype_v16qi_v16qi_v16qi
7784 = build_function_type_list (V16QI_type_node,
7785 V16QI_type_node, V16QI_type_node,
7786 V16QI_type_node, NULL_TREE);
7787 tree v4si_ftype_int
7788 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7789 tree v8hi_ftype_int
7790 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7791 tree v16qi_ftype_int
7792 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7793 tree v8hi_ftype_v16qi
7794 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7795 tree v4sf_ftype_v4sf
7796 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7798 tree v2si_ftype_v2si_v2si
7799 = build_function_type_list (opaque_V2SI_type_node,
7800 opaque_V2SI_type_node,
7801 opaque_V2SI_type_node, NULL_TREE);
7803 tree v2sf_ftype_v2sf_v2sf
7804 = build_function_type_list (opaque_V2SF_type_node,
7805 opaque_V2SF_type_node,
7806 opaque_V2SF_type_node, NULL_TREE);
7808 tree v2si_ftype_int_int
7809 = build_function_type_list (opaque_V2SI_type_node,
7810 integer_type_node, integer_type_node,
7811 NULL_TREE);
7813 tree v2si_ftype_v2si
7814 = build_function_type_list (opaque_V2SI_type_node,
7815 opaque_V2SI_type_node, NULL_TREE);
7817 tree v2sf_ftype_v2sf
7818 = build_function_type_list (opaque_V2SF_type_node,
7819 opaque_V2SF_type_node, NULL_TREE);
7821 tree v2sf_ftype_v2si
7822 = build_function_type_list (opaque_V2SF_type_node,
7823 opaque_V2SI_type_node, NULL_TREE);
7825 tree v2si_ftype_v2sf
7826 = build_function_type_list (opaque_V2SI_type_node,
7827 opaque_V2SF_type_node, NULL_TREE);
7829 tree v2si_ftype_v2si_char
7830 = build_function_type_list (opaque_V2SI_type_node,
7831 opaque_V2SI_type_node,
7832 char_type_node, NULL_TREE);
7834 tree v2si_ftype_int_char
7835 = build_function_type_list (opaque_V2SI_type_node,
7836 integer_type_node, char_type_node, NULL_TREE);
7838 tree v2si_ftype_char
7839 = build_function_type_list (opaque_V2SI_type_node,
7840 char_type_node, NULL_TREE);
7842 tree int_ftype_int_int
7843 = build_function_type_list (integer_type_node,
7844 integer_type_node, integer_type_node,
7845 NULL_TREE);
7847 tree v4si_ftype_v4si_v4si
7848 = build_function_type_list (V4SI_type_node,
7849 V4SI_type_node, V4SI_type_node, NULL_TREE);
7850 tree v4sf_ftype_v4si_int
7851 = build_function_type_list (V4SF_type_node,
7852 V4SI_type_node, integer_type_node, NULL_TREE);
7853 tree v4si_ftype_v4sf_int
7854 = build_function_type_list (V4SI_type_node,
7855 V4SF_type_node, integer_type_node, NULL_TREE);
7856 tree v4si_ftype_v4si_int
7857 = build_function_type_list (V4SI_type_node,
7858 V4SI_type_node, integer_type_node, NULL_TREE);
7859 tree v8hi_ftype_v8hi_int
7860 = build_function_type_list (V8HI_type_node,
7861 V8HI_type_node, integer_type_node, NULL_TREE);
7862 tree v16qi_ftype_v16qi_int
7863 = build_function_type_list (V16QI_type_node,
7864 V16QI_type_node, integer_type_node, NULL_TREE);
7865 tree v16qi_ftype_v16qi_v16qi_int
7866 = build_function_type_list (V16QI_type_node,
7867 V16QI_type_node, V16QI_type_node,
7868 integer_type_node, NULL_TREE);
7869 tree v8hi_ftype_v8hi_v8hi_int
7870 = build_function_type_list (V8HI_type_node,
7871 V8HI_type_node, V8HI_type_node,
7872 integer_type_node, NULL_TREE);
7873 tree v4si_ftype_v4si_v4si_int
7874 = build_function_type_list (V4SI_type_node,
7875 V4SI_type_node, V4SI_type_node,
7876 integer_type_node, NULL_TREE);
7877 tree v4sf_ftype_v4sf_v4sf_int
7878 = build_function_type_list (V4SF_type_node,
7879 V4SF_type_node, V4SF_type_node,
7880 integer_type_node, NULL_TREE);
7881 tree v4sf_ftype_v4sf_v4sf
7882 = build_function_type_list (V4SF_type_node,
7883 V4SF_type_node, V4SF_type_node, NULL_TREE);
7884 tree v4sf_ftype_v4sf_v4sf_v4si
7885 = build_function_type_list (V4SF_type_node,
7886 V4SF_type_node, V4SF_type_node,
7887 V4SI_type_node, NULL_TREE);
7888 tree v4sf_ftype_v4sf_v4sf_v4sf
7889 = build_function_type_list (V4SF_type_node,
7890 V4SF_type_node, V4SF_type_node,
7891 V4SF_type_node, NULL_TREE);
7892 tree v4si_ftype_v4si_v4si_v4si
7893 = build_function_type_list (V4SI_type_node,
7894 V4SI_type_node, V4SI_type_node,
7895 V4SI_type_node, NULL_TREE);
7896 tree v8hi_ftype_v8hi_v8hi
7897 = build_function_type_list (V8HI_type_node,
7898 V8HI_type_node, V8HI_type_node, NULL_TREE);
7899 tree v8hi_ftype_v8hi_v8hi_v8hi
7900 = build_function_type_list (V8HI_type_node,
7901 V8HI_type_node, V8HI_type_node,
7902 V8HI_type_node, NULL_TREE);
7903 tree v4si_ftype_v8hi_v8hi_v4si
7904 = build_function_type_list (V4SI_type_node,
7905 V8HI_type_node, V8HI_type_node,
7906 V4SI_type_node, NULL_TREE);
7907 tree v4si_ftype_v16qi_v16qi_v4si
7908 = build_function_type_list (V4SI_type_node,
7909 V16QI_type_node, V16QI_type_node,
7910 V4SI_type_node, NULL_TREE);
7911 tree v16qi_ftype_v16qi_v16qi
7912 = build_function_type_list (V16QI_type_node,
7913 V16QI_type_node, V16QI_type_node, NULL_TREE);
7914 tree v4si_ftype_v4sf_v4sf
7915 = build_function_type_list (V4SI_type_node,
7916 V4SF_type_node, V4SF_type_node, NULL_TREE);
7917 tree v8hi_ftype_v16qi_v16qi
7918 = build_function_type_list (V8HI_type_node,
7919 V16QI_type_node, V16QI_type_node, NULL_TREE);
7920 tree v4si_ftype_v8hi_v8hi
7921 = build_function_type_list (V4SI_type_node,
7922 V8HI_type_node, V8HI_type_node, NULL_TREE);
7923 tree v8hi_ftype_v4si_v4si
7924 = build_function_type_list (V8HI_type_node,
7925 V4SI_type_node, V4SI_type_node, NULL_TREE);
7926 tree v16qi_ftype_v8hi_v8hi
7927 = build_function_type_list (V16QI_type_node,
7928 V8HI_type_node, V8HI_type_node, NULL_TREE);
7929 tree v4si_ftype_v16qi_v4si
7930 = build_function_type_list (V4SI_type_node,
7931 V16QI_type_node, V4SI_type_node, NULL_TREE);
7932 tree v4si_ftype_v16qi_v16qi
7933 = build_function_type_list (V4SI_type_node,
7934 V16QI_type_node, V16QI_type_node, NULL_TREE);
7935 tree v4si_ftype_v8hi_v4si
7936 = build_function_type_list (V4SI_type_node,
7937 V8HI_type_node, V4SI_type_node, NULL_TREE);
7938 tree v4si_ftype_v8hi
7939 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7940 tree int_ftype_v4si_v4si
7941 = build_function_type_list (integer_type_node,
7942 V4SI_type_node, V4SI_type_node, NULL_TREE);
7943 tree int_ftype_v4sf_v4sf
7944 = build_function_type_list (integer_type_node,
7945 V4SF_type_node, V4SF_type_node, NULL_TREE);
7946 tree int_ftype_v16qi_v16qi
7947 = build_function_type_list (integer_type_node,
7948 V16QI_type_node, V16QI_type_node, NULL_TREE);
7949 tree int_ftype_v8hi_v8hi
7950 = build_function_type_list (integer_type_node,
7951 V8HI_type_node, V8HI_type_node, NULL_TREE);
7953 /* Add the simple ternary operators. */
7954 d = (struct builtin_description *) bdesc_3arg;
7955 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7958 enum machine_mode mode0, mode1, mode2, mode3;
7959 tree type;
7961 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7962 continue;
7964 mode0 = insn_data[d->icode].operand[0].mode;
7965 mode1 = insn_data[d->icode].operand[1].mode;
7966 mode2 = insn_data[d->icode].operand[2].mode;
7967 mode3 = insn_data[d->icode].operand[3].mode;
7969 /* When all four are of the same mode. */
7970 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7972 switch (mode0)
7974 case V4SImode:
7975 type = v4si_ftype_v4si_v4si_v4si;
7976 break;
7977 case V4SFmode:
7978 type = v4sf_ftype_v4sf_v4sf_v4sf;
7979 break;
7980 case V8HImode:
7981 type = v8hi_ftype_v8hi_v8hi_v8hi;
7982 break;
7983 case V16QImode:
7984 type = v16qi_ftype_v16qi_v16qi_v16qi;
7985 break;
7986 default:
7987 abort();
7990 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7992 switch (mode0)
7994 case V4SImode:
7995 type = v4si_ftype_v4si_v4si_v16qi;
7996 break;
7997 case V4SFmode:
7998 type = v4sf_ftype_v4sf_v4sf_v16qi;
7999 break;
8000 case V8HImode:
8001 type = v8hi_ftype_v8hi_v8hi_v16qi;
8002 break;
8003 case V16QImode:
8004 type = v16qi_ftype_v16qi_v16qi_v16qi;
8005 break;
8006 default:
8007 abort();
8010 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
8011 && mode3 == V4SImode)
8012 type = v4si_ftype_v16qi_v16qi_v4si;
8013 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
8014 && mode3 == V4SImode)
8015 type = v4si_ftype_v8hi_v8hi_v4si;
8016 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
8017 && mode3 == V4SImode)
8018 type = v4sf_ftype_v4sf_v4sf_v4si;
8020 /* vchar, vchar, vchar, 4 bit literal. */
8021 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
8022 && mode3 == QImode)
8023 type = v16qi_ftype_v16qi_v16qi_int;
8025 /* vshort, vshort, vshort, 4 bit literal. */
8026 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
8027 && mode3 == QImode)
8028 type = v8hi_ftype_v8hi_v8hi_int;
8030 /* vint, vint, vint, 4 bit literal. */
8031 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
8032 && mode3 == QImode)
8033 type = v4si_ftype_v4si_v4si_int;
8035 /* vfloat, vfloat, vfloat, 4 bit literal. */
8036 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
8037 && mode3 == QImode)
8038 type = v4sf_ftype_v4sf_v4sf_int;
8040 else
8041 abort ();
8043 def_builtin (d->mask, d->name, type, d->code);
8046 /* Add the simple binary operators. */
8047 d = (struct builtin_description *) bdesc_2arg;
8048 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
8050 enum machine_mode mode0, mode1, mode2;
8051 tree type;
8053 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8054 continue;
8056 mode0 = insn_data[d->icode].operand[0].mode;
8057 mode1 = insn_data[d->icode].operand[1].mode;
8058 mode2 = insn_data[d->icode].operand[2].mode;
8060 /* When all three operands are of the same mode. */
8061 if (mode0 == mode1 && mode1 == mode2)
8063 switch (mode0)
8065 case V4SFmode:
8066 type = v4sf_ftype_v4sf_v4sf;
8067 break;
8068 case V4SImode:
8069 type = v4si_ftype_v4si_v4si;
8070 break;
8071 case V16QImode:
8072 type = v16qi_ftype_v16qi_v16qi;
8073 break;
8074 case V8HImode:
8075 type = v8hi_ftype_v8hi_v8hi;
8076 break;
8077 case V2SImode:
8078 type = v2si_ftype_v2si_v2si;
8079 break;
8080 case V2SFmode:
8081 type = v2sf_ftype_v2sf_v2sf;
8082 break;
8083 case SImode:
8084 type = int_ftype_int_int;
8085 break;
8086 default:
8087 abort ();
8091 /* A few other combos we really don't want to do manually. */
8093 /* vint, vfloat, vfloat. */
8094 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
8095 type = v4si_ftype_v4sf_v4sf;
8097 /* vshort, vchar, vchar. */
8098 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
8099 type = v8hi_ftype_v16qi_v16qi;
8101 /* vint, vshort, vshort. */
8102 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
8103 type = v4si_ftype_v8hi_v8hi;
8105 /* vshort, vint, vint. */
8106 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
8107 type = v8hi_ftype_v4si_v4si;
8109 /* vchar, vshort, vshort. */
8110 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
8111 type = v16qi_ftype_v8hi_v8hi;
8113 /* vint, vchar, vint. */
8114 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
8115 type = v4si_ftype_v16qi_v4si;
8117 /* vint, vchar, vchar. */
8118 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
8119 type = v4si_ftype_v16qi_v16qi;
8121 /* vint, vshort, vint. */
8122 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
8123 type = v4si_ftype_v8hi_v4si;
8125 /* vint, vint, 5 bit literal. */
8126 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
8127 type = v4si_ftype_v4si_int;
8129 /* vshort, vshort, 5 bit literal. */
8130 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
8131 type = v8hi_ftype_v8hi_int;
8133 /* vchar, vchar, 5 bit literal. */
8134 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
8135 type = v16qi_ftype_v16qi_int;
8137 /* vfloat, vint, 5 bit literal. */
8138 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
8139 type = v4sf_ftype_v4si_int;
8141 /* vint, vfloat, 5 bit literal. */
8142 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
8143 type = v4si_ftype_v4sf_int;
8145 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
8146 type = v2si_ftype_int_int;
8148 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
8149 type = v2si_ftype_v2si_char;
8151 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
8152 type = v2si_ftype_int_char;
8154 /* int, x, x. */
8155 else if (mode0 == SImode)
8157 switch (mode1)
8159 case V4SImode:
8160 type = int_ftype_v4si_v4si;
8161 break;
8162 case V4SFmode:
8163 type = int_ftype_v4sf_v4sf;
8164 break;
8165 case V16QImode:
8166 type = int_ftype_v16qi_v16qi;
8167 break;
8168 case V8HImode:
8169 type = int_ftype_v8hi_v8hi;
8170 break;
8171 default:
8172 abort ();
8176 else
8177 abort ();
8179 def_builtin (d->mask, d->name, type, d->code);
8182 /* Add the simple unary operators. */
8183 d = (struct builtin_description *) bdesc_1arg;
8184 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
8186 enum machine_mode mode0, mode1;
8187 tree type;
8189 if (d->name == 0 || d->icode == CODE_FOR_nothing)
8190 continue;
8192 mode0 = insn_data[d->icode].operand[0].mode;
8193 mode1 = insn_data[d->icode].operand[1].mode;
8195 if (mode0 == V4SImode && mode1 == QImode)
8196 type = v4si_ftype_int;
8197 else if (mode0 == V8HImode && mode1 == QImode)
8198 type = v8hi_ftype_int;
8199 else if (mode0 == V16QImode && mode1 == QImode)
8200 type = v16qi_ftype_int;
8201 else if (mode0 == V4SFmode && mode1 == V4SFmode)
8202 type = v4sf_ftype_v4sf;
8203 else if (mode0 == V8HImode && mode1 == V16QImode)
8204 type = v8hi_ftype_v16qi;
8205 else if (mode0 == V4SImode && mode1 == V8HImode)
8206 type = v4si_ftype_v8hi;
8207 else if (mode0 == V2SImode && mode1 == V2SImode)
8208 type = v2si_ftype_v2si;
8209 else if (mode0 == V2SFmode && mode1 == V2SFmode)
8210 type = v2sf_ftype_v2sf;
8211 else if (mode0 == V2SFmode && mode1 == V2SImode)
8212 type = v2sf_ftype_v2si;
8213 else if (mode0 == V2SImode && mode1 == V2SFmode)
8214 type = v2si_ftype_v2sf;
8215 else if (mode0 == V2SImode && mode1 == QImode)
8216 type = v2si_ftype_char;
8217 else
8218 abort ();
8220 def_builtin (d->mask, d->name, type, d->code);
8224 static void
8225 rs6000_init_libfuncs (void)
8227 if (!TARGET_HARD_FLOAT)
8228 return;
8230 if (DEFAULT_ABI != ABI_V4)
8232 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
8234 /* AIX library routines for float->int conversion. */
8235 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
8236 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
8237 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
8238 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
8241 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8242 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
8243 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
8244 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
8245 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
8247 else
8249 /* 32-bit SVR4 quad floating point routines. */
8251 set_optab_libfunc (add_optab, TFmode, "_q_add");
8252 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
8253 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
8254 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
8255 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
8256 if (TARGET_PPC_GPOPT || TARGET_POWER2)
8257 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
8259 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
8260 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
8261 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
8262 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
8263 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
8264 set_optab_libfunc (le_optab, TFmode, "_q_fle");
8266 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
8267 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
8268 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
8269 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
8270 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
8271 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
8272 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
8277 /* Expand a block clear operation, and return 1 if successful. Return 0
8278 if we should let the compiler generate normal code.
8280 operands[0] is the destination
8281 operands[1] is the length
8282 operands[2] is the alignment */
8285 expand_block_clear (rtx operands[])
8287 rtx orig_dest = operands[0];
8288 rtx bytes_rtx = operands[1];
8289 rtx align_rtx = operands[2];
8290 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8291 int align;
8292 int bytes;
8293 int offset;
8294 int clear_bytes;
8296 /* If this is not a fixed size move, just call memcpy */
8297 if (! constp)
8298 return 0;
8300 /* If this is not a fixed size alignment, abort */
8301 if (GET_CODE (align_rtx) != CONST_INT)
8302 abort ();
8303 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8305 /* Anything to clear? */
8306 bytes = INTVAL (bytes_rtx);
8307 if (bytes <= 0)
8308 return 1;
8310 if (bytes > (TARGET_POWERPC64 && align >= 32 ? 64 : 32))
8311 return 0;
8313 if (optimize_size && bytes > 16)
8314 return 0;
8316 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
8318 rtx (*mov) (rtx, rtx);
8319 enum machine_mode mode = BLKmode;
8320 rtx dest;
8322 if (bytes >= 8 && TARGET_POWERPC64
8323 /* 64-bit loads and stores require word-aligned
8324 displacements. */
8325 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8327 clear_bytes = 8;
8328 mode = DImode;
8329 mov = gen_movdi;
8331 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8332 { /* move 4 bytes */
8333 clear_bytes = 4;
8334 mode = SImode;
8335 mov = gen_movsi;
8337 else if (bytes == 2 && !STRICT_ALIGNMENT)
8338 { /* move 2 bytes */
8339 clear_bytes = 2;
8340 mode = HImode;
8341 mov = gen_movhi;
8343 else /* move 1 byte at a time */
8345 clear_bytes = 1;
8346 mode = QImode;
8347 mov = gen_movqi;
8350 dest = adjust_address (orig_dest, mode, offset);
8352 emit_insn ((*mov) (dest, const0_rtx));
8355 return 1;
8359 /* Expand a block move operation, and return 1 if successful. Return 0
8360 if we should let the compiler generate normal code.
8362 operands[0] is the destination
8363 operands[1] is the source
8364 operands[2] is the length
8365 operands[3] is the alignment */
8367 #define MAX_MOVE_REG 4
8370 expand_block_move (rtx operands[])
8372 rtx orig_dest = operands[0];
8373 rtx orig_src = operands[1];
8374 rtx bytes_rtx = operands[2];
8375 rtx align_rtx = operands[3];
8376 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
8377 int align;
8378 int bytes;
8379 int offset;
8380 int move_bytes;
8381 rtx stores[MAX_MOVE_REG];
8382 int num_reg = 0;
8384 /* If this is not a fixed size move, just call memcpy */
8385 if (! constp)
8386 return 0;
8388 /* If this is not a fixed size alignment, abort */
8389 if (GET_CODE (align_rtx) != CONST_INT)
8390 abort ();
8391 align = INTVAL (align_rtx) * BITS_PER_UNIT;
8393 /* Anything to move? */
8394 bytes = INTVAL (bytes_rtx);
8395 if (bytes <= 0)
8396 return 1;
8398 /* store_one_arg depends on expand_block_move to handle at least the size of
8399 reg_parm_stack_space. */
8400 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
8401 return 0;
8403 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
8405 union {
8406 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
8407 rtx (*mov) (rtx, rtx);
8408 } gen_func;
8409 enum machine_mode mode = BLKmode;
8410 rtx src, dest;
8412 if (TARGET_STRING
8413 && bytes > 24 /* move up to 32 bytes at a time */
8414 && ! fixed_regs[5]
8415 && ! fixed_regs[6]
8416 && ! fixed_regs[7]
8417 && ! fixed_regs[8]
8418 && ! fixed_regs[9]
8419 && ! fixed_regs[10]
8420 && ! fixed_regs[11]
8421 && ! fixed_regs[12])
8423 move_bytes = (bytes > 32) ? 32 : bytes;
8424 gen_func.movmemsi = gen_movmemsi_8reg;
8426 else if (TARGET_STRING
8427 && bytes > 16 /* move up to 24 bytes at a time */
8428 && ! fixed_regs[5]
8429 && ! fixed_regs[6]
8430 && ! fixed_regs[7]
8431 && ! fixed_regs[8]
8432 && ! fixed_regs[9]
8433 && ! fixed_regs[10])
8435 move_bytes = (bytes > 24) ? 24 : bytes;
8436 gen_func.movmemsi = gen_movmemsi_6reg;
8438 else if (TARGET_STRING
8439 && bytes > 8 /* move up to 16 bytes at a time */
8440 && ! fixed_regs[5]
8441 && ! fixed_regs[6]
8442 && ! fixed_regs[7]
8443 && ! fixed_regs[8])
8445 move_bytes = (bytes > 16) ? 16 : bytes;
8446 gen_func.movmemsi = gen_movmemsi_4reg;
8448 else if (bytes >= 8 && TARGET_POWERPC64
8449 /* 64-bit loads and stores require word-aligned
8450 displacements. */
8451 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
8453 move_bytes = 8;
8454 mode = DImode;
8455 gen_func.mov = gen_movdi;
8457 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
8458 { /* move up to 8 bytes at a time */
8459 move_bytes = (bytes > 8) ? 8 : bytes;
8460 gen_func.movmemsi = gen_movmemsi_2reg;
8462 else if (bytes >= 4 && !STRICT_ALIGNMENT)
8463 { /* move 4 bytes */
8464 move_bytes = 4;
8465 mode = SImode;
8466 gen_func.mov = gen_movsi;
8468 else if (bytes == 2 && !STRICT_ALIGNMENT)
8469 { /* move 2 bytes */
8470 move_bytes = 2;
8471 mode = HImode;
8472 gen_func.mov = gen_movhi;
8474 else if (TARGET_STRING && bytes > 1)
8475 { /* move up to 4 bytes at a time */
8476 move_bytes = (bytes > 4) ? 4 : bytes;
8477 gen_func.movmemsi = gen_movmemsi_1reg;
8479 else /* move 1 byte at a time */
8481 move_bytes = 1;
8482 mode = QImode;
8483 gen_func.mov = gen_movqi;
8486 src = adjust_address (orig_src, mode, offset);
8487 dest = adjust_address (orig_dest, mode, offset);
8489 if (mode != BLKmode)
8491 rtx tmp_reg = gen_reg_rtx (mode);
8493 emit_insn ((*gen_func.mov) (tmp_reg, src));
8494 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
8497 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
8499 int i;
8500 for (i = 0; i < num_reg; i++)
8501 emit_insn (stores[i]);
8502 num_reg = 0;
8505 if (mode == BLKmode)
8507 /* Move the address into scratch registers. The movmemsi
8508 patterns require zero offset. */
8509 if (!REG_P (XEXP (src, 0)))
8511 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
8512 src = replace_equiv_address (src, src_reg);
8514 set_mem_size (src, GEN_INT (move_bytes));
8516 if (!REG_P (XEXP (dest, 0)))
8518 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
8519 dest = replace_equiv_address (dest, dest_reg);
8521 set_mem_size (dest, GEN_INT (move_bytes));
8523 emit_insn ((*gen_func.movmemsi) (dest, src,
8524 GEN_INT (move_bytes & 31),
8525 align_rtx));
8529 return 1;
8533 /* Return 1 if OP is a load multiple operation. It is known to be a
8534 PARALLEL and the first section will be tested. */
8537 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8539 int count = XVECLEN (op, 0);
8540 unsigned int dest_regno;
8541 rtx src_addr;
8542 int i;
8544 /* Perform a quick check so we don't blow up below. */
8545 if (count <= 1
8546 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8547 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8548 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8549 return 0;
8551 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8552 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8554 for (i = 1; i < count; i++)
8556 rtx elt = XVECEXP (op, 0, i);
8558 if (GET_CODE (elt) != SET
8559 || GET_CODE (SET_DEST (elt)) != REG
8560 || GET_MODE (SET_DEST (elt)) != SImode
8561 || REGNO (SET_DEST (elt)) != dest_regno + i
8562 || GET_CODE (SET_SRC (elt)) != MEM
8563 || GET_MODE (SET_SRC (elt)) != SImode
8564 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
8565 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
8566 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
8567 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
8568 return 0;
8571 return 1;
8574 /* Similar, but tests for store multiple. Here, the second vector element
8575 is a CLOBBER. It will be tested later. */
8578 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8580 int count = XVECLEN (op, 0) - 1;
8581 unsigned int src_regno;
8582 rtx dest_addr;
8583 int i;
8585 /* Perform a quick check so we don't blow up below. */
8586 if (count <= 1
8587 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8588 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8589 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8590 return 0;
8592 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8593 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8595 for (i = 1; i < count; i++)
8597 rtx elt = XVECEXP (op, 0, i + 1);
8599 if (GET_CODE (elt) != SET
8600 || GET_CODE (SET_SRC (elt)) != REG
8601 || GET_MODE (SET_SRC (elt)) != SImode
8602 || REGNO (SET_SRC (elt)) != src_regno + i
8603 || GET_CODE (SET_DEST (elt)) != MEM
8604 || GET_MODE (SET_DEST (elt)) != SImode
8605 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
8606 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
8607 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
8608 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
8609 return 0;
8612 return 1;
8615 /* Return a string to perform a load_multiple operation.
8616 operands[0] is the vector.
8617 operands[1] is the source address.
8618 operands[2] is the first destination register. */
8620 const char *
8621 rs6000_output_load_multiple (rtx operands[3])
8623 /* We have to handle the case where the pseudo used to contain the address
8624 is assigned to one of the output registers. */
8625 int i, j;
8626 int words = XVECLEN (operands[0], 0);
8627 rtx xop[10];
8629 if (XVECLEN (operands[0], 0) == 1)
8630 return "{l|lwz} %2,0(%1)";
8632 for (i = 0; i < words; i++)
8633 if (refers_to_regno_p (REGNO (operands[2]) + i,
8634 REGNO (operands[2]) + i + 1, operands[1], 0))
8636 if (i == words-1)
8638 xop[0] = GEN_INT (4 * (words-1));
8639 xop[1] = operands[1];
8640 xop[2] = operands[2];
8641 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
8642 return "";
8644 else if (i == 0)
8646 xop[0] = GEN_INT (4 * (words-1));
8647 xop[1] = operands[1];
8648 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
8649 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
8650 return "";
8652 else
8654 for (j = 0; j < words; j++)
8655 if (j != i)
8657 xop[0] = GEN_INT (j * 4);
8658 xop[1] = operands[1];
8659 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
8660 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
8662 xop[0] = GEN_INT (i * 4);
8663 xop[1] = operands[1];
8664 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
8665 return "";
8669 return "{lsi|lswi} %2,%1,%N0";
8672 /* Return 1 for a parallel vrsave operation. */
8675 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8677 int count = XVECLEN (op, 0);
8678 unsigned int dest_regno, src_regno;
8679 int i;
8681 if (count <= 1
8682 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8683 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8684 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
8685 return 0;
8687 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8688 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8690 if (dest_regno != VRSAVE_REGNO
8691 && src_regno != VRSAVE_REGNO)
8692 return 0;
8694 for (i = 1; i < count; i++)
8696 rtx elt = XVECEXP (op, 0, i);
8698 if (GET_CODE (elt) != CLOBBER
8699 && GET_CODE (elt) != SET)
8700 return 0;
8703 return 1;
8706 /* Return 1 for an PARALLEL suitable for mfcr. */
8709 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8711 int count = XVECLEN (op, 0);
8712 int i;
8714 /* Perform a quick check so we don't blow up below. */
8715 if (count < 1
8716 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8717 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8718 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8719 return 0;
8721 for (i = 0; i < count; i++)
8723 rtx exp = XVECEXP (op, 0, i);
8724 rtx unspec;
8725 int maskval;
8726 rtx src_reg;
8728 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8730 if (GET_CODE (src_reg) != REG
8731 || GET_MODE (src_reg) != CCmode
8732 || ! CR_REGNO_P (REGNO (src_reg)))
8733 return 0;
8735 if (GET_CODE (exp) != SET
8736 || GET_CODE (SET_DEST (exp)) != REG
8737 || GET_MODE (SET_DEST (exp)) != SImode
8738 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8739 return 0;
8740 unspec = SET_SRC (exp);
8741 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8743 if (GET_CODE (unspec) != UNSPEC
8744 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8745 || XVECLEN (unspec, 0) != 2
8746 || XVECEXP (unspec, 0, 0) != src_reg
8747 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8748 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8749 return 0;
8751 return 1;
8754 /* Return 1 for an PARALLEL suitable for mtcrf. */
8757 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8759 int count = XVECLEN (op, 0);
8760 int i;
8761 rtx src_reg;
8763 /* Perform a quick check so we don't blow up below. */
8764 if (count < 1
8765 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8766 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8767 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8768 return 0;
8769 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8771 if (GET_CODE (src_reg) != REG
8772 || GET_MODE (src_reg) != SImode
8773 || ! INT_REGNO_P (REGNO (src_reg)))
8774 return 0;
8776 for (i = 0; i < count; i++)
8778 rtx exp = XVECEXP (op, 0, i);
8779 rtx unspec;
8780 int maskval;
8782 if (GET_CODE (exp) != SET
8783 || GET_CODE (SET_DEST (exp)) != REG
8784 || GET_MODE (SET_DEST (exp)) != CCmode
8785 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8786 return 0;
8787 unspec = SET_SRC (exp);
8788 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8790 if (GET_CODE (unspec) != UNSPEC
8791 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8792 || XVECLEN (unspec, 0) != 2
8793 || XVECEXP (unspec, 0, 0) != src_reg
8794 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8795 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8796 return 0;
8798 return 1;
8801 /* Return 1 for an PARALLEL suitable for lmw. */
8804 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8806 int count = XVECLEN (op, 0);
8807 unsigned int dest_regno;
8808 rtx src_addr;
8809 unsigned int base_regno;
8810 HOST_WIDE_INT offset;
8811 int i;
8813 /* Perform a quick check so we don't blow up below. */
8814 if (count <= 1
8815 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8816 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8817 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8818 return 0;
8820 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8821 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8823 if (dest_regno > 31
8824 || count != 32 - (int) dest_regno)
8825 return 0;
8827 if (legitimate_indirect_address_p (src_addr, 0))
8829 offset = 0;
8830 base_regno = REGNO (src_addr);
8831 if (base_regno == 0)
8832 return 0;
8834 else if (rs6000_legitimate_offset_address_p (SImode, src_addr, 0))
8836 offset = INTVAL (XEXP (src_addr, 1));
8837 base_regno = REGNO (XEXP (src_addr, 0));
8839 else
8840 return 0;
8842 for (i = 0; i < count; i++)
8844 rtx elt = XVECEXP (op, 0, i);
8845 rtx newaddr;
8846 rtx addr_reg;
8847 HOST_WIDE_INT newoffset;
8849 if (GET_CODE (elt) != SET
8850 || GET_CODE (SET_DEST (elt)) != REG
8851 || GET_MODE (SET_DEST (elt)) != SImode
8852 || REGNO (SET_DEST (elt)) != dest_regno + i
8853 || GET_CODE (SET_SRC (elt)) != MEM
8854 || GET_MODE (SET_SRC (elt)) != SImode)
8855 return 0;
8856 newaddr = XEXP (SET_SRC (elt), 0);
8857 if (legitimate_indirect_address_p (newaddr, 0))
8859 newoffset = 0;
8860 addr_reg = newaddr;
8862 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
8864 addr_reg = XEXP (newaddr, 0);
8865 newoffset = INTVAL (XEXP (newaddr, 1));
8867 else
8868 return 0;
8869 if (REGNO (addr_reg) != base_regno
8870 || newoffset != offset + 4 * i)
8871 return 0;
8874 return 1;
8877 /* Return 1 for an PARALLEL suitable for stmw. */
8880 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8882 int count = XVECLEN (op, 0);
8883 unsigned int src_regno;
8884 rtx dest_addr;
8885 unsigned int base_regno;
8886 HOST_WIDE_INT offset;
8887 int i;
8889 /* Perform a quick check so we don't blow up below. */
8890 if (count <= 1
8891 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8892 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8893 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8894 return 0;
8896 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8897 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8899 if (src_regno > 31
8900 || count != 32 - (int) src_regno)
8901 return 0;
8903 if (legitimate_indirect_address_p (dest_addr, 0))
8905 offset = 0;
8906 base_regno = REGNO (dest_addr);
8907 if (base_regno == 0)
8908 return 0;
8910 else if (rs6000_legitimate_offset_address_p (SImode, dest_addr, 0))
8912 offset = INTVAL (XEXP (dest_addr, 1));
8913 base_regno = REGNO (XEXP (dest_addr, 0));
8915 else
8916 return 0;
8918 for (i = 0; i < count; i++)
8920 rtx elt = XVECEXP (op, 0, i);
8921 rtx newaddr;
8922 rtx addr_reg;
8923 HOST_WIDE_INT newoffset;
8925 if (GET_CODE (elt) != SET
8926 || GET_CODE (SET_SRC (elt)) != REG
8927 || GET_MODE (SET_SRC (elt)) != SImode
8928 || REGNO (SET_SRC (elt)) != src_regno + i
8929 || GET_CODE (SET_DEST (elt)) != MEM
8930 || GET_MODE (SET_DEST (elt)) != SImode)
8931 return 0;
8932 newaddr = XEXP (SET_DEST (elt), 0);
8933 if (legitimate_indirect_address_p (newaddr, 0))
8935 newoffset = 0;
8936 addr_reg = newaddr;
8938 else if (rs6000_legitimate_offset_address_p (SImode, newaddr, 0))
8940 addr_reg = XEXP (newaddr, 0);
8941 newoffset = INTVAL (XEXP (newaddr, 1));
8943 else
8944 return 0;
8945 if (REGNO (addr_reg) != base_regno
8946 || newoffset != offset + 4 * i)
8947 return 0;
8950 return 1;
8953 /* A validation routine: say whether CODE, a condition code, and MODE
8954 match. The other alternatives either don't make sense or should
8955 never be generated. */
8957 static void
8958 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8960 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8961 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8962 || GET_MODE_CLASS (mode) != MODE_CC)
8963 abort ();
8965 /* These don't make sense. */
8966 if ((code == GT || code == LT || code == GE || code == LE)
8967 && mode == CCUNSmode)
8968 abort ();
8970 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8971 && mode != CCUNSmode)
8972 abort ();
8974 if (mode != CCFPmode
8975 && (code == ORDERED || code == UNORDERED
8976 || code == UNEQ || code == LTGT
8977 || code == UNGT || code == UNLT
8978 || code == UNGE || code == UNLE))
8979 abort ();
8981 /* These should never be generated except for
8982 flag_finite_math_only. */
8983 if (mode == CCFPmode
8984 && ! flag_finite_math_only
8985 && (code == LE || code == GE
8986 || code == UNEQ || code == LTGT
8987 || code == UNGT || code == UNLT))
8988 abort ();
8990 /* These are invalid; the information is not there. */
8991 if (mode == CCEQmode
8992 && code != EQ && code != NE)
8993 abort ();
8996 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8997 We only check the opcode against the mode of the CC value here. */
9000 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9002 enum rtx_code code = GET_CODE (op);
9003 enum machine_mode cc_mode;
9005 if (!COMPARISON_P (op))
9006 return 0;
9008 cc_mode = GET_MODE (XEXP (op, 0));
9009 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
9010 return 0;
9012 validate_condition_mode (code, cc_mode);
9014 return 1;
9017 /* Return 1 if OP is a comparison operation that is valid for a branch
9018 insn and which is true if the corresponding bit in the CC register
9019 is set. */
9022 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
9024 enum rtx_code code;
9026 if (! branch_comparison_operator (op, mode))
9027 return 0;
9029 code = GET_CODE (op);
9030 return (code == EQ || code == LT || code == GT
9031 || code == LTU || code == GTU
9032 || code == UNORDERED);
9035 /* Return 1 if OP is a comparison operation that is valid for an scc
9036 insn: it must be a positive comparison. */
9039 scc_comparison_operator (rtx op, enum machine_mode mode)
9041 return branch_positive_comparison_operator (op, mode);
9045 trap_comparison_operator (rtx op, enum machine_mode mode)
9047 if (mode != VOIDmode && mode != GET_MODE (op))
9048 return 0;
9049 return COMPARISON_P (op);
9053 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9055 enum rtx_code code = GET_CODE (op);
9056 return (code == AND || code == IOR || code == XOR);
9060 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9062 enum rtx_code code = GET_CODE (op);
9063 return (code == IOR || code == XOR);
9067 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
9069 enum rtx_code code = GET_CODE (op);
9070 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
9073 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9074 mask required to convert the result of a rotate insn into a shift
9075 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9078 includes_lshift_p (rtx shiftop, rtx andop)
9080 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9082 shift_mask <<= INTVAL (shiftop);
9084 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9087 /* Similar, but for right shift. */
9090 includes_rshift_p (rtx shiftop, rtx andop)
9092 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
9094 shift_mask >>= INTVAL (shiftop);
9096 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
9099 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9100 to perform a left shift. It must have exactly SHIFTOP least
9101 significant 0's, then one or more 1's, then zero or more 0's. */
9104 includes_rldic_lshift_p (rtx shiftop, rtx andop)
9106 if (GET_CODE (andop) == CONST_INT)
9108 HOST_WIDE_INT c, lsb, shift_mask;
9110 c = INTVAL (andop);
9111 if (c == 0 || c == ~0)
9112 return 0;
9114 shift_mask = ~0;
9115 shift_mask <<= INTVAL (shiftop);
9117 /* Find the least significant one bit. */
9118 lsb = c & -c;
9120 /* It must coincide with the LSB of the shift mask. */
9121 if (-lsb != shift_mask)
9122 return 0;
9124 /* Invert to look for the next transition (if any). */
9125 c = ~c;
9127 /* Remove the low group of ones (originally low group of zeros). */
9128 c &= -lsb;
9130 /* Again find the lsb, and check we have all 1's above. */
9131 lsb = c & -c;
9132 return c == -lsb;
9134 else if (GET_CODE (andop) == CONST_DOUBLE
9135 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9137 HOST_WIDE_INT low, high, lsb;
9138 HOST_WIDE_INT shift_mask_low, shift_mask_high;
9140 low = CONST_DOUBLE_LOW (andop);
9141 if (HOST_BITS_PER_WIDE_INT < 64)
9142 high = CONST_DOUBLE_HIGH (andop);
9144 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
9145 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
9146 return 0;
9148 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9150 shift_mask_high = ~0;
9151 if (INTVAL (shiftop) > 32)
9152 shift_mask_high <<= INTVAL (shiftop) - 32;
9154 lsb = high & -high;
9156 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
9157 return 0;
9159 high = ~high;
9160 high &= -lsb;
9162 lsb = high & -high;
9163 return high == -lsb;
9166 shift_mask_low = ~0;
9167 shift_mask_low <<= INTVAL (shiftop);
9169 lsb = low & -low;
9171 if (-lsb != shift_mask_low)
9172 return 0;
9174 if (HOST_BITS_PER_WIDE_INT < 64)
9175 high = ~high;
9176 low = ~low;
9177 low &= -lsb;
9179 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
9181 lsb = high & -high;
9182 return high == -lsb;
9185 lsb = low & -low;
9186 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
9188 else
9189 return 0;
9192 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9193 to perform a left shift. It must have SHIFTOP or more least
9194 significant 0's, with the remainder of the word 1's. */
9197 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
9199 if (GET_CODE (andop) == CONST_INT)
9201 HOST_WIDE_INT c, lsb, shift_mask;
9203 shift_mask = ~0;
9204 shift_mask <<= INTVAL (shiftop);
9205 c = INTVAL (andop);
9207 /* Find the least significant one bit. */
9208 lsb = c & -c;
9210 /* It must be covered by the shift mask.
9211 This test also rejects c == 0. */
9212 if ((lsb & shift_mask) == 0)
9213 return 0;
9215 /* Check we have all 1's above the transition, and reject all 1's. */
9216 return c == -lsb && lsb != 1;
9218 else if (GET_CODE (andop) == CONST_DOUBLE
9219 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
9221 HOST_WIDE_INT low, lsb, shift_mask_low;
9223 low = CONST_DOUBLE_LOW (andop);
9225 if (HOST_BITS_PER_WIDE_INT < 64)
9227 HOST_WIDE_INT high, shift_mask_high;
9229 high = CONST_DOUBLE_HIGH (andop);
9231 if (low == 0)
9233 shift_mask_high = ~0;
9234 if (INTVAL (shiftop) > 32)
9235 shift_mask_high <<= INTVAL (shiftop) - 32;
9237 lsb = high & -high;
9239 if ((lsb & shift_mask_high) == 0)
9240 return 0;
9242 return high == -lsb;
9244 if (high != ~0)
9245 return 0;
9248 shift_mask_low = ~0;
9249 shift_mask_low <<= INTVAL (shiftop);
9251 lsb = low & -low;
9253 if ((lsb & shift_mask_low) == 0)
9254 return 0;
9256 return low == -lsb && lsb != 1;
9258 else
9259 return 0;
9262 /* Return 1 if operands will generate a valid arguments to rlwimi
9263 instruction for insert with right shift in 64-bit mode. The mask may
9264 not start on the first bit or stop on the last bit because wrap-around
9265 effects of instruction do not correspond to semantics of RTL insn. */
9268 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
9270 if (INTVAL (startop) < 64
9271 && INTVAL (startop) > 32
9272 && (INTVAL (sizeop) + INTVAL (startop) < 64)
9273 && (INTVAL (sizeop) + INTVAL (startop) > 33)
9274 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) < 96)
9275 && (INTVAL (sizeop) + INTVAL (startop) + INTVAL (shiftop) >= 64)
9276 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
9277 return 1;
9279 return 0;
9282 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9283 for lfq and stfq insns iff the registers are hard registers. */
9286 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
9288 /* We might have been passed a SUBREG. */
9289 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
9290 return 0;
9292 /* We might have been passed non floating point registers. */
9293 if (!FP_REGNO_P (REGNO (reg1))
9294 || !FP_REGNO_P (REGNO (reg2)))
9295 return 0;
9297 return (REGNO (reg1) == REGNO (reg2) - 1);
9300 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9301 addr1 and addr2 must be in consecutive memory locations
9302 (addr2 == addr1 + 8). */
9305 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
9307 rtx addr1, addr2;
9308 unsigned int reg1;
9309 int offset1;
9311 /* The mems cannot be volatile. */
9312 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
9313 return 0;
9315 addr1 = XEXP (mem1, 0);
9316 addr2 = XEXP (mem2, 0);
9318 /* Extract an offset (if used) from the first addr. */
9319 if (GET_CODE (addr1) == PLUS)
9321 /* If not a REG, return zero. */
9322 if (GET_CODE (XEXP (addr1, 0)) != REG)
9323 return 0;
9324 else
9326 reg1 = REGNO (XEXP (addr1, 0));
9327 /* The offset must be constant! */
9328 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
9329 return 0;
9330 offset1 = INTVAL (XEXP (addr1, 1));
9333 else if (GET_CODE (addr1) != REG)
9334 return 0;
9335 else
9337 reg1 = REGNO (addr1);
9338 /* This was a simple (mem (reg)) expression. Offset is 0. */
9339 offset1 = 0;
9342 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9343 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9344 register as addr1. */
9345 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
9346 return 1;
9347 if (GET_CODE (addr2) != PLUS)
9348 return 0;
9350 if (GET_CODE (XEXP (addr2, 0)) != REG
9351 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
9352 return 0;
9354 if (reg1 != REGNO (XEXP (addr2, 0)))
9355 return 0;
9357 /* The offset for the second addr must be 8 more than the first addr. */
9358 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
9359 return 0;
9361 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9362 instructions. */
9363 return 1;
9366 /* Return the register class of a scratch register needed to copy IN into
9367 or out of a register in CLASS in MODE. If it can be done directly,
9368 NO_REGS is returned. */
9370 enum reg_class
9371 secondary_reload_class (enum reg_class class,
9372 enum machine_mode mode ATTRIBUTE_UNUSED,
9373 rtx in)
9375 int regno;
9377 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
9378 #if TARGET_MACHO
9379 && MACHOPIC_INDIRECT
9380 #endif
9383 /* We cannot copy a symbolic operand directly into anything
9384 other than BASE_REGS for TARGET_ELF. So indicate that a
9385 register from BASE_REGS is needed as an intermediate
9386 register.
9388 On Darwin, pic addresses require a load from memory, which
9389 needs a base register. */
9390 if (class != BASE_REGS
9391 && (GET_CODE (in) == SYMBOL_REF
9392 || GET_CODE (in) == HIGH
9393 || GET_CODE (in) == LABEL_REF
9394 || GET_CODE (in) == CONST))
9395 return BASE_REGS;
9398 if (GET_CODE (in) == REG)
9400 regno = REGNO (in);
9401 if (regno >= FIRST_PSEUDO_REGISTER)
9403 regno = true_regnum (in);
9404 if (regno >= FIRST_PSEUDO_REGISTER)
9405 regno = -1;
9408 else if (GET_CODE (in) == SUBREG)
9410 regno = true_regnum (in);
9411 if (regno >= FIRST_PSEUDO_REGISTER)
9412 regno = -1;
9414 else
9415 regno = -1;
9417 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9418 into anything. */
9419 if (class == GENERAL_REGS || class == BASE_REGS
9420 || (regno >= 0 && INT_REGNO_P (regno)))
9421 return NO_REGS;
9423 /* Constants, memory, and FP registers can go into FP registers. */
9424 if ((regno == -1 || FP_REGNO_P (regno))
9425 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
9426 return NO_REGS;
9428 /* Memory, and AltiVec registers can go into AltiVec registers. */
9429 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
9430 && class == ALTIVEC_REGS)
9431 return NO_REGS;
9433 /* We can copy among the CR registers. */
9434 if ((class == CR_REGS || class == CR0_REGS)
9435 && regno >= 0 && CR_REGNO_P (regno))
9436 return NO_REGS;
9438 /* Otherwise, we need GENERAL_REGS. */
9439 return GENERAL_REGS;
9442 /* Given a comparison operation, return the bit number in CCR to test. We
9443 know this is a valid comparison.
9445 SCC_P is 1 if this is for an scc. That means that %D will have been
9446 used instead of %C, so the bits will be in different places.
9448 Return -1 if OP isn't a valid comparison for some reason. */
9451 ccr_bit (rtx op, int scc_p)
9453 enum rtx_code code = GET_CODE (op);
9454 enum machine_mode cc_mode;
9455 int cc_regnum;
9456 int base_bit;
9457 rtx reg;
9459 if (!COMPARISON_P (op))
9460 return -1;
9462 reg = XEXP (op, 0);
9464 if (GET_CODE (reg) != REG
9465 || ! CR_REGNO_P (REGNO (reg)))
9466 abort ();
9468 cc_mode = GET_MODE (reg);
9469 cc_regnum = REGNO (reg);
9470 base_bit = 4 * (cc_regnum - CR0_REGNO);
9472 validate_condition_mode (code, cc_mode);
9474 /* When generating a sCOND operation, only positive conditions are
9475 allowed. */
9476 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
9477 && code != GTU && code != LTU)
9478 abort ();
9480 switch (code)
9482 case NE:
9483 return scc_p ? base_bit + 3 : base_bit + 2;
9484 case EQ:
9485 return base_bit + 2;
9486 case GT: case GTU: case UNLE:
9487 return base_bit + 1;
9488 case LT: case LTU: case UNGE:
9489 return base_bit;
9490 case ORDERED: case UNORDERED:
9491 return base_bit + 3;
9493 case GE: case GEU:
9494 /* If scc, we will have done a cror to put the bit in the
9495 unordered position. So test that bit. For integer, this is ! LT
9496 unless this is an scc insn. */
9497 return scc_p ? base_bit + 3 : base_bit;
9499 case LE: case LEU:
9500 return scc_p ? base_bit + 3 : base_bit + 1;
9502 default:
9503 abort ();
9507 /* Return the GOT register. */
9509 struct rtx_def *
9510 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
9512 /* The second flow pass currently (June 1999) can't update
9513 regs_ever_live without disturbing other parts of the compiler, so
9514 update it here to make the prolog/epilogue code happy. */
9515 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
9516 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
9518 current_function_uses_pic_offset_table = 1;
9520 return pic_offset_table_rtx;
9523 /* Function to init struct machine_function.
9524 This will be called, via a pointer variable,
9525 from push_function_context. */
9527 static struct machine_function *
9528 rs6000_init_machine_status (void)
9530 return ggc_alloc_cleared (sizeof (machine_function));
9533 /* These macros test for integers and extract the low-order bits. */
9534 #define INT_P(X) \
9535 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9536 && GET_MODE (X) == VOIDmode)
9538 #define INT_LOWPART(X) \
9539 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9542 extract_MB (rtx op)
9544 int i;
9545 unsigned long val = INT_LOWPART (op);
9547 /* If the high bit is zero, the value is the first 1 bit we find
9548 from the left. */
9549 if ((val & 0x80000000) == 0)
9551 if ((val & 0xffffffff) == 0)
9552 abort ();
9554 i = 1;
9555 while (((val <<= 1) & 0x80000000) == 0)
9556 ++i;
9557 return i;
9560 /* If the high bit is set and the low bit is not, or the mask is all
9561 1's, the value is zero. */
9562 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
9563 return 0;
9565 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9566 from the right. */
9567 i = 31;
9568 while (((val >>= 1) & 1) != 0)
9569 --i;
9571 return i;
9575 extract_ME (rtx op)
9577 int i;
9578 unsigned long val = INT_LOWPART (op);
9580 /* If the low bit is zero, the value is the first 1 bit we find from
9581 the right. */
9582 if ((val & 1) == 0)
9584 if ((val & 0xffffffff) == 0)
9585 abort ();
9587 i = 30;
9588 while (((val >>= 1) & 1) == 0)
9589 --i;
9591 return i;
9594 /* If the low bit is set and the high bit is not, or the mask is all
9595 1's, the value is 31. */
9596 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
9597 return 31;
9599 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9600 from the left. */
9601 i = 0;
9602 while (((val <<= 1) & 0x80000000) != 0)
9603 ++i;
9605 return i;
9608 /* Locate some local-dynamic symbol still in use by this function
9609 so that we can print its name in some tls_ld pattern. */
9611 static const char *
9612 rs6000_get_some_local_dynamic_name (void)
9614 rtx insn;
9616 if (cfun->machine->some_ld_name)
9617 return cfun->machine->some_ld_name;
9619 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
9620 if (INSN_P (insn)
9621 && for_each_rtx (&PATTERN (insn),
9622 rs6000_get_some_local_dynamic_name_1, 0))
9623 return cfun->machine->some_ld_name;
9625 abort ();
9628 /* Helper function for rs6000_get_some_local_dynamic_name. */
9630 static int
9631 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
9633 rtx x = *px;
9635 if (GET_CODE (x) == SYMBOL_REF)
9637 const char *str = XSTR (x, 0);
9638 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
9640 cfun->machine->some_ld_name = str;
9641 return 1;
9645 return 0;
9648 /* Print an operand. Recognize special options, documented below. */
9650 #if TARGET_ELF
9651 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9652 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9653 #else
9654 #define SMALL_DATA_RELOC "sda21"
9655 #define SMALL_DATA_REG 0
9656 #endif
9658 void
9659 print_operand (FILE *file, rtx x, int code)
9661 int i;
9662 HOST_WIDE_INT val;
9663 unsigned HOST_WIDE_INT uval;
9665 switch (code)
9667 case '.':
9668 /* Write out an instruction after the call which may be replaced
9669 with glue code by the loader. This depends on the AIX version. */
9670 asm_fprintf (file, RS6000_CALL_GLUE);
9671 return;
9673 /* %a is output_address. */
9675 case 'A':
9676 /* If X is a constant integer whose low-order 5 bits are zero,
9677 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9678 in the AIX assembler where "sri" with a zero shift count
9679 writes a trash instruction. */
9680 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
9681 putc ('l', file);
9682 else
9683 putc ('r', file);
9684 return;
9686 case 'b':
9687 /* If constant, low-order 16 bits of constant, unsigned.
9688 Otherwise, write normally. */
9689 if (INT_P (x))
9690 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
9691 else
9692 print_operand (file, x, 0);
9693 return;
9695 case 'B':
9696 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9697 for 64-bit mask direction. */
9698 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
9699 return;
9701 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9702 output_operand. */
9704 case 'c':
9705 /* X is a CR register. Print the number of the GT bit of the CR. */
9706 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9707 output_operand_lossage ("invalid %%E value");
9708 else
9709 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
9710 return;
9712 case 'D':
9713 /* Like 'J' but get to the EQ bit. */
9714 if (GET_CODE (x) != REG)
9715 abort ();
9717 /* Bit 1 is EQ bit. */
9718 i = 4 * (REGNO (x) - CR0_REGNO) + 2;
9720 /* If we want bit 31, write a shift count of zero, not 32. */
9721 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9722 return;
9724 case 'E':
9725 /* X is a CR register. Print the number of the EQ bit of the CR */
9726 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9727 output_operand_lossage ("invalid %%E value");
9728 else
9729 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
9730 return;
9732 case 'f':
9733 /* X is a CR register. Print the shift count needed to move it
9734 to the high-order four bits. */
9735 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9736 output_operand_lossage ("invalid %%f value");
9737 else
9738 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
9739 return;
9741 case 'F':
9742 /* Similar, but print the count for the rotate in the opposite
9743 direction. */
9744 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9745 output_operand_lossage ("invalid %%F value");
9746 else
9747 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9748 return;
9750 case 'G':
9751 /* X is a constant integer. If it is negative, print "m",
9752 otherwise print "z". This is to make an aze or ame insn. */
9753 if (GET_CODE (x) != CONST_INT)
9754 output_operand_lossage ("invalid %%G value");
9755 else if (INTVAL (x) >= 0)
9756 putc ('z', file);
9757 else
9758 putc ('m', file);
9759 return;
9761 case 'h':
9762 /* If constant, output low-order five bits. Otherwise, write
9763 normally. */
9764 if (INT_P (x))
9765 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9766 else
9767 print_operand (file, x, 0);
9768 return;
9770 case 'H':
9771 /* If constant, output low-order six bits. Otherwise, write
9772 normally. */
9773 if (INT_P (x))
9774 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9775 else
9776 print_operand (file, x, 0);
9777 return;
9779 case 'I':
9780 /* Print `i' if this is a constant, else nothing. */
9781 if (INT_P (x))
9782 putc ('i', file);
9783 return;
9785 case 'j':
9786 /* Write the bit number in CCR for jump. */
9787 i = ccr_bit (x, 0);
9788 if (i == -1)
9789 output_operand_lossage ("invalid %%j code");
9790 else
9791 fprintf (file, "%d", i);
9792 return;
9794 case 'J':
9795 /* Similar, but add one for shift count in rlinm for scc and pass
9796 scc flag to `ccr_bit'. */
9797 i = ccr_bit (x, 1);
9798 if (i == -1)
9799 output_operand_lossage ("invalid %%J code");
9800 else
9801 /* If we want bit 31, write a shift count of zero, not 32. */
9802 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9803 return;
9805 case 'k':
9806 /* X must be a constant. Write the 1's complement of the
9807 constant. */
9808 if (! INT_P (x))
9809 output_operand_lossage ("invalid %%k value");
9810 else
9811 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9812 return;
9814 case 'K':
9815 /* X must be a symbolic constant on ELF. Write an
9816 expression suitable for an 'addi' that adds in the low 16
9817 bits of the MEM. */
9818 if (GET_CODE (x) != CONST)
9820 print_operand_address (file, x);
9821 fputs ("@l", file);
9823 else
9825 if (GET_CODE (XEXP (x, 0)) != PLUS
9826 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9827 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9828 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9829 output_operand_lossage ("invalid %%K value");
9830 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9831 fputs ("@l", file);
9832 /* For GNU as, there must be a non-alphanumeric character
9833 between 'l' and the number. The '-' is added by
9834 print_operand() already. */
9835 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9836 fputs ("+", file);
9837 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9839 return;
9841 /* %l is output_asm_label. */
9843 case 'L':
9844 /* Write second word of DImode or DFmode reference. Works on register
9845 or non-indexed memory only. */
9846 if (GET_CODE (x) == REG)
9847 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9848 else if (GET_CODE (x) == MEM)
9850 /* Handle possible auto-increment. Since it is pre-increment and
9851 we have already done it, we can just use an offset of word. */
9852 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9853 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9854 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9855 UNITS_PER_WORD));
9856 else
9857 output_address (XEXP (adjust_address_nv (x, SImode,
9858 UNITS_PER_WORD),
9859 0));
9861 if (small_data_operand (x, GET_MODE (x)))
9862 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9863 reg_names[SMALL_DATA_REG]);
9865 return;
9867 case 'm':
9868 /* MB value for a mask operand. */
9869 if (! mask_operand (x, SImode))
9870 output_operand_lossage ("invalid %%m value");
9872 fprintf (file, "%d", extract_MB (x));
9873 return;
9875 case 'M':
9876 /* ME value for a mask operand. */
9877 if (! mask_operand (x, SImode))
9878 output_operand_lossage ("invalid %%M value");
9880 fprintf (file, "%d", extract_ME (x));
9881 return;
9883 /* %n outputs the negative of its operand. */
9885 case 'N':
9886 /* Write the number of elements in the vector times 4. */
9887 if (GET_CODE (x) != PARALLEL)
9888 output_operand_lossage ("invalid %%N value");
9889 else
9890 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9891 return;
9893 case 'O':
9894 /* Similar, but subtract 1 first. */
9895 if (GET_CODE (x) != PARALLEL)
9896 output_operand_lossage ("invalid %%O value");
9897 else
9898 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9899 return;
9901 case 'p':
9902 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9903 if (! INT_P (x)
9904 || INT_LOWPART (x) < 0
9905 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9906 output_operand_lossage ("invalid %%p value");
9907 else
9908 fprintf (file, "%d", i);
9909 return;
9911 case 'P':
9912 /* The operand must be an indirect memory reference. The result
9913 is the register name. */
9914 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9915 || REGNO (XEXP (x, 0)) >= 32)
9916 output_operand_lossage ("invalid %%P value");
9917 else
9918 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9919 return;
9921 case 'q':
9922 /* This outputs the logical code corresponding to a boolean
9923 expression. The expression may have one or both operands
9924 negated (if one, only the first one). For condition register
9925 logical operations, it will also treat the negated
9926 CR codes as NOTs, but not handle NOTs of them. */
9928 const char *const *t = 0;
9929 const char *s;
9930 enum rtx_code code = GET_CODE (x);
9931 static const char * const tbl[3][3] = {
9932 { "and", "andc", "nor" },
9933 { "or", "orc", "nand" },
9934 { "xor", "eqv", "xor" } };
9936 if (code == AND)
9937 t = tbl[0];
9938 else if (code == IOR)
9939 t = tbl[1];
9940 else if (code == XOR)
9941 t = tbl[2];
9942 else
9943 output_operand_lossage ("invalid %%q value");
9945 if (GET_CODE (XEXP (x, 0)) != NOT)
9946 s = t[0];
9947 else
9949 if (GET_CODE (XEXP (x, 1)) == NOT)
9950 s = t[2];
9951 else
9952 s = t[1];
9955 fputs (s, file);
9957 return;
9959 case 'Q':
9960 if (TARGET_MFCRF)
9961 fputc (',', file);
9962 /* FALLTHRU */
9963 else
9964 return;
9966 case 'R':
9967 /* X is a CR register. Print the mask for `mtcrf'. */
9968 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9969 output_operand_lossage ("invalid %%R value");
9970 else
9971 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9972 return;
9974 case 's':
9975 /* Low 5 bits of 32 - value */
9976 if (! INT_P (x))
9977 output_operand_lossage ("invalid %%s value");
9978 else
9979 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9980 return;
9982 case 'S':
9983 /* PowerPC64 mask position. All 0's is excluded.
9984 CONST_INT 32-bit mask is considered sign-extended so any
9985 transition must occur within the CONST_INT, not on the boundary. */
9986 if (! mask64_operand (x, DImode))
9987 output_operand_lossage ("invalid %%S value");
9989 uval = INT_LOWPART (x);
9991 if (uval & 1) /* Clear Left */
9993 #if HOST_BITS_PER_WIDE_INT > 64
9994 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9995 #endif
9996 i = 64;
9998 else /* Clear Right */
10000 uval = ~uval;
10001 #if HOST_BITS_PER_WIDE_INT > 64
10002 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
10003 #endif
10004 i = 63;
10006 while (uval != 0)
10007 --i, uval >>= 1;
10008 if (i < 0)
10009 abort ();
10010 fprintf (file, "%d", i);
10011 return;
10013 case 't':
10014 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10015 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
10016 abort ();
10018 /* Bit 3 is OV bit. */
10019 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
10021 /* If we want bit 31, write a shift count of zero, not 32. */
10022 fprintf (file, "%d", i == 31 ? 0 : i + 1);
10023 return;
10025 case 'T':
10026 /* Print the symbolic name of a branch target register. */
10027 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
10028 && REGNO (x) != COUNT_REGISTER_REGNUM))
10029 output_operand_lossage ("invalid %%T value");
10030 else if (REGNO (x) == LINK_REGISTER_REGNUM)
10031 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
10032 else
10033 fputs ("ctr", file);
10034 return;
10036 case 'u':
10037 /* High-order 16 bits of constant for use in unsigned operand. */
10038 if (! INT_P (x))
10039 output_operand_lossage ("invalid %%u value");
10040 else
10041 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10042 (INT_LOWPART (x) >> 16) & 0xffff);
10043 return;
10045 case 'v':
10046 /* High-order 16 bits of constant for use in signed operand. */
10047 if (! INT_P (x))
10048 output_operand_lossage ("invalid %%v value");
10049 else
10050 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
10051 (INT_LOWPART (x) >> 16) & 0xffff);
10052 return;
10054 case 'U':
10055 /* Print `u' if this has an auto-increment or auto-decrement. */
10056 if (GET_CODE (x) == MEM
10057 && (GET_CODE (XEXP (x, 0)) == PRE_INC
10058 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
10059 putc ('u', file);
10060 return;
10062 case 'V':
10063 /* Print the trap code for this operand. */
10064 switch (GET_CODE (x))
10066 case EQ:
10067 fputs ("eq", file); /* 4 */
10068 break;
10069 case NE:
10070 fputs ("ne", file); /* 24 */
10071 break;
10072 case LT:
10073 fputs ("lt", file); /* 16 */
10074 break;
10075 case LE:
10076 fputs ("le", file); /* 20 */
10077 break;
10078 case GT:
10079 fputs ("gt", file); /* 8 */
10080 break;
10081 case GE:
10082 fputs ("ge", file); /* 12 */
10083 break;
10084 case LTU:
10085 fputs ("llt", file); /* 2 */
10086 break;
10087 case LEU:
10088 fputs ("lle", file); /* 6 */
10089 break;
10090 case GTU:
10091 fputs ("lgt", file); /* 1 */
10092 break;
10093 case GEU:
10094 fputs ("lge", file); /* 5 */
10095 break;
10096 default:
10097 abort ();
10099 break;
10101 case 'w':
10102 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10103 normally. */
10104 if (INT_P (x))
10105 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
10106 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
10107 else
10108 print_operand (file, x, 0);
10109 return;
10111 case 'W':
10112 /* MB value for a PowerPC64 rldic operand. */
10113 val = (GET_CODE (x) == CONST_INT
10114 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
10116 if (val < 0)
10117 i = -1;
10118 else
10119 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
10120 if ((val <<= 1) < 0)
10121 break;
10123 #if HOST_BITS_PER_WIDE_INT == 32
10124 if (GET_CODE (x) == CONST_INT && i >= 0)
10125 i += 32; /* zero-extend high-part was all 0's */
10126 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
10128 val = CONST_DOUBLE_LOW (x);
10130 if (val == 0)
10131 abort ();
10132 else if (val < 0)
10133 --i;
10134 else
10135 for ( ; i < 64; i++)
10136 if ((val <<= 1) < 0)
10137 break;
10139 #endif
10141 fprintf (file, "%d", i + 1);
10142 return;
10144 case 'X':
10145 if (GET_CODE (x) == MEM
10146 && legitimate_indexed_address_p (XEXP (x, 0), 0))
10147 putc ('x', file);
10148 return;
10150 case 'Y':
10151 /* Like 'L', for third word of TImode */
10152 if (GET_CODE (x) == REG)
10153 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
10154 else if (GET_CODE (x) == MEM)
10156 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10157 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10158 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
10159 else
10160 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
10161 if (small_data_operand (x, GET_MODE (x)))
10162 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10163 reg_names[SMALL_DATA_REG]);
10165 return;
10167 case 'z':
10168 /* X is a SYMBOL_REF. Write out the name preceded by a
10169 period and without any trailing data in brackets. Used for function
10170 names. If we are configured for System V (or the embedded ABI) on
10171 the PowerPC, do not emit the period, since those systems do not use
10172 TOCs and the like. */
10173 if (GET_CODE (x) != SYMBOL_REF)
10174 abort ();
10176 /* Mark the decl as referenced so that cgraph will output the function. */
10177 if (SYMBOL_REF_DECL (x))
10178 mark_decl_referenced (SYMBOL_REF_DECL (x));
10180 if (XSTR (x, 0)[0] != '.')
10182 switch (DEFAULT_ABI)
10184 default:
10185 abort ();
10187 case ABI_AIX:
10188 putc ('.', file);
10189 break;
10191 case ABI_V4:
10192 case ABI_DARWIN:
10193 break;
10196 /* For macho, we need to check it see if we need a stub. */
10197 if (TARGET_MACHO)
10199 const char *name = XSTR (x, 0);
10200 #if TARGET_MACHO
10201 if (MACHOPIC_INDIRECT
10202 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
10203 name = machopic_indirection_name (x, /*stub_p=*/true);
10204 #endif
10205 assemble_name (file, name);
10207 else if (TARGET_AIX)
10208 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
10209 else
10210 assemble_name (file, XSTR (x, 0));
10211 return;
10213 case 'Z':
10214 /* Like 'L', for last word of TImode. */
10215 if (GET_CODE (x) == REG)
10216 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
10217 else if (GET_CODE (x) == MEM)
10219 if (GET_CODE (XEXP (x, 0)) == PRE_INC
10220 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
10221 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
10222 else
10223 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
10224 if (small_data_operand (x, GET_MODE (x)))
10225 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10226 reg_names[SMALL_DATA_REG]);
10228 return;
10230 /* Print AltiVec or SPE memory operand. */
10231 case 'y':
10233 rtx tmp;
10235 if (GET_CODE (x) != MEM)
10236 abort ();
10238 tmp = XEXP (x, 0);
10240 if (TARGET_E500)
10242 /* Handle [reg]. */
10243 if (GET_CODE (tmp) == REG)
10245 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
10246 break;
10248 /* Handle [reg+UIMM]. */
10249 else if (GET_CODE (tmp) == PLUS &&
10250 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
10252 int x;
10254 if (GET_CODE (XEXP (tmp, 0)) != REG)
10255 abort ();
10257 x = INTVAL (XEXP (tmp, 1));
10258 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
10259 break;
10262 /* Fall through. Must be [reg+reg]. */
10264 if (GET_CODE (tmp) == REG)
10265 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
10266 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
10268 if (REGNO (XEXP (tmp, 0)) == 0)
10269 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
10270 reg_names[ REGNO (XEXP (tmp, 0)) ]);
10271 else
10272 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
10273 reg_names[ REGNO (XEXP (tmp, 1)) ]);
10275 else
10276 abort ();
10277 break;
10280 case 0:
10281 if (GET_CODE (x) == REG)
10282 fprintf (file, "%s", reg_names[REGNO (x)]);
10283 else if (GET_CODE (x) == MEM)
10285 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10286 know the width from the mode. */
10287 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
10288 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
10289 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10290 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
10291 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
10292 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
10293 else
10294 output_address (XEXP (x, 0));
10296 else
10297 output_addr_const (file, x);
10298 return;
10300 case '&':
10301 assemble_name (file, rs6000_get_some_local_dynamic_name ());
10302 return;
10304 default:
10305 output_operand_lossage ("invalid %%xn code");
10309 /* Print the address of an operand. */
10311 void
10312 print_operand_address (FILE *file, rtx x)
10314 if (GET_CODE (x) == REG)
10315 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
10316 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
10317 || GET_CODE (x) == LABEL_REF)
10319 output_addr_const (file, x);
10320 if (small_data_operand (x, GET_MODE (x)))
10321 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
10322 reg_names[SMALL_DATA_REG]);
10323 else if (TARGET_TOC)
10324 abort ();
10326 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
10328 if (REGNO (XEXP (x, 0)) == 0)
10329 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
10330 reg_names[ REGNO (XEXP (x, 0)) ]);
10331 else
10332 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
10333 reg_names[ REGNO (XEXP (x, 1)) ]);
10335 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
10336 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
10337 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
10338 #if TARGET_ELF
10339 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10340 && CONSTANT_P (XEXP (x, 1)))
10342 output_addr_const (file, XEXP (x, 1));
10343 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10345 #endif
10346 #if TARGET_MACHO
10347 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
10348 && CONSTANT_P (XEXP (x, 1)))
10350 fprintf (file, "lo16(");
10351 output_addr_const (file, XEXP (x, 1));
10352 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
10354 #endif
10355 else if (legitimate_constant_pool_address_p (x))
10357 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
10359 rtx contains_minus = XEXP (x, 1);
10360 rtx minus, symref;
10361 const char *name;
10363 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10364 turn it into (sym) for output_addr_const. */
10365 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
10366 contains_minus = XEXP (contains_minus, 0);
10368 minus = XEXP (contains_minus, 0);
10369 symref = XEXP (minus, 0);
10370 XEXP (contains_minus, 0) = symref;
10371 if (TARGET_ELF)
10373 char *newname;
10375 name = XSTR (symref, 0);
10376 newname = alloca (strlen (name) + sizeof ("@toc"));
10377 strcpy (newname, name);
10378 strcat (newname, "@toc");
10379 XSTR (symref, 0) = newname;
10381 output_addr_const (file, XEXP (x, 1));
10382 if (TARGET_ELF)
10383 XSTR (symref, 0) = name;
10384 XEXP (contains_minus, 0) = minus;
10386 else
10387 output_addr_const (file, XEXP (x, 1));
10389 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
10391 else
10392 abort ();
10395 /* Target hook for assembling integer objects. The PowerPC version has
10396 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10397 is defined. It also needs to handle DI-mode objects on 64-bit
10398 targets. */
10400 static bool
10401 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
10403 #ifdef RELOCATABLE_NEEDS_FIXUP
10404 /* Special handling for SI values. */
10405 if (size == 4 && aligned_p)
10407 extern int in_toc_section (void);
10408 static int recurse = 0;
10410 /* For -mrelocatable, we mark all addresses that need to be fixed up
10411 in the .fixup section. */
10412 if (TARGET_RELOCATABLE
10413 && !in_toc_section ()
10414 && !in_text_section ()
10415 && !in_unlikely_text_section ()
10416 && !recurse
10417 && GET_CODE (x) != CONST_INT
10418 && GET_CODE (x) != CONST_DOUBLE
10419 && CONSTANT_P (x))
10421 char buf[256];
10423 recurse = 1;
10424 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
10425 fixuplabelno++;
10426 ASM_OUTPUT_LABEL (asm_out_file, buf);
10427 fprintf (asm_out_file, "\t.long\t(");
10428 output_addr_const (asm_out_file, x);
10429 fprintf (asm_out_file, ")@fixup\n");
10430 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
10431 ASM_OUTPUT_ALIGN (asm_out_file, 2);
10432 fprintf (asm_out_file, "\t.long\t");
10433 assemble_name (asm_out_file, buf);
10434 fprintf (asm_out_file, "\n\t.previous\n");
10435 recurse = 0;
10436 return true;
10438 /* Remove initial .'s to turn a -mcall-aixdesc function
10439 address into the address of the descriptor, not the function
10440 itself. */
10441 else if (GET_CODE (x) == SYMBOL_REF
10442 && XSTR (x, 0)[0] == '.'
10443 && DEFAULT_ABI == ABI_AIX)
10445 const char *name = XSTR (x, 0);
10446 while (*name == '.')
10447 name++;
10449 fprintf (asm_out_file, "\t.long\t%s\n", name);
10450 return true;
10453 #endif /* RELOCATABLE_NEEDS_FIXUP */
10454 return default_assemble_integer (x, size, aligned_p);
10457 #ifdef HAVE_GAS_HIDDEN
10458 /* Emit an assembler directive to set symbol visibility for DECL to
10459 VISIBILITY_TYPE. */
10461 static void
10462 rs6000_assemble_visibility (tree decl, int vis)
10464 /* Functions need to have their entry point symbol visibility set as
10465 well as their descriptor symbol visibility. */
10466 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
10468 static const char * const visibility_types[] = {
10469 NULL, "internal", "hidden", "protected"
10472 const char *name, *type;
10474 name = ((* targetm.strip_name_encoding)
10475 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
10476 type = visibility_types[vis];
10478 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
10479 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
10481 else
10482 default_assemble_visibility (decl, vis);
10484 #endif
10486 enum rtx_code
10487 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
10489 /* Reversal of FP compares takes care -- an ordered compare
10490 becomes an unordered compare and vice versa. */
10491 if (mode == CCFPmode
10492 && (!flag_finite_math_only
10493 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
10494 || code == UNEQ || code == LTGT))
10495 return reverse_condition_maybe_unordered (code);
10496 else
10497 return reverse_condition (code);
10500 /* Generate a compare for CODE. Return a brand-new rtx that
10501 represents the result of the compare. */
10503 static rtx
10504 rs6000_generate_compare (enum rtx_code code)
10506 enum machine_mode comp_mode;
10507 rtx compare_result;
10509 if (rs6000_compare_fp_p)
10510 comp_mode = CCFPmode;
10511 else if (code == GTU || code == LTU
10512 || code == GEU || code == LEU)
10513 comp_mode = CCUNSmode;
10514 else
10515 comp_mode = CCmode;
10517 /* First, the compare. */
10518 compare_result = gen_reg_rtx (comp_mode);
10520 /* SPE FP compare instructions on the GPRs. Yuck! */
10521 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10522 && rs6000_compare_fp_p)
10524 rtx cmp, or1, or2, or_result, compare_result2;
10526 /* Note: The E500 comparison instructions set the GT bit (x +
10527 1), on success. This explains the mess. */
10529 switch (code)
10531 case EQ: case UNEQ: case NE: case LTGT:
10532 cmp = flag_finite_math_only
10533 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
10534 rs6000_compare_op1)
10535 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
10536 rs6000_compare_op1);
10537 break;
10538 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
10539 cmp = flag_finite_math_only
10540 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
10541 rs6000_compare_op1)
10542 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
10543 rs6000_compare_op1);
10544 break;
10545 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
10546 cmp = flag_finite_math_only
10547 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
10548 rs6000_compare_op1)
10549 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
10550 rs6000_compare_op1);
10551 break;
10552 default:
10553 abort ();
10556 /* Synthesize LE and GE from LT/GT || EQ. */
10557 if (code == LE || code == GE || code == LEU || code == GEU)
10559 emit_insn (cmp);
10561 switch (code)
10563 case LE: code = LT; break;
10564 case GE: code = GT; break;
10565 case LEU: code = LT; break;
10566 case GEU: code = GT; break;
10567 default: abort ();
10570 or1 = gen_reg_rtx (SImode);
10571 or2 = gen_reg_rtx (SImode);
10572 or_result = gen_reg_rtx (CCEQmode);
10573 compare_result2 = gen_reg_rtx (CCFPmode);
10575 /* Do the EQ. */
10576 cmp = flag_finite_math_only
10577 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
10578 rs6000_compare_op1)
10579 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
10580 rs6000_compare_op1);
10581 emit_insn (cmp);
10583 or1 = gen_rtx_GT (SImode, compare_result, const0_rtx);
10584 or2 = gen_rtx_GT (SImode, compare_result2, const0_rtx);
10586 /* OR them together. */
10587 cmp = gen_rtx_SET (VOIDmode, or_result,
10588 gen_rtx_COMPARE (CCEQmode,
10589 gen_rtx_IOR (SImode, or1, or2),
10590 const_true_rtx));
10591 compare_result = or_result;
10592 code = EQ;
10594 else
10596 if (code == NE || code == LTGT)
10597 code = NE;
10598 else
10599 code = EQ;
10602 emit_insn (cmp);
10604 else
10605 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
10606 gen_rtx_COMPARE (comp_mode,
10607 rs6000_compare_op0,
10608 rs6000_compare_op1)));
10610 /* Some kinds of FP comparisons need an OR operation;
10611 under flag_finite_math_only we don't bother. */
10612 if (rs6000_compare_fp_p
10613 && ! flag_finite_math_only
10614 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
10615 && (code == LE || code == GE
10616 || code == UNEQ || code == LTGT
10617 || code == UNGT || code == UNLT))
10619 enum rtx_code or1, or2;
10620 rtx or1_rtx, or2_rtx, compare2_rtx;
10621 rtx or_result = gen_reg_rtx (CCEQmode);
10623 switch (code)
10625 case LE: or1 = LT; or2 = EQ; break;
10626 case GE: or1 = GT; or2 = EQ; break;
10627 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
10628 case LTGT: or1 = LT; or2 = GT; break;
10629 case UNGT: or1 = UNORDERED; or2 = GT; break;
10630 case UNLT: or1 = UNORDERED; or2 = LT; break;
10631 default: abort ();
10633 validate_condition_mode (or1, comp_mode);
10634 validate_condition_mode (or2, comp_mode);
10635 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
10636 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
10637 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
10638 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
10639 const_true_rtx);
10640 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
10642 compare_result = or_result;
10643 code = EQ;
10646 validate_condition_mode (code, GET_MODE (compare_result));
10648 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
10652 /* Emit the RTL for an sCOND pattern. */
10654 void
10655 rs6000_emit_sCOND (enum rtx_code code, rtx result)
10657 rtx condition_rtx;
10658 enum machine_mode op_mode;
10659 enum rtx_code cond_code;
10661 condition_rtx = rs6000_generate_compare (code);
10662 cond_code = GET_CODE (condition_rtx);
10664 if (TARGET_E500 && rs6000_compare_fp_p
10665 && !TARGET_FPRS && TARGET_HARD_FLOAT)
10667 rtx t;
10669 PUT_MODE (condition_rtx, SImode);
10670 t = XEXP (condition_rtx, 0);
10672 if (cond_code != NE && cond_code != EQ)
10673 abort ();
10675 if (cond_code == NE)
10676 emit_insn (gen_e500_flip_eq_bit (t, t));
10678 emit_insn (gen_move_from_CR_eq_bit (result, t));
10679 return;
10682 if (cond_code == NE
10683 || cond_code == GE || cond_code == LE
10684 || cond_code == GEU || cond_code == LEU
10685 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
10687 rtx not_result = gen_reg_rtx (CCEQmode);
10688 rtx not_op, rev_cond_rtx;
10689 enum machine_mode cc_mode;
10691 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
10693 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
10694 SImode, XEXP (condition_rtx, 0), const0_rtx);
10695 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
10696 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
10697 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
10700 op_mode = GET_MODE (rs6000_compare_op0);
10701 if (op_mode == VOIDmode)
10702 op_mode = GET_MODE (rs6000_compare_op1);
10704 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
10706 PUT_MODE (condition_rtx, DImode);
10707 convert_move (result, condition_rtx, 0);
10709 else
10711 PUT_MODE (condition_rtx, SImode);
10712 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
10716 /* Emit a branch of kind CODE to location LOC. */
10718 void
10719 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
10721 rtx condition_rtx, loc_ref;
10723 condition_rtx = rs6000_generate_compare (code);
10724 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
10725 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
10726 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
10727 loc_ref, pc_rtx)));
10730 /* Return the string to output a conditional branch to LABEL, which is
10731 the operand number of the label, or -1 if the branch is really a
10732 conditional return.
10734 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10735 condition code register and its mode specifies what kind of
10736 comparison we made.
10738 REVERSED is nonzero if we should reverse the sense of the comparison.
10740 INSN is the insn. */
10742 char *
10743 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
10745 static char string[64];
10746 enum rtx_code code = GET_CODE (op);
10747 rtx cc_reg = XEXP (op, 0);
10748 enum machine_mode mode = GET_MODE (cc_reg);
10749 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10750 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10751 int really_reversed = reversed ^ need_longbranch;
10752 char *s = string;
10753 const char *ccode;
10754 const char *pred;
10755 rtx note;
10757 validate_condition_mode (code, mode);
10759 /* Work out which way this really branches. We could use
10760 reverse_condition_maybe_unordered here always but this
10761 makes the resulting assembler clearer. */
10762 if (really_reversed)
10764 /* Reversal of FP compares takes care -- an ordered compare
10765 becomes an unordered compare and vice versa. */
10766 if (mode == CCFPmode)
10767 code = reverse_condition_maybe_unordered (code);
10768 else
10769 code = reverse_condition (code);
10772 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10774 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10775 to the GT bit. */
10776 if (code == EQ)
10777 /* Opposite of GT. */
10778 code = GT;
10779 else if (code == NE)
10780 code = UNLE;
10781 else
10782 abort ();
10785 switch (code)
10787 /* Not all of these are actually distinct opcodes, but
10788 we distinguish them for clarity of the resulting assembler. */
10789 case NE: case LTGT:
10790 ccode = "ne"; break;
10791 case EQ: case UNEQ:
10792 ccode = "eq"; break;
10793 case GE: case GEU:
10794 ccode = "ge"; break;
10795 case GT: case GTU: case UNGT:
10796 ccode = "gt"; break;
10797 case LE: case LEU:
10798 ccode = "le"; break;
10799 case LT: case LTU: case UNLT:
10800 ccode = "lt"; break;
10801 case UNORDERED: ccode = "un"; break;
10802 case ORDERED: ccode = "nu"; break;
10803 case UNGE: ccode = "nl"; break;
10804 case UNLE: ccode = "ng"; break;
10805 default:
10806 abort ();
10809 /* Maybe we have a guess as to how likely the branch is.
10810 The old mnemonics don't have a way to specify this information. */
10811 pred = "";
10812 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10813 if (note != NULL_RTX)
10815 /* PROB is the difference from 50%. */
10816 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10818 /* Only hint for highly probable/improbable branches on newer
10819 cpus as static prediction overrides processor dynamic
10820 prediction. For older cpus we may as well always hint, but
10821 assume not taken for branches that are very close to 50% as a
10822 mispredicted taken branch is more expensive than a
10823 mispredicted not-taken branch. */
10824 if (rs6000_always_hint
10825 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10827 if (abs (prob) > REG_BR_PROB_BASE / 20
10828 && ((prob > 0) ^ need_longbranch))
10829 pred = "+";
10830 else
10831 pred = "-";
10835 if (label == NULL)
10836 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10837 else
10838 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10840 /* We need to escape any '%' characters in the reg_names string.
10841 Assume they'd only be the first character.... */
10842 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10843 *s++ = '%';
10844 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10846 if (label != NULL)
10848 /* If the branch distance was too far, we may have to use an
10849 unconditional branch to go the distance. */
10850 if (need_longbranch)
10851 s += sprintf (s, ",$+8\n\tb %s", label);
10852 else
10853 s += sprintf (s, ",%s", label);
10856 return string;
10859 /* Return the string to flip the EQ bit on a CR. */
10860 char *
10861 output_e500_flip_eq_bit (rtx dst, rtx src)
10863 static char string[64];
10864 int a, b;
10866 if (GET_CODE (dst) != REG || ! CR_REGNO_P (REGNO (dst))
10867 || GET_CODE (src) != REG || ! CR_REGNO_P (REGNO (src)))
10868 abort ();
10870 /* EQ bit. */
10871 a = 4 * (REGNO (dst) - CR0_REGNO) + 2;
10872 b = 4 * (REGNO (src) - CR0_REGNO) + 2;
10874 sprintf (string, "crnot %d,%d", a, b);
10875 return string;
10878 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10879 operands of the last comparison is nonzero/true, FALSE_COND if it
10880 is zero/false. Return 0 if the hardware has no such operation. */
10883 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10885 enum rtx_code code = GET_CODE (op);
10886 rtx op0 = rs6000_compare_op0;
10887 rtx op1 = rs6000_compare_op1;
10888 REAL_VALUE_TYPE c1;
10889 enum machine_mode compare_mode = GET_MODE (op0);
10890 enum machine_mode result_mode = GET_MODE (dest);
10891 rtx temp;
10893 /* These modes should always match. */
10894 if (GET_MODE (op1) != compare_mode
10895 /* In the isel case however, we can use a compare immediate, so
10896 op1 may be a small constant. */
10897 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10898 return 0;
10899 if (GET_MODE (true_cond) != result_mode)
10900 return 0;
10901 if (GET_MODE (false_cond) != result_mode)
10902 return 0;
10904 /* First, work out if the hardware can do this at all, or
10905 if it's too slow.... */
10906 if (! rs6000_compare_fp_p)
10908 if (TARGET_ISEL)
10909 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10910 return 0;
10912 else if (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS
10913 && GET_MODE_CLASS (compare_mode) == MODE_FLOAT)
10914 return 0;
10916 /* Eliminate half of the comparisons by switching operands, this
10917 makes the remaining code simpler. */
10918 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10919 || code == LTGT || code == LT || code == UNLE)
10921 code = reverse_condition_maybe_unordered (code);
10922 temp = true_cond;
10923 true_cond = false_cond;
10924 false_cond = temp;
10927 /* UNEQ and LTGT take four instructions for a comparison with zero,
10928 it'll probably be faster to use a branch here too. */
10929 if (code == UNEQ && HONOR_NANS (compare_mode))
10930 return 0;
10932 if (GET_CODE (op1) == CONST_DOUBLE)
10933 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10935 /* We're going to try to implement comparisons by performing
10936 a subtract, then comparing against zero. Unfortunately,
10937 Inf - Inf is NaN which is not zero, and so if we don't
10938 know that the operand is finite and the comparison
10939 would treat EQ different to UNORDERED, we can't do it. */
10940 if (HONOR_INFINITIES (compare_mode)
10941 && code != GT && code != UNGE
10942 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10943 /* Constructs of the form (a OP b ? a : b) are safe. */
10944 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10945 || (! rtx_equal_p (op0, true_cond)
10946 && ! rtx_equal_p (op1, true_cond))))
10947 return 0;
10948 /* At this point we know we can use fsel. */
10950 /* Reduce the comparison to a comparison against zero. */
10951 temp = gen_reg_rtx (compare_mode);
10952 emit_insn (gen_rtx_SET (VOIDmode, temp,
10953 gen_rtx_MINUS (compare_mode, op0, op1)));
10954 op0 = temp;
10955 op1 = CONST0_RTX (compare_mode);
10957 /* If we don't care about NaNs we can reduce some of the comparisons
10958 down to faster ones. */
10959 if (! HONOR_NANS (compare_mode))
10960 switch (code)
10962 case GT:
10963 code = LE;
10964 temp = true_cond;
10965 true_cond = false_cond;
10966 false_cond = temp;
10967 break;
10968 case UNGE:
10969 code = GE;
10970 break;
10971 case UNEQ:
10972 code = EQ;
10973 break;
10974 default:
10975 break;
10978 /* Now, reduce everything down to a GE. */
10979 switch (code)
10981 case GE:
10982 break;
10984 case LE:
10985 temp = gen_reg_rtx (compare_mode);
10986 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10987 op0 = temp;
10988 break;
10990 case ORDERED:
10991 temp = gen_reg_rtx (compare_mode);
10992 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10993 op0 = temp;
10994 break;
10996 case EQ:
10997 temp = gen_reg_rtx (compare_mode);
10998 emit_insn (gen_rtx_SET (VOIDmode, temp,
10999 gen_rtx_NEG (compare_mode,
11000 gen_rtx_ABS (compare_mode, op0))));
11001 op0 = temp;
11002 break;
11004 case UNGE:
11005 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11006 temp = gen_reg_rtx (result_mode);
11007 emit_insn (gen_rtx_SET (VOIDmode, temp,
11008 gen_rtx_IF_THEN_ELSE (result_mode,
11009 gen_rtx_GE (VOIDmode,
11010 op0, op1),
11011 true_cond, false_cond)));
11012 false_cond = true_cond;
11013 true_cond = temp;
11015 temp = gen_reg_rtx (compare_mode);
11016 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11017 op0 = temp;
11018 break;
11020 case GT:
11021 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11022 temp = gen_reg_rtx (result_mode);
11023 emit_insn (gen_rtx_SET (VOIDmode, temp,
11024 gen_rtx_IF_THEN_ELSE (result_mode,
11025 gen_rtx_GE (VOIDmode,
11026 op0, op1),
11027 true_cond, false_cond)));
11028 true_cond = false_cond;
11029 false_cond = temp;
11031 temp = gen_reg_rtx (compare_mode);
11032 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
11033 op0 = temp;
11034 break;
11036 default:
11037 abort ();
11040 emit_insn (gen_rtx_SET (VOIDmode, dest,
11041 gen_rtx_IF_THEN_ELSE (result_mode,
11042 gen_rtx_GE (VOIDmode,
11043 op0, op1),
11044 true_cond, false_cond)));
11045 return 1;
11048 /* Same as above, but for ints (isel). */
11050 static int
11051 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
11053 rtx condition_rtx, cr;
11055 /* All isel implementations thus far are 32-bits. */
11056 if (GET_MODE (rs6000_compare_op0) != SImode)
11057 return 0;
11059 /* We still have to do the compare, because isel doesn't do a
11060 compare, it just looks at the CRx bits set by a previous compare
11061 instruction. */
11062 condition_rtx = rs6000_generate_compare (GET_CODE (op));
11063 cr = XEXP (condition_rtx, 0);
11065 if (GET_MODE (cr) == CCmode)
11066 emit_insn (gen_isel_signed (dest, condition_rtx,
11067 true_cond, false_cond, cr));
11068 else
11069 emit_insn (gen_isel_unsigned (dest, condition_rtx,
11070 true_cond, false_cond, cr));
11072 return 1;
11075 const char *
11076 output_isel (rtx *operands)
11078 enum rtx_code code;
11080 code = GET_CODE (operands[1]);
11081 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
11083 PUT_CODE (operands[1], reverse_condition (code));
11084 return "isel %0,%3,%2,%j1";
11086 else
11087 return "isel %0,%2,%3,%j1";
11090 void
11091 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
11093 enum machine_mode mode = GET_MODE (op0);
11094 enum rtx_code c;
11095 rtx target;
11097 if (code == SMAX || code == SMIN)
11098 c = GE;
11099 else
11100 c = GEU;
11102 if (code == SMAX || code == UMAX)
11103 target = emit_conditional_move (dest, c, op0, op1, mode,
11104 op0, op1, mode, 0);
11105 else
11106 target = emit_conditional_move (dest, c, op0, op1, mode,
11107 op1, op0, mode, 0);
11108 if (target == NULL_RTX)
11109 abort ();
11110 if (target != dest)
11111 emit_move_insn (dest, target);
11114 /* Emit instructions to move SRC to DST. Called by splitters for
11115 multi-register moves. It will emit at most one instruction for
11116 each register that is accessed; that is, it won't emit li/lis pairs
11117 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11118 register. */
11120 void
11121 rs6000_split_multireg_move (rtx dst, rtx src)
11123 /* The register number of the first register being moved. */
11124 int reg;
11125 /* The mode that is to be moved. */
11126 enum machine_mode mode;
11127 /* The mode that the move is being done in, and its size. */
11128 enum machine_mode reg_mode;
11129 int reg_mode_size;
11130 /* The number of registers that will be moved. */
11131 int nregs;
11133 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
11134 mode = GET_MODE (dst);
11135 nregs = HARD_REGNO_NREGS (reg, mode);
11136 if (FP_REGNO_P (reg))
11137 reg_mode = DFmode;
11138 else if (ALTIVEC_REGNO_P (reg))
11139 reg_mode = V16QImode;
11140 else
11141 reg_mode = word_mode;
11142 reg_mode_size = GET_MODE_SIZE (reg_mode);
11144 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
11145 abort ();
11147 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
11149 /* Move register range backwards, if we might have destructive
11150 overlap. */
11151 int i;
11152 for (i = nregs - 1; i >= 0; i--)
11153 emit_insn (gen_rtx_SET (VOIDmode,
11154 simplify_gen_subreg (reg_mode, dst, mode,
11155 i * reg_mode_size),
11156 simplify_gen_subreg (reg_mode, src, mode,
11157 i * reg_mode_size)));
11159 else
11161 int i;
11162 int j = -1;
11163 bool used_update = false;
11165 if (MEM_P (src) && INT_REGNO_P (reg))
11167 rtx breg;
11169 if (GET_CODE (XEXP (src, 0)) == PRE_INC
11170 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
11172 rtx delta_rtx;
11173 breg = XEXP (XEXP (src, 0), 0);
11174 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
11175 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
11176 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
11177 emit_insn (TARGET_32BIT
11178 ? gen_addsi3 (breg, breg, delta_rtx)
11179 : gen_adddi3 (breg, breg, delta_rtx));
11180 src = gen_rtx_MEM (mode, breg);
11182 else if (! offsettable_memref_p (src))
11184 rtx newsrc, basereg;
11185 basereg = gen_rtx_REG (Pmode, reg);
11186 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
11187 newsrc = gen_rtx_MEM (GET_MODE (src), basereg);
11188 MEM_COPY_ATTRIBUTES (newsrc, src);
11189 src = newsrc;
11192 /* We have now address involving an base register only.
11193 If we use one of the registers to address memory,
11194 we have change that register last. */
11196 breg = (GET_CODE (XEXP (src, 0)) == PLUS
11197 ? XEXP (XEXP (src, 0), 0)
11198 : XEXP (src, 0));
11200 if (!REG_P (breg))
11201 abort();
11203 if (REGNO (breg) >= REGNO (dst)
11204 && REGNO (breg) < REGNO (dst) + nregs)
11205 j = REGNO (breg) - REGNO (dst);
11208 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
11210 rtx breg;
11212 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
11213 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
11215 rtx delta_rtx;
11216 breg = XEXP (XEXP (dst, 0), 0);
11217 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
11218 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
11219 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
11221 /* We have to update the breg before doing the store.
11222 Use store with update, if available. */
11224 if (TARGET_UPDATE)
11226 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
11227 emit_insn (TARGET_32BIT
11228 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
11229 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
11230 used_update = true;
11232 else
11233 emit_insn (TARGET_32BIT
11234 ? gen_addsi3 (breg, breg, delta_rtx)
11235 : gen_adddi3 (breg, breg, delta_rtx));
11236 dst = gen_rtx_MEM (mode, breg);
11238 else if (! offsettable_memref_p (dst))
11239 abort ();
11242 for (i = 0; i < nregs; i++)
11244 /* Calculate index to next subword. */
11245 ++j;
11246 if (j == nregs)
11247 j = 0;
11249 /* If compiler already emited move of first word by
11250 store with update, no need to do anything. */
11251 if (j == 0 && used_update)
11252 continue;
11254 emit_insn (gen_rtx_SET (VOIDmode,
11255 simplify_gen_subreg (reg_mode, dst, mode,
11256 j * reg_mode_size),
11257 simplify_gen_subreg (reg_mode, src, mode,
11258 j * reg_mode_size)));
11264 /* This page contains routines that are used to determine what the
11265 function prologue and epilogue code will do and write them out. */
11267 /* Return the first fixed-point register that is required to be
11268 saved. 32 if none. */
11271 first_reg_to_save (void)
11273 int first_reg;
11275 /* Find lowest numbered live register. */
11276 for (first_reg = 13; first_reg <= 31; first_reg++)
11277 if (regs_ever_live[first_reg]
11278 && (! call_used_regs[first_reg]
11279 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
11280 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11281 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
11282 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
11283 break;
11285 #if TARGET_MACHO
11286 if (flag_pic
11287 && current_function_uses_pic_offset_table
11288 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
11289 return RS6000_PIC_OFFSET_TABLE_REGNUM;
11290 #endif
11292 return first_reg;
11295 /* Similar, for FP regs. */
11298 first_fp_reg_to_save (void)
11300 int first_reg;
11302 /* Find lowest numbered live register. */
11303 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
11304 if (regs_ever_live[first_reg])
11305 break;
11307 return first_reg;
11310 /* Similar, for AltiVec regs. */
11312 static int
11313 first_altivec_reg_to_save (void)
11315 int i;
11317 /* Stack frame remains as is unless we are in AltiVec ABI. */
11318 if (! TARGET_ALTIVEC_ABI)
11319 return LAST_ALTIVEC_REGNO + 1;
11321 /* Find lowest numbered live register. */
11322 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
11323 if (regs_ever_live[i])
11324 break;
11326 return i;
11329 /* Return a 32-bit mask of the AltiVec registers we need to set in
11330 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11331 the 32-bit word is 0. */
11333 static unsigned int
11334 compute_vrsave_mask (void)
11336 unsigned int i, mask = 0;
11338 /* First, find out if we use _any_ altivec registers. */
11339 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11340 if (regs_ever_live[i])
11341 mask |= ALTIVEC_REG_BIT (i);
11343 if (mask == 0)
11344 return mask;
11346 /* Next, remove the argument registers from the set. These must
11347 be in the VRSAVE mask set by the caller, so we don't need to add
11348 them in again. More importantly, the mask we compute here is
11349 used to generate CLOBBERs in the set_vrsave insn, and we do not
11350 wish the argument registers to die. */
11351 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
11352 mask &= ~ALTIVEC_REG_BIT (i);
11354 /* Similarly, remove the return value from the set. */
11356 bool yes = false;
11357 diddle_return_value (is_altivec_return_reg, &yes);
11358 if (yes)
11359 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
11362 return mask;
11365 static void
11366 is_altivec_return_reg (rtx reg, void *xyes)
11368 bool *yes = (bool *) xyes;
11369 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
11370 *yes = true;
11374 /* Calculate the stack information for the current function. This is
11375 complicated by having two separate calling sequences, the AIX calling
11376 sequence and the V.4 calling sequence.
11378 AIX (and Darwin/Mac OS X) stack frames look like:
11379 32-bit 64-bit
11380 SP----> +---------------------------------------+
11381 | back chain to caller | 0 0
11382 +---------------------------------------+
11383 | saved CR | 4 8 (8-11)
11384 +---------------------------------------+
11385 | saved LR | 8 16
11386 +---------------------------------------+
11387 | reserved for compilers | 12 24
11388 +---------------------------------------+
11389 | reserved for binders | 16 32
11390 +---------------------------------------+
11391 | saved TOC pointer | 20 40
11392 +---------------------------------------+
11393 | Parameter save area (P) | 24 48
11394 +---------------------------------------+
11395 | Alloca space (A) | 24+P etc.
11396 +---------------------------------------+
11397 | Local variable space (L) | 24+P+A
11398 +---------------------------------------+
11399 | Float/int conversion temporary (X) | 24+P+A+L
11400 +---------------------------------------+
11401 | Save area for AltiVec registers (W) | 24+P+A+L+X
11402 +---------------------------------------+
11403 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11404 +---------------------------------------+
11405 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11406 +---------------------------------------+
11407 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11408 +---------------------------------------+
11409 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11410 +---------------------------------------+
11411 old SP->| back chain to caller's caller |
11412 +---------------------------------------+
11414 The required alignment for AIX configurations is two words (i.e., 8
11415 or 16 bytes).
11418 V.4 stack frames look like:
11420 SP----> +---------------------------------------+
11421 | back chain to caller | 0
11422 +---------------------------------------+
11423 | caller's saved LR | 4
11424 +---------------------------------------+
11425 | Parameter save area (P) | 8
11426 +---------------------------------------+
11427 | Alloca space (A) | 8+P
11428 +---------------------------------------+
11429 | Varargs save area (V) | 8+P+A
11430 +---------------------------------------+
11431 | Local variable space (L) | 8+P+A+V
11432 +---------------------------------------+
11433 | Float/int conversion temporary (X) | 8+P+A+V+L
11434 +---------------------------------------+
11435 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11436 +---------------------------------------+
11437 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11438 +---------------------------------------+
11439 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11440 +---------------------------------------+
11441 | SPE: area for 64-bit GP registers |
11442 +---------------------------------------+
11443 | SPE alignment padding |
11444 +---------------------------------------+
11445 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11446 +---------------------------------------+
11447 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11448 +---------------------------------------+
11449 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11450 +---------------------------------------+
11451 old SP->| back chain to caller's caller |
11452 +---------------------------------------+
11454 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11455 given. (But note below and in sysv4.h that we require only 8 and
11456 may round up the size of our stack frame anyways. The historical
11457 reason is early versions of powerpc-linux which didn't properly
11458 align the stack at program startup. A happy side-effect is that
11459 -mno-eabi libraries can be used with -meabi programs.)
11461 The EABI configuration defaults to the V.4 layout. However,
11462 the stack alignment requirements may differ. If -mno-eabi is not
11463 given, the required stack alignment is 8 bytes; if -mno-eabi is
11464 given, the required alignment is 16 bytes. (But see V.4 comment
11465 above.) */
11467 #ifndef ABI_STACK_BOUNDARY
11468 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11469 #endif
11471 static rs6000_stack_t *
11472 rs6000_stack_info (void)
11474 static rs6000_stack_t info, zero_info;
11475 rs6000_stack_t *info_ptr = &info;
11476 int reg_size = TARGET_32BIT ? 4 : 8;
11477 int ehrd_size;
11478 HOST_WIDE_INT non_fixed_size;
11480 /* Zero all fields portably. */
11481 info = zero_info;
11483 if (TARGET_SPE)
11485 /* Cache value so we don't rescan instruction chain over and over. */
11486 if (cfun->machine->insn_chain_scanned_p == 0)
11488 cfun->machine->insn_chain_scanned_p = 1;
11489 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
11493 /* Select which calling sequence. */
11494 info_ptr->abi = DEFAULT_ABI;
11496 /* Calculate which registers need to be saved & save area size. */
11497 info_ptr->first_gp_reg_save = first_reg_to_save ();
11498 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11499 even if it currently looks like we won't. */
11500 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
11501 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
11502 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
11503 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
11504 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
11505 else
11506 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
11508 /* For the SPE, we have an additional upper 32-bits on each GPR.
11509 Ideally we should save the entire 64-bits only when the upper
11510 half is used in SIMD instructions. Since we only record
11511 registers live (not the size they are used in), this proves
11512 difficult because we'd have to traverse the instruction chain at
11513 the right time, taking reload into account. This is a real pain,
11514 so we opt to save the GPRs in 64-bits always if but one register
11515 gets used in 64-bits. Otherwise, all the registers in the frame
11516 get saved in 32-bits.
11518 So... since when we save all GPRs (except the SP) in 64-bits, the
11519 traditional GP save area will be empty. */
11520 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11521 info_ptr->gp_size = 0;
11523 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
11524 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
11526 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
11527 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
11528 - info_ptr->first_altivec_reg_save);
11530 /* Does this function call anything? */
11531 info_ptr->calls_p = (! current_function_is_leaf
11532 || cfun->machine->ra_needs_full_frame);
11534 /* Determine if we need to save the link register. */
11535 if (rs6000_ra_ever_killed ()
11536 || (DEFAULT_ABI == ABI_AIX
11537 && current_function_profile
11538 && !TARGET_PROFILE_KERNEL)
11539 #ifdef TARGET_RELOCATABLE
11540 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
11541 #endif
11542 || (info_ptr->first_fp_reg_save != 64
11543 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
11544 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
11545 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
11546 || (DEFAULT_ABI == ABI_DARWIN
11547 && flag_pic
11548 && current_function_uses_pic_offset_table)
11549 || info_ptr->calls_p)
11551 info_ptr->lr_save_p = 1;
11552 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
11555 /* Determine if we need to save the condition code registers. */
11556 if (regs_ever_live[CR2_REGNO]
11557 || regs_ever_live[CR3_REGNO]
11558 || regs_ever_live[CR4_REGNO])
11560 info_ptr->cr_save_p = 1;
11561 if (DEFAULT_ABI == ABI_V4)
11562 info_ptr->cr_size = reg_size;
11565 /* If the current function calls __builtin_eh_return, then we need
11566 to allocate stack space for registers that will hold data for
11567 the exception handler. */
11568 if (current_function_calls_eh_return)
11570 unsigned int i;
11571 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
11572 continue;
11574 /* SPE saves EH registers in 64-bits. */
11575 ehrd_size = i * (TARGET_SPE_ABI
11576 && info_ptr->spe_64bit_regs_used != 0
11577 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
11579 else
11580 ehrd_size = 0;
11582 /* Determine various sizes. */
11583 info_ptr->reg_size = reg_size;
11584 info_ptr->fixed_size = RS6000_SAVE_AREA;
11585 info_ptr->varargs_size = RS6000_VARARGS_AREA;
11586 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
11587 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
11588 TARGET_ALTIVEC ? 16 : 8);
11590 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11591 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
11592 else
11593 info_ptr->spe_gp_size = 0;
11595 if (TARGET_ALTIVEC_ABI)
11596 info_ptr->vrsave_mask = compute_vrsave_mask ();
11597 else
11598 info_ptr->vrsave_mask = 0;
11600 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
11601 info_ptr->vrsave_size = 4;
11602 else
11603 info_ptr->vrsave_size = 0;
11605 /* Calculate the offsets. */
11606 switch (DEFAULT_ABI)
11608 case ABI_NONE:
11609 default:
11610 abort ();
11612 case ABI_AIX:
11613 case ABI_DARWIN:
11614 info_ptr->fp_save_offset = - info_ptr->fp_size;
11615 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11617 if (TARGET_ALTIVEC_ABI)
11619 info_ptr->vrsave_save_offset
11620 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
11622 /* Align stack so vector save area is on a quadword boundary. */
11623 if (info_ptr->altivec_size != 0)
11624 info_ptr->altivec_padding_size
11625 = 16 - (-info_ptr->vrsave_save_offset % 16);
11626 else
11627 info_ptr->altivec_padding_size = 0;
11629 info_ptr->altivec_save_offset
11630 = info_ptr->vrsave_save_offset
11631 - info_ptr->altivec_padding_size
11632 - info_ptr->altivec_size;
11634 /* Adjust for AltiVec case. */
11635 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
11637 else
11638 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
11639 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
11640 info_ptr->lr_save_offset = 2*reg_size;
11641 break;
11643 case ABI_V4:
11644 info_ptr->fp_save_offset = - info_ptr->fp_size;
11645 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
11646 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
11648 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
11650 /* Align stack so SPE GPR save area is aligned on a
11651 double-word boundary. */
11652 if (info_ptr->spe_gp_size != 0)
11653 info_ptr->spe_padding_size
11654 = 8 - (-info_ptr->cr_save_offset % 8);
11655 else
11656 info_ptr->spe_padding_size = 0;
11658 info_ptr->spe_gp_save_offset
11659 = info_ptr->cr_save_offset
11660 - info_ptr->spe_padding_size
11661 - info_ptr->spe_gp_size;
11663 /* Adjust for SPE case. */
11664 info_ptr->toc_save_offset
11665 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
11667 else if (TARGET_ALTIVEC_ABI)
11669 info_ptr->vrsave_save_offset
11670 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
11672 /* Align stack so vector save area is on a quadword boundary. */
11673 if (info_ptr->altivec_size != 0)
11674 info_ptr->altivec_padding_size
11675 = 16 - (-info_ptr->vrsave_save_offset % 16);
11676 else
11677 info_ptr->altivec_padding_size = 0;
11679 info_ptr->altivec_save_offset
11680 = info_ptr->vrsave_save_offset
11681 - info_ptr->altivec_padding_size
11682 - info_ptr->altivec_size;
11684 /* Adjust for AltiVec case. */
11685 info_ptr->toc_save_offset
11686 = info_ptr->altivec_save_offset - info_ptr->toc_size;
11688 else
11689 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
11690 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
11691 info_ptr->lr_save_offset = reg_size;
11692 break;
11695 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
11696 + info_ptr->gp_size
11697 + info_ptr->altivec_size
11698 + info_ptr->altivec_padding_size
11699 + info_ptr->spe_gp_size
11700 + info_ptr->spe_padding_size
11701 + ehrd_size
11702 + info_ptr->cr_size
11703 + info_ptr->lr_size
11704 + info_ptr->vrsave_size
11705 + info_ptr->toc_size,
11706 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
11707 ? 16 : 8);
11709 non_fixed_size = (info_ptr->vars_size
11710 + info_ptr->parm_size
11711 + info_ptr->save_size
11712 + info_ptr->varargs_size);
11714 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
11715 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
11717 /* Determine if we need to allocate any stack frame:
11719 For AIX we need to push the stack if a frame pointer is needed
11720 (because the stack might be dynamically adjusted), if we are
11721 debugging, if we make calls, or if the sum of fp_save, gp_save,
11722 and local variables are more than the space needed to save all
11723 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
11724 + 18*8 = 288 (GPR13 reserved).
11726 For V.4 we don't have the stack cushion that AIX uses, but assume
11727 that the debugger can handle stackless frames. */
11729 if (info_ptr->calls_p)
11730 info_ptr->push_p = 1;
11732 else if (DEFAULT_ABI == ABI_V4)
11733 info_ptr->push_p = non_fixed_size != 0;
11735 else if (frame_pointer_needed)
11736 info_ptr->push_p = 1;
11738 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
11739 info_ptr->push_p = 1;
11741 else
11742 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
11744 /* Zero offsets if we're not saving those registers. */
11745 if (info_ptr->fp_size == 0)
11746 info_ptr->fp_save_offset = 0;
11748 if (info_ptr->gp_size == 0)
11749 info_ptr->gp_save_offset = 0;
11751 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
11752 info_ptr->altivec_save_offset = 0;
11754 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
11755 info_ptr->vrsave_save_offset = 0;
11757 if (! TARGET_SPE_ABI
11758 || info_ptr->spe_64bit_regs_used == 0
11759 || info_ptr->spe_gp_size == 0)
11760 info_ptr->spe_gp_save_offset = 0;
11762 if (! info_ptr->lr_save_p)
11763 info_ptr->lr_save_offset = 0;
11765 if (! info_ptr->cr_save_p)
11766 info_ptr->cr_save_offset = 0;
11768 if (! info_ptr->toc_save_p)
11769 info_ptr->toc_save_offset = 0;
11771 return info_ptr;
11774 /* Return true if the current function uses any GPRs in 64-bit SIMD
11775 mode. */
11777 static bool
11778 spe_func_has_64bit_regs_p (void)
11780 rtx insns, insn;
11782 /* Functions that save and restore all the call-saved registers will
11783 need to save/restore the registers in 64-bits. */
11784 if (current_function_calls_eh_return
11785 || current_function_calls_setjmp
11786 || current_function_has_nonlocal_goto)
11787 return true;
11789 insns = get_insns ();
11791 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11793 if (INSN_P (insn))
11795 rtx i;
11797 i = PATTERN (insn);
11798 if (GET_CODE (i) == SET
11799 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11800 return true;
11804 return false;
11807 static void
11808 debug_stack_info (rs6000_stack_t *info)
11810 const char *abi_string;
11812 if (! info)
11813 info = rs6000_stack_info ();
11815 fprintf (stderr, "\nStack information for function %s:\n",
11816 ((current_function_decl && DECL_NAME (current_function_decl))
11817 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11818 : "<unknown>"));
11820 switch (info->abi)
11822 default: abi_string = "Unknown"; break;
11823 case ABI_NONE: abi_string = "NONE"; break;
11824 case ABI_AIX: abi_string = "AIX"; break;
11825 case ABI_DARWIN: abi_string = "Darwin"; break;
11826 case ABI_V4: abi_string = "V.4"; break;
11829 fprintf (stderr, "\tABI = %5s\n", abi_string);
11831 if (TARGET_ALTIVEC_ABI)
11832 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11834 if (TARGET_SPE_ABI)
11835 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11837 if (info->first_gp_reg_save != 32)
11838 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11840 if (info->first_fp_reg_save != 64)
11841 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11843 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11844 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11845 info->first_altivec_reg_save);
11847 if (info->lr_save_p)
11848 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11850 if (info->cr_save_p)
11851 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11853 if (info->toc_save_p)
11854 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11856 if (info->vrsave_mask)
11857 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11859 if (info->push_p)
11860 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11862 if (info->calls_p)
11863 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11865 if (info->gp_save_offset)
11866 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11868 if (info->fp_save_offset)
11869 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11871 if (info->altivec_save_offset)
11872 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11873 info->altivec_save_offset);
11875 if (info->spe_gp_save_offset)
11876 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11877 info->spe_gp_save_offset);
11879 if (info->vrsave_save_offset)
11880 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11881 info->vrsave_save_offset);
11883 if (info->lr_save_offset)
11884 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11886 if (info->cr_save_offset)
11887 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11889 if (info->toc_save_offset)
11890 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11892 if (info->varargs_save_offset)
11893 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11895 if (info->total_size)
11896 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11897 info->total_size);
11899 if (info->varargs_size)
11900 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11902 if (info->vars_size)
11903 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11904 info->vars_size);
11906 if (info->parm_size)
11907 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11909 if (info->fixed_size)
11910 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11912 if (info->gp_size)
11913 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11915 if (info->spe_gp_size)
11916 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11918 if (info->fp_size)
11919 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11921 if (info->altivec_size)
11922 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11924 if (info->vrsave_size)
11925 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11927 if (info->altivec_padding_size)
11928 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11929 info->altivec_padding_size);
11931 if (info->spe_padding_size)
11932 fprintf (stderr, "\tspe_padding_size = %5d\n",
11933 info->spe_padding_size);
11935 if (info->lr_size)
11936 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11938 if (info->cr_size)
11939 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11941 if (info->toc_size)
11942 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11944 if (info->save_size)
11945 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11947 if (info->reg_size != 4)
11948 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11950 fprintf (stderr, "\n");
11954 rs6000_return_addr (int count, rtx frame)
11956 /* Currently we don't optimize very well between prolog and body
11957 code and for PIC code the code can be actually quite bad, so
11958 don't try to be too clever here. */
11959 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11961 cfun->machine->ra_needs_full_frame = 1;
11963 return
11964 gen_rtx_MEM
11965 (Pmode,
11966 memory_address
11967 (Pmode,
11968 plus_constant (copy_to_reg
11969 (gen_rtx_MEM (Pmode,
11970 memory_address (Pmode, frame))),
11971 RETURN_ADDRESS_OFFSET)));
11974 cfun->machine->ra_need_lr = 1;
11975 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11978 /* Say whether a function is a candidate for sibcall handling or not.
11979 We do not allow indirect calls to be optimized into sibling calls.
11980 Also, we can't do it if there are any vector parameters; there's
11981 nowhere to put the VRsave code so it works; note that functions with
11982 vector parameters are required to have a prototype, so the argument
11983 type info must be available here. (The tail recursion case can work
11984 with vector parameters, but there's no way to distinguish here.) */
11985 static bool
11986 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11988 tree type;
11989 if (decl)
11991 if (TARGET_ALTIVEC_VRSAVE)
11993 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11994 type; type = TREE_CHAIN (type))
11996 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11997 return false;
12000 if (DEFAULT_ABI == ABI_DARWIN
12001 || (*targetm.binds_local_p) (decl))
12003 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
12005 if (!lookup_attribute ("longcall", attr_list)
12006 || lookup_attribute ("shortcall", attr_list))
12007 return true;
12010 return false;
12013 static int
12014 rs6000_ra_ever_killed (void)
12016 rtx top;
12017 rtx reg;
12018 rtx insn;
12020 if (current_function_is_thunk)
12021 return 0;
12023 /* regs_ever_live has LR marked as used if any sibcalls are present,
12024 but this should not force saving and restoring in the
12025 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12026 clobbers LR, so that is inappropriate. */
12028 /* Also, the prologue can generate a store into LR that
12029 doesn't really count, like this:
12031 move LR->R0
12032 bcl to set PIC register
12033 move LR->R31
12034 move R0->LR
12036 When we're called from the epilogue, we need to avoid counting
12037 this as a store. */
12039 push_topmost_sequence ();
12040 top = get_insns ();
12041 pop_topmost_sequence ();
12042 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12044 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
12046 if (INSN_P (insn))
12048 if (FIND_REG_INC_NOTE (insn, reg))
12049 return 1;
12050 else if (GET_CODE (insn) == CALL_INSN
12051 && !SIBLING_CALL_P (insn))
12052 return 1;
12053 else if (set_of (reg, insn) != NULL_RTX
12054 && !prologue_epilogue_contains (insn))
12055 return 1;
12058 return 0;
12061 /* Add a REG_MAYBE_DEAD note to the insn. */
12062 static void
12063 rs6000_maybe_dead (rtx insn)
12065 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
12066 const0_rtx,
12067 REG_NOTES (insn));
12070 /* Emit instructions needed to load the TOC register.
12071 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12072 a constant pool; or for SVR4 -fpic. */
12074 void
12075 rs6000_emit_load_toc_table (int fromprolog)
12077 rtx dest, insn;
12078 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
12080 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
12082 rtx temp = (fromprolog
12083 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12084 : gen_reg_rtx (Pmode));
12085 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
12086 if (fromprolog)
12087 rs6000_maybe_dead (insn);
12088 insn = emit_move_insn (dest, temp);
12089 if (fromprolog)
12090 rs6000_maybe_dead (insn);
12092 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
12094 char buf[30];
12095 rtx tempLR = (fromprolog
12096 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
12097 : gen_reg_rtx (Pmode));
12098 rtx temp0 = (fromprolog
12099 ? gen_rtx_REG (Pmode, 0)
12100 : gen_reg_rtx (Pmode));
12101 rtx symF;
12103 /* possibly create the toc section */
12104 if (! toc_initialized)
12106 toc_section ();
12107 function_section (current_function_decl);
12110 if (fromprolog)
12112 rtx symL;
12114 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
12115 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12117 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
12118 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12120 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
12121 symF)));
12122 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
12123 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
12124 symL,
12125 symF)));
12127 else
12129 rtx tocsym;
12130 static int reload_toc_labelno = 0;
12132 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
12134 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
12135 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12137 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
12138 emit_move_insn (dest, tempLR);
12139 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
12141 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
12142 if (fromprolog)
12143 rs6000_maybe_dead (insn);
12145 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
12147 /* This is for AIX code running in non-PIC ELF32. */
12148 char buf[30];
12149 rtx realsym;
12150 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
12151 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
12153 insn = emit_insn (gen_elf_high (dest, realsym));
12154 if (fromprolog)
12155 rs6000_maybe_dead (insn);
12156 insn = emit_insn (gen_elf_low (dest, dest, realsym));
12157 if (fromprolog)
12158 rs6000_maybe_dead (insn);
12160 else if (DEFAULT_ABI == ABI_AIX)
12162 if (TARGET_32BIT)
12163 insn = emit_insn (gen_load_toc_aix_si (dest));
12164 else
12165 insn = emit_insn (gen_load_toc_aix_di (dest));
12166 if (fromprolog)
12167 rs6000_maybe_dead (insn);
12169 else
12170 abort ();
12173 /* Emit instructions to restore the link register after determining where
12174 its value has been stored. */
12176 void
12177 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
12179 rs6000_stack_t *info = rs6000_stack_info ();
12180 rtx operands[2];
12182 operands[0] = source;
12183 operands[1] = scratch;
12185 if (info->lr_save_p)
12187 rtx frame_rtx = stack_pointer_rtx;
12188 HOST_WIDE_INT sp_offset = 0;
12189 rtx tmp;
12191 if (frame_pointer_needed
12192 || current_function_calls_alloca
12193 || info->total_size > 32767)
12195 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
12196 frame_rtx = operands[1];
12198 else if (info->push_p)
12199 sp_offset = info->total_size;
12201 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
12202 tmp = gen_rtx_MEM (Pmode, tmp);
12203 emit_move_insn (tmp, operands[0]);
12205 else
12206 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
12209 static GTY(()) int set = -1;
12211 int
12212 get_TOC_alias_set (void)
12214 if (set == -1)
12215 set = new_alias_set ();
12216 return set;
12219 /* This returns nonzero if the current function uses the TOC. This is
12220 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12221 is generated by the ABI_V4 load_toc_* patterns. */
12222 #if TARGET_ELF
12223 static int
12224 uses_TOC (void)
12226 rtx insn;
12228 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
12229 if (INSN_P (insn))
12231 rtx pat = PATTERN (insn);
12232 int i;
12234 if (GET_CODE (pat) == PARALLEL)
12235 for (i = 0; i < XVECLEN (pat, 0); i++)
12237 rtx sub = XVECEXP (pat, 0, i);
12238 if (GET_CODE (sub) == USE)
12240 sub = XEXP (sub, 0);
12241 if (GET_CODE (sub) == UNSPEC
12242 && XINT (sub, 1) == UNSPEC_TOC)
12243 return 1;
12247 return 0;
12249 #endif
12252 create_TOC_reference (rtx symbol)
12254 return gen_rtx_PLUS (Pmode,
12255 gen_rtx_REG (Pmode, TOC_REGISTER),
12256 gen_rtx_CONST (Pmode,
12257 gen_rtx_MINUS (Pmode, symbol,
12258 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
12261 /* If _Unwind_* has been called from within the same module,
12262 toc register is not guaranteed to be saved to 40(1) on function
12263 entry. Save it there in that case. */
12265 void
12266 rs6000_aix_emit_builtin_unwind_init (void)
12268 rtx mem;
12269 rtx stack_top = gen_reg_rtx (Pmode);
12270 rtx opcode_addr = gen_reg_rtx (Pmode);
12271 rtx opcode = gen_reg_rtx (SImode);
12272 rtx tocompare = gen_reg_rtx (SImode);
12273 rtx no_toc_save_needed = gen_label_rtx ();
12275 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
12276 emit_move_insn (stack_top, mem);
12278 mem = gen_rtx_MEM (Pmode,
12279 gen_rtx_PLUS (Pmode, stack_top,
12280 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
12281 emit_move_insn (opcode_addr, mem);
12282 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
12283 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
12284 : 0xE8410028, SImode));
12286 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
12287 SImode, NULL_RTX, NULL_RTX,
12288 no_toc_save_needed);
12290 mem = gen_rtx_MEM (Pmode,
12291 gen_rtx_PLUS (Pmode, stack_top,
12292 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
12293 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
12294 emit_label (no_toc_save_needed);
12297 /* This ties together stack memory (MEM with an alias set of
12298 rs6000_sr_alias_set) and the change to the stack pointer. */
12300 static void
12301 rs6000_emit_stack_tie (void)
12303 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
12305 set_mem_alias_set (mem, rs6000_sr_alias_set);
12306 emit_insn (gen_stack_tie (mem));
12309 /* Emit the correct code for allocating stack space, as insns.
12310 If COPY_R12, make sure a copy of the old frame is left in r12.
12311 The generated code may use hard register 0 as a temporary. */
12313 static void
12314 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
12316 rtx insn;
12317 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12318 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
12319 rtx todec = GEN_INT (-size);
12321 if (current_function_limit_stack)
12323 if (REG_P (stack_limit_rtx)
12324 && REGNO (stack_limit_rtx) > 1
12325 && REGNO (stack_limit_rtx) <= 31)
12327 emit_insn (TARGET_32BIT
12328 ? gen_addsi3 (tmp_reg,
12329 stack_limit_rtx,
12330 GEN_INT (size))
12331 : gen_adddi3 (tmp_reg,
12332 stack_limit_rtx,
12333 GEN_INT (size)));
12335 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12336 const0_rtx));
12338 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
12339 && TARGET_32BIT
12340 && DEFAULT_ABI == ABI_V4)
12342 rtx toload = gen_rtx_CONST (VOIDmode,
12343 gen_rtx_PLUS (Pmode,
12344 stack_limit_rtx,
12345 GEN_INT (size)));
12347 emit_insn (gen_elf_high (tmp_reg, toload));
12348 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
12349 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
12350 const0_rtx));
12352 else
12353 warning ("stack limit expression is not supported");
12356 if (copy_r12 || ! TARGET_UPDATE)
12357 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
12359 if (TARGET_UPDATE)
12361 if (size > 32767)
12363 /* Need a note here so that try_split doesn't get confused. */
12364 if (get_last_insn() == NULL_RTX)
12365 emit_note (NOTE_INSN_DELETED);
12366 insn = emit_move_insn (tmp_reg, todec);
12367 try_split (PATTERN (insn), insn, 0);
12368 todec = tmp_reg;
12371 insn = emit_insn (TARGET_32BIT
12372 ? gen_movsi_update (stack_reg, stack_reg,
12373 todec, stack_reg)
12374 : gen_movdi_update (stack_reg, stack_reg,
12375 todec, stack_reg));
12377 else
12379 insn = emit_insn (TARGET_32BIT
12380 ? gen_addsi3 (stack_reg, stack_reg, todec)
12381 : gen_adddi3 (stack_reg, stack_reg, todec));
12382 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
12383 gen_rtx_REG (Pmode, 12));
12386 RTX_FRAME_RELATED_P (insn) = 1;
12387 REG_NOTES (insn) =
12388 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12389 gen_rtx_SET (VOIDmode, stack_reg,
12390 gen_rtx_PLUS (Pmode, stack_reg,
12391 GEN_INT (-size))),
12392 REG_NOTES (insn));
12395 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12396 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12397 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12398 deduce these equivalences by itself so it wasn't necessary to hold
12399 its hand so much. */
12401 static void
12402 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
12403 rtx reg2, rtx rreg)
12405 rtx real, temp;
12407 /* copy_rtx will not make unique copies of registers, so we need to
12408 ensure we don't have unwanted sharing here. */
12409 if (reg == reg2)
12410 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12412 if (reg == rreg)
12413 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
12415 real = copy_rtx (PATTERN (insn));
12417 if (reg2 != NULL_RTX)
12418 real = replace_rtx (real, reg2, rreg);
12420 real = replace_rtx (real, reg,
12421 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
12422 STACK_POINTER_REGNUM),
12423 GEN_INT (val)));
12425 /* We expect that 'real' is either a SET or a PARALLEL containing
12426 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12427 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12429 if (GET_CODE (real) == SET)
12431 rtx set = real;
12433 temp = simplify_rtx (SET_SRC (set));
12434 if (temp)
12435 SET_SRC (set) = temp;
12436 temp = simplify_rtx (SET_DEST (set));
12437 if (temp)
12438 SET_DEST (set) = temp;
12439 if (GET_CODE (SET_DEST (set)) == MEM)
12441 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12442 if (temp)
12443 XEXP (SET_DEST (set), 0) = temp;
12446 else if (GET_CODE (real) == PARALLEL)
12448 int i;
12449 for (i = 0; i < XVECLEN (real, 0); i++)
12450 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
12452 rtx set = XVECEXP (real, 0, i);
12454 temp = simplify_rtx (SET_SRC (set));
12455 if (temp)
12456 SET_SRC (set) = temp;
12457 temp = simplify_rtx (SET_DEST (set));
12458 if (temp)
12459 SET_DEST (set) = temp;
12460 if (GET_CODE (SET_DEST (set)) == MEM)
12462 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
12463 if (temp)
12464 XEXP (SET_DEST (set), 0) = temp;
12466 RTX_FRAME_RELATED_P (set) = 1;
12469 else
12470 abort ();
12472 if (TARGET_SPE)
12473 real = spe_synthesize_frame_save (real);
12475 RTX_FRAME_RELATED_P (insn) = 1;
12476 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12477 real,
12478 REG_NOTES (insn));
12481 /* Given an SPE frame note, return a PARALLEL of SETs with the
12482 original note, plus a synthetic register save. */
12484 static rtx
12485 spe_synthesize_frame_save (rtx real)
12487 rtx synth, offset, reg, real2;
12489 if (GET_CODE (real) != SET
12490 || GET_MODE (SET_SRC (real)) != V2SImode)
12491 return real;
12493 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12494 frame related note. The parallel contains a set of the register
12495 being saved, and another set to a synthetic register (n+1200).
12496 This is so we can differentiate between 64-bit and 32-bit saves.
12497 Words cannot describe this nastiness. */
12499 if (GET_CODE (SET_DEST (real)) != MEM
12500 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
12501 || GET_CODE (SET_SRC (real)) != REG)
12502 abort ();
12504 /* Transform:
12505 (set (mem (plus (reg x) (const y)))
12506 (reg z))
12507 into:
12508 (set (mem (plus (reg x) (const y+4)))
12509 (reg z+1200))
12512 real2 = copy_rtx (real);
12513 PUT_MODE (SET_DEST (real2), SImode);
12514 reg = SET_SRC (real2);
12515 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
12516 synth = copy_rtx (real2);
12518 if (BYTES_BIG_ENDIAN)
12520 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
12521 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
12524 reg = SET_SRC (synth);
12526 synth = replace_rtx (synth, reg,
12527 gen_rtx_REG (SImode, REGNO (reg) + 1200));
12529 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
12530 synth = replace_rtx (synth, offset,
12531 GEN_INT (INTVAL (offset)
12532 + (BYTES_BIG_ENDIAN ? 0 : 4)));
12534 RTX_FRAME_RELATED_P (synth) = 1;
12535 RTX_FRAME_RELATED_P (real2) = 1;
12536 if (BYTES_BIG_ENDIAN)
12537 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
12538 else
12539 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
12541 return real;
12544 /* Returns an insn that has a vrsave set operation with the
12545 appropriate CLOBBERs. */
12547 static rtx
12548 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
12550 int nclobs, i;
12551 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
12552 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12554 clobs[0]
12555 = gen_rtx_SET (VOIDmode,
12556 vrsave,
12557 gen_rtx_UNSPEC_VOLATILE (SImode,
12558 gen_rtvec (2, reg, vrsave),
12559 30));
12561 nclobs = 1;
12563 /* We need to clobber the registers in the mask so the scheduler
12564 does not move sets to VRSAVE before sets of AltiVec registers.
12566 However, if the function receives nonlocal gotos, reload will set
12567 all call saved registers live. We will end up with:
12569 (set (reg 999) (mem))
12570 (parallel [ (set (reg vrsave) (unspec blah))
12571 (clobber (reg 999))])
12573 The clobber will cause the store into reg 999 to be dead, and
12574 flow will attempt to delete an epilogue insn. In this case, we
12575 need an unspec use/set of the register. */
12577 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
12578 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12580 if (!epiloguep || call_used_regs [i])
12581 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
12582 gen_rtx_REG (V4SImode, i));
12583 else
12585 rtx reg = gen_rtx_REG (V4SImode, i);
12587 clobs[nclobs++]
12588 = gen_rtx_SET (VOIDmode,
12589 reg,
12590 gen_rtx_UNSPEC (V4SImode,
12591 gen_rtvec (1, reg), 27));
12595 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
12597 for (i = 0; i < nclobs; ++i)
12598 XVECEXP (insn, 0, i) = clobs[i];
12600 return insn;
12603 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12604 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12606 static void
12607 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
12608 unsigned int regno, int offset, HOST_WIDE_INT total_size)
12610 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
12611 rtx replacea, replaceb;
12613 int_rtx = GEN_INT (offset);
12615 /* Some cases that need register indexed addressing. */
12616 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
12617 || (TARGET_SPE_ABI
12618 && SPE_VECTOR_MODE (mode)
12619 && !SPE_CONST_OFFSET_OK (offset)))
12621 /* Whomever calls us must make sure r11 is available in the
12622 flow path of instructions in the prologue. */
12623 offset_rtx = gen_rtx_REG (Pmode, 11);
12624 emit_move_insn (offset_rtx, int_rtx);
12626 replacea = offset_rtx;
12627 replaceb = int_rtx;
12629 else
12631 offset_rtx = int_rtx;
12632 replacea = NULL_RTX;
12633 replaceb = NULL_RTX;
12636 reg = gen_rtx_REG (mode, regno);
12637 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
12638 mem = gen_rtx_MEM (mode, addr);
12639 set_mem_alias_set (mem, rs6000_sr_alias_set);
12641 insn = emit_move_insn (mem, reg);
12643 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
12646 /* Emit an offset memory reference suitable for a frame store, while
12647 converting to a valid addressing mode. */
12649 static rtx
12650 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
12652 rtx int_rtx, offset_rtx;
12654 int_rtx = GEN_INT (offset);
12656 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
12658 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12659 emit_move_insn (offset_rtx, int_rtx);
12661 else
12662 offset_rtx = int_rtx;
12664 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
12667 /* Emit function prologue as insns. */
12669 void
12670 rs6000_emit_prologue (void)
12672 rs6000_stack_t *info = rs6000_stack_info ();
12673 enum machine_mode reg_mode = Pmode;
12674 int reg_size = TARGET_32BIT ? 4 : 8;
12675 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
12676 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
12677 rtx frame_reg_rtx = sp_reg_rtx;
12678 rtx cr_save_rtx = NULL_RTX;
12679 rtx insn;
12680 int saving_FPRs_inline;
12681 int using_store_multiple;
12682 HOST_WIDE_INT sp_offset = 0;
12684 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12686 reg_mode = V2SImode;
12687 reg_size = 8;
12690 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12691 && (!TARGET_SPE_ABI
12692 || info->spe_64bit_regs_used == 0)
12693 && info->first_gp_reg_save < 31);
12694 saving_FPRs_inline = (info->first_fp_reg_save == 64
12695 || FP_SAVE_INLINE (info->first_fp_reg_save)
12696 || current_function_calls_eh_return
12697 || cfun->machine->ra_need_lr);
12699 /* For V.4, update stack before we do any saving and set back pointer. */
12700 if (info->push_p
12701 && (DEFAULT_ABI == ABI_V4
12702 || current_function_calls_eh_return))
12704 if (info->total_size < 32767)
12705 sp_offset = info->total_size;
12706 else
12707 frame_reg_rtx = frame_ptr_rtx;
12708 rs6000_emit_allocate_stack (info->total_size,
12709 (frame_reg_rtx != sp_reg_rtx
12710 && (info->cr_save_p
12711 || info->lr_save_p
12712 || info->first_fp_reg_save < 64
12713 || info->first_gp_reg_save < 32
12714 )));
12715 if (frame_reg_rtx != sp_reg_rtx)
12716 rs6000_emit_stack_tie ();
12719 /* Save AltiVec registers if needed. */
12720 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12722 int i;
12724 /* There should be a non inline version of this, for when we
12725 are saving lots of vector registers. */
12726 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12727 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12729 rtx areg, savereg, mem;
12730 int offset;
12732 offset = info->altivec_save_offset + sp_offset
12733 + 16 * (i - info->first_altivec_reg_save);
12735 savereg = gen_rtx_REG (V4SImode, i);
12737 areg = gen_rtx_REG (Pmode, 0);
12738 emit_move_insn (areg, GEN_INT (offset));
12740 /* AltiVec addressing mode is [reg+reg]. */
12741 mem = gen_rtx_MEM (V4SImode,
12742 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
12744 set_mem_alias_set (mem, rs6000_sr_alias_set);
12746 insn = emit_move_insn (mem, savereg);
12748 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12749 areg, GEN_INT (offset));
12753 /* VRSAVE is a bit vector representing which AltiVec registers
12754 are used. The OS uses this to determine which vector
12755 registers to save on a context switch. We need to save
12756 VRSAVE on the stack frame, add whatever AltiVec registers we
12757 used in this function, and do the corresponding magic in the
12758 epilogue. */
12760 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
12761 && info->vrsave_mask != 0)
12763 rtx reg, mem, vrsave;
12764 int offset;
12766 /* Get VRSAVE onto a GPR. */
12767 reg = gen_rtx_REG (SImode, 12);
12768 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
12769 if (TARGET_MACHO)
12770 emit_insn (gen_get_vrsave_internal (reg));
12771 else
12772 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
12774 /* Save VRSAVE. */
12775 offset = info->vrsave_save_offset + sp_offset;
12777 = gen_rtx_MEM (SImode,
12778 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12779 set_mem_alias_set (mem, rs6000_sr_alias_set);
12780 insn = emit_move_insn (mem, reg);
12782 /* Include the registers in the mask. */
12783 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12785 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12788 /* If we use the link register, get it into r0. */
12789 if (info->lr_save_p)
12791 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
12792 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12793 RTX_FRAME_RELATED_P (insn) = 1;
12796 /* If we need to save CR, put it into r12. */
12797 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12799 rtx set;
12801 cr_save_rtx = gen_rtx_REG (SImode, 12);
12802 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
12803 RTX_FRAME_RELATED_P (insn) = 1;
12804 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12805 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12806 But that's OK. All we have to do is specify that _one_ condition
12807 code register is saved in this stack slot. The thrower's epilogue
12808 will then restore all the call-saved registers.
12809 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12810 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
12811 gen_rtx_REG (SImode, CR2_REGNO));
12812 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
12813 set,
12814 REG_NOTES (insn));
12817 /* Do any required saving of fpr's. If only one or two to save, do
12818 it ourselves. Otherwise, call function. */
12819 if (saving_FPRs_inline)
12821 int i;
12822 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12823 if ((regs_ever_live[info->first_fp_reg_save+i]
12824 && ! call_used_regs[info->first_fp_reg_save+i]))
12825 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12826 info->first_fp_reg_save + i,
12827 info->fp_save_offset + sp_offset + 8 * i,
12828 info->total_size);
12830 else if (info->first_fp_reg_save != 64)
12832 int i;
12833 char rname[30];
12834 const char *alloc_rname;
12835 rtvec p;
12836 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12838 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12839 gen_rtx_REG (Pmode,
12840 LINK_REGISTER_REGNUM));
12841 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12842 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12843 alloc_rname = ggc_strdup (rname);
12844 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12845 gen_rtx_SYMBOL_REF (Pmode,
12846 alloc_rname));
12847 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12849 rtx addr, reg, mem;
12850 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12851 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12852 GEN_INT (info->fp_save_offset
12853 + sp_offset + 8*i));
12854 mem = gen_rtx_MEM (DFmode, addr);
12855 set_mem_alias_set (mem, rs6000_sr_alias_set);
12857 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12859 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12860 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12861 NULL_RTX, NULL_RTX);
12864 /* Save GPRs. This is done as a PARALLEL if we are using
12865 the store-multiple instructions. */
12866 if (using_store_multiple)
12868 rtvec p;
12869 int i;
12870 p = rtvec_alloc (32 - info->first_gp_reg_save);
12871 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12873 rtx addr, reg, mem;
12874 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12875 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12876 GEN_INT (info->gp_save_offset
12877 + sp_offset
12878 + reg_size * i));
12879 mem = gen_rtx_MEM (reg_mode, addr);
12880 set_mem_alias_set (mem, rs6000_sr_alias_set);
12882 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12884 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12885 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12886 NULL_RTX, NULL_RTX);
12888 else
12890 int i;
12891 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12892 if ((regs_ever_live[info->first_gp_reg_save+i]
12893 && (! call_used_regs[info->first_gp_reg_save+i]
12894 || (i+info->first_gp_reg_save
12895 == RS6000_PIC_OFFSET_TABLE_REGNUM
12896 && TARGET_TOC && TARGET_MINIMAL_TOC)))
12897 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12898 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12899 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12901 rtx addr, reg, mem;
12902 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12904 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12906 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12907 rtx b;
12909 if (!SPE_CONST_OFFSET_OK (offset))
12911 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12912 emit_move_insn (b, GEN_INT (offset));
12914 else
12915 b = GEN_INT (offset);
12917 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12918 mem = gen_rtx_MEM (V2SImode, addr);
12919 set_mem_alias_set (mem, rs6000_sr_alias_set);
12920 insn = emit_move_insn (mem, reg);
12922 if (GET_CODE (b) == CONST_INT)
12923 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12924 NULL_RTX, NULL_RTX);
12925 else
12926 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12927 b, GEN_INT (offset));
12929 else
12931 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12932 GEN_INT (info->gp_save_offset
12933 + sp_offset
12934 + reg_size * i));
12935 mem = gen_rtx_MEM (reg_mode, addr);
12936 set_mem_alias_set (mem, rs6000_sr_alias_set);
12938 insn = emit_move_insn (mem, reg);
12939 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12940 NULL_RTX, NULL_RTX);
12945 /* ??? There's no need to emit actual instructions here, but it's the
12946 easiest way to get the frame unwind information emitted. */
12947 if (current_function_calls_eh_return)
12949 unsigned int i, regno;
12951 /* In AIX ABI we need to pretend we save r2 here. */
12952 if (TARGET_AIX)
12954 rtx addr, reg, mem;
12956 reg = gen_rtx_REG (reg_mode, 2);
12957 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12958 GEN_INT (sp_offset + 5 * reg_size));
12959 mem = gen_rtx_MEM (reg_mode, addr);
12960 set_mem_alias_set (mem, rs6000_sr_alias_set);
12962 insn = emit_move_insn (mem, reg);
12963 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12964 NULL_RTX, NULL_RTX);
12965 PATTERN (insn) = gen_blockage ();
12968 for (i = 0; ; ++i)
12970 regno = EH_RETURN_DATA_REGNO (i);
12971 if (regno == INVALID_REGNUM)
12972 break;
12974 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12975 info->ehrd_offset + sp_offset
12976 + reg_size * (int) i,
12977 info->total_size);
12981 /* Save lr if we used it. */
12982 if (info->lr_save_p)
12984 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12985 GEN_INT (info->lr_save_offset + sp_offset));
12986 rtx reg = gen_rtx_REG (Pmode, 0);
12987 rtx mem = gen_rtx_MEM (Pmode, addr);
12988 /* This should not be of rs6000_sr_alias_set, because of
12989 __builtin_return_address. */
12991 insn = emit_move_insn (mem, reg);
12992 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12993 NULL_RTX, NULL_RTX);
12996 /* Save CR if we use any that must be preserved. */
12997 if (info->cr_save_p)
12999 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13000 GEN_INT (info->cr_save_offset + sp_offset));
13001 rtx mem = gen_rtx_MEM (SImode, addr);
13002 /* See the large comment above about why CR2_REGNO is used. */
13003 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
13005 set_mem_alias_set (mem, rs6000_sr_alias_set);
13007 /* If r12 was used to hold the original sp, copy cr into r0 now
13008 that it's free. */
13009 if (REGNO (frame_reg_rtx) == 12)
13011 rtx set;
13013 cr_save_rtx = gen_rtx_REG (SImode, 0);
13014 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
13015 RTX_FRAME_RELATED_P (insn) = 1;
13016 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
13017 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
13018 set,
13019 REG_NOTES (insn));
13022 insn = emit_move_insn (mem, cr_save_rtx);
13024 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
13025 NULL_RTX, NULL_RTX);
13028 /* Update stack and set back pointer unless this is V.4,
13029 for which it was done previously. */
13030 if (info->push_p
13031 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
13032 rs6000_emit_allocate_stack (info->total_size, FALSE);
13034 /* Set frame pointer, if needed. */
13035 if (frame_pointer_needed)
13037 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
13038 sp_reg_rtx);
13039 RTX_FRAME_RELATED_P (insn) = 1;
13042 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13043 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
13044 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
13045 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
13047 /* If emit_load_toc_table will use the link register, we need to save
13048 it. We use R12 for this purpose because emit_load_toc_table
13049 can use register 0. This allows us to use a plain 'blr' to return
13050 from the procedure more often. */
13051 int save_LR_around_toc_setup = (TARGET_ELF
13052 && DEFAULT_ABI != ABI_AIX
13053 && flag_pic
13054 && ! info->lr_save_p
13055 && EXIT_BLOCK_PTR->pred != NULL);
13056 if (save_LR_around_toc_setup)
13058 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13060 insn = emit_move_insn (frame_ptr_rtx, lr);
13061 rs6000_maybe_dead (insn);
13062 RTX_FRAME_RELATED_P (insn) = 1;
13064 rs6000_emit_load_toc_table (TRUE);
13066 insn = emit_move_insn (lr, frame_ptr_rtx);
13067 rs6000_maybe_dead (insn);
13068 RTX_FRAME_RELATED_P (insn) = 1;
13070 else
13071 rs6000_emit_load_toc_table (TRUE);
13074 #if TARGET_MACHO
13075 if (DEFAULT_ABI == ABI_DARWIN
13076 && flag_pic && current_function_uses_pic_offset_table)
13078 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
13079 rtx src = machopic_function_base_sym ();
13081 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr, src)));
13083 insn = emit_move_insn (gen_rtx_REG (Pmode,
13084 RS6000_PIC_OFFSET_TABLE_REGNUM),
13085 lr);
13086 rs6000_maybe_dead (insn);
13088 #endif
13091 /* Write function prologue. */
13093 static void
13094 rs6000_output_function_prologue (FILE *file,
13095 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13097 rs6000_stack_t *info = rs6000_stack_info ();
13099 if (TARGET_DEBUG_STACK)
13100 debug_stack_info (info);
13102 /* Write .extern for any function we will call to save and restore
13103 fp values. */
13104 if (info->first_fp_reg_save < 64
13105 && !FP_SAVE_INLINE (info->first_fp_reg_save))
13106 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13107 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
13108 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
13109 RESTORE_FP_SUFFIX);
13111 /* Write .extern for AIX common mode routines, if needed. */
13112 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
13114 fputs ("\t.extern __mulh\n", file);
13115 fputs ("\t.extern __mull\n", file);
13116 fputs ("\t.extern __divss\n", file);
13117 fputs ("\t.extern __divus\n", file);
13118 fputs ("\t.extern __quoss\n", file);
13119 fputs ("\t.extern __quous\n", file);
13120 common_mode_defined = 1;
13123 if (! HAVE_prologue)
13125 start_sequence ();
13127 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13128 the "toplevel" insn chain. */
13129 emit_note (NOTE_INSN_DELETED);
13130 rs6000_emit_prologue ();
13131 emit_note (NOTE_INSN_DELETED);
13133 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13135 rtx insn;
13136 unsigned addr = 0;
13137 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13139 INSN_ADDRESSES_NEW (insn, addr);
13140 addr += 4;
13144 if (TARGET_DEBUG_STACK)
13145 debug_rtx_list (get_insns (), 100);
13146 final (get_insns (), file, FALSE, FALSE);
13147 end_sequence ();
13150 rs6000_pic_labelno++;
13153 /* Emit function epilogue as insns.
13155 At present, dwarf2out_frame_debug_expr doesn't understand
13156 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13157 anywhere in the epilogue. Most of the insns below would in any case
13158 need special notes to explain where r11 is in relation to the stack. */
13160 void
13161 rs6000_emit_epilogue (int sibcall)
13163 rs6000_stack_t *info;
13164 int restoring_FPRs_inline;
13165 int using_load_multiple;
13166 int using_mfcr_multiple;
13167 int use_backchain_to_restore_sp;
13168 int sp_offset = 0;
13169 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
13170 rtx frame_reg_rtx = sp_reg_rtx;
13171 enum machine_mode reg_mode = Pmode;
13172 int reg_size = TARGET_32BIT ? 4 : 8;
13173 int i;
13175 info = rs6000_stack_info ();
13177 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13179 reg_mode = V2SImode;
13180 reg_size = 8;
13183 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
13184 && (!TARGET_SPE_ABI
13185 || info->spe_64bit_regs_used == 0)
13186 && info->first_gp_reg_save < 31);
13187 restoring_FPRs_inline = (sibcall
13188 || current_function_calls_eh_return
13189 || info->first_fp_reg_save == 64
13190 || FP_SAVE_INLINE (info->first_fp_reg_save));
13191 use_backchain_to_restore_sp = (frame_pointer_needed
13192 || current_function_calls_alloca
13193 || info->total_size > 32767);
13194 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
13195 || rs6000_cpu == PROCESSOR_PPC603
13196 || rs6000_cpu == PROCESSOR_PPC750
13197 || optimize_size);
13199 /* If we have a frame pointer, a call to alloca, or a large stack
13200 frame, restore the old stack pointer using the backchain. Otherwise,
13201 we know what size to update it with. */
13202 if (use_backchain_to_restore_sp)
13204 /* Under V.4, don't reset the stack pointer until after we're done
13205 loading the saved registers. */
13206 if (DEFAULT_ABI == ABI_V4)
13207 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
13209 emit_move_insn (frame_reg_rtx,
13210 gen_rtx_MEM (Pmode, sp_reg_rtx));
13213 else if (info->push_p)
13215 if (DEFAULT_ABI == ABI_V4
13216 || current_function_calls_eh_return)
13217 sp_offset = info->total_size;
13218 else
13220 emit_insn (TARGET_32BIT
13221 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13222 GEN_INT (info->total_size))
13223 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13224 GEN_INT (info->total_size)));
13228 /* Restore AltiVec registers if needed. */
13229 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
13231 int i;
13233 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
13234 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
13236 rtx addr, areg, mem;
13238 areg = gen_rtx_REG (Pmode, 0);
13239 emit_move_insn
13240 (areg, GEN_INT (info->altivec_save_offset
13241 + sp_offset
13242 + 16 * (i - info->first_altivec_reg_save)));
13244 /* AltiVec addressing mode is [reg+reg]. */
13245 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
13246 mem = gen_rtx_MEM (V4SImode, addr);
13247 set_mem_alias_set (mem, rs6000_sr_alias_set);
13249 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
13253 /* Restore VRSAVE if needed. */
13254 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
13255 && info->vrsave_mask != 0)
13257 rtx addr, mem, reg;
13259 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13260 GEN_INT (info->vrsave_save_offset + sp_offset));
13261 mem = gen_rtx_MEM (SImode, addr);
13262 set_mem_alias_set (mem, rs6000_sr_alias_set);
13263 reg = gen_rtx_REG (SImode, 12);
13264 emit_move_insn (reg, mem);
13266 emit_insn (generate_set_vrsave (reg, info, 1));
13269 /* Get the old lr if we saved it. */
13270 if (info->lr_save_p)
13272 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
13273 info->lr_save_offset + sp_offset);
13275 set_mem_alias_set (mem, rs6000_sr_alias_set);
13277 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
13280 /* Get the old cr if we saved it. */
13281 if (info->cr_save_p)
13283 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13284 GEN_INT (info->cr_save_offset + sp_offset));
13285 rtx mem = gen_rtx_MEM (SImode, addr);
13287 set_mem_alias_set (mem, rs6000_sr_alias_set);
13289 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
13292 /* Set LR here to try to overlap restores below. */
13293 if (info->lr_save_p)
13294 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
13295 gen_rtx_REG (Pmode, 0));
13297 /* Load exception handler data registers, if needed. */
13298 if (current_function_calls_eh_return)
13300 unsigned int i, regno;
13302 if (TARGET_AIX)
13304 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13305 GEN_INT (sp_offset + 5 * reg_size));
13306 rtx mem = gen_rtx_MEM (reg_mode, addr);
13308 set_mem_alias_set (mem, rs6000_sr_alias_set);
13310 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
13313 for (i = 0; ; ++i)
13315 rtx mem;
13317 regno = EH_RETURN_DATA_REGNO (i);
13318 if (regno == INVALID_REGNUM)
13319 break;
13321 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
13322 info->ehrd_offset + sp_offset
13323 + reg_size * (int) i);
13324 set_mem_alias_set (mem, rs6000_sr_alias_set);
13326 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
13330 /* Restore GPRs. This is done as a PARALLEL if we are using
13331 the load-multiple instructions. */
13332 if (using_load_multiple)
13334 rtvec p;
13335 p = rtvec_alloc (32 - info->first_gp_reg_save);
13336 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13338 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13339 GEN_INT (info->gp_save_offset
13340 + sp_offset
13341 + reg_size * i));
13342 rtx mem = gen_rtx_MEM (reg_mode, addr);
13344 set_mem_alias_set (mem, rs6000_sr_alias_set);
13346 RTVEC_ELT (p, i) =
13347 gen_rtx_SET (VOIDmode,
13348 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
13349 mem);
13351 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13353 else
13354 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
13355 if ((regs_ever_live[info->first_gp_reg_save+i]
13356 && (! call_used_regs[info->first_gp_reg_save+i]
13357 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13358 && TARGET_TOC && TARGET_MINIMAL_TOC)))
13359 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
13360 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
13361 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
13363 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13364 GEN_INT (info->gp_save_offset
13365 + sp_offset
13366 + reg_size * i));
13367 rtx mem = gen_rtx_MEM (reg_mode, addr);
13369 /* Restore 64-bit quantities for SPE. */
13370 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
13372 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
13373 rtx b;
13375 if (!SPE_CONST_OFFSET_OK (offset))
13377 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
13378 emit_move_insn (b, GEN_INT (offset));
13380 else
13381 b = GEN_INT (offset);
13383 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
13384 mem = gen_rtx_MEM (V2SImode, addr);
13387 set_mem_alias_set (mem, rs6000_sr_alias_set);
13389 emit_move_insn (gen_rtx_REG (reg_mode,
13390 info->first_gp_reg_save + i), mem);
13393 /* Restore fpr's if we need to do it without calling a function. */
13394 if (restoring_FPRs_inline)
13395 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13396 if ((regs_ever_live[info->first_fp_reg_save+i]
13397 && ! call_used_regs[info->first_fp_reg_save+i]))
13399 rtx addr, mem;
13400 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
13401 GEN_INT (info->fp_save_offset
13402 + sp_offset
13403 + 8 * i));
13404 mem = gen_rtx_MEM (DFmode, addr);
13405 set_mem_alias_set (mem, rs6000_sr_alias_set);
13407 emit_move_insn (gen_rtx_REG (DFmode,
13408 info->first_fp_reg_save + i),
13409 mem);
13412 /* If we saved cr, restore it here. Just those that were used. */
13413 if (info->cr_save_p)
13415 rtx r12_rtx = gen_rtx_REG (SImode, 12);
13416 int count = 0;
13418 if (using_mfcr_multiple)
13420 for (i = 0; i < 8; i++)
13421 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13422 count++;
13423 if (count == 0)
13424 abort ();
13427 if (using_mfcr_multiple && count > 1)
13429 rtvec p;
13430 int ndx;
13432 p = rtvec_alloc (count);
13434 ndx = 0;
13435 for (i = 0; i < 8; i++)
13436 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13438 rtvec r = rtvec_alloc (2);
13439 RTVEC_ELT (r, 0) = r12_rtx;
13440 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
13441 RTVEC_ELT (p, ndx) =
13442 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
13443 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
13444 ndx++;
13446 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
13447 if (ndx != count)
13448 abort ();
13450 else
13451 for (i = 0; i < 8; i++)
13452 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
13454 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
13455 CR0_REGNO+i),
13456 r12_rtx));
13460 /* If this is V.4, unwind the stack pointer after all of the loads
13461 have been done. We need to emit a block here so that sched
13462 doesn't decide to move the sp change before the register restores
13463 (which may not have any obvious dependency on the stack). This
13464 doesn't hurt performance, because there is no scheduling that can
13465 be done after this point. */
13466 if (DEFAULT_ABI == ABI_V4
13467 || current_function_calls_eh_return)
13469 if (frame_reg_rtx != sp_reg_rtx)
13470 rs6000_emit_stack_tie ();
13472 if (use_backchain_to_restore_sp)
13474 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
13476 else if (sp_offset != 0)
13478 emit_insn (TARGET_32BIT
13479 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
13480 GEN_INT (sp_offset))
13481 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
13482 GEN_INT (sp_offset)));
13486 if (current_function_calls_eh_return)
13488 rtx sa = EH_RETURN_STACKADJ_RTX;
13489 emit_insn (TARGET_32BIT
13490 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
13491 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
13494 if (!sibcall)
13496 rtvec p;
13497 if (! restoring_FPRs_inline)
13498 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
13499 else
13500 p = rtvec_alloc (2);
13502 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
13503 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
13504 gen_rtx_REG (Pmode,
13505 LINK_REGISTER_REGNUM));
13507 /* If we have to restore more than two FP registers, branch to the
13508 restore function. It will return to our caller. */
13509 if (! restoring_FPRs_inline)
13511 int i;
13512 char rname[30];
13513 const char *alloc_rname;
13515 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
13516 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
13517 alloc_rname = ggc_strdup (rname);
13518 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
13519 gen_rtx_SYMBOL_REF (Pmode,
13520 alloc_rname));
13522 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
13524 rtx addr, mem;
13525 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
13526 GEN_INT (info->fp_save_offset + 8*i));
13527 mem = gen_rtx_MEM (DFmode, addr);
13528 set_mem_alias_set (mem, rs6000_sr_alias_set);
13530 RTVEC_ELT (p, i+3) =
13531 gen_rtx_SET (VOIDmode,
13532 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
13533 mem);
13537 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
13541 /* Write function epilogue. */
13543 static void
13544 rs6000_output_function_epilogue (FILE *file,
13545 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
13547 rs6000_stack_t *info = rs6000_stack_info ();
13549 if (! HAVE_epilogue)
13551 rtx insn = get_last_insn ();
13552 /* If the last insn was a BARRIER, we don't have to write anything except
13553 the trace table. */
13554 if (GET_CODE (insn) == NOTE)
13555 insn = prev_nonnote_insn (insn);
13556 if (insn == 0 || GET_CODE (insn) != BARRIER)
13558 /* This is slightly ugly, but at least we don't have two
13559 copies of the epilogue-emitting code. */
13560 start_sequence ();
13562 /* A NOTE_INSN_DELETED is supposed to be at the start
13563 and end of the "toplevel" insn chain. */
13564 emit_note (NOTE_INSN_DELETED);
13565 rs6000_emit_epilogue (FALSE);
13566 emit_note (NOTE_INSN_DELETED);
13568 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13570 rtx insn;
13571 unsigned addr = 0;
13572 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
13574 INSN_ADDRESSES_NEW (insn, addr);
13575 addr += 4;
13579 if (TARGET_DEBUG_STACK)
13580 debug_rtx_list (get_insns (), 100);
13581 final (get_insns (), file, FALSE, FALSE);
13582 end_sequence ();
13586 #if TARGET_MACHO
13587 macho_branch_islands ();
13588 /* Mach-O doesn't support labels at the end of objects, so if
13589 it looks like we might want one, insert a NOP. */
13591 rtx insn = get_last_insn ();
13592 while (insn
13593 && NOTE_P (insn)
13594 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
13595 insn = PREV_INSN (insn);
13596 if (insn
13597 && (LABEL_P (insn)
13598 || (NOTE_P (insn)
13599 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
13600 fputs ("\tnop\n", file);
13602 #endif
13604 /* Output a traceback table here. See /usr/include/sys/debug.h for info
13605 on its format.
13607 We don't output a traceback table if -finhibit-size-directive was
13608 used. The documentation for -finhibit-size-directive reads
13609 ``don't output a @code{.size} assembler directive, or anything
13610 else that would cause trouble if the function is split in the
13611 middle, and the two halves are placed at locations far apart in
13612 memory.'' The traceback table has this property, since it
13613 includes the offset from the start of the function to the
13614 traceback table itself.
13616 System V.4 Powerpc's (and the embedded ABI derived from it) use a
13617 different traceback table. */
13618 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
13619 && rs6000_traceback != traceback_none)
13621 const char *fname = NULL;
13622 const char *language_string = lang_hooks.name;
13623 int fixed_parms = 0, float_parms = 0, parm_info = 0;
13624 int i;
13625 int optional_tbtab;
13627 if (rs6000_traceback == traceback_full)
13628 optional_tbtab = 1;
13629 else if (rs6000_traceback == traceback_part)
13630 optional_tbtab = 0;
13631 else
13632 optional_tbtab = !optimize_size && !TARGET_ELF;
13634 if (optional_tbtab)
13636 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
13637 while (*fname == '.') /* V.4 encodes . in the name */
13638 fname++;
13640 /* Need label immediately before tbtab, so we can compute
13641 its offset from the function start. */
13642 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13643 ASM_OUTPUT_LABEL (file, fname);
13646 /* The .tbtab pseudo-op can only be used for the first eight
13647 expressions, since it can't handle the possibly variable
13648 length fields that follow. However, if you omit the optional
13649 fields, the assembler outputs zeros for all optional fields
13650 anyways, giving each variable length field is minimum length
13651 (as defined in sys/debug.h). Thus we can not use the .tbtab
13652 pseudo-op at all. */
13654 /* An all-zero word flags the start of the tbtab, for debuggers
13655 that have to find it by searching forward from the entry
13656 point or from the current pc. */
13657 fputs ("\t.long 0\n", file);
13659 /* Tbtab format type. Use format type 0. */
13660 fputs ("\t.byte 0,", file);
13662 /* Language type. Unfortunately, there does not seem to be any
13663 official way to discover the language being compiled, so we
13664 use language_string.
13665 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
13666 Java is 13. Objective-C is 14. */
13667 if (! strcmp (language_string, "GNU C"))
13668 i = 0;
13669 else if (! strcmp (language_string, "GNU F77")
13670 || ! strcmp (language_string, "GNU F95"))
13671 i = 1;
13672 else if (! strcmp (language_string, "GNU Pascal"))
13673 i = 2;
13674 else if (! strcmp (language_string, "GNU Ada"))
13675 i = 3;
13676 else if (! strcmp (language_string, "GNU C++"))
13677 i = 9;
13678 else if (! strcmp (language_string, "GNU Java"))
13679 i = 13;
13680 else if (! strcmp (language_string, "GNU Objective-C"))
13681 i = 14;
13682 else
13683 abort ();
13684 fprintf (file, "%d,", i);
13686 /* 8 single bit fields: global linkage (not set for C extern linkage,
13687 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
13688 from start of procedure stored in tbtab, internal function, function
13689 has controlled storage, function has no toc, function uses fp,
13690 function logs/aborts fp operations. */
13691 /* Assume that fp operations are used if any fp reg must be saved. */
13692 fprintf (file, "%d,",
13693 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
13695 /* 6 bitfields: function is interrupt handler, name present in
13696 proc table, function calls alloca, on condition directives
13697 (controls stack walks, 3 bits), saves condition reg, saves
13698 link reg. */
13699 /* The `function calls alloca' bit seems to be set whenever reg 31 is
13700 set up as a frame pointer, even when there is no alloca call. */
13701 fprintf (file, "%d,",
13702 ((optional_tbtab << 6)
13703 | ((optional_tbtab & frame_pointer_needed) << 5)
13704 | (info->cr_save_p << 1)
13705 | (info->lr_save_p)));
13707 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
13708 (6 bits). */
13709 fprintf (file, "%d,",
13710 (info->push_p << 7) | (64 - info->first_fp_reg_save));
13712 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
13713 fprintf (file, "%d,", (32 - first_reg_to_save ()));
13715 if (optional_tbtab)
13717 /* Compute the parameter info from the function decl argument
13718 list. */
13719 tree decl;
13720 int next_parm_info_bit = 31;
13722 for (decl = DECL_ARGUMENTS (current_function_decl);
13723 decl; decl = TREE_CHAIN (decl))
13725 rtx parameter = DECL_INCOMING_RTL (decl);
13726 enum machine_mode mode = GET_MODE (parameter);
13728 if (GET_CODE (parameter) == REG)
13730 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
13732 int bits;
13734 float_parms++;
13736 if (mode == SFmode)
13737 bits = 0x2;
13738 else if (mode == DFmode || mode == TFmode)
13739 bits = 0x3;
13740 else
13741 abort ();
13743 /* If only one bit will fit, don't or in this entry. */
13744 if (next_parm_info_bit > 0)
13745 parm_info |= (bits << (next_parm_info_bit - 1));
13746 next_parm_info_bit -= 2;
13748 else
13750 fixed_parms += ((GET_MODE_SIZE (mode)
13751 + (UNITS_PER_WORD - 1))
13752 / UNITS_PER_WORD);
13753 next_parm_info_bit -= 1;
13759 /* Number of fixed point parameters. */
13760 /* This is actually the number of words of fixed point parameters; thus
13761 an 8 byte struct counts as 2; and thus the maximum value is 8. */
13762 fprintf (file, "%d,", fixed_parms);
13764 /* 2 bitfields: number of floating point parameters (7 bits), parameters
13765 all on stack. */
13766 /* This is actually the number of fp registers that hold parameters;
13767 and thus the maximum value is 13. */
13768 /* Set parameters on stack bit if parameters are not in their original
13769 registers, regardless of whether they are on the stack? Xlc
13770 seems to set the bit when not optimizing. */
13771 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
13773 if (! optional_tbtab)
13774 return;
13776 /* Optional fields follow. Some are variable length. */
13778 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
13779 11 double float. */
13780 /* There is an entry for each parameter in a register, in the order that
13781 they occur in the parameter list. Any intervening arguments on the
13782 stack are ignored. If the list overflows a long (max possible length
13783 34 bits) then completely leave off all elements that don't fit. */
13784 /* Only emit this long if there was at least one parameter. */
13785 if (fixed_parms || float_parms)
13786 fprintf (file, "\t.long %d\n", parm_info);
13788 /* Offset from start of code to tb table. */
13789 fputs ("\t.long ", file);
13790 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
13791 #if TARGET_AIX
13792 RS6000_OUTPUT_BASENAME (file, fname);
13793 #else
13794 assemble_name (file, fname);
13795 #endif
13796 fputs ("-.", file);
13797 #if TARGET_AIX
13798 RS6000_OUTPUT_BASENAME (file, fname);
13799 #else
13800 assemble_name (file, fname);
13801 #endif
13802 putc ('\n', file);
13804 /* Interrupt handler mask. */
13805 /* Omit this long, since we never set the interrupt handler bit
13806 above. */
13808 /* Number of CTL (controlled storage) anchors. */
13809 /* Omit this long, since the has_ctl bit is never set above. */
13811 /* Displacement into stack of each CTL anchor. */
13812 /* Omit this list of longs, because there are no CTL anchors. */
13814 /* Length of function name. */
13815 if (*fname == '*')
13816 ++fname;
13817 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13819 /* Function name. */
13820 assemble_string (fname, strlen (fname));
13822 /* Register for alloca automatic storage; this is always reg 31.
13823 Only emit this if the alloca bit was set above. */
13824 if (frame_pointer_needed)
13825 fputs ("\t.byte 31\n", file);
13827 fputs ("\t.align 2\n", file);
13831 /* A C compound statement that outputs the assembler code for a thunk
13832 function, used to implement C++ virtual function calls with
13833 multiple inheritance. The thunk acts as a wrapper around a virtual
13834 function, adjusting the implicit object parameter before handing
13835 control off to the real function.
13837 First, emit code to add the integer DELTA to the location that
13838 contains the incoming first argument. Assume that this argument
13839 contains a pointer, and is the one used to pass the `this' pointer
13840 in C++. This is the incoming argument *before* the function
13841 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13842 values of all other incoming arguments.
13844 After the addition, emit code to jump to FUNCTION, which is a
13845 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13846 not touch the return address. Hence returning from FUNCTION will
13847 return to whoever called the current `thunk'.
13849 The effect must be as if FUNCTION had been called directly with the
13850 adjusted first argument. This macro is responsible for emitting
13851 all of the code for a thunk function; output_function_prologue()
13852 and output_function_epilogue() are not invoked.
13854 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13855 been extracted from it.) It might possibly be useful on some
13856 targets, but probably not.
13858 If you do not define this macro, the target-independent code in the
13859 C++ frontend will generate a less efficient heavyweight thunk that
13860 calls FUNCTION instead of jumping to it. The generic approach does
13861 not support varargs. */
13863 static void
13864 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13865 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13866 tree function)
13868 rtx this, insn, funexp;
13870 reload_completed = 1;
13871 epilogue_completed = 1;
13872 no_new_pseudos = 1;
13873 reset_block_changes ();
13875 /* Mark the end of the (empty) prologue. */
13876 emit_note (NOTE_INSN_PROLOGUE_END);
13878 /* Find the "this" pointer. If the function returns a structure,
13879 the structure return pointer is in r3. */
13880 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13881 this = gen_rtx_REG (Pmode, 4);
13882 else
13883 this = gen_rtx_REG (Pmode, 3);
13885 /* Apply the constant offset, if required. */
13886 if (delta)
13888 rtx delta_rtx = GEN_INT (delta);
13889 emit_insn (TARGET_32BIT
13890 ? gen_addsi3 (this, this, delta_rtx)
13891 : gen_adddi3 (this, this, delta_rtx));
13894 /* Apply the offset from the vtable, if required. */
13895 if (vcall_offset)
13897 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13898 rtx tmp = gen_rtx_REG (Pmode, 12);
13900 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13901 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13903 emit_insn (TARGET_32BIT
13904 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13905 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13906 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13908 else
13910 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13912 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13914 emit_insn (TARGET_32BIT
13915 ? gen_addsi3 (this, this, tmp)
13916 : gen_adddi3 (this, this, tmp));
13919 /* Generate a tail call to the target function. */
13920 if (!TREE_USED (function))
13922 assemble_external (function);
13923 TREE_USED (function) = 1;
13925 funexp = XEXP (DECL_RTL (function), 0);
13926 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13928 #if TARGET_MACHO
13929 if (MACHOPIC_INDIRECT)
13930 funexp = machopic_indirect_call_target (funexp);
13931 #endif
13933 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13934 generate sibcall RTL explicitly to avoid constraint abort. */
13935 insn = emit_call_insn (
13936 gen_rtx_PARALLEL (VOIDmode,
13937 gen_rtvec (4,
13938 gen_rtx_CALL (VOIDmode,
13939 funexp, const0_rtx),
13940 gen_rtx_USE (VOIDmode, const0_rtx),
13941 gen_rtx_USE (VOIDmode,
13942 gen_rtx_REG (SImode,
13943 LINK_REGISTER_REGNUM)),
13944 gen_rtx_RETURN (VOIDmode))));
13945 SIBLING_CALL_P (insn) = 1;
13946 emit_barrier ();
13948 /* Run just enough of rest_of_compilation to get the insns emitted.
13949 There's not really enough bulk here to make other passes such as
13950 instruction scheduling worth while. Note that use_thunk calls
13951 assemble_start_function and assemble_end_function. */
13952 insn = get_insns ();
13953 insn_locators_initialize ();
13954 shorten_branches (insn);
13955 final_start_function (insn, file, 1);
13956 final (insn, file, 1, 0);
13957 final_end_function ();
13959 reload_completed = 0;
13960 epilogue_completed = 0;
13961 no_new_pseudos = 0;
13964 /* A quick summary of the various types of 'constant-pool tables'
13965 under PowerPC:
13967 Target Flags Name One table per
13968 AIX (none) AIX TOC object file
13969 AIX -mfull-toc AIX TOC object file
13970 AIX -mminimal-toc AIX minimal TOC translation unit
13971 SVR4/EABI (none) SVR4 SDATA object file
13972 SVR4/EABI -fpic SVR4 pic object file
13973 SVR4/EABI -fPIC SVR4 PIC translation unit
13974 SVR4/EABI -mrelocatable EABI TOC function
13975 SVR4/EABI -maix AIX TOC object file
13976 SVR4/EABI -maix -mminimal-toc
13977 AIX minimal TOC translation unit
13979 Name Reg. Set by entries contains:
13980 made by addrs? fp? sum?
13982 AIX TOC 2 crt0 as Y option option
13983 AIX minimal TOC 30 prolog gcc Y Y option
13984 SVR4 SDATA 13 crt0 gcc N Y N
13985 SVR4 pic 30 prolog ld Y not yet N
13986 SVR4 PIC 30 prolog gcc Y option option
13987 EABI TOC 30 prolog gcc Y option option
13991 /* Hash functions for the hash table. */
13993 static unsigned
13994 rs6000_hash_constant (rtx k)
13996 enum rtx_code code = GET_CODE (k);
13997 enum machine_mode mode = GET_MODE (k);
13998 unsigned result = (code << 3) ^ mode;
13999 const char *format;
14000 int flen, fidx;
14002 format = GET_RTX_FORMAT (code);
14003 flen = strlen (format);
14004 fidx = 0;
14006 switch (code)
14008 case LABEL_REF:
14009 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
14011 case CONST_DOUBLE:
14012 if (mode != VOIDmode)
14013 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
14014 flen = 2;
14015 break;
14017 case CODE_LABEL:
14018 fidx = 3;
14019 break;
14021 default:
14022 break;
14025 for (; fidx < flen; fidx++)
14026 switch (format[fidx])
14028 case 's':
14030 unsigned i, len;
14031 const char *str = XSTR (k, fidx);
14032 len = strlen (str);
14033 result = result * 613 + len;
14034 for (i = 0; i < len; i++)
14035 result = result * 613 + (unsigned) str[i];
14036 break;
14038 case 'u':
14039 case 'e':
14040 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
14041 break;
14042 case 'i':
14043 case 'n':
14044 result = result * 613 + (unsigned) XINT (k, fidx);
14045 break;
14046 case 'w':
14047 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
14048 result = result * 613 + (unsigned) XWINT (k, fidx);
14049 else
14051 size_t i;
14052 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
14053 result = result * 613 + (unsigned) (XWINT (k, fidx)
14054 >> CHAR_BIT * i);
14056 break;
14057 case '0':
14058 break;
14059 default:
14060 abort ();
14063 return result;
14066 static unsigned
14067 toc_hash_function (const void *hash_entry)
14069 const struct toc_hash_struct *thc =
14070 (const struct toc_hash_struct *) hash_entry;
14071 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
14074 /* Compare H1 and H2 for equivalence. */
14076 static int
14077 toc_hash_eq (const void *h1, const void *h2)
14079 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
14080 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
14082 if (((const struct toc_hash_struct *) h1)->key_mode
14083 != ((const struct toc_hash_struct *) h2)->key_mode)
14084 return 0;
14086 return rtx_equal_p (r1, r2);
14089 /* These are the names given by the C++ front-end to vtables, and
14090 vtable-like objects. Ideally, this logic should not be here;
14091 instead, there should be some programmatic way of inquiring as
14092 to whether or not an object is a vtable. */
14094 #define VTABLE_NAME_P(NAME) \
14095 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
14096 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14097 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14098 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14099 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14101 void
14102 rs6000_output_symbol_ref (FILE *file, rtx x)
14104 /* Currently C++ toc references to vtables can be emitted before it
14105 is decided whether the vtable is public or private. If this is
14106 the case, then the linker will eventually complain that there is
14107 a reference to an unknown section. Thus, for vtables only,
14108 we emit the TOC reference to reference the symbol and not the
14109 section. */
14110 const char *name = XSTR (x, 0);
14112 if (VTABLE_NAME_P (name))
14114 RS6000_OUTPUT_BASENAME (file, name);
14116 else
14117 assemble_name (file, name);
14120 /* Output a TOC entry. We derive the entry name from what is being
14121 written. */
14123 void
14124 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
14126 char buf[256];
14127 const char *name = buf;
14128 const char *real_name;
14129 rtx base = x;
14130 int offset = 0;
14132 if (TARGET_NO_TOC)
14133 abort ();
14135 /* When the linker won't eliminate them, don't output duplicate
14136 TOC entries (this happens on AIX if there is any kind of TOC,
14137 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14138 CODE_LABELs. */
14139 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
14141 struct toc_hash_struct *h;
14142 void * * found;
14144 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14145 time because GGC is not initialized at that point. */
14146 if (toc_hash_table == NULL)
14147 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
14148 toc_hash_eq, NULL);
14150 h = ggc_alloc (sizeof (*h));
14151 h->key = x;
14152 h->key_mode = mode;
14153 h->labelno = labelno;
14155 found = htab_find_slot (toc_hash_table, h, 1);
14156 if (*found == NULL)
14157 *found = h;
14158 else /* This is indeed a duplicate.
14159 Set this label equal to that label. */
14161 fputs ("\t.set ", file);
14162 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14163 fprintf (file, "%d,", labelno);
14164 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
14165 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
14166 found)->labelno));
14167 return;
14171 /* If we're going to put a double constant in the TOC, make sure it's
14172 aligned properly when strict alignment is on. */
14173 if (GET_CODE (x) == CONST_DOUBLE
14174 && STRICT_ALIGNMENT
14175 && GET_MODE_BITSIZE (mode) >= 64
14176 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
14177 ASM_OUTPUT_ALIGN (file, 3);
14180 (*targetm.asm_out.internal_label) (file, "LC", labelno);
14182 /* Handle FP constants specially. Note that if we have a minimal
14183 TOC, things we put here aren't actually in the TOC, so we can allow
14184 FP constants. */
14185 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
14187 REAL_VALUE_TYPE rv;
14188 long k[4];
14190 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14191 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
14193 if (TARGET_64BIT)
14195 if (TARGET_MINIMAL_TOC)
14196 fputs (DOUBLE_INT_ASM_OP, file);
14197 else
14198 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14199 k[0] & 0xffffffff, k[1] & 0xffffffff,
14200 k[2] & 0xffffffff, k[3] & 0xffffffff);
14201 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
14202 k[0] & 0xffffffff, k[1] & 0xffffffff,
14203 k[2] & 0xffffffff, k[3] & 0xffffffff);
14204 return;
14206 else
14208 if (TARGET_MINIMAL_TOC)
14209 fputs ("\t.long ", file);
14210 else
14211 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14212 k[0] & 0xffffffff, k[1] & 0xffffffff,
14213 k[2] & 0xffffffff, k[3] & 0xffffffff);
14214 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14215 k[0] & 0xffffffff, k[1] & 0xffffffff,
14216 k[2] & 0xffffffff, k[3] & 0xffffffff);
14217 return;
14220 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
14222 REAL_VALUE_TYPE rv;
14223 long k[2];
14225 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14226 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
14228 if (TARGET_64BIT)
14230 if (TARGET_MINIMAL_TOC)
14231 fputs (DOUBLE_INT_ASM_OP, file);
14232 else
14233 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14234 k[0] & 0xffffffff, k[1] & 0xffffffff);
14235 fprintf (file, "0x%lx%08lx\n",
14236 k[0] & 0xffffffff, k[1] & 0xffffffff);
14237 return;
14239 else
14241 if (TARGET_MINIMAL_TOC)
14242 fputs ("\t.long ", file);
14243 else
14244 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
14245 k[0] & 0xffffffff, k[1] & 0xffffffff);
14246 fprintf (file, "0x%lx,0x%lx\n",
14247 k[0] & 0xffffffff, k[1] & 0xffffffff);
14248 return;
14251 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
14253 REAL_VALUE_TYPE rv;
14254 long l;
14256 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
14257 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
14259 if (TARGET_64BIT)
14261 if (TARGET_MINIMAL_TOC)
14262 fputs (DOUBLE_INT_ASM_OP, file);
14263 else
14264 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14265 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
14266 return;
14268 else
14270 if (TARGET_MINIMAL_TOC)
14271 fputs ("\t.long ", file);
14272 else
14273 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
14274 fprintf (file, "0x%lx\n", l & 0xffffffff);
14275 return;
14278 else if (GET_MODE (x) == VOIDmode
14279 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
14281 unsigned HOST_WIDE_INT low;
14282 HOST_WIDE_INT high;
14284 if (GET_CODE (x) == CONST_DOUBLE)
14286 low = CONST_DOUBLE_LOW (x);
14287 high = CONST_DOUBLE_HIGH (x);
14289 else
14290 #if HOST_BITS_PER_WIDE_INT == 32
14292 low = INTVAL (x);
14293 high = (low & 0x80000000) ? ~0 : 0;
14295 #else
14297 low = INTVAL (x) & 0xffffffff;
14298 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
14300 #endif
14302 /* TOC entries are always Pmode-sized, but since this
14303 is a bigendian machine then if we're putting smaller
14304 integer constants in the TOC we have to pad them.
14305 (This is still a win over putting the constants in
14306 a separate constant pool, because then we'd have
14307 to have both a TOC entry _and_ the actual constant.)
14309 For a 32-bit target, CONST_INT values are loaded and shifted
14310 entirely within `low' and can be stored in one TOC entry. */
14312 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
14313 abort ();/* It would be easy to make this work, but it doesn't now. */
14315 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
14317 #if HOST_BITS_PER_WIDE_INT == 32
14318 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
14319 POINTER_SIZE, &low, &high, 0);
14320 #else
14321 low |= high << 32;
14322 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
14323 high = (HOST_WIDE_INT) low >> 32;
14324 low &= 0xffffffff;
14325 #endif
14328 if (TARGET_64BIT)
14330 if (TARGET_MINIMAL_TOC)
14331 fputs (DOUBLE_INT_ASM_OP, file);
14332 else
14333 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14334 (long) high & 0xffffffff, (long) low & 0xffffffff);
14335 fprintf (file, "0x%lx%08lx\n",
14336 (long) high & 0xffffffff, (long) low & 0xffffffff);
14337 return;
14339 else
14341 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
14343 if (TARGET_MINIMAL_TOC)
14344 fputs ("\t.long ", file);
14345 else
14346 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
14347 (long) high & 0xffffffff, (long) low & 0xffffffff);
14348 fprintf (file, "0x%lx,0x%lx\n",
14349 (long) high & 0xffffffff, (long) low & 0xffffffff);
14351 else
14353 if (TARGET_MINIMAL_TOC)
14354 fputs ("\t.long ", file);
14355 else
14356 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
14357 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
14359 return;
14363 if (GET_CODE (x) == CONST)
14365 if (GET_CODE (XEXP (x, 0)) != PLUS)
14366 abort ();
14368 base = XEXP (XEXP (x, 0), 0);
14369 offset = INTVAL (XEXP (XEXP (x, 0), 1));
14372 if (GET_CODE (base) == SYMBOL_REF)
14373 name = XSTR (base, 0);
14374 else if (GET_CODE (base) == LABEL_REF)
14375 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
14376 else if (GET_CODE (base) == CODE_LABEL)
14377 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
14378 else
14379 abort ();
14381 real_name = (*targetm.strip_name_encoding) (name);
14382 if (TARGET_MINIMAL_TOC)
14383 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
14384 else
14386 fprintf (file, "\t.tc %s", real_name);
14388 if (offset < 0)
14389 fprintf (file, ".N%d", - offset);
14390 else if (offset)
14391 fprintf (file, ".P%d", offset);
14393 fputs ("[TC],", file);
14396 /* Currently C++ toc references to vtables can be emitted before it
14397 is decided whether the vtable is public or private. If this is
14398 the case, then the linker will eventually complain that there is
14399 a TOC reference to an unknown section. Thus, for vtables only,
14400 we emit the TOC reference to reference the symbol and not the
14401 section. */
14402 if (VTABLE_NAME_P (name))
14404 RS6000_OUTPUT_BASENAME (file, name);
14405 if (offset < 0)
14406 fprintf (file, "%d", offset);
14407 else if (offset > 0)
14408 fprintf (file, "+%d", offset);
14410 else
14411 output_addr_const (file, x);
14412 putc ('\n', file);
14415 /* Output an assembler pseudo-op to write an ASCII string of N characters
14416 starting at P to FILE.
14418 On the RS/6000, we have to do this using the .byte operation and
14419 write out special characters outside the quoted string.
14420 Also, the assembler is broken; very long strings are truncated,
14421 so we must artificially break them up early. */
14423 void
14424 output_ascii (FILE *file, const char *p, int n)
14426 char c;
14427 int i, count_string;
14428 const char *for_string = "\t.byte \"";
14429 const char *for_decimal = "\t.byte ";
14430 const char *to_close = NULL;
14432 count_string = 0;
14433 for (i = 0; i < n; i++)
14435 c = *p++;
14436 if (c >= ' ' && c < 0177)
14438 if (for_string)
14439 fputs (for_string, file);
14440 putc (c, file);
14442 /* Write two quotes to get one. */
14443 if (c == '"')
14445 putc (c, file);
14446 ++count_string;
14449 for_string = NULL;
14450 for_decimal = "\"\n\t.byte ";
14451 to_close = "\"\n";
14452 ++count_string;
14454 if (count_string >= 512)
14456 fputs (to_close, file);
14458 for_string = "\t.byte \"";
14459 for_decimal = "\t.byte ";
14460 to_close = NULL;
14461 count_string = 0;
14464 else
14466 if (for_decimal)
14467 fputs (for_decimal, file);
14468 fprintf (file, "%d", c);
14470 for_string = "\n\t.byte \"";
14471 for_decimal = ", ";
14472 to_close = "\n";
14473 count_string = 0;
14477 /* Now close the string if we have written one. Then end the line. */
14478 if (to_close)
14479 fputs (to_close, file);
14482 /* Generate a unique section name for FILENAME for a section type
14483 represented by SECTION_DESC. Output goes into BUF.
14485 SECTION_DESC can be any string, as long as it is different for each
14486 possible section type.
14488 We name the section in the same manner as xlc. The name begins with an
14489 underscore followed by the filename (after stripping any leading directory
14490 names) with the last period replaced by the string SECTION_DESC. If
14491 FILENAME does not contain a period, SECTION_DESC is appended to the end of
14492 the name. */
14494 void
14495 rs6000_gen_section_name (char **buf, const char *filename,
14496 const char *section_desc)
14498 const char *q, *after_last_slash, *last_period = 0;
14499 char *p;
14500 int len;
14502 after_last_slash = filename;
14503 for (q = filename; *q; q++)
14505 if (*q == '/')
14506 after_last_slash = q + 1;
14507 else if (*q == '.')
14508 last_period = q;
14511 len = strlen (after_last_slash) + strlen (section_desc) + 2;
14512 *buf = (char *) xmalloc (len);
14514 p = *buf;
14515 *p++ = '_';
14517 for (q = after_last_slash; *q; q++)
14519 if (q == last_period)
14521 strcpy (p, section_desc);
14522 p += strlen (section_desc);
14523 break;
14526 else if (ISALNUM (*q))
14527 *p++ = *q;
14530 if (last_period == 0)
14531 strcpy (p, section_desc);
14532 else
14533 *p = '\0';
14536 /* Emit profile function. */
14538 void
14539 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
14541 if (TARGET_PROFILE_KERNEL)
14542 return;
14544 if (DEFAULT_ABI == ABI_AIX)
14546 #ifndef NO_PROFILE_COUNTERS
14547 # define NO_PROFILE_COUNTERS 0
14548 #endif
14549 if (NO_PROFILE_COUNTERS)
14550 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
14551 else
14553 char buf[30];
14554 const char *label_name;
14555 rtx fun;
14557 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14558 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
14559 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
14561 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
14562 fun, Pmode);
14565 else if (DEFAULT_ABI == ABI_DARWIN)
14567 const char *mcount_name = RS6000_MCOUNT;
14568 int caller_addr_regno = LINK_REGISTER_REGNUM;
14570 /* Be conservative and always set this, at least for now. */
14571 current_function_uses_pic_offset_table = 1;
14573 #if TARGET_MACHO
14574 /* For PIC code, set up a stub and collect the caller's address
14575 from r0, which is where the prologue puts it. */
14576 if (MACHOPIC_INDIRECT
14577 && current_function_uses_pic_offset_table)
14578 caller_addr_regno = 0;
14579 #endif
14580 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
14581 0, VOIDmode, 1,
14582 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
14586 /* Write function profiler code. */
14588 void
14589 output_function_profiler (FILE *file, int labelno)
14591 char buf[100];
14592 int save_lr = 8;
14594 switch (DEFAULT_ABI)
14596 default:
14597 abort ();
14599 case ABI_V4:
14600 save_lr = 4;
14601 if (!TARGET_32BIT)
14603 warning ("no profiling of 64-bit code for this ABI");
14604 return;
14606 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
14607 fprintf (file, "\tmflr %s\n", reg_names[0]);
14608 if (flag_pic == 1)
14610 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
14611 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14612 reg_names[0], save_lr, reg_names[1]);
14613 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
14614 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
14615 assemble_name (file, buf);
14616 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
14618 else if (flag_pic > 1)
14620 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14621 reg_names[0], save_lr, reg_names[1]);
14622 /* Now, we need to get the address of the label. */
14623 fputs ("\tbl 1f\n\t.long ", file);
14624 assemble_name (file, buf);
14625 fputs ("-.\n1:", file);
14626 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
14627 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
14628 reg_names[0], reg_names[11]);
14629 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
14630 reg_names[0], reg_names[0], reg_names[11]);
14632 else
14634 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
14635 assemble_name (file, buf);
14636 fputs ("@ha\n", file);
14637 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
14638 reg_names[0], save_lr, reg_names[1]);
14639 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
14640 assemble_name (file, buf);
14641 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
14644 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
14645 fprintf (file, "\tbl %s%s\n",
14646 RS6000_MCOUNT, flag_pic ? "@plt" : "");
14647 break;
14649 case ABI_AIX:
14650 case ABI_DARWIN:
14651 if (!TARGET_PROFILE_KERNEL)
14653 /* Don't do anything, done in output_profile_hook (). */
14655 else
14657 if (TARGET_32BIT)
14658 abort ();
14660 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
14661 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
14663 if (cfun->static_chain_decl != NULL)
14665 asm_fprintf (file, "\tstd %s,24(%s)\n",
14666 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14667 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14668 asm_fprintf (file, "\tld %s,24(%s)\n",
14669 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
14671 else
14672 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
14674 break;
14679 /* Power4 load update and store update instructions are cracked into a
14680 load or store and an integer insn which are executed in the same cycle.
14681 Branches have their own dispatch slot which does not count against the
14682 GCC issue rate, but it changes the program flow so there are no other
14683 instructions to issue in this cycle. */
14685 static int
14686 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
14687 int verbose ATTRIBUTE_UNUSED,
14688 rtx insn, int more)
14690 if (GET_CODE (PATTERN (insn)) == USE
14691 || GET_CODE (PATTERN (insn)) == CLOBBER)
14692 return more;
14694 if (rs6000_sched_groups)
14696 if (is_microcoded_insn (insn))
14697 return 0;
14698 else if (is_cracked_insn (insn))
14699 return more > 2 ? more - 2 : 0;
14702 return more - 1;
14705 /* Adjust the cost of a scheduling dependency. Return the new cost of
14706 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
14708 static int
14709 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
14710 int cost)
14712 if (! recog_memoized (insn))
14713 return 0;
14715 if (REG_NOTE_KIND (link) != 0)
14716 return 0;
14718 if (REG_NOTE_KIND (link) == 0)
14720 /* Data dependency; DEP_INSN writes a register that INSN reads
14721 some cycles later. */
14722 switch (get_attr_type (insn))
14724 case TYPE_JMPREG:
14725 /* Tell the first scheduling pass about the latency between
14726 a mtctr and bctr (and mtlr and br/blr). The first
14727 scheduling pass will not know about this latency since
14728 the mtctr instruction, which has the latency associated
14729 to it, will be generated by reload. */
14730 return TARGET_POWER ? 5 : 4;
14731 case TYPE_BRANCH:
14732 /* Leave some extra cycles between a compare and its
14733 dependent branch, to inhibit expensive mispredicts. */
14734 if ((rs6000_cpu_attr == CPU_PPC603
14735 || rs6000_cpu_attr == CPU_PPC604
14736 || rs6000_cpu_attr == CPU_PPC604E
14737 || rs6000_cpu_attr == CPU_PPC620
14738 || rs6000_cpu_attr == CPU_PPC630
14739 || rs6000_cpu_attr == CPU_PPC750
14740 || rs6000_cpu_attr == CPU_PPC7400
14741 || rs6000_cpu_attr == CPU_PPC7450
14742 || rs6000_cpu_attr == CPU_POWER4
14743 || rs6000_cpu_attr == CPU_POWER5)
14744 && recog_memoized (dep_insn)
14745 && (INSN_CODE (dep_insn) >= 0)
14746 && (get_attr_type (dep_insn) == TYPE_CMP
14747 || get_attr_type (dep_insn) == TYPE_COMPARE
14748 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
14749 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
14750 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
14751 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
14752 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
14753 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
14754 return cost + 2;
14755 default:
14756 break;
14758 /* Fall out to return default cost. */
14761 return cost;
14764 /* The function returns a true if INSN is microcoded.
14765 Return false otherwise. */
14767 static bool
14768 is_microcoded_insn (rtx insn)
14770 if (!insn || !INSN_P (insn)
14771 || GET_CODE (PATTERN (insn)) == USE
14772 || GET_CODE (PATTERN (insn)) == CLOBBER)
14773 return false;
14775 if (rs6000_sched_groups)
14777 enum attr_type type = get_attr_type (insn);
14778 if (type == TYPE_LOAD_EXT_U
14779 || type == TYPE_LOAD_EXT_UX
14780 || type == TYPE_LOAD_UX
14781 || type == TYPE_STORE_UX
14782 || type == TYPE_MFCR)
14783 return true;
14786 return false;
14789 /* The function returns a nonzero value if INSN can be scheduled only
14790 as the first insn in a dispatch group ("dispatch-slot restricted").
14791 In this case, the returned value indicates how many dispatch slots
14792 the insn occupies (at the beginning of the group).
14793 Return 0 otherwise. */
14795 static int
14796 is_dispatch_slot_restricted (rtx insn)
14798 enum attr_type type;
14800 if (!rs6000_sched_groups)
14801 return 0;
14803 if (!insn
14804 || insn == NULL_RTX
14805 || GET_CODE (insn) == NOTE
14806 || GET_CODE (PATTERN (insn)) == USE
14807 || GET_CODE (PATTERN (insn)) == CLOBBER)
14808 return 0;
14810 type = get_attr_type (insn);
14812 switch (type)
14814 case TYPE_MFCR:
14815 case TYPE_MFCRF:
14816 case TYPE_MTCR:
14817 case TYPE_DELAYED_CR:
14818 case TYPE_CR_LOGICAL:
14819 case TYPE_MTJMPR:
14820 case TYPE_MFJMPR:
14821 return 1;
14822 case TYPE_IDIV:
14823 case TYPE_LDIV:
14824 return 2;
14825 default:
14826 if (rs6000_cpu == PROCESSOR_POWER5
14827 && is_cracked_insn (insn))
14828 return 2;
14829 return 0;
14833 /* The function returns true if INSN is cracked into 2 instructions
14834 by the processor (and therefore occupies 2 issue slots). */
14836 static bool
14837 is_cracked_insn (rtx insn)
14839 if (!insn || !INSN_P (insn)
14840 || GET_CODE (PATTERN (insn)) == USE
14841 || GET_CODE (PATTERN (insn)) == CLOBBER)
14842 return false;
14844 if (rs6000_sched_groups)
14846 enum attr_type type = get_attr_type (insn);
14847 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14848 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14849 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14850 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14851 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14852 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14853 || type == TYPE_IDIV || type == TYPE_LDIV
14854 || type == TYPE_INSERT_WORD)
14855 return true;
14858 return false;
14861 /* The function returns true if INSN can be issued only from
14862 the branch slot. */
14864 static bool
14865 is_branch_slot_insn (rtx insn)
14867 if (!insn || !INSN_P (insn)
14868 || GET_CODE (PATTERN (insn)) == USE
14869 || GET_CODE (PATTERN (insn)) == CLOBBER)
14870 return false;
14872 if (rs6000_sched_groups)
14874 enum attr_type type = get_attr_type (insn);
14875 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14876 return true;
14877 return false;
14880 return false;
14883 /* A C statement (sans semicolon) to update the integer scheduling
14884 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14885 INSN earlier, reduce the priority to execute INSN later. Do not
14886 define this macro if you do not need to adjust the scheduling
14887 priorities of insns. */
14889 static int
14890 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14892 /* On machines (like the 750) which have asymmetric integer units,
14893 where one integer unit can do multiply and divides and the other
14894 can't, reduce the priority of multiply/divide so it is scheduled
14895 before other integer operations. */
14897 #if 0
14898 if (! INSN_P (insn))
14899 return priority;
14901 if (GET_CODE (PATTERN (insn)) == USE)
14902 return priority;
14904 switch (rs6000_cpu_attr) {
14905 case CPU_PPC750:
14906 switch (get_attr_type (insn))
14908 default:
14909 break;
14911 case TYPE_IMUL:
14912 case TYPE_IDIV:
14913 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14914 priority, priority);
14915 if (priority >= 0 && priority < 0x01000000)
14916 priority >>= 3;
14917 break;
14920 #endif
14922 if (is_dispatch_slot_restricted (insn)
14923 && reload_completed
14924 && current_sched_info->sched_max_insns_priority
14925 && rs6000_sched_restricted_insns_priority)
14928 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14929 if (rs6000_sched_restricted_insns_priority == 1)
14930 /* Attach highest priority to insn. This means that in
14931 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14932 precede 'priority' (critical path) considerations. */
14933 return current_sched_info->sched_max_insns_priority;
14934 else if (rs6000_sched_restricted_insns_priority == 2)
14935 /* Increase priority of insn by a minimal amount. This means that in
14936 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14937 precede dispatch-slot restriction considerations. */
14938 return (priority + 1);
14941 return priority;
14944 /* Return how many instructions the machine can issue per cycle. */
14946 static int
14947 rs6000_issue_rate (void)
14949 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14950 if (!reload_completed)
14951 return 1;
14953 switch (rs6000_cpu_attr) {
14954 case CPU_RIOS1: /* ? */
14955 case CPU_RS64A:
14956 case CPU_PPC601: /* ? */
14957 case CPU_PPC7450:
14958 return 3;
14959 case CPU_PPC440:
14960 case CPU_PPC603:
14961 case CPU_PPC750:
14962 case CPU_PPC7400:
14963 case CPU_PPC8540:
14964 return 2;
14965 case CPU_RIOS2:
14966 case CPU_PPC604:
14967 case CPU_PPC604E:
14968 case CPU_PPC620:
14969 case CPU_PPC630:
14970 return 4;
14971 case CPU_POWER4:
14972 case CPU_POWER5:
14973 return 5;
14974 default:
14975 return 1;
14979 /* Return how many instructions to look ahead for better insn
14980 scheduling. */
14982 static int
14983 rs6000_use_sched_lookahead (void)
14985 if (rs6000_cpu_attr == CPU_PPC8540)
14986 return 4;
14987 return 0;
14990 /* Determine is PAT refers to memory. */
14992 static bool
14993 is_mem_ref (rtx pat)
14995 const char * fmt;
14996 int i, j;
14997 bool ret = false;
14999 if (GET_CODE (pat) == MEM)
15000 return true;
15002 /* Recursively process the pattern. */
15003 fmt = GET_RTX_FORMAT (GET_CODE (pat));
15005 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
15007 if (fmt[i] == 'e')
15008 ret |= is_mem_ref (XEXP (pat, i));
15009 else if (fmt[i] == 'E')
15010 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
15011 ret |= is_mem_ref (XVECEXP (pat, i, j));
15014 return ret;
15017 /* Determine if PAT is a PATTERN of a load insn. */
15019 static bool
15020 is_load_insn1 (rtx pat)
15022 if (!pat || pat == NULL_RTX)
15023 return false;
15025 if (GET_CODE (pat) == SET)
15026 return is_mem_ref (SET_SRC (pat));
15028 if (GET_CODE (pat) == PARALLEL)
15030 int i;
15032 for (i = 0; i < XVECLEN (pat, 0); i++)
15033 if (is_load_insn1 (XVECEXP (pat, 0, i)))
15034 return true;
15037 return false;
15040 /* Determine if INSN loads from memory. */
15042 static bool
15043 is_load_insn (rtx insn)
15045 if (!insn || !INSN_P (insn))
15046 return false;
15048 if (GET_CODE (insn) == CALL_INSN)
15049 return false;
15051 return is_load_insn1 (PATTERN (insn));
15054 /* Determine if PAT is a PATTERN of a store insn. */
15056 static bool
15057 is_store_insn1 (rtx pat)
15059 if (!pat || pat == NULL_RTX)
15060 return false;
15062 if (GET_CODE (pat) == SET)
15063 return is_mem_ref (SET_DEST (pat));
15065 if (GET_CODE (pat) == PARALLEL)
15067 int i;
15069 for (i = 0; i < XVECLEN (pat, 0); i++)
15070 if (is_store_insn1 (XVECEXP (pat, 0, i)))
15071 return true;
15074 return false;
15077 /* Determine if INSN stores to memory. */
15079 static bool
15080 is_store_insn (rtx insn)
15082 if (!insn || !INSN_P (insn))
15083 return false;
15085 return is_store_insn1 (PATTERN (insn));
15088 /* Returns whether the dependence between INSN and NEXT is considered
15089 costly by the given target. */
15091 static bool
15092 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
15094 /* If the flag is not enbled - no dependence is considered costly;
15095 allow all dependent insns in the same group.
15096 This is the most aggressive option. */
15097 if (rs6000_sched_costly_dep == no_dep_costly)
15098 return false;
15100 /* If the flag is set to 1 - a dependence is always considered costly;
15101 do not allow dependent instructions in the same group.
15102 This is the most conservative option. */
15103 if (rs6000_sched_costly_dep == all_deps_costly)
15104 return true;
15106 if (rs6000_sched_costly_dep == store_to_load_dep_costly
15107 && is_load_insn (next)
15108 && is_store_insn (insn))
15109 /* Prevent load after store in the same group. */
15110 return true;
15112 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
15113 && is_load_insn (next)
15114 && is_store_insn (insn)
15115 && (!link || (int) REG_NOTE_KIND (link) == 0))
15116 /* Prevent load after store in the same group if it is a true dependence. */
15117 return true;
15119 /* The flag is set to X; dependences with latency >= X are considered costly,
15120 and will not be scheduled in the same group. */
15121 if (rs6000_sched_costly_dep <= max_dep_latency
15122 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
15123 return true;
15125 return false;
15128 /* Return the next insn after INSN that is found before TAIL is reached,
15129 skipping any "non-active" insns - insns that will not actually occupy
15130 an issue slot. Return NULL_RTX if such an insn is not found. */
15132 static rtx
15133 get_next_active_insn (rtx insn, rtx tail)
15135 rtx next_insn;
15137 if (!insn || insn == tail)
15138 return NULL_RTX;
15140 next_insn = NEXT_INSN (insn);
15142 while (next_insn
15143 && next_insn != tail
15144 && (GET_CODE(next_insn) == NOTE
15145 || GET_CODE (PATTERN (next_insn)) == USE
15146 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
15148 next_insn = NEXT_INSN (next_insn);
15151 if (!next_insn || next_insn == tail)
15152 return NULL_RTX;
15154 return next_insn;
15157 /* Return whether the presence of INSN causes a dispatch group termination
15158 of group WHICH_GROUP.
15160 If WHICH_GROUP == current_group, this function will return true if INSN
15161 causes the termination of the current group (i.e, the dispatch group to
15162 which INSN belongs). This means that INSN will be the last insn in the
15163 group it belongs to.
15165 If WHICH_GROUP == previous_group, this function will return true if INSN
15166 causes the termination of the previous group (i.e, the dispatch group that
15167 precedes the group to which INSN belongs). This means that INSN will be
15168 the first insn in the group it belongs to). */
15170 static bool
15171 insn_terminates_group_p (rtx insn, enum group_termination which_group)
15173 enum attr_type type;
15175 if (! insn)
15176 return false;
15178 type = get_attr_type (insn);
15180 if (is_microcoded_insn (insn))
15181 return true;
15183 if (which_group == current_group)
15185 if (is_branch_slot_insn (insn))
15186 return true;
15187 return false;
15189 else if (which_group == previous_group)
15191 if (is_dispatch_slot_restricted (insn))
15192 return true;
15193 return false;
15196 return false;
15199 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15200 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15202 static bool
15203 is_costly_group (rtx *group_insns, rtx next_insn)
15205 int i;
15206 rtx link;
15207 int cost;
15208 int issue_rate = rs6000_issue_rate ();
15210 for (i = 0; i < issue_rate; i++)
15212 rtx insn = group_insns[i];
15213 if (!insn)
15214 continue;
15215 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
15217 rtx next = XEXP (link, 0);
15218 if (next == next_insn)
15220 cost = insn_cost (insn, link, next_insn);
15221 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
15222 return true;
15227 return false;
15230 /* Utility of the function redefine_groups.
15231 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15232 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15233 to keep it "far" (in a separate group) from GROUP_INSNS, following
15234 one of the following schemes, depending on the value of the flag
15235 -minsert_sched_nops = X:
15236 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15237 in order to force NEXT_INSN into a separate group.
15238 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15239 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15240 insertion (has a group just ended, how many vacant issue slots remain in the
15241 last group, and how many dispatch groups were encountered so far). */
15243 static int
15244 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
15245 bool *group_end, int can_issue_more, int *group_count)
15247 rtx nop;
15248 bool force;
15249 int issue_rate = rs6000_issue_rate ();
15250 bool end = *group_end;
15251 int i;
15253 if (next_insn == NULL_RTX)
15254 return can_issue_more;
15256 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
15257 return can_issue_more;
15259 force = is_costly_group (group_insns, next_insn);
15260 if (!force)
15261 return can_issue_more;
15263 if (sched_verbose > 6)
15264 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
15265 *group_count ,can_issue_more);
15267 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
15269 if (*group_end)
15270 can_issue_more = 0;
15272 /* Since only a branch can be issued in the last issue_slot, it is
15273 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15274 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15275 in this case the last nop will start a new group and the branch will be
15276 forced to the new group. */
15277 if (can_issue_more && !is_branch_slot_insn (next_insn))
15278 can_issue_more--;
15280 while (can_issue_more > 0)
15282 nop = gen_nop();
15283 emit_insn_before (nop, next_insn);
15284 can_issue_more--;
15287 *group_end = true;
15288 return 0;
15291 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
15293 int n_nops = rs6000_sched_insert_nops;
15295 /* Nops can't be issued from the branch slot, so the effective
15296 issue_rate for nops is 'issue_rate - 1'. */
15297 if (can_issue_more == 0)
15298 can_issue_more = issue_rate;
15299 can_issue_more--;
15300 if (can_issue_more == 0)
15302 can_issue_more = issue_rate - 1;
15303 (*group_count)++;
15304 end = true;
15305 for (i = 0; i < issue_rate; i++)
15307 group_insns[i] = 0;
15311 while (n_nops > 0)
15313 nop = gen_nop ();
15314 emit_insn_before (nop, next_insn);
15315 if (can_issue_more == issue_rate - 1) /* new group begins */
15316 end = false;
15317 can_issue_more--;
15318 if (can_issue_more == 0)
15320 can_issue_more = issue_rate - 1;
15321 (*group_count)++;
15322 end = true;
15323 for (i = 0; i < issue_rate; i++)
15325 group_insns[i] = 0;
15328 n_nops--;
15331 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15332 can_issue_more++;
15334 *group_end = /* Is next_insn going to start a new group? */
15335 (end
15336 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15337 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15338 || (can_issue_more < issue_rate &&
15339 insn_terminates_group_p (next_insn, previous_group)));
15340 if (*group_end && end)
15341 (*group_count)--;
15343 if (sched_verbose > 6)
15344 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
15345 *group_count, can_issue_more);
15346 return can_issue_more;
15349 return can_issue_more;
15352 /* This function tries to synch the dispatch groups that the compiler "sees"
15353 with the dispatch groups that the processor dispatcher is expected to
15354 form in practice. It tries to achieve this synchronization by forcing the
15355 estimated processor grouping on the compiler (as opposed to the function
15356 'pad_goups' which tries to force the scheduler's grouping on the processor).
15358 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15359 examines the (estimated) dispatch groups that will be formed by the processor
15360 dispatcher. It marks these group boundaries to reflect the estimated
15361 processor grouping, overriding the grouping that the scheduler had marked.
15362 Depending on the value of the flag '-minsert-sched-nops' this function can
15363 force certain insns into separate groups or force a certain distance between
15364 them by inserting nops, for example, if there exists a "costly dependence"
15365 between the insns.
15367 The function estimates the group boundaries that the processor will form as
15368 folllows: It keeps track of how many vacant issue slots are available after
15369 each insn. A subsequent insn will start a new group if one of the following
15370 4 cases applies:
15371 - no more vacant issue slots remain in the current dispatch group.
15372 - only the last issue slot, which is the branch slot, is vacant, but the next
15373 insn is not a branch.
15374 - only the last 2 or less issue slots, including the branch slot, are vacant,
15375 which means that a cracked insn (which occupies two issue slots) can't be
15376 issued in this group.
15377 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15378 start a new group. */
15380 static int
15381 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15383 rtx insn, next_insn;
15384 int issue_rate;
15385 int can_issue_more;
15386 int slot, i;
15387 bool group_end;
15388 int group_count = 0;
15389 rtx *group_insns;
15391 /* Initialize. */
15392 issue_rate = rs6000_issue_rate ();
15393 group_insns = alloca (issue_rate * sizeof (rtx));
15394 for (i = 0; i < issue_rate; i++)
15396 group_insns[i] = 0;
15398 can_issue_more = issue_rate;
15399 slot = 0;
15400 insn = get_next_active_insn (prev_head_insn, tail);
15401 group_end = false;
15403 while (insn != NULL_RTX)
15405 slot = (issue_rate - can_issue_more);
15406 group_insns[slot] = insn;
15407 can_issue_more =
15408 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15409 if (insn_terminates_group_p (insn, current_group))
15410 can_issue_more = 0;
15412 next_insn = get_next_active_insn (insn, tail);
15413 if (next_insn == NULL_RTX)
15414 return group_count + 1;
15416 group_end = /* Is next_insn going to start a new group? */
15417 (can_issue_more == 0
15418 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
15419 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
15420 || (can_issue_more < issue_rate &&
15421 insn_terminates_group_p (next_insn, previous_group)));
15423 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
15424 next_insn, &group_end, can_issue_more, &group_count);
15426 if (group_end)
15428 group_count++;
15429 can_issue_more = 0;
15430 for (i = 0; i < issue_rate; i++)
15432 group_insns[i] = 0;
15436 if (GET_MODE (next_insn) == TImode && can_issue_more)
15437 PUT_MODE(next_insn, VOIDmode);
15438 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
15439 PUT_MODE (next_insn, TImode);
15441 insn = next_insn;
15442 if (can_issue_more == 0)
15443 can_issue_more = issue_rate;
15444 } /* while */
15446 return group_count;
15449 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
15450 dispatch group boundaries that the scheduler had marked. Pad with nops
15451 any dispatch groups which have vacant issue slots, in order to force the
15452 scheduler's grouping on the processor dispatcher. The function
15453 returns the number of dispatch groups found. */
15455 static int
15456 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
15458 rtx insn, next_insn;
15459 rtx nop;
15460 int issue_rate;
15461 int can_issue_more;
15462 int group_end;
15463 int group_count = 0;
15465 /* Initialize issue_rate. */
15466 issue_rate = rs6000_issue_rate ();
15467 can_issue_more = issue_rate;
15469 insn = get_next_active_insn (prev_head_insn, tail);
15470 next_insn = get_next_active_insn (insn, tail);
15472 while (insn != NULL_RTX)
15474 can_issue_more =
15475 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
15477 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
15479 if (next_insn == NULL_RTX)
15480 break;
15482 if (group_end)
15484 /* If the scheduler had marked group termination at this location
15485 (between insn and next_indn), and neither insn nor next_insn will
15486 force group termination, pad the group with nops to force group
15487 termination. */
15488 if (can_issue_more
15489 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
15490 && !insn_terminates_group_p (insn, current_group)
15491 && !insn_terminates_group_p (next_insn, previous_group))
15493 if (!is_branch_slot_insn(next_insn))
15494 can_issue_more--;
15496 while (can_issue_more)
15498 nop = gen_nop ();
15499 emit_insn_before (nop, next_insn);
15500 can_issue_more--;
15504 can_issue_more = issue_rate;
15505 group_count++;
15508 insn = next_insn;
15509 next_insn = get_next_active_insn (insn, tail);
15512 return group_count;
15515 /* The following function is called at the end of scheduling BB.
15516 After reload, it inserts nops at insn group bundling. */
15518 static void
15519 rs6000_sched_finish (FILE *dump, int sched_verbose)
15521 int n_groups;
15523 if (sched_verbose)
15524 fprintf (dump, "=== Finishing schedule.\n");
15526 if (reload_completed && rs6000_sched_groups)
15528 if (rs6000_sched_insert_nops == sched_finish_none)
15529 return;
15531 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
15532 n_groups = pad_groups (dump, sched_verbose,
15533 current_sched_info->prev_head,
15534 current_sched_info->next_tail);
15535 else
15536 n_groups = redefine_groups (dump, sched_verbose,
15537 current_sched_info->prev_head,
15538 current_sched_info->next_tail);
15540 if (sched_verbose >= 6)
15542 fprintf (dump, "ngroups = %d\n", n_groups);
15543 print_rtl (dump, current_sched_info->prev_head);
15544 fprintf (dump, "Done finish_sched\n");
15549 /* Length in units of the trampoline for entering a nested function. */
15552 rs6000_trampoline_size (void)
15554 int ret = 0;
15556 switch (DEFAULT_ABI)
15558 default:
15559 abort ();
15561 case ABI_AIX:
15562 ret = (TARGET_32BIT) ? 12 : 24;
15563 break;
15565 case ABI_DARWIN:
15566 case ABI_V4:
15567 ret = (TARGET_32BIT) ? 40 : 48;
15568 break;
15571 return ret;
15574 /* Emit RTL insns to initialize the variable parts of a trampoline.
15575 FNADDR is an RTX for the address of the function's pure code.
15576 CXT is an RTX for the static chain value for the function. */
15578 void
15579 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
15581 enum machine_mode pmode = Pmode;
15582 int regsize = (TARGET_32BIT) ? 4 : 8;
15583 rtx ctx_reg = force_reg (pmode, cxt);
15585 switch (DEFAULT_ABI)
15587 default:
15588 abort ();
15590 /* Macros to shorten the code expansions below. */
15591 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
15592 #define MEM_PLUS(addr,offset) \
15593 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
15595 /* Under AIX, just build the 3 word function descriptor */
15596 case ABI_AIX:
15598 rtx fn_reg = gen_reg_rtx (pmode);
15599 rtx toc_reg = gen_reg_rtx (pmode);
15600 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
15601 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
15602 emit_move_insn (MEM_DEREF (addr), fn_reg);
15603 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
15604 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
15606 break;
15608 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
15609 case ABI_DARWIN:
15610 case ABI_V4:
15611 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
15612 FALSE, VOIDmode, 4,
15613 addr, pmode,
15614 GEN_INT (rs6000_trampoline_size ()), SImode,
15615 fnaddr, pmode,
15616 ctx_reg, pmode);
15617 break;
15620 return;
15624 /* Table of valid machine attributes. */
15626 const struct attribute_spec rs6000_attribute_table[] =
15628 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
15629 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
15630 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15631 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
15632 { NULL, 0, 0, false, false, false, NULL }
15635 /* Handle the "altivec" attribute. The attribute may have
15636 arguments as follows:
15638 __attribute__((altivec(vector__)))
15639 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
15640 __attribute__((altivec(bool__))) (always followed by 'unsigned')
15642 and may appear more than once (e.g., 'vector bool char') in a
15643 given declaration. */
15645 static tree
15646 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
15647 int flags ATTRIBUTE_UNUSED,
15648 bool *no_add_attrs)
15650 tree type = *node, result = NULL_TREE;
15651 enum machine_mode mode;
15652 int unsigned_p;
15653 char altivec_type
15654 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
15655 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
15656 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
15657 : '?');
15659 while (POINTER_TYPE_P (type)
15660 || TREE_CODE (type) == FUNCTION_TYPE
15661 || TREE_CODE (type) == METHOD_TYPE
15662 || TREE_CODE (type) == ARRAY_TYPE)
15663 type = TREE_TYPE (type);
15665 mode = TYPE_MODE (type);
15667 if (rs6000_warn_altivec_long
15668 && (type == long_unsigned_type_node || type == long_integer_type_node))
15669 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
15671 switch (altivec_type)
15673 case 'v':
15674 unsigned_p = TYPE_UNSIGNED (type);
15675 switch (mode)
15677 case SImode:
15678 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
15679 break;
15680 case HImode:
15681 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
15682 break;
15683 case QImode:
15684 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
15685 break;
15686 case SFmode: result = V4SF_type_node; break;
15687 /* If the user says 'vector int bool', we may be handed the 'bool'
15688 attribute _before_ the 'vector' attribute, and so select the proper
15689 type in the 'b' case below. */
15690 case V4SImode: case V8HImode: case V16QImode: result = type;
15691 default: break;
15693 break;
15694 case 'b':
15695 switch (mode)
15697 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
15698 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
15699 case QImode: case V16QImode: result = bool_V16QI_type_node;
15700 default: break;
15702 break;
15703 case 'p':
15704 switch (mode)
15706 case V8HImode: result = pixel_V8HI_type_node;
15707 default: break;
15709 default: break;
15712 if (result && result != type && TYPE_READONLY (type))
15713 result = build_qualified_type (result, TYPE_QUAL_CONST);
15715 *no_add_attrs = true; /* No need to hang on to the attribute. */
15717 if (!result)
15718 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
15719 else
15720 *node = reconstruct_complex_type (*node, result);
15722 return NULL_TREE;
15725 /* AltiVec defines four built-in scalar types that serve as vector
15726 elements; we must teach the compiler how to mangle them. */
15728 static const char *
15729 rs6000_mangle_fundamental_type (tree type)
15731 if (type == bool_char_type_node) return "U6__boolc";
15732 if (type == bool_short_type_node) return "U6__bools";
15733 if (type == pixel_type_node) return "u7__pixel";
15734 if (type == bool_int_type_node) return "U6__booli";
15736 /* For all other types, use normal C++ mangling. */
15737 return NULL;
15740 /* Handle a "longcall" or "shortcall" attribute; arguments as in
15741 struct attribute_spec.handler. */
15743 static tree
15744 rs6000_handle_longcall_attribute (tree *node, tree name,
15745 tree args ATTRIBUTE_UNUSED,
15746 int flags ATTRIBUTE_UNUSED,
15747 bool *no_add_attrs)
15749 if (TREE_CODE (*node) != FUNCTION_TYPE
15750 && TREE_CODE (*node) != FIELD_DECL
15751 && TREE_CODE (*node) != TYPE_DECL)
15753 warning ("`%s' attribute only applies to functions",
15754 IDENTIFIER_POINTER (name));
15755 *no_add_attrs = true;
15758 return NULL_TREE;
15761 /* Set longcall attributes on all functions declared when
15762 rs6000_default_long_calls is true. */
15763 static void
15764 rs6000_set_default_type_attributes (tree type)
15766 if (rs6000_default_long_calls
15767 && (TREE_CODE (type) == FUNCTION_TYPE
15768 || TREE_CODE (type) == METHOD_TYPE))
15769 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
15770 NULL_TREE,
15771 TYPE_ATTRIBUTES (type));
15774 /* Return a reference suitable for calling a function with the
15775 longcall attribute. */
15777 struct rtx_def *
15778 rs6000_longcall_ref (rtx call_ref)
15780 const char *call_name;
15781 tree node;
15783 if (GET_CODE (call_ref) != SYMBOL_REF)
15784 return call_ref;
15786 /* System V adds '.' to the internal name, so skip them. */
15787 call_name = XSTR (call_ref, 0);
15788 if (*call_name == '.')
15790 while (*call_name == '.')
15791 call_name++;
15793 node = get_identifier (call_name);
15794 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
15797 return force_reg (Pmode, call_ref);
15800 #ifdef USING_ELFOS_H
15802 /* A C statement or statements to switch to the appropriate section
15803 for output of RTX in mode MODE. You can assume that RTX is some
15804 kind of constant in RTL. The argument MODE is redundant except in
15805 the case of a `const_int' rtx. Select the section by calling
15806 `text_section' or one of the alternatives for other sections.
15808 Do not define this macro if you put all constants in the read-only
15809 data section. */
15811 static void
15812 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
15813 unsigned HOST_WIDE_INT align)
15815 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15816 toc_section ();
15817 else
15818 default_elf_select_rtx_section (mode, x, align);
15821 /* A C statement or statements to switch to the appropriate
15822 section for output of DECL. DECL is either a `VAR_DECL' node
15823 or a constant of some sort. RELOC indicates whether forming
15824 the initial value of DECL requires link-time relocations. */
15826 static void
15827 rs6000_elf_select_section (tree decl, int reloc,
15828 unsigned HOST_WIDE_INT align)
15830 /* Pretend that we're always building for a shared library when
15831 ABI_AIX, because otherwise we end up with dynamic relocations
15832 in read-only sections. This happens for function pointers,
15833 references to vtables in typeinfo, and probably other cases. */
15834 default_elf_select_section_1 (decl, reloc, align,
15835 flag_pic || DEFAULT_ABI == ABI_AIX);
15838 /* A C statement to build up a unique section name, expressed as a
15839 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15840 RELOC indicates whether the initial value of EXP requires
15841 link-time relocations. If you do not define this macro, GCC will use
15842 the symbol name prefixed by `.' as the section name. Note - this
15843 macro can now be called for uninitialized data items as well as
15844 initialized data and functions. */
15846 static void
15847 rs6000_elf_unique_section (tree decl, int reloc)
15849 /* As above, pretend that we're always building for a shared library
15850 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15851 default_unique_section_1 (decl, reloc,
15852 flag_pic || DEFAULT_ABI == ABI_AIX);
15855 /* For a SYMBOL_REF, set generic flags and then perform some
15856 target-specific processing.
15858 When the AIX ABI is requested on a non-AIX system, replace the
15859 function name with the real name (with a leading .) rather than the
15860 function descriptor name. This saves a lot of overriding code to
15861 read the prefixes. */
15863 static void
15864 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15866 default_encode_section_info (decl, rtl, first);
15868 if (first
15869 && TREE_CODE (decl) == FUNCTION_DECL
15870 && !TARGET_AIX
15871 && DEFAULT_ABI == ABI_AIX)
15873 rtx sym_ref = XEXP (rtl, 0);
15874 size_t len = strlen (XSTR (sym_ref, 0));
15875 char *str = alloca (len + 2);
15876 str[0] = '.';
15877 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15878 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15882 static bool
15883 rs6000_elf_in_small_data_p (tree decl)
15885 if (rs6000_sdata == SDATA_NONE)
15886 return false;
15888 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15890 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15891 if (strcmp (section, ".sdata") == 0
15892 || strcmp (section, ".sdata2") == 0
15893 || strcmp (section, ".sbss") == 0
15894 || strcmp (section, ".sbss2") == 0
15895 || strcmp (section, ".PPC.EMB.sdata0") == 0
15896 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15897 return true;
15899 else
15901 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15903 if (size > 0
15904 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15905 /* If it's not public, and we're not going to reference it there,
15906 there's no need to put it in the small data section. */
15907 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15908 return true;
15911 return false;
15914 #endif /* USING_ELFOS_H */
15917 /* Return a REG that occurs in ADDR with coefficient 1.
15918 ADDR can be effectively incremented by incrementing REG.
15920 r0 is special and we must not select it as an address
15921 register by this routine since our caller will try to
15922 increment the returned register via an "la" instruction. */
15924 struct rtx_def *
15925 find_addr_reg (rtx addr)
15927 while (GET_CODE (addr) == PLUS)
15929 if (GET_CODE (XEXP (addr, 0)) == REG
15930 && REGNO (XEXP (addr, 0)) != 0)
15931 addr = XEXP (addr, 0);
15932 else if (GET_CODE (XEXP (addr, 1)) == REG
15933 && REGNO (XEXP (addr, 1)) != 0)
15934 addr = XEXP (addr, 1);
15935 else if (CONSTANT_P (XEXP (addr, 0)))
15936 addr = XEXP (addr, 1);
15937 else if (CONSTANT_P (XEXP (addr, 1)))
15938 addr = XEXP (addr, 0);
15939 else
15940 abort ();
15942 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15943 return addr;
15944 abort ();
15947 void
15948 rs6000_fatal_bad_address (rtx op)
15950 fatal_insn ("bad address", op);
15953 #if TARGET_MACHO
15955 #if 0
15956 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15957 reference and a constant. */
15960 symbolic_operand (rtx op)
15962 switch (GET_CODE (op))
15964 case SYMBOL_REF:
15965 case LABEL_REF:
15966 return 1;
15967 case CONST:
15968 op = XEXP (op, 0);
15969 return (GET_CODE (op) == SYMBOL_REF ||
15970 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15971 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15972 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15973 default:
15974 return 0;
15977 #endif
15979 #if TARGET_MACHO
15981 static tree branch_island_list = 0;
15983 /* Remember to generate a branch island for far calls to the given
15984 function. */
15986 static void
15987 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15989 tree branch_island = build_tree_list (function_name, label_name);
15990 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number, 0);
15991 TREE_CHAIN (branch_island) = branch_island_list;
15992 branch_island_list = branch_island;
15995 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15996 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15997 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15998 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16000 /* Generate far-jump branch islands for everything on the
16001 branch_island_list. Invoked immediately after the last instruction
16002 of the epilogue has been emitted; the branch-islands must be
16003 appended to, and contiguous with, the function body. Mach-O stubs
16004 are generated in machopic_output_stub(). */
16006 static void
16007 macho_branch_islands (void)
16009 char tmp_buf[512];
16010 tree branch_island;
16012 for (branch_island = branch_island_list;
16013 branch_island;
16014 branch_island = TREE_CHAIN (branch_island))
16016 const char *label =
16017 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
16018 const char *name =
16019 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
16020 char name_buf[512];
16021 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16022 if (name[0] == '*' || name[0] == '&')
16023 strcpy (name_buf, name+1);
16024 else
16026 name_buf[0] = '_';
16027 strcpy (name_buf+1, name);
16029 strcpy (tmp_buf, "\n");
16030 strcat (tmp_buf, label);
16031 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16032 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16033 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16034 BRANCH_ISLAND_LINE_NUMBER(branch_island));
16035 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16036 if (flag_pic)
16038 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
16039 strcat (tmp_buf, label);
16040 strcat (tmp_buf, "_pic\n");
16041 strcat (tmp_buf, label);
16042 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
16044 strcat (tmp_buf, "\taddis r11,r11,ha16(");
16045 strcat (tmp_buf, name_buf);
16046 strcat (tmp_buf, " - ");
16047 strcat (tmp_buf, label);
16048 strcat (tmp_buf, "_pic)\n");
16050 strcat (tmp_buf, "\tmtlr r0\n");
16052 strcat (tmp_buf, "\taddi r12,r11,lo16(");
16053 strcat (tmp_buf, name_buf);
16054 strcat (tmp_buf, " - ");
16055 strcat (tmp_buf, label);
16056 strcat (tmp_buf, "_pic)\n");
16058 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
16060 else
16062 strcat (tmp_buf, ":\nlis r12,hi16(");
16063 strcat (tmp_buf, name_buf);
16064 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
16065 strcat (tmp_buf, name_buf);
16066 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
16068 output_asm_insn (tmp_buf, 0);
16069 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16070 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
16071 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
16072 BRANCH_ISLAND_LINE_NUMBER (branch_island));
16073 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16076 branch_island_list = 0;
16079 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16080 already there or not. */
16082 static int
16083 no_previous_def (tree function_name)
16085 tree branch_island;
16086 for (branch_island = branch_island_list;
16087 branch_island;
16088 branch_island = TREE_CHAIN (branch_island))
16089 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16090 return 0;
16091 return 1;
16094 /* GET_PREV_LABEL gets the label name from the previous definition of
16095 the function. */
16097 static tree
16098 get_prev_label (tree function_name)
16100 tree branch_island;
16101 for (branch_island = branch_island_list;
16102 branch_island;
16103 branch_island = TREE_CHAIN (branch_island))
16104 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
16105 return BRANCH_ISLAND_LABEL_NAME (branch_island);
16106 return 0;
16109 /* INSN is either a function call or a millicode call. It may have an
16110 unconditional jump in its delay slot.
16112 CALL_DEST is the routine we are calling. */
16114 char *
16115 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
16117 static char buf[256];
16118 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
16119 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
16121 tree labelname;
16122 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
16124 if (no_previous_def (funname))
16126 int line_number = 0;
16127 rtx label_rtx = gen_label_rtx ();
16128 char *label_buf, temp_buf[256];
16129 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
16130 CODE_LABEL_NUMBER (label_rtx));
16131 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
16132 labelname = get_identifier (label_buf);
16133 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
16134 if (insn)
16135 line_number = NOTE_LINE_NUMBER (insn);
16136 add_compiler_branch_island (labelname, funname, line_number);
16138 else
16139 labelname = get_prev_label (funname);
16141 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16142 instruction will reach 'foo', otherwise link as 'bl L42'".
16143 "L42" should be a 'branch island', that will do a far jump to
16144 'foo'. Branch islands are generated in
16145 macho_branch_islands(). */
16146 sprintf (buf, "jbsr %%z%d,%.246s",
16147 dest_operand_number, IDENTIFIER_POINTER (labelname));
16149 else
16150 sprintf (buf, "bl %%z%d", dest_operand_number);
16151 return buf;
16154 #endif /* TARGET_MACHO */
16156 /* Generate PIC and indirect symbol stubs. */
16158 void
16159 machopic_output_stub (FILE *file, const char *symb, const char *stub)
16161 unsigned int length;
16162 char *symbol_name, *lazy_ptr_name;
16163 char *local_label_0;
16164 static int label = 0;
16166 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16167 symb = (*targetm.strip_name_encoding) (symb);
16170 length = strlen (symb);
16171 symbol_name = alloca (length + 32);
16172 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
16174 lazy_ptr_name = alloca (length + 32);
16175 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
16177 if (flag_pic == 2)
16178 machopic_picsymbol_stub1_section ();
16179 else
16180 machopic_symbol_stub1_section ();
16182 if (flag_pic == 2)
16184 fprintf (file, "\t.align 5\n");
16186 fprintf (file, "%s:\n", stub);
16187 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16189 label++;
16190 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
16191 sprintf (local_label_0, "\"L%011d$spb\"", label);
16193 fprintf (file, "\tmflr r0\n");
16194 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
16195 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
16196 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
16197 lazy_ptr_name, local_label_0);
16198 fprintf (file, "\tmtlr r0\n");
16199 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16200 lazy_ptr_name, local_label_0);
16201 fprintf (file, "\tmtctr r12\n");
16202 fprintf (file, "\tbctr\n");
16204 else
16206 fprintf (file, "\t.align 4\n");
16208 fprintf (file, "%s:\n", stub);
16209 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16211 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
16212 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
16213 fprintf (file, "\tmtctr r12\n");
16214 fprintf (file, "\tbctr\n");
16217 machopic_lazy_symbol_ptr_section ();
16218 fprintf (file, "%s:\n", lazy_ptr_name);
16219 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
16220 fprintf (file, "%sdyld_stub_binding_helper\n",
16221 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
16224 /* Legitimize PIC addresses. If the address is already
16225 position-independent, we return ORIG. Newly generated
16226 position-independent addresses go into a reg. This is REG if non
16227 zero, otherwise we allocate register(s) as necessary. */
16229 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16232 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
16233 rtx reg)
16235 rtx base, offset;
16237 if (reg == NULL && ! reload_in_progress && ! reload_completed)
16238 reg = gen_reg_rtx (Pmode);
16240 if (GET_CODE (orig) == CONST)
16242 if (GET_CODE (XEXP (orig, 0)) == PLUS
16243 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
16244 return orig;
16246 if (GET_CODE (XEXP (orig, 0)) == PLUS)
16248 /* Use a different reg for the intermediate value, as
16249 it will be marked UNCHANGING. */
16250 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
16252 base =
16253 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
16254 Pmode, reg_temp);
16255 offset =
16256 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
16257 Pmode, reg);
16259 else
16260 abort ();
16262 if (GET_CODE (offset) == CONST_INT)
16264 if (SMALL_INT (offset))
16265 return plus_constant (base, INTVAL (offset));
16266 else if (! reload_in_progress && ! reload_completed)
16267 offset = force_reg (Pmode, offset);
16268 else
16270 rtx mem = force_const_mem (Pmode, orig);
16271 return machopic_legitimize_pic_address (mem, Pmode, reg);
16274 return gen_rtx_PLUS (Pmode, base, offset);
16277 /* Fall back on generic machopic code. */
16278 return machopic_legitimize_pic_address (orig, mode, reg);
16281 /* This is just a placeholder to make linking work without having to
16282 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16283 ever needed for Darwin (not too likely!) this would have to get a
16284 real definition. */
16286 void
16287 toc_section (void)
16291 #endif /* TARGET_MACHO */
16293 #if TARGET_ELF
16294 static unsigned int
16295 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
16297 return default_section_type_flags_1 (decl, name, reloc,
16298 flag_pic || DEFAULT_ABI == ABI_AIX);
16301 /* Record an element in the table of global constructors. SYMBOL is
16302 a SYMBOL_REF of the function to be called; PRIORITY is a number
16303 between 0 and MAX_INIT_PRIORITY.
16305 This differs from default_named_section_asm_out_constructor in
16306 that we have special handling for -mrelocatable. */
16308 static void
16309 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
16311 const char *section = ".ctors";
16312 char buf[16];
16314 if (priority != DEFAULT_INIT_PRIORITY)
16316 sprintf (buf, ".ctors.%.5u",
16317 /* Invert the numbering so the linker puts us in the proper
16318 order; constructors are run from right to left, and the
16319 linker sorts in increasing order. */
16320 MAX_INIT_PRIORITY - priority);
16321 section = buf;
16324 named_section_flags (section, SECTION_WRITE);
16325 assemble_align (POINTER_SIZE);
16327 if (TARGET_RELOCATABLE)
16329 fputs ("\t.long (", asm_out_file);
16330 output_addr_const (asm_out_file, symbol);
16331 fputs (")@fixup\n", asm_out_file);
16333 else
16334 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16337 static void
16338 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
16340 const char *section = ".dtors";
16341 char buf[16];
16343 if (priority != DEFAULT_INIT_PRIORITY)
16345 sprintf (buf, ".dtors.%.5u",
16346 /* Invert the numbering so the linker puts us in the proper
16347 order; constructors are run from right to left, and the
16348 linker sorts in increasing order. */
16349 MAX_INIT_PRIORITY - priority);
16350 section = buf;
16353 named_section_flags (section, SECTION_WRITE);
16354 assemble_align (POINTER_SIZE);
16356 if (TARGET_RELOCATABLE)
16358 fputs ("\t.long (", asm_out_file);
16359 output_addr_const (asm_out_file, symbol);
16360 fputs (")@fixup\n", asm_out_file);
16362 else
16363 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
16366 void
16367 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
16369 if (TARGET_64BIT)
16371 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
16372 ASM_OUTPUT_LABEL (file, name);
16373 fputs (DOUBLE_INT_ASM_OP, file);
16374 putc ('.', file);
16375 assemble_name (file, name);
16376 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
16377 assemble_name (file, name);
16378 fputs (",24\n\t.type\t.", file);
16379 assemble_name (file, name);
16380 fputs (",@function\n", file);
16381 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
16383 fputs ("\t.globl\t.", file);
16384 assemble_name (file, name);
16385 putc ('\n', file);
16387 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16388 putc ('.', file);
16389 ASM_OUTPUT_LABEL (file, name);
16390 return;
16393 if (TARGET_RELOCATABLE
16394 && (get_pool_size () != 0 || current_function_profile)
16395 && uses_TOC ())
16397 char buf[256];
16399 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
16401 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
16402 fprintf (file, "\t.long ");
16403 assemble_name (file, buf);
16404 putc ('-', file);
16405 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
16406 assemble_name (file, buf);
16407 putc ('\n', file);
16410 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
16411 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
16413 if (DEFAULT_ABI == ABI_AIX)
16415 const char *desc_name, *orig_name;
16417 orig_name = (*targetm.strip_name_encoding) (name);
16418 desc_name = orig_name;
16419 while (*desc_name == '.')
16420 desc_name++;
16422 if (TREE_PUBLIC (decl))
16423 fprintf (file, "\t.globl %s\n", desc_name);
16425 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
16426 fprintf (file, "%s:\n", desc_name);
16427 fprintf (file, "\t.long %s\n", orig_name);
16428 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
16429 if (DEFAULT_ABI == ABI_AIX)
16430 fputs ("\t.long 0\n", file);
16431 fprintf (file, "\t.previous\n");
16433 ASM_OUTPUT_LABEL (file, name);
16435 #endif
16437 #if TARGET_XCOFF
16438 static void
16439 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
16441 fputs (GLOBAL_ASM_OP, stream);
16442 RS6000_OUTPUT_BASENAME (stream, name);
16443 putc ('\n', stream);
16446 static void
16447 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
16449 int smclass;
16450 static const char * const suffix[3] = { "PR", "RO", "RW" };
16452 if (flags & SECTION_CODE)
16453 smclass = 0;
16454 else if (flags & SECTION_WRITE)
16455 smclass = 2;
16456 else
16457 smclass = 1;
16459 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
16460 (flags & SECTION_CODE) ? "." : "",
16461 name, suffix[smclass], flags & SECTION_ENTSIZE);
16464 static void
16465 rs6000_xcoff_select_section (tree decl, int reloc,
16466 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16468 if (decl_readonly_section_1 (decl, reloc, 1))
16470 if (TREE_PUBLIC (decl))
16471 read_only_data_section ();
16472 else
16473 read_only_private_data_section ();
16475 else
16477 if (TREE_PUBLIC (decl))
16478 data_section ();
16479 else
16480 private_data_section ();
16484 static void
16485 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
16487 const char *name;
16489 /* Use select_section for private and uninitialized data. */
16490 if (!TREE_PUBLIC (decl)
16491 || DECL_COMMON (decl)
16492 || DECL_INITIAL (decl) == NULL_TREE
16493 || DECL_INITIAL (decl) == error_mark_node
16494 || (flag_zero_initialized_in_bss
16495 && initializer_zerop (DECL_INITIAL (decl))))
16496 return;
16498 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
16499 name = (*targetm.strip_name_encoding) (name);
16500 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
16503 /* Select section for constant in constant pool.
16505 On RS/6000, all constants are in the private read-only data area.
16506 However, if this is being placed in the TOC it must be output as a
16507 toc entry. */
16509 static void
16510 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
16511 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
16513 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
16514 toc_section ();
16515 else
16516 read_only_private_data_section ();
16519 /* Remove any trailing [DS] or the like from the symbol name. */
16521 static const char *
16522 rs6000_xcoff_strip_name_encoding (const char *name)
16524 size_t len;
16525 if (*name == '*')
16526 name++;
16527 len = strlen (name);
16528 if (name[len - 1] == ']')
16529 return ggc_alloc_string (name, len - 4);
16530 else
16531 return name;
16534 /* Section attributes. AIX is always PIC. */
16536 static unsigned int
16537 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
16539 unsigned int align;
16540 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
16542 /* Align to at least UNIT size. */
16543 if (flags & SECTION_CODE)
16544 align = MIN_UNITS_PER_WORD;
16545 else
16546 /* Increase alignment of large objects if not already stricter. */
16547 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
16548 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
16549 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
16551 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
16554 /* Output at beginning of assembler file.
16556 Initialize the section names for the RS/6000 at this point.
16558 Specify filename, including full path, to assembler.
16560 We want to go into the TOC section so at least one .toc will be emitted.
16561 Also, in order to output proper .bs/.es pairs, we need at least one static
16562 [RW] section emitted.
16564 Finally, declare mcount when profiling to make the assembler happy. */
16566 static void
16567 rs6000_xcoff_file_start (void)
16569 rs6000_gen_section_name (&xcoff_bss_section_name,
16570 main_input_filename, ".bss_");
16571 rs6000_gen_section_name (&xcoff_private_data_section_name,
16572 main_input_filename, ".rw_");
16573 rs6000_gen_section_name (&xcoff_read_only_section_name,
16574 main_input_filename, ".ro_");
16576 fputs ("\t.file\t", asm_out_file);
16577 output_quoted_string (asm_out_file, main_input_filename);
16578 fputc ('\n', asm_out_file);
16579 toc_section ();
16580 if (write_symbols != NO_DEBUG)
16581 private_data_section ();
16582 text_section ();
16583 if (profile_flag)
16584 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
16585 rs6000_file_start ();
16588 /* Output at end of assembler file.
16589 On the RS/6000, referencing data should automatically pull in text. */
16591 static void
16592 rs6000_xcoff_file_end (void)
16594 text_section ();
16595 fputs ("_section_.text:\n", asm_out_file);
16596 data_section ();
16597 fputs (TARGET_32BIT
16598 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
16599 asm_out_file);
16601 #endif /* TARGET_XCOFF */
16603 #if TARGET_MACHO
16604 /* Cross-module name binding. Darwin does not support overriding
16605 functions at dynamic-link time. */
16607 static bool
16608 rs6000_binds_local_p (tree decl)
16610 return default_binds_local_p_1 (decl, 0);
16612 #endif
16614 /* Compute a (partial) cost for rtx X. Return true if the complete
16615 cost has been computed, and false if subexpressions should be
16616 scanned. In either case, *TOTAL contains the cost result. */
16618 static bool
16619 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
16620 int *total)
16622 enum machine_mode mode = GET_MODE (x);
16624 switch (code)
16626 /* On the RS/6000, if it is valid in the insn, it is free. */
16627 case CONST_INT:
16628 if (((outer_code == SET
16629 || outer_code == PLUS
16630 || outer_code == MINUS)
16631 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
16632 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
16633 || ((outer_code == IOR || outer_code == XOR)
16634 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
16635 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')))
16636 || (outer_code == AND
16637 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
16638 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
16639 || mask_operand (x, VOIDmode)))
16640 || outer_code == ASHIFT
16641 || outer_code == ASHIFTRT
16642 || outer_code == LSHIFTRT
16643 || outer_code == ROTATE
16644 || outer_code == ROTATERT
16645 || outer_code == ZERO_EXTRACT
16646 || (outer_code == MULT
16647 && CONST_OK_FOR_LETTER_P (INTVAL (x), 'I'))
16648 || (outer_code == COMPARE
16649 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'I')
16650 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'K'))))
16652 *total = 0;
16653 return true;
16655 else if ((outer_code == PLUS
16656 && reg_or_add_cint64_operand (x, VOIDmode))
16657 || (outer_code == MINUS
16658 && reg_or_sub_cint64_operand (x, VOIDmode))
16659 || ((outer_code == SET
16660 || outer_code == IOR
16661 || outer_code == XOR)
16662 && (INTVAL (x)
16663 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
16665 *total = COSTS_N_INSNS (1);
16666 return true;
16668 /* FALLTHRU */
16670 case CONST_DOUBLE:
16671 if (mode == DImode
16672 && ((outer_code == AND
16673 && (CONST_OK_FOR_LETTER_P (INTVAL (x), 'K')
16674 || CONST_OK_FOR_LETTER_P (INTVAL (x), 'L')
16675 || mask64_operand (x, DImode)))
16676 || ((outer_code == IOR || outer_code == XOR)
16677 && CONST_DOUBLE_HIGH (x) == 0
16678 && (CONST_DOUBLE_LOW (x)
16679 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)))
16681 *total = 0;
16682 return true;
16684 else if (mode == DImode
16685 && (outer_code == SET
16686 || outer_code == IOR
16687 || outer_code == XOR)
16688 && CONST_DOUBLE_HIGH (x) == 0)
16690 *total = COSTS_N_INSNS (1);
16691 return true;
16693 /* FALLTHRU */
16695 case CONST:
16696 case HIGH:
16697 case SYMBOL_REF:
16698 case MEM:
16699 /* When optimizing for size, MEM should be slightly more expensive
16700 than generating address, e.g., (plus (reg) (const)).
16701 L1 cache latecy is about two instructions. */
16702 *total = optimize_size ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
16703 return true;
16705 case LABEL_REF:
16706 *total = 0;
16707 return true;
16709 case PLUS:
16710 if (mode == DFmode)
16712 if (GET_CODE (XEXP (x, 0)) == MULT)
16714 /* FNMA accounted in outer NEG. */
16715 if (outer_code == NEG)
16716 *total = rs6000_cost->dmul - rs6000_cost->fp;
16717 else
16718 *total = rs6000_cost->dmul;
16720 else
16721 *total = rs6000_cost->fp;
16723 else if (mode == SFmode)
16725 /* FNMA accounted in outer NEG. */
16726 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
16727 *total = 0;
16728 else
16729 *total = rs6000_cost->fp;
16731 else if (GET_CODE (XEXP (x, 0)) == MULT)
16733 /* The rs6000 doesn't have shift-and-add instructions. */
16734 rs6000_rtx_costs (XEXP (x, 0), MULT, PLUS, total);
16735 *total += COSTS_N_INSNS (1);
16737 else
16738 *total = COSTS_N_INSNS (1);
16739 return false;
16741 case MINUS:
16742 if (mode == DFmode)
16744 if (GET_CODE (XEXP (x, 0)) == MULT)
16746 /* FNMA accounted in outer NEG. */
16747 if (outer_code == NEG)
16748 *total = 0;
16749 else
16750 *total = rs6000_cost->dmul;
16752 else
16753 *total = rs6000_cost->fp;
16755 else if (mode == SFmode)
16757 /* FNMA accounted in outer NEG. */
16758 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
16759 *total = 0;
16760 else
16761 *total = rs6000_cost->fp;
16763 else if (GET_CODE (XEXP (x, 0)) == MULT)
16765 /* The rs6000 doesn't have shift-and-sub instructions. */
16766 rs6000_rtx_costs (XEXP (x, 0), MULT, MINUS, total);
16767 *total += COSTS_N_INSNS (1);
16769 else
16770 *total = COSTS_N_INSNS (1);
16771 return false;
16773 case MULT:
16774 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
16776 if (INTVAL (XEXP (x, 1)) >= -256
16777 && INTVAL (XEXP (x, 1)) <= 255)
16778 *total = rs6000_cost->mulsi_const9;
16779 else
16780 *total = rs6000_cost->mulsi_const;
16782 /* FMA accounted in outer PLUS/MINUS. */
16783 else if ((mode == DFmode || mode == SFmode)
16784 && (outer_code == PLUS || outer_code == MINUS))
16785 *total = 0;
16786 else if (mode == DFmode)
16787 *total = rs6000_cost->dmul;
16788 else if (mode == SFmode)
16789 *total = rs6000_cost->fp;
16790 else if (mode == DImode)
16791 *total = rs6000_cost->muldi;
16792 else
16793 *total = rs6000_cost->mulsi;
16794 return false;
16796 case DIV:
16797 case MOD:
16798 if (FLOAT_MODE_P (mode))
16800 *total = mode == DFmode ? rs6000_cost->ddiv
16801 : rs6000_cost->sdiv;
16802 return false;
16804 if (GET_CODE (XEXP (x, 1)) == CONST_INT
16805 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
16807 *total = COSTS_N_INSNS (2);
16808 return false;
16810 /* FALLTHRU */
16812 case UDIV:
16813 case UMOD:
16814 if (GET_MODE (XEXP (x, 1)) == DImode)
16815 *total = rs6000_cost->divdi;
16816 else
16817 *total = rs6000_cost->divsi;
16818 return false;
16820 case FFS:
16821 *total = COSTS_N_INSNS (4);
16822 return false;
16824 case NOT:
16825 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
16827 *total = 0;
16828 return false;
16830 /* FALLTHRU */
16832 case AND:
16833 case IOR:
16834 case XOR:
16835 case ZERO_EXTRACT:
16836 *total = COSTS_N_INSNS (1);
16837 return false;
16839 case ASHIFT:
16840 case ASHIFTRT:
16841 case LSHIFTRT:
16842 case ROTATE:
16843 case ROTATERT:
16844 /* Handle mul_highpart. */
16845 if (outer_code == TRUNCATE
16846 && GET_CODE (XEXP (x, 0)) == MULT)
16848 if (mode == DImode)
16849 *total = rs6000_cost->muldi;
16850 else
16851 *total = rs6000_cost->mulsi;
16852 return true;
16854 else if (outer_code == AND)
16855 *total = 0;
16856 else
16857 *total = COSTS_N_INSNS (1);
16858 return false;
16860 case SIGN_EXTEND:
16861 case ZERO_EXTEND:
16862 if (GET_CODE (XEXP (x, 0)) == MEM)
16863 *total = 0;
16864 else
16865 *total = COSTS_N_INSNS (1);
16866 return false;
16868 case COMPARE:
16869 case NEG:
16870 case ABS:
16871 if (!FLOAT_MODE_P (mode))
16873 *total = COSTS_N_INSNS (1);
16874 return false;
16876 /* FALLTHRU */
16878 case FLOAT:
16879 case UNSIGNED_FLOAT:
16880 case FIX:
16881 case UNSIGNED_FIX:
16882 case FLOAT_EXTEND:
16883 case FLOAT_TRUNCATE:
16884 *total = rs6000_cost->fp;
16885 return false;
16887 case UNSPEC:
16888 switch (XINT (x, 1))
16890 case UNSPEC_FRSP:
16891 *total = rs6000_cost->fp;
16892 return true;
16894 default:
16895 break;
16897 break;
16899 case CALL:
16900 case IF_THEN_ELSE:
16901 if (optimize_size)
16903 *total = COSTS_N_INSNS (1);
16904 return true;
16906 else if (FLOAT_MODE_P (mode)
16907 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
16909 *total = rs6000_cost->fp;
16910 return false;
16913 break;
16915 default:
16916 break;
16919 return false;
16922 /* A C expression returning the cost of moving data from a register of class
16923 CLASS1 to one of CLASS2. */
16926 rs6000_register_move_cost (enum machine_mode mode,
16927 enum reg_class from, enum reg_class to)
16929 /* Moves from/to GENERAL_REGS. */
16930 if (reg_classes_intersect_p (to, GENERAL_REGS)
16931 || reg_classes_intersect_p (from, GENERAL_REGS))
16933 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16934 from = to;
16936 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16937 return (rs6000_memory_move_cost (mode, from, 0)
16938 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16940 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16941 else if (from == CR_REGS)
16942 return 4;
16944 else
16945 /* A move will cost one instruction per GPR moved. */
16946 return 2 * HARD_REGNO_NREGS (0, mode);
16949 /* Moving between two similar registers is just one instruction. */
16950 else if (reg_classes_intersect_p (to, from))
16951 return mode == TFmode ? 4 : 2;
16953 /* Everything else has to go through GENERAL_REGS. */
16954 else
16955 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16956 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16959 /* A C expressions returning the cost of moving data of MODE from a register to
16960 or from memory. */
16963 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16964 int in ATTRIBUTE_UNUSED)
16966 if (reg_classes_intersect_p (class, GENERAL_REGS))
16967 return 4 * HARD_REGNO_NREGS (0, mode);
16968 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16969 return 4 * HARD_REGNO_NREGS (32, mode);
16970 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16971 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16972 else
16973 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16976 /* Return an RTX representing where to find the function value of a
16977 function returning MODE. */
16978 static rtx
16979 rs6000_complex_function_value (enum machine_mode mode)
16981 unsigned int regno;
16982 rtx r1, r2;
16983 enum machine_mode inner = GET_MODE_INNER (mode);
16984 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16986 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
16987 regno = FP_ARG_RETURN;
16988 else
16990 regno = GP_ARG_RETURN;
16992 /* 32-bit is OK since it'll go in r3/r4. */
16993 if (TARGET_32BIT && inner_bytes >= 4)
16994 return gen_rtx_REG (mode, regno);
16997 if (inner_bytes >= 8)
16998 return gen_rtx_REG (mode, regno);
17000 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
17001 const0_rtx);
17002 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
17003 GEN_INT (inner_bytes));
17004 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
17007 /* Define how to find the value returned by a function.
17008 VALTYPE is the data type of the value (as a tree).
17009 If the precise function being called is known, FUNC is its FUNCTION_DECL;
17010 otherwise, FUNC is 0.
17012 On the SPE, both FPs and vectors are returned in r3.
17014 On RS/6000 an integer value is in r3 and a floating-point value is in
17015 fp1, unless -msoft-float. */
17018 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
17020 enum machine_mode mode;
17021 unsigned int regno;
17023 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
17025 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17026 return gen_rtx_PARALLEL (DImode,
17027 gen_rtvec (2,
17028 gen_rtx_EXPR_LIST (VOIDmode,
17029 gen_rtx_REG (SImode, GP_ARG_RETURN),
17030 const0_rtx),
17031 gen_rtx_EXPR_LIST (VOIDmode,
17032 gen_rtx_REG (SImode,
17033 GP_ARG_RETURN + 1),
17034 GEN_INT (4))));
17037 if ((INTEGRAL_TYPE_P (valtype)
17038 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
17039 || POINTER_TYPE_P (valtype))
17040 mode = TARGET_32BIT ? SImode : DImode;
17041 else
17042 mode = TYPE_MODE (valtype);
17044 if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
17045 regno = FP_ARG_RETURN;
17046 else if (TREE_CODE (valtype) == COMPLEX_TYPE
17047 && targetm.calls.split_complex_arg)
17048 return rs6000_complex_function_value (mode);
17049 else if (TREE_CODE (valtype) == VECTOR_TYPE
17050 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
17051 && ALTIVEC_VECTOR_MODE(mode))
17052 regno = ALTIVEC_ARG_RETURN;
17053 else
17054 regno = GP_ARG_RETURN;
17056 return gen_rtx_REG (mode, regno);
17059 /* Define how to find the value returned by a library function
17060 assuming the value has mode MODE. */
17062 rs6000_libcall_value (enum machine_mode mode)
17064 unsigned int regno;
17066 if (GET_MODE_CLASS (mode) == MODE_FLOAT
17067 && TARGET_HARD_FLOAT && TARGET_FPRS)
17068 regno = FP_ARG_RETURN;
17069 else if (ALTIVEC_VECTOR_MODE (mode)
17070 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
17071 regno = ALTIVEC_ARG_RETURN;
17072 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
17073 return rs6000_complex_function_value (mode);
17074 else
17075 regno = GP_ARG_RETURN;
17077 return gen_rtx_REG (mode, regno);
17080 /* Define the offset between two registers, FROM to be eliminated and its
17081 replacement TO, at the start of a routine. */
17082 HOST_WIDE_INT
17083 rs6000_initial_elimination_offset (int from, int to)
17085 rs6000_stack_t *info = rs6000_stack_info ();
17086 HOST_WIDE_INT offset;
17088 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17089 offset = info->push_p ? 0 : -info->total_size;
17090 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
17091 offset = info->total_size;
17092 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
17093 offset = info->push_p ? info->total_size : 0;
17094 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
17095 offset = 0;
17096 else
17097 abort ();
17099 return offset;
17102 /* Return true if TYPE is of type __ev64_opaque__. */
17104 static bool
17105 is_ev64_opaque_type (tree type)
17107 return (TARGET_SPE
17108 && (type == opaque_V2SI_type_node
17109 || type == opaque_V2SF_type_node
17110 || type == opaque_p_V2SI_type_node));
17113 static rtx
17114 rs6000_dwarf_register_span (rtx reg)
17116 unsigned regno;
17118 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
17119 return NULL_RTX;
17121 regno = REGNO (reg);
17123 /* The duality of the SPE register size wreaks all kinds of havoc.
17124 This is a way of distinguishing r0 in 32-bits from r0 in
17125 64-bits. */
17126 return
17127 gen_rtx_PARALLEL (VOIDmode,
17128 BYTES_BIG_ENDIAN
17129 ? gen_rtvec (2,
17130 gen_rtx_REG (SImode, regno + 1200),
17131 gen_rtx_REG (SImode, regno))
17132 : gen_rtvec (2,
17133 gen_rtx_REG (SImode, regno),
17134 gen_rtx_REG (SImode, regno + 1200)));
17137 /* Map internal gcc register numbers to DWARF2 register numbers. */
17139 unsigned int
17140 rs6000_dbx_register_number (unsigned int regno)
17142 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
17143 return regno;
17144 if (regno == MQ_REGNO)
17145 return 100;
17146 if (regno == LINK_REGISTER_REGNUM)
17147 return 108;
17148 if (regno == COUNT_REGISTER_REGNUM)
17149 return 109;
17150 if (CR_REGNO_P (regno))
17151 return regno - CR0_REGNO + 86;
17152 if (regno == XER_REGNO)
17153 return 101;
17154 if (ALTIVEC_REGNO_P (regno))
17155 return regno - FIRST_ALTIVEC_REGNO + 1124;
17156 if (regno == VRSAVE_REGNO)
17157 return 356;
17158 if (regno == VSCR_REGNO)
17159 return 67;
17160 if (regno == SPE_ACC_REGNO)
17161 return 99;
17162 if (regno == SPEFSCR_REGNO)
17163 return 612;
17164 /* SPE high reg number. We get these values of regno from
17165 rs6000_dwarf_register_span. */
17166 if (regno >= 1200 && regno < 1232)
17167 return regno;
17169 abort ();
17172 /* target hook eh_return_filter_mode */
17173 static enum machine_mode
17174 rs6000_eh_return_filter_mode (void)
17176 return TARGET_32BIT ? SImode : word_mode;
17179 #include "gt-rs6000.h"