1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 2, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the
21 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22 MA 02110-1301, USA. */
26 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 #include "cfglayout.h"
55 #include "sched-int.h"
56 #include "tree-gimple.h"
59 #include "tm-constrs.h"
61 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
64 #include "gstab.h" /* for N_SLINE */
67 #ifndef TARGET_NO_PROTOTYPE
68 #define TARGET_NO_PROTOTYPE 0
71 #define min(A,B) ((A) < (B) ? (A) : (B))
72 #define max(A,B) ((A) > (B) ? (A) : (B))
74 /* Structure used to define the rs6000 stack */
75 typedef struct rs6000_stack
{
76 int first_gp_reg_save
; /* first callee saved GP register used */
77 int first_fp_reg_save
; /* first callee saved FP register used */
78 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
79 int lr_save_p
; /* true if the link reg needs to be saved */
80 int cr_save_p
; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask
; /* mask of vec registers to save */
82 int push_p
; /* true if we need to allocate stack space */
83 int calls_p
; /* true if the function makes any calls */
84 int world_save_p
; /* true if we're saving *everything*:
85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
86 enum rs6000_abi abi
; /* which ABI to use */
87 int gp_save_offset
; /* offset to save GP regs from initial SP */
88 int fp_save_offset
; /* offset to save FP regs from initial SP */
89 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset
; /* offset to save LR from initial SP */
91 int cr_save_offset
; /* offset to save CR from initial SP */
92 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
94 int varargs_save_offset
; /* offset to save the varargs registers */
95 int ehrd_offset
; /* offset to EH return data */
96 int reg_size
; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size
; /* variable save area size */
98 int parm_size
; /* outgoing parameter size */
99 int save_size
; /* save area size */
100 int fixed_size
; /* fixed size of stack frame */
101 int gp_size
; /* size of saved GP registers */
102 int fp_size
; /* size of saved FP registers */
103 int altivec_size
; /* size of saved AltiVec registers */
104 int cr_size
; /* size to hold CR if not in save_size */
105 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size
; /* size of altivec alignment padding if
108 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size
;
110 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
111 int spe_64bit_regs_used
;
114 /* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116 typedef struct machine_function
GTY(())
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame
;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name
;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p
;
124 /* Flags if __builtin_return_address (0) was used. */
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset
;
131 /* Target cpu type */
133 enum processor_type rs6000_cpu
;
134 struct rs6000_cpu_select rs6000_select
[3] =
136 /* switch name, tune arch */
137 { (const char *)0, "--with-cpu=", 1, 1 },
138 { (const char *)0, "-mcpu=", 1, 1 },
139 { (const char *)0, "-mtune=", 1, 0 },
142 static GTY(()) bool rs6000_cell_dont_microcode
;
144 /* Always emit branch hint bits. */
145 static GTY(()) bool rs6000_always_hint
;
147 /* Schedule instructions for group formation. */
148 static GTY(()) bool rs6000_sched_groups
;
150 /* Align branch targets. */
151 static GTY(()) bool rs6000_align_branch_targets
;
153 /* Support for -msched-costly-dep option. */
154 const char *rs6000_sched_costly_dep_str
;
155 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
157 /* Support for -minsert-sched-nops option. */
158 const char *rs6000_sched_insert_nops_str
;
159 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
161 /* Support targetm.vectorize.builtin_mask_for_load. */
162 static GTY(()) tree altivec_builtin_mask_for_load
;
164 /* Size of long double. */
165 int rs6000_long_double_type_size
;
167 /* IEEE quad extended precision long double. */
170 /* Whether -mabi=altivec has appeared. */
171 int rs6000_altivec_abi
;
173 /* Nonzero if we want SPE ABI extensions. */
176 /* Nonzero if floating point operations are done in the GPRs. */
177 int rs6000_float_gprs
= 0;
179 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
180 int rs6000_darwin64_abi
;
182 /* Set to nonzero once AIX common-mode calls have been defined. */
183 static GTY(()) int common_mode_defined
;
185 /* Save information from a "cmpxx" operation until the branch or scc is
187 rtx rs6000_compare_op0
, rs6000_compare_op1
;
188 int rs6000_compare_fp_p
;
190 /* Label number of label created for -mrelocatable, to call to so we can
191 get the address of the GOT section */
192 int rs6000_pic_labelno
;
195 /* Which abi to adhere to */
196 const char *rs6000_abi_name
;
198 /* Semantics of the small data area */
199 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
201 /* Which small data model to use */
202 const char *rs6000_sdata_name
= (char *)0;
204 /* Counter for labels which are to be placed in .fixup. */
205 int fixuplabelno
= 0;
208 /* Bit size of immediate TLS offsets and string from which it is decoded. */
209 int rs6000_tls_size
= 32;
210 const char *rs6000_tls_size_string
;
212 /* ABI enumeration available for subtarget to use. */
213 enum rs6000_abi rs6000_current_abi
;
215 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
219 const char *rs6000_debug_name
;
220 int rs6000_debug_stack
; /* debug stack applications */
221 int rs6000_debug_arg
; /* debug argument handling */
223 /* Value is TRUE if register/mode pair is acceptable. */
224 bool rs6000_hard_regno_mode_ok_p
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
226 /* Built in types. */
228 tree rs6000_builtin_types
[RS6000_BTI_MAX
];
229 tree rs6000_builtin_decls
[RS6000_BUILTIN_COUNT
];
231 const char *rs6000_traceback_name
;
233 traceback_default
= 0,
239 /* Flag to say the TOC is initialized */
241 char toc_label_name
[10];
243 /* Cached value of rs6000_variable_issue. This is cached in
244 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
245 static short cached_can_issue_more
;
247 static GTY(()) section
*read_only_data_section
;
248 static GTY(()) section
*private_data_section
;
249 static GTY(()) section
*read_only_private_data_section
;
250 static GTY(()) section
*sdata2_section
;
251 static GTY(()) section
*toc_section
;
253 /* Control alignment for fields within structures. */
254 /* String from -malign-XXXXX. */
255 int rs6000_alignment_flags
;
257 /* True for any options that were explicitly set. */
259 bool aix_struct_ret
; /* True if -maix-struct-ret was used. */
260 bool alignment
; /* True if -malign- was used. */
261 bool abi
; /* True if -mabi=spe/nospe was used. */
262 bool spe
; /* True if -mspe= was used. */
263 bool float_gprs
; /* True if -mfloat-gprs= was used. */
264 bool isel
; /* True if -misel was used. */
265 bool long_double
; /* True if -mlong-double- was used. */
266 bool ieee
; /* True if -mabi=ieee/ibmlongdouble used. */
267 } rs6000_explicit_options
;
269 struct builtin_description
271 /* mask is not const because we're going to alter it below. This
272 nonsense will go away when we rewrite the -march infrastructure
273 to give us more target flag bits. */
275 const enum insn_code icode
;
276 const char *const name
;
277 const enum rs6000_builtins code
;
280 /* Target cpu costs. */
282 struct processor_costs
{
283 const int mulsi
; /* cost of SImode multiplication. */
284 const int mulsi_const
; /* cost of SImode multiplication by constant. */
285 const int mulsi_const9
; /* cost of SImode mult by short constant. */
286 const int muldi
; /* cost of DImode multiplication. */
287 const int divsi
; /* cost of SImode division. */
288 const int divdi
; /* cost of DImode division. */
289 const int fp
; /* cost of simple SFmode and DFmode insns. */
290 const int dmul
; /* cost of DFmode multiplication (and fmadd). */
291 const int sdiv
; /* cost of SFmode division (fdivs). */
292 const int ddiv
; /* cost of DFmode division (fdiv). */
295 const struct processor_costs
*rs6000_cost
;
297 /* Processor costs (relative to an add) */
299 /* Instruction size costs on 32bit processors. */
301 struct processor_costs size32_cost
= {
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
314 /* Instruction size costs on 64bit processors. */
316 struct processor_costs size64_cost
= {
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
329 /* Instruction costs on RIOS1 processors. */
331 struct processor_costs rios1_cost
= {
332 COSTS_N_INSNS (5), /* mulsi */
333 COSTS_N_INSNS (4), /* mulsi_const */
334 COSTS_N_INSNS (3), /* mulsi_const9 */
335 COSTS_N_INSNS (5), /* muldi */
336 COSTS_N_INSNS (19), /* divsi */
337 COSTS_N_INSNS (19), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (19), /* sdiv */
341 COSTS_N_INSNS (19), /* ddiv */
344 /* Instruction costs on RIOS2 processors. */
346 struct processor_costs rios2_cost
= {
347 COSTS_N_INSNS (2), /* mulsi */
348 COSTS_N_INSNS (2), /* mulsi_const */
349 COSTS_N_INSNS (2), /* mulsi_const9 */
350 COSTS_N_INSNS (2), /* muldi */
351 COSTS_N_INSNS (13), /* divsi */
352 COSTS_N_INSNS (13), /* divdi */
353 COSTS_N_INSNS (2), /* fp */
354 COSTS_N_INSNS (2), /* dmul */
355 COSTS_N_INSNS (17), /* sdiv */
356 COSTS_N_INSNS (17), /* ddiv */
359 /* Instruction costs on RS64A processors. */
361 struct processor_costs rs64a_cost
= {
362 COSTS_N_INSNS (20), /* mulsi */
363 COSTS_N_INSNS (12), /* mulsi_const */
364 COSTS_N_INSNS (8), /* mulsi_const9 */
365 COSTS_N_INSNS (34), /* muldi */
366 COSTS_N_INSNS (65), /* divsi */
367 COSTS_N_INSNS (67), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (4), /* dmul */
370 COSTS_N_INSNS (31), /* sdiv */
371 COSTS_N_INSNS (31), /* ddiv */
374 /* Instruction costs on MPCCORE processors. */
376 struct processor_costs mpccore_cost
= {
377 COSTS_N_INSNS (2), /* mulsi */
378 COSTS_N_INSNS (2), /* mulsi_const */
379 COSTS_N_INSNS (2), /* mulsi_const9 */
380 COSTS_N_INSNS (2), /* muldi */
381 COSTS_N_INSNS (6), /* divsi */
382 COSTS_N_INSNS (6), /* divdi */
383 COSTS_N_INSNS (4), /* fp */
384 COSTS_N_INSNS (5), /* dmul */
385 COSTS_N_INSNS (10), /* sdiv */
386 COSTS_N_INSNS (17), /* ddiv */
389 /* Instruction costs on PPC403 processors. */
391 struct processor_costs ppc403_cost
= {
392 COSTS_N_INSNS (4), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (4), /* mulsi_const9 */
395 COSTS_N_INSNS (4), /* muldi */
396 COSTS_N_INSNS (33), /* divsi */
397 COSTS_N_INSNS (33), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
404 /* Instruction costs on PPC405 processors. */
406 struct processor_costs ppc405_cost
= {
407 COSTS_N_INSNS (5), /* mulsi */
408 COSTS_N_INSNS (4), /* mulsi_const */
409 COSTS_N_INSNS (3), /* mulsi_const9 */
410 COSTS_N_INSNS (5), /* muldi */
411 COSTS_N_INSNS (35), /* divsi */
412 COSTS_N_INSNS (35), /* divdi */
413 COSTS_N_INSNS (11), /* fp */
414 COSTS_N_INSNS (11), /* dmul */
415 COSTS_N_INSNS (11), /* sdiv */
416 COSTS_N_INSNS (11), /* ddiv */
419 /* Instruction costs on PPC440 processors. */
421 struct processor_costs ppc440_cost
= {
422 COSTS_N_INSNS (3), /* mulsi */
423 COSTS_N_INSNS (2), /* mulsi_const */
424 COSTS_N_INSNS (2), /* mulsi_const9 */
425 COSTS_N_INSNS (3), /* muldi */
426 COSTS_N_INSNS (34), /* divsi */
427 COSTS_N_INSNS (34), /* divdi */
428 COSTS_N_INSNS (5), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (19), /* sdiv */
431 COSTS_N_INSNS (33), /* ddiv */
434 /* Instruction costs on PPC601 processors. */
436 struct processor_costs ppc601_cost
= {
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (5), /* mulsi_const */
439 COSTS_N_INSNS (5), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (36), /* divsi */
442 COSTS_N_INSNS (36), /* divdi */
443 COSTS_N_INSNS (4), /* fp */
444 COSTS_N_INSNS (5), /* dmul */
445 COSTS_N_INSNS (17), /* sdiv */
446 COSTS_N_INSNS (31), /* ddiv */
449 /* Instruction costs on PPC603 processors. */
451 struct processor_costs ppc603_cost
= {
452 COSTS_N_INSNS (5), /* mulsi */
453 COSTS_N_INSNS (3), /* mulsi_const */
454 COSTS_N_INSNS (2), /* mulsi_const9 */
455 COSTS_N_INSNS (5), /* muldi */
456 COSTS_N_INSNS (37), /* divsi */
457 COSTS_N_INSNS (37), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (4), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (33), /* ddiv */
464 /* Instruction costs on PPC604 processors. */
466 struct processor_costs ppc604_cost
= {
467 COSTS_N_INSNS (4), /* mulsi */
468 COSTS_N_INSNS (4), /* mulsi_const */
469 COSTS_N_INSNS (4), /* mulsi_const9 */
470 COSTS_N_INSNS (4), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
479 /* Instruction costs on PPC604e processors. */
481 struct processor_costs ppc604e_cost
= {
482 COSTS_N_INSNS (2), /* mulsi */
483 COSTS_N_INSNS (2), /* mulsi_const */
484 COSTS_N_INSNS (2), /* mulsi_const9 */
485 COSTS_N_INSNS (2), /* muldi */
486 COSTS_N_INSNS (20), /* divsi */
487 COSTS_N_INSNS (20), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
494 /* Instruction costs on PPC620 processors. */
496 struct processor_costs ppc620_cost
= {
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (18), /* sdiv */
506 COSTS_N_INSNS (32), /* ddiv */
509 /* Instruction costs on PPC630 processors. */
511 struct processor_costs ppc630_cost
= {
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (4), /* mulsi_const */
514 COSTS_N_INSNS (3), /* mulsi_const9 */
515 COSTS_N_INSNS (7), /* muldi */
516 COSTS_N_INSNS (21), /* divsi */
517 COSTS_N_INSNS (37), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (21), /* ddiv */
524 /* Instruction costs on Cell processor. */
525 /* COSTS_N_INSNS (1) ~ one add. */
527 struct processor_costs ppccell_cost
= {
528 COSTS_N_INSNS (9/2)+2, /* mulsi */
529 COSTS_N_INSNS (6/2), /* mulsi_const */
530 COSTS_N_INSNS (6/2), /* mulsi_const9 */
531 COSTS_N_INSNS (15/2)+2, /* muldi */
532 COSTS_N_INSNS (38/2), /* divsi */
533 COSTS_N_INSNS (70/2), /* divdi */
534 COSTS_N_INSNS (10/2), /* fp */
535 COSTS_N_INSNS (10/2), /* dmul */
536 COSTS_N_INSNS (74/2), /* sdiv */
537 COSTS_N_INSNS (74/2), /* ddiv */
540 /* Instruction costs on PPC750 and PPC7400 processors. */
542 struct processor_costs ppc750_cost
= {
543 COSTS_N_INSNS (5), /* mulsi */
544 COSTS_N_INSNS (3), /* mulsi_const */
545 COSTS_N_INSNS (2), /* mulsi_const9 */
546 COSTS_N_INSNS (5), /* muldi */
547 COSTS_N_INSNS (17), /* divsi */
548 COSTS_N_INSNS (17), /* divdi */
549 COSTS_N_INSNS (3), /* fp */
550 COSTS_N_INSNS (3), /* dmul */
551 COSTS_N_INSNS (17), /* sdiv */
552 COSTS_N_INSNS (31), /* ddiv */
555 /* Instruction costs on PPC7450 processors. */
557 struct processor_costs ppc7450_cost
= {
558 COSTS_N_INSNS (4), /* mulsi */
559 COSTS_N_INSNS (3), /* mulsi_const */
560 COSTS_N_INSNS (3), /* mulsi_const9 */
561 COSTS_N_INSNS (4), /* muldi */
562 COSTS_N_INSNS (23), /* divsi */
563 COSTS_N_INSNS (23), /* divdi */
564 COSTS_N_INSNS (5), /* fp */
565 COSTS_N_INSNS (5), /* dmul */
566 COSTS_N_INSNS (21), /* sdiv */
567 COSTS_N_INSNS (35), /* ddiv */
570 /* Instruction costs on PPC8540 processors. */
572 struct processor_costs ppc8540_cost
= {
573 COSTS_N_INSNS (4), /* mulsi */
574 COSTS_N_INSNS (4), /* mulsi_const */
575 COSTS_N_INSNS (4), /* mulsi_const9 */
576 COSTS_N_INSNS (4), /* muldi */
577 COSTS_N_INSNS (19), /* divsi */
578 COSTS_N_INSNS (19), /* divdi */
579 COSTS_N_INSNS (4), /* fp */
580 COSTS_N_INSNS (4), /* dmul */
581 COSTS_N_INSNS (29), /* sdiv */
582 COSTS_N_INSNS (29), /* ddiv */
585 /* Instruction costs on POWER4 and POWER5 processors. */
587 struct processor_costs power4_cost
= {
588 COSTS_N_INSNS (3), /* mulsi */
589 COSTS_N_INSNS (2), /* mulsi_const */
590 COSTS_N_INSNS (2), /* mulsi_const9 */
591 COSTS_N_INSNS (4), /* muldi */
592 COSTS_N_INSNS (18), /* divsi */
593 COSTS_N_INSNS (34), /* divdi */
594 COSTS_N_INSNS (3), /* fp */
595 COSTS_N_INSNS (3), /* dmul */
596 COSTS_N_INSNS (17), /* sdiv */
597 COSTS_N_INSNS (17), /* ddiv */
600 /* Instruction costs on POWER6 processors. */
602 struct processor_costs power6_cost
= {
603 COSTS_N_INSNS (8), /* mulsi */
604 COSTS_N_INSNS (8), /* mulsi_const */
605 COSTS_N_INSNS (8), /* mulsi_const9 */
606 COSTS_N_INSNS (8), /* muldi */
607 COSTS_N_INSNS (22), /* divsi */
608 COSTS_N_INSNS (28), /* divdi */
609 COSTS_N_INSNS (3), /* fp */
610 COSTS_N_INSNS (3), /* dmul */
611 COSTS_N_INSNS (13), /* sdiv */
612 COSTS_N_INSNS (16), /* ddiv */
616 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
617 static const char *rs6000_invalid_within_doloop (rtx
);
618 static rtx
rs6000_generate_compare (enum rtx_code
);
619 static void rs6000_maybe_dead (rtx
);
620 static void rs6000_emit_stack_tie (void);
621 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
622 static rtx
spe_synthesize_frame_save (rtx
);
623 static bool spe_func_has_64bit_regs_p (void);
624 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
626 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
627 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
628 static unsigned rs6000_hash_constant (rtx
);
629 static unsigned toc_hash_function (const void *);
630 static int toc_hash_eq (const void *, const void *);
631 static int constant_pool_expr_1 (rtx
, int *, int *);
632 static bool constant_pool_expr_p (rtx
);
633 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
634 static bool legitimate_indexed_address_p (rtx
, int);
635 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
636 static struct machine_function
* rs6000_init_machine_status (void);
637 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
638 static bool no_global_regs_above (int);
639 #ifdef HAVE_GAS_HIDDEN
640 static void rs6000_assemble_visibility (tree
, int);
642 static int rs6000_ra_ever_killed (void);
643 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
644 static tree
rs6000_handle_altivec_attribute (tree
*, tree
, tree
, int, bool *);
645 static bool rs6000_ms_bitfield_layout_p (tree
);
646 static tree
rs6000_handle_struct_attribute (tree
*, tree
, tree
, int, bool *);
647 static void rs6000_eliminate_indexed_memrefs (rtx operands
[2]);
648 static const char *rs6000_mangle_fundamental_type (tree
);
649 extern const struct attribute_spec rs6000_attribute_table
[];
650 static void rs6000_set_default_type_attributes (tree
);
651 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
652 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
653 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
655 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
656 static bool rs6000_return_in_memory (tree
, tree
);
657 static void rs6000_file_start (void);
659 static unsigned int rs6000_elf_section_type_flags (tree
, const char *, int);
660 static void rs6000_elf_asm_out_constructor (rtx
, int);
661 static void rs6000_elf_asm_out_destructor (rtx
, int);
662 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED
;
663 static void rs6000_elf_asm_init_sections (void);
664 static section
*rs6000_elf_select_section (tree
, int, unsigned HOST_WIDE_INT
);
665 static void rs6000_elf_unique_section (tree
, int);
666 static section
*rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
667 unsigned HOST_WIDE_INT
);
668 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
671 static bool rs6000_use_blocks_for_constant_p (enum machine_mode
, rtx
);
673 static void rs6000_xcoff_asm_output_anchor (rtx
);
674 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
675 static void rs6000_xcoff_asm_init_sections (void);
676 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree
);
677 static section
*rs6000_xcoff_select_section (tree
, int,
678 unsigned HOST_WIDE_INT
);
679 static void rs6000_xcoff_unique_section (tree
, int);
680 static section
*rs6000_xcoff_select_rtx_section
681 (enum machine_mode
, rtx
, unsigned HOST_WIDE_INT
);
682 static const char * rs6000_xcoff_strip_name_encoding (const char *);
683 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
684 static void rs6000_xcoff_file_start (void);
685 static void rs6000_xcoff_file_end (void);
687 static int rs6000_variable_issue (FILE *, int, rtx
, int);
688 static bool rs6000_rtx_costs (rtx
, int, int, int *);
689 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
690 static void rs6000_sched_init (FILE *, int, int);
691 static bool is_microcoded_insn (rtx
);
692 static bool is_nonpipeline_insn (rtx
);
693 static bool is_cracked_insn (rtx
);
694 static bool is_branch_slot_insn (rtx
);
695 static bool is_load_insn (rtx
);
696 static rtx
get_store_dest (rtx pat
);
697 static bool is_store_insn (rtx
);
698 static bool set_to_load_agen (rtx
,rtx
);
699 static bool adjacent_mem_locations (rtx
,rtx
);
700 static int rs6000_adjust_priority (rtx
, int);
701 static int rs6000_issue_rate (void);
702 static bool rs6000_is_costly_dependence (dep_t
, int, int);
703 static rtx
get_next_active_insn (rtx
, rtx
);
704 static bool insn_terminates_group_p (rtx
, enum group_termination
);
705 static bool insn_must_be_first_in_group (rtx
);
706 static bool insn_must_be_last_in_group (rtx
);
707 static bool is_costly_group (rtx
*, rtx
);
708 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
709 static int redefine_groups (FILE *, int, rtx
, rtx
);
710 static int pad_groups (FILE *, int, rtx
, rtx
);
711 static void rs6000_sched_finish (FILE *, int);
712 static int rs6000_sched_reorder (FILE *, int, rtx
*, int *, int);
713 static int rs6000_sched_reorder2 (FILE *, int, rtx
*, int *, int);
714 static int rs6000_use_sched_lookahead (void);
715 static int rs6000_use_sched_lookahead_guard (rtx
);
716 static tree
rs6000_builtin_mask_for_load (void);
717 static tree
rs6000_builtin_mul_widen_even (tree
);
718 static tree
rs6000_builtin_mul_widen_odd (tree
);
720 static void def_builtin (int, const char *, tree
, int);
721 static void rs6000_init_builtins (void);
722 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
723 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
724 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
725 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
726 static void altivec_init_builtins (void);
727 static void rs6000_common_init_builtins (void);
728 static void rs6000_init_libfuncs (void);
730 static void enable_mask_for_builtins (struct builtin_description
*, int,
731 enum rs6000_builtins
,
732 enum rs6000_builtins
);
733 static tree
build_opaque_vector_type (tree
, int);
734 static void spe_init_builtins (void);
735 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
736 static rtx
spe_expand_stv_builtin (enum insn_code
, tree
);
737 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
738 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
739 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
740 static rs6000_stack_t
*rs6000_stack_info (void);
741 static void debug_stack_info (rs6000_stack_t
*);
743 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
744 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
745 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
746 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
747 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
748 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
749 const char *, tree
, rtx
);
750 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
751 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
752 static rtx
altivec_expand_vec_init_builtin (tree
, tree
, rtx
);
753 static rtx
altivec_expand_vec_set_builtin (tree
);
754 static rtx
altivec_expand_vec_ext_builtin (tree
, rtx
);
755 static int get_element_number (tree
, tree
);
756 static bool rs6000_handle_option (size_t, const char *, int);
757 static void rs6000_parse_tls_size_option (void);
758 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
759 static int first_altivec_reg_to_save (void);
760 static unsigned int compute_vrsave_mask (void);
761 static void compute_save_world_info (rs6000_stack_t
*info_ptr
);
762 static void is_altivec_return_reg (rtx
, void *);
763 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
764 int easy_vector_constant (rtx
, enum machine_mode
);
765 static bool rs6000_is_opaque_type (tree
);
766 static rtx
rs6000_dwarf_register_span (rtx
);
767 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
768 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
769 static rtx
rs6000_tls_get_addr (void);
770 static rtx
rs6000_got_sym (void);
771 static int rs6000_tls_symbol_ref_1 (rtx
*, void *);
772 static const char *rs6000_get_some_local_dynamic_name (void);
773 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
774 static rtx
rs6000_complex_function_value (enum machine_mode
);
775 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
776 enum machine_mode
, tree
);
777 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*,
779 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*,
780 tree
, HOST_WIDE_INT
);
781 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*,
784 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*,
787 static rtx
rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*, tree
, int, bool);
788 static rtx
rs6000_mixed_function_arg (enum machine_mode
, tree
, int);
789 static void rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
);
790 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
791 enum machine_mode
, tree
,
793 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
795 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*, enum machine_mode
,
797 static const char *invalid_arg_for_unprototyped_fn (tree
, tree
, tree
);
799 static void macho_branch_islands (void);
800 static int no_previous_def (tree function_name
);
801 static tree
get_prev_label (tree function_name
);
802 static void rs6000_darwin_file_start (void);
805 static tree
rs6000_build_builtin_va_list (void);
806 static tree
rs6000_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
807 static bool rs6000_must_pass_in_stack (enum machine_mode
, tree
);
808 static bool rs6000_scalar_mode_supported_p (enum machine_mode
);
809 static bool rs6000_vector_mode_supported_p (enum machine_mode
);
810 static int get_vec_cmp_insn (enum rtx_code
, enum machine_mode
,
812 static rtx
rs6000_emit_vector_compare (enum rtx_code
, rtx
, rtx
,
814 static int get_vsel_insn (enum machine_mode
);
815 static void rs6000_emit_vector_select (rtx
, rtx
, rtx
, rtx
);
816 static tree
rs6000_stack_protect_fail (void);
818 const int INSN_NOT_AVAILABLE
= -1;
819 static enum machine_mode
rs6000_eh_return_filter_mode (void);
821 /* Hash table stuff for keeping track of TOC entries. */
823 struct toc_hash_struct
GTY(())
825 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
826 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
828 enum machine_mode key_mode
;
832 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
834 /* Default register names. */
835 char rs6000_reg_names
[][8] =
837 "0", "1", "2", "3", "4", "5", "6", "7",
838 "8", "9", "10", "11", "12", "13", "14", "15",
839 "16", "17", "18", "19", "20", "21", "22", "23",
840 "24", "25", "26", "27", "28", "29", "30", "31",
841 "0", "1", "2", "3", "4", "5", "6", "7",
842 "8", "9", "10", "11", "12", "13", "14", "15",
843 "16", "17", "18", "19", "20", "21", "22", "23",
844 "24", "25", "26", "27", "28", "29", "30", "31",
845 "mq", "lr", "ctr","ap",
846 "0", "1", "2", "3", "4", "5", "6", "7",
848 /* AltiVec registers. */
849 "0", "1", "2", "3", "4", "5", "6", "7",
850 "8", "9", "10", "11", "12", "13", "14", "15",
851 "16", "17", "18", "19", "20", "21", "22", "23",
852 "24", "25", "26", "27", "28", "29", "30", "31",
855 "spe_acc", "spefscr",
856 /* Soft frame pointer. */
860 #ifdef TARGET_REGNAMES
861 static const char alt_reg_names
[][8] =
863 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
864 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
865 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
866 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
867 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
868 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
869 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
870 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
871 "mq", "lr", "ctr", "ap",
872 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
874 /* AltiVec registers. */
875 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
876 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
877 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
878 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
881 "spe_acc", "spefscr",
882 /* Soft frame pointer. */
887 #ifndef MASK_STRICT_ALIGN
888 #define MASK_STRICT_ALIGN 0
890 #ifndef TARGET_PROFILE_KERNEL
891 #define TARGET_PROFILE_KERNEL 0
894 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
895 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
897 /* Initialize the GCC target structure. */
898 #undef TARGET_ATTRIBUTE_TABLE
899 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
900 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
901 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
903 #undef TARGET_ASM_ALIGNED_DI_OP
904 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
906 /* Default unaligned ops are only provided for ELF. Find the ops needed
907 for non-ELF systems. */
908 #ifndef OBJECT_FORMAT_ELF
910 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
912 #undef TARGET_ASM_UNALIGNED_HI_OP
913 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
914 #undef TARGET_ASM_UNALIGNED_SI_OP
915 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
916 #undef TARGET_ASM_UNALIGNED_DI_OP
917 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
920 #undef TARGET_ASM_UNALIGNED_HI_OP
921 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
922 #undef TARGET_ASM_UNALIGNED_SI_OP
923 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
924 #undef TARGET_ASM_UNALIGNED_DI_OP
925 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
926 #undef TARGET_ASM_ALIGNED_DI_OP
927 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
931 /* This hook deals with fixups for relocatable code and DI-mode objects
933 #undef TARGET_ASM_INTEGER
934 #define TARGET_ASM_INTEGER rs6000_assemble_integer
936 #ifdef HAVE_GAS_HIDDEN
937 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
938 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
941 #undef TARGET_HAVE_TLS
942 #define TARGET_HAVE_TLS HAVE_AS_TLS
944 #undef TARGET_CANNOT_FORCE_CONST_MEM
945 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
947 #undef TARGET_ASM_FUNCTION_PROLOGUE
948 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
949 #undef TARGET_ASM_FUNCTION_EPILOGUE
950 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
952 #undef TARGET_SCHED_VARIABLE_ISSUE
953 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
955 #undef TARGET_SCHED_ISSUE_RATE
956 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
957 #undef TARGET_SCHED_ADJUST_COST
958 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
959 #undef TARGET_SCHED_ADJUST_PRIORITY
960 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
961 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
962 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
963 #undef TARGET_SCHED_INIT
964 #define TARGET_SCHED_INIT rs6000_sched_init
965 #undef TARGET_SCHED_FINISH
966 #define TARGET_SCHED_FINISH rs6000_sched_finish
967 #undef TARGET_SCHED_REORDER
968 #define TARGET_SCHED_REORDER rs6000_sched_reorder
969 #undef TARGET_SCHED_REORDER2
970 #define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
972 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
973 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
975 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
976 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
978 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
979 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
980 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
981 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
982 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
983 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
985 #undef TARGET_INIT_BUILTINS
986 #define TARGET_INIT_BUILTINS rs6000_init_builtins
988 #undef TARGET_EXPAND_BUILTIN
989 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
991 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
992 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
994 #undef TARGET_INIT_LIBFUNCS
995 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
998 #undef TARGET_BINDS_LOCAL_P
999 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
1002 #undef TARGET_MS_BITFIELD_LAYOUT_P
1003 #define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1005 #undef TARGET_ASM_OUTPUT_MI_THUNK
1006 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1008 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1009 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
1011 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1012 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1014 #undef TARGET_INVALID_WITHIN_DOLOOP
1015 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
1017 #undef TARGET_RTX_COSTS
1018 #define TARGET_RTX_COSTS rs6000_rtx_costs
1019 #undef TARGET_ADDRESS_COST
1020 #define TARGET_ADDRESS_COST hook_int_rtx_0
1022 #undef TARGET_VECTOR_OPAQUE_P
1023 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
1025 #undef TARGET_DWARF_REGISTER_SPAN
1026 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1028 /* On rs6000, function arguments are promoted, as are function return
1030 #undef TARGET_PROMOTE_FUNCTION_ARGS
1031 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
1032 #undef TARGET_PROMOTE_FUNCTION_RETURN
1033 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
1035 #undef TARGET_RETURN_IN_MEMORY
1036 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1038 #undef TARGET_SETUP_INCOMING_VARARGS
1039 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1041 /* Always strict argument naming on rs6000. */
1042 #undef TARGET_STRICT_ARGUMENT_NAMING
1043 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1044 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1045 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
1046 #undef TARGET_SPLIT_COMPLEX_ARG
1047 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
1048 #undef TARGET_MUST_PASS_IN_STACK
1049 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
1050 #undef TARGET_PASS_BY_REFERENCE
1051 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
1052 #undef TARGET_ARG_PARTIAL_BYTES
1053 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
1055 #undef TARGET_BUILD_BUILTIN_VA_LIST
1056 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1058 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1059 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1061 #undef TARGET_EH_RETURN_FILTER_MODE
1062 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1064 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1065 #define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1067 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1068 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1070 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1071 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1073 #undef TARGET_HANDLE_OPTION
1074 #define TARGET_HANDLE_OPTION rs6000_handle_option
1076 #undef TARGET_DEFAULT_TARGET_FLAGS
1077 #define TARGET_DEFAULT_TARGET_FLAGS \
1080 #undef TARGET_STACK_PROTECT_FAIL
1081 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1083 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1084 The PowerPC architecture requires only weak consistency among
1085 processors--that is, memory accesses between processors need not be
1086 sequentially consistent and memory accesses among processors can occur
1087 in any order. The ability to order memory accesses weakly provides
1088 opportunities for more efficient use of the system bus. Unless a
1089 dependency exists, the 604e allows read operations to precede store
1091 #undef TARGET_RELAXED_ORDERING
1092 #define TARGET_RELAXED_ORDERING true
1095 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1096 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1099 /* Use a 32-bit anchor range. This leads to sequences like:
1101 addis tmp,anchor,high
1104 where tmp itself acts as an anchor, and can be shared between
1105 accesses to the same 64k page. */
1106 #undef TARGET_MIN_ANCHOR_OFFSET
1107 #define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1108 #undef TARGET_MAX_ANCHOR_OFFSET
1109 #define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1110 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1111 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1113 struct gcc_target targetm
= TARGET_INITIALIZER
;
1116 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1119 rs6000_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1121 /* The GPRs can hold any mode, but values bigger than one register
1122 cannot go past R31. */
1123 if (INT_REGNO_P (regno
))
1124 return INT_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
1126 /* The float registers can only hold floating modes and DImode.
1127 This also excludes decimal float modes. */
1128 if (FP_REGNO_P (regno
))
1130 (SCALAR_FLOAT_MODE_P (mode
)
1131 && !DECIMAL_FLOAT_MODE_P (mode
)
1132 && FP_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1))
1133 || (GET_MODE_CLASS (mode
) == MODE_INT
1134 && GET_MODE_SIZE (mode
) == UNITS_PER_FP_WORD
);
1136 /* The CR register can only hold CC modes. */
1137 if (CR_REGNO_P (regno
))
1138 return GET_MODE_CLASS (mode
) == MODE_CC
;
1140 if (XER_REGNO_P (regno
))
1141 return mode
== PSImode
;
1143 /* AltiVec only in AldyVec registers. */
1144 if (ALTIVEC_REGNO_P (regno
))
1145 return ALTIVEC_VECTOR_MODE (mode
);
1147 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1148 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
1151 /* We cannot put TImode anywhere except general register and it must be
1152 able to fit within the register set. */
1154 return GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
;
1157 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1159 rs6000_init_hard_regno_mode_ok (void)
1163 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; ++r
)
1164 for (m
= 0; m
< NUM_MACHINE_MODES
; ++m
)
1165 if (rs6000_hard_regno_mode_ok (r
, m
))
1166 rs6000_hard_regno_mode_ok_p
[m
][r
] = true;
1169 /* If not otherwise specified by a target, make 'long double' equivalent to
1172 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1173 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1176 /* Override command line options. Mostly we process the processor
1177 type and sometimes adjust other TARGET_ options. */
1180 rs6000_override_options (const char *default_cpu
)
1183 struct rs6000_cpu_select
*ptr
;
1186 /* Simplifications for entries below. */
1189 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
1190 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
1193 /* This table occasionally claims that a processor does not support
1194 a particular feature even though it does, but the feature is slower
1195 than the alternative. Thus, it shouldn't be relied on as a
1196 complete description of the processor's support.
1198 Please keep this list in order, and don't forget to update the
1199 documentation in invoke.texi when adding a new processor or
1203 const char *const name
; /* Canonical processor name. */
1204 const enum processor_type processor
; /* Processor type enum value. */
1205 const int target_enable
; /* Target flags to enable. */
1206 } const processor_target_table
[]
1207 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1208 {"403", PROCESSOR_PPC403
,
1209 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
1210 {"405", PROCESSOR_PPC405
,
1211 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_MULHW
| MASK_DLMZB
},
1212 {"405fp", PROCESSOR_PPC405
,
1213 POWERPC_BASE_MASK
| MASK_MULHW
| MASK_DLMZB
},
1214 {"440", PROCESSOR_PPC440
,
1215 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_MULHW
| MASK_DLMZB
},
1216 {"440fp", PROCESSOR_PPC440
,
1217 POWERPC_BASE_MASK
| MASK_MULHW
| MASK_DLMZB
},
1218 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
1219 {"601", PROCESSOR_PPC601
,
1220 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
1221 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1222 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1223 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1224 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1225 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1226 {"620", PROCESSOR_PPC620
,
1227 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1228 {"630", PROCESSOR_PPC630
,
1229 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1230 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1231 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
1232 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1233 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1234 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1235 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1236 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1237 {"8540", PROCESSOR_PPC8540
,
1238 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_STRICT_ALIGN
},
1239 /* 8548 has a dummy entry for now. */
1240 {"8548", PROCESSOR_PPC8540
,
1241 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_STRICT_ALIGN
},
1242 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1243 {"970", PROCESSOR_POWER4
,
1244 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1245 {"cell", PROCESSOR_CELL
,
1246 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1247 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
1248 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1249 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1250 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1251 {"G5", PROCESSOR_POWER4
,
1252 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1253 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1254 {"power2", PROCESSOR_POWER
,
1255 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1256 {"power3", PROCESSOR_PPC630
,
1257 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1258 {"power4", PROCESSOR_POWER4
,
1259 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1260 {"power5", PROCESSOR_POWER5
,
1261 POWERPC_BASE_MASK
| MASK_POWERPC64
| MASK_PPC_GFXOPT
1262 | MASK_MFCRF
| MASK_POPCNTB
},
1263 {"power5+", PROCESSOR_POWER5
,
1264 POWERPC_BASE_MASK
| MASK_POWERPC64
| MASK_PPC_GFXOPT
1265 | MASK_MFCRF
| MASK_POPCNTB
| MASK_FPRND
},
1266 {"power6", PROCESSOR_POWER6
,
1267 POWERPC_7400_MASK
| MASK_POWERPC64
| MASK_MFCRF
| MASK_POPCNTB
1269 {"power6x", PROCESSOR_POWER6
,
1270 POWERPC_7400_MASK
| MASK_POWERPC64
| MASK_MFCRF
| MASK_POPCNTB
1271 | MASK_FPRND
| MASK_MFPGPR
},
1272 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
1273 {"powerpc64", PROCESSOR_POWERPC64
,
1274 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1275 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1276 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1277 {"rios2", PROCESSOR_RIOS2
,
1278 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1279 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1280 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1281 {"rs64", PROCESSOR_RS64A
,
1282 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
}
1285 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
1287 /* Some OSs don't support saving the high part of 64-bit registers on
1288 context switch. Other OSs don't support saving Altivec registers.
1289 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1290 settings; if the user wants either, the user must explicitly specify
1291 them and we won't interfere with the user's specification. */
1294 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
1295 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
| MASK_STRICT_ALIGN
1296 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
1297 | MASK_MFCRF
| MASK_POPCNTB
| MASK_FPRND
| MASK_MULHW
1298 | MASK_DLMZB
| MASK_MFPGPR
)
1301 rs6000_init_hard_regno_mode_ok ();
1303 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
1304 #ifdef OS_MISSING_POWERPC64
1305 if (OS_MISSING_POWERPC64
)
1306 set_masks
&= ~MASK_POWERPC64
;
1308 #ifdef OS_MISSING_ALTIVEC
1309 if (OS_MISSING_ALTIVEC
)
1310 set_masks
&= ~MASK_ALTIVEC
;
1313 /* Don't override by the processor default if given explicitly. */
1314 set_masks
&= ~target_flags_explicit
;
1316 /* Identify the processor type. */
1317 rs6000_select
[0].string
= default_cpu
;
1318 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
1320 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1322 ptr
= &rs6000_select
[i
];
1323 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1325 for (j
= 0; j
< ptt_size
; j
++)
1326 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
1328 if (ptr
->set_tune_p
)
1329 rs6000_cpu
= processor_target_table
[j
].processor
;
1331 if (ptr
->set_arch_p
)
1333 target_flags
&= ~set_masks
;
1334 target_flags
|= (processor_target_table
[j
].target_enable
1341 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
1348 /* If we are optimizing big endian systems for space, use the load/store
1349 multiple and string instructions. */
1350 if (BYTES_BIG_ENDIAN
&& optimize_size
)
1351 target_flags
|= ~target_flags_explicit
& (MASK_MULTIPLE
| MASK_STRING
);
1353 /* Don't allow -mmultiple or -mstring on little endian systems
1354 unless the cpu is a 750, because the hardware doesn't support the
1355 instructions used in little endian mode, and causes an alignment
1356 trap. The 750 does not cause an alignment trap (except when the
1357 target is unaligned). */
1359 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
1361 if (TARGET_MULTIPLE
)
1363 target_flags
&= ~MASK_MULTIPLE
;
1364 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
1365 warning (0, "-mmultiple is not supported on little endian systems");
1370 target_flags
&= ~MASK_STRING
;
1371 if ((target_flags_explicit
& MASK_STRING
) != 0)
1372 warning (0, "-mstring is not supported on little endian systems");
1376 /* Set debug flags */
1377 if (rs6000_debug_name
)
1379 if (! strcmp (rs6000_debug_name
, "all"))
1380 rs6000_debug_stack
= rs6000_debug_arg
= 1;
1381 else if (! strcmp (rs6000_debug_name
, "stack"))
1382 rs6000_debug_stack
= 1;
1383 else if (! strcmp (rs6000_debug_name
, "arg"))
1384 rs6000_debug_arg
= 1;
1386 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
1389 if (rs6000_traceback_name
)
1391 if (! strncmp (rs6000_traceback_name
, "full", 4))
1392 rs6000_traceback
= traceback_full
;
1393 else if (! strncmp (rs6000_traceback_name
, "part", 4))
1394 rs6000_traceback
= traceback_part
;
1395 else if (! strncmp (rs6000_traceback_name
, "no", 2))
1396 rs6000_traceback
= traceback_none
;
1398 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1399 rs6000_traceback_name
);
1402 if (!rs6000_explicit_options
.long_double
)
1403 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1405 #ifndef POWERPC_LINUX
1406 if (!rs6000_explicit_options
.ieee
)
1407 rs6000_ieeequad
= 1;
1410 /* Set Altivec ABI as default for powerpc64 linux. */
1411 if (TARGET_ELF
&& TARGET_64BIT
)
1413 rs6000_altivec_abi
= 1;
1414 TARGET_ALTIVEC_VRSAVE
= 1;
1417 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1418 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
1420 rs6000_darwin64_abi
= 1;
1422 darwin_one_byte_bool
= 1;
1424 /* Default to natural alignment, for better performance. */
1425 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1428 /* Place FP constants in the constant pool instead of TOC
1429 if section anchors enabled. */
1430 if (flag_section_anchors
)
1431 TARGET_NO_FP_IN_TOC
= 1;
1433 /* Handle -mtls-size option. */
1434 rs6000_parse_tls_size_option ();
1436 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1437 SUBTARGET_OVERRIDE_OPTIONS
;
1439 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1440 SUBSUBTARGET_OVERRIDE_OPTIONS
;
1442 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1443 SUB3TARGET_OVERRIDE_OPTIONS
;
1448 /* The e500 does not have string instructions, and we set
1449 MASK_STRING above when optimizing for size. */
1450 if ((target_flags
& MASK_STRING
) != 0)
1451 target_flags
= target_flags
& ~MASK_STRING
;
1453 else if (rs6000_select
[1].string
!= NULL
)
1455 /* For the powerpc-eabispe configuration, we set all these by
1456 default, so let's unset them if we manually set another
1457 CPU that is not the E500. */
1458 if (!rs6000_explicit_options
.abi
)
1460 if (!rs6000_explicit_options
.spe
)
1462 if (!rs6000_explicit_options
.float_gprs
)
1463 rs6000_float_gprs
= 0;
1464 if (!rs6000_explicit_options
.isel
)
1468 /* Detect invalid option combinations with E500. */
1471 rs6000_always_hint
= (rs6000_cpu
!= PROCESSOR_POWER4
1472 && rs6000_cpu
!= PROCESSOR_POWER5
1473 && rs6000_cpu
!= PROCESSOR_POWER6
1474 && rs6000_cpu
!= PROCESSOR_CELL
);
1475 rs6000_sched_groups
= (rs6000_cpu
== PROCESSOR_POWER4
1476 || rs6000_cpu
== PROCESSOR_POWER5
);
1477 rs6000_align_branch_targets
= (rs6000_cpu
== PROCESSOR_POWER4
1478 || rs6000_cpu
== PROCESSOR_POWER5
1479 || rs6000_cpu
== PROCESSOR_POWER6
);
1481 rs6000_sched_restricted_insns_priority
1482 = (rs6000_sched_groups
? 1 : 0);
1484 /* Handle -msched-costly-dep option. */
1485 rs6000_sched_costly_dep
1486 = (rs6000_sched_groups
? store_to_load_dep_costly
: no_dep_costly
);
1488 if (rs6000_sched_costly_dep_str
)
1490 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
1491 rs6000_sched_costly_dep
= no_dep_costly
;
1492 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
1493 rs6000_sched_costly_dep
= all_deps_costly
;
1494 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
1495 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
1496 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
1497 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
1499 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
1502 /* Handle -minsert-sched-nops option. */
1503 rs6000_sched_insert_nops
1504 = (rs6000_sched_groups
? sched_finish_regroup_exact
: sched_finish_none
);
1506 if (rs6000_sched_insert_nops_str
)
1508 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
1509 rs6000_sched_insert_nops
= sched_finish_none
;
1510 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
1511 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
1512 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
1513 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
1515 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
1518 #ifdef TARGET_REGNAMES
1519 /* If the user desires alternate register names, copy in the
1520 alternate names now. */
1521 if (TARGET_REGNAMES
)
1522 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
1525 /* Set aix_struct_return last, after the ABI is determined.
1526 If -maix-struct-return or -msvr4-struct-return was explicitly
1527 used, don't override with the ABI default. */
1528 if (!rs6000_explicit_options
.aix_struct_ret
)
1529 aix_struct_return
= (DEFAULT_ABI
!= ABI_V4
|| DRAFT_V4_STRUCT_RET
);
1531 if (TARGET_LONG_DOUBLE_128
&& !TARGET_IEEEQUAD
)
1532 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
1535 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
1537 /* We can only guarantee the availability of DI pseudo-ops when
1538 assembling for 64-bit targets. */
1541 targetm
.asm_out
.aligned_op
.di
= NULL
;
1542 targetm
.asm_out
.unaligned_op
.di
= NULL
;
1545 /* Set branch target alignment, if not optimizing for size. */
1548 /* Cell wants to be aligned 8byte for dual issue. */
1549 if (rs6000_cpu
== PROCESSOR_CELL
)
1551 if (align_functions
<= 0)
1552 align_functions
= 8;
1553 if (align_jumps
<= 0)
1555 if (align_loops
<= 0)
1558 if (rs6000_align_branch_targets
)
1560 if (align_functions
<= 0)
1561 align_functions
= 16;
1562 if (align_jumps
<= 0)
1564 if (align_loops
<= 0)
1567 if (align_jumps_max_skip
<= 0)
1568 align_jumps_max_skip
= 15;
1569 if (align_loops_max_skip
<= 0)
1570 align_loops_max_skip
= 15;
1573 /* Arrange to save and restore machine status around nested functions. */
1574 init_machine_status
= rs6000_init_machine_status
;
1576 /* We should always be splitting complex arguments, but we can't break
1577 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1578 if (DEFAULT_ABI
!= ABI_AIX
)
1579 targetm
.calls
.split_complex_arg
= NULL
;
1581 /* Initialize rs6000_cost with the appropriate target costs. */
1583 rs6000_cost
= TARGET_POWERPC64
? &size64_cost
: &size32_cost
;
1587 case PROCESSOR_RIOS1
:
1588 rs6000_cost
= &rios1_cost
;
1591 case PROCESSOR_RIOS2
:
1592 rs6000_cost
= &rios2_cost
;
1595 case PROCESSOR_RS64A
:
1596 rs6000_cost
= &rs64a_cost
;
1599 case PROCESSOR_MPCCORE
:
1600 rs6000_cost
= &mpccore_cost
;
1603 case PROCESSOR_PPC403
:
1604 rs6000_cost
= &ppc403_cost
;
1607 case PROCESSOR_PPC405
:
1608 rs6000_cost
= &ppc405_cost
;
1611 case PROCESSOR_PPC440
:
1612 rs6000_cost
= &ppc440_cost
;
1615 case PROCESSOR_PPC601
:
1616 rs6000_cost
= &ppc601_cost
;
1619 case PROCESSOR_PPC603
:
1620 rs6000_cost
= &ppc603_cost
;
1623 case PROCESSOR_PPC604
:
1624 rs6000_cost
= &ppc604_cost
;
1627 case PROCESSOR_PPC604e
:
1628 rs6000_cost
= &ppc604e_cost
;
1631 case PROCESSOR_PPC620
:
1632 rs6000_cost
= &ppc620_cost
;
1635 case PROCESSOR_PPC630
:
1636 rs6000_cost
= &ppc630_cost
;
1639 case PROCESSOR_CELL
:
1640 rs6000_cost
= &ppccell_cost
;
1643 case PROCESSOR_PPC750
:
1644 case PROCESSOR_PPC7400
:
1645 rs6000_cost
= &ppc750_cost
;
1648 case PROCESSOR_PPC7450
:
1649 rs6000_cost
= &ppc7450_cost
;
1652 case PROCESSOR_PPC8540
:
1653 rs6000_cost
= &ppc8540_cost
;
1656 case PROCESSOR_POWER4
:
1657 case PROCESSOR_POWER5
:
1658 rs6000_cost
= &power4_cost
;
1661 case PROCESSOR_POWER6
:
1662 rs6000_cost
= &power6_cost
;
1670 /* Implement targetm.vectorize.builtin_mask_for_load. */
1672 rs6000_builtin_mask_for_load (void)
1675 return altivec_builtin_mask_for_load
;
1680 /* Implement targetm.vectorize.builtin_mul_widen_even. */
1682 rs6000_builtin_mul_widen_even (tree type
)
1684 if (!TARGET_ALTIVEC
)
1687 switch (TYPE_MODE (type
))
1690 return TYPE_UNSIGNED (type
) ?
1691 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULEUH
] :
1692 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULESH
];
1695 return TYPE_UNSIGNED (type
) ?
1696 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULEUB
] :
1697 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULESB
];
1703 /* Implement targetm.vectorize.builtin_mul_widen_odd. */
1705 rs6000_builtin_mul_widen_odd (tree type
)
1707 if (!TARGET_ALTIVEC
)
1710 switch (TYPE_MODE (type
))
1713 return TYPE_UNSIGNED (type
) ?
1714 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOUH
] :
1715 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOSH
];
1718 return TYPE_UNSIGNED (type
) ?
1719 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOUB
] :
1720 rs6000_builtin_decls
[ALTIVEC_BUILTIN_VMULOSB
];
1726 /* Handle generic options of the form -mfoo=yes/no.
1727 NAME is the option name.
1728 VALUE is the option value.
1729 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1730 whether the option value is 'yes' or 'no' respectively. */
1732 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1736 else if (!strcmp (value
, "yes"))
1738 else if (!strcmp (value
, "no"))
1741 error ("unknown -m%s= option specified: '%s'", name
, value
);
1744 /* Validate and record the size specified with the -mtls-size option. */
1747 rs6000_parse_tls_size_option (void)
1749 if (rs6000_tls_size_string
== 0)
1751 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1752 rs6000_tls_size
= 16;
1753 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1754 rs6000_tls_size
= 32;
1755 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1756 rs6000_tls_size
= 64;
1758 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string
);
1762 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1764 if (DEFAULT_ABI
== ABI_DARWIN
)
1765 /* The Darwin libraries never set errno, so we might as well
1766 avoid calling them when that's the only reason we would. */
1767 flag_errno_math
= 0;
1769 /* Double growth factor to counter reduced min jump length. */
1770 set_param_value ("max-grow-copy-bb-insns", 16);
1772 /* Enable section anchors by default.
1773 Skip section anchors for Objective C and Objective C++
1774 until front-ends fixed. */
1775 if (!TARGET_MACHO
&& lang_hooks
.name
[4] != 'O')
1776 flag_section_anchors
= 1;
1779 /* Implement TARGET_HANDLE_OPTION. */
1782 rs6000_handle_option (size_t code
, const char *arg
, int value
)
1787 target_flags
&= ~(MASK_POWER
| MASK_POWER2
1788 | MASK_MULTIPLE
| MASK_STRING
);
1789 target_flags_explicit
|= (MASK_POWER
| MASK_POWER2
1790 | MASK_MULTIPLE
| MASK_STRING
);
1792 case OPT_mno_powerpc
:
1793 target_flags
&= ~(MASK_POWERPC
| MASK_PPC_GPOPT
1794 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1795 target_flags_explicit
|= (MASK_POWERPC
| MASK_PPC_GPOPT
1796 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1799 target_flags
&= ~MASK_MINIMAL_TOC
;
1800 TARGET_NO_FP_IN_TOC
= 0;
1801 TARGET_NO_SUM_IN_TOC
= 0;
1802 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1803 #ifdef TARGET_USES_SYSV4_OPT
1804 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1805 just the same as -mminimal-toc. */
1806 target_flags
|= MASK_MINIMAL_TOC
;
1807 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1811 #ifdef TARGET_USES_SYSV4_OPT
1813 /* Make -mtoc behave like -mminimal-toc. */
1814 target_flags
|= MASK_MINIMAL_TOC
;
1815 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1819 #ifdef TARGET_USES_AIX64_OPT
1824 target_flags
|= MASK_POWERPC64
| MASK_POWERPC
;
1825 target_flags
|= ~target_flags_explicit
& MASK_PPC_GFXOPT
;
1826 target_flags_explicit
|= MASK_POWERPC64
| MASK_POWERPC
;
1829 #ifdef TARGET_USES_AIX64_OPT
1834 target_flags
&= ~MASK_POWERPC64
;
1835 target_flags_explicit
|= MASK_POWERPC64
;
1838 case OPT_minsert_sched_nops_
:
1839 rs6000_sched_insert_nops_str
= arg
;
1842 case OPT_mminimal_toc
:
1845 TARGET_NO_FP_IN_TOC
= 0;
1846 TARGET_NO_SUM_IN_TOC
= 0;
1853 target_flags
|= (MASK_MULTIPLE
| MASK_STRING
);
1854 target_flags_explicit
|= (MASK_MULTIPLE
| MASK_STRING
);
1861 target_flags
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1862 target_flags_explicit
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1866 case OPT_mpowerpc_gpopt
:
1867 case OPT_mpowerpc_gfxopt
:
1870 target_flags
|= MASK_POWERPC
;
1871 target_flags_explicit
|= MASK_POWERPC
;
1875 case OPT_maix_struct_return
:
1876 case OPT_msvr4_struct_return
:
1877 rs6000_explicit_options
.aix_struct_ret
= true;
1881 rs6000_parse_yes_no_option ("vrsave", arg
, &(TARGET_ALTIVEC_VRSAVE
));
1885 rs6000_explicit_options
.isel
= true;
1886 rs6000_parse_yes_no_option ("isel", arg
, &(rs6000_isel
));
1890 rs6000_explicit_options
.spe
= true;
1891 rs6000_parse_yes_no_option ("spe", arg
, &(rs6000_spe
));
1895 rs6000_debug_name
= arg
;
1898 #ifdef TARGET_USES_SYSV4_OPT
1900 rs6000_abi_name
= arg
;
1904 rs6000_sdata_name
= arg
;
1907 case OPT_mtls_size_
:
1908 rs6000_tls_size_string
= arg
;
1911 case OPT_mrelocatable
:
1914 target_flags
|= MASK_MINIMAL_TOC
;
1915 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1916 TARGET_NO_FP_IN_TOC
= 1;
1920 case OPT_mrelocatable_lib
:
1923 target_flags
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
;
1924 target_flags_explicit
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
;
1925 TARGET_NO_FP_IN_TOC
= 1;
1929 target_flags
&= ~MASK_RELOCATABLE
;
1930 target_flags_explicit
|= MASK_RELOCATABLE
;
1936 if (!strcmp (arg
, "altivec"))
1938 rs6000_explicit_options
.abi
= true;
1939 rs6000_altivec_abi
= 1;
1942 else if (! strcmp (arg
, "no-altivec"))
1944 /* ??? Don't set rs6000_explicit_options.abi here, to allow
1945 the default for rs6000_spe_abi to be chosen later. */
1946 rs6000_altivec_abi
= 0;
1948 else if (! strcmp (arg
, "spe"))
1950 rs6000_explicit_options
.abi
= true;
1952 rs6000_altivec_abi
= 0;
1953 if (!TARGET_SPE_ABI
)
1954 error ("not configured for ABI: '%s'", arg
);
1956 else if (! strcmp (arg
, "no-spe"))
1958 rs6000_explicit_options
.abi
= true;
1962 /* These are here for testing during development only, do not
1963 document in the manual please. */
1964 else if (! strcmp (arg
, "d64"))
1966 rs6000_darwin64_abi
= 1;
1967 warning (0, "Using darwin64 ABI");
1969 else if (! strcmp (arg
, "d32"))
1971 rs6000_darwin64_abi
= 0;
1972 warning (0, "Using old darwin ABI");
1975 else if (! strcmp (arg
, "ibmlongdouble"))
1977 rs6000_explicit_options
.ieee
= true;
1978 rs6000_ieeequad
= 0;
1979 warning (0, "Using IBM extended precision long double");
1981 else if (! strcmp (arg
, "ieeelongdouble"))
1983 rs6000_explicit_options
.ieee
= true;
1984 rs6000_ieeequad
= 1;
1985 warning (0, "Using IEEE extended precision long double");
1990 error ("unknown ABI specified: '%s'", arg
);
1996 rs6000_select
[1].string
= arg
;
2000 rs6000_select
[2].string
= arg
;
2003 case OPT_mtraceback_
:
2004 rs6000_traceback_name
= arg
;
2007 case OPT_mfloat_gprs_
:
2008 rs6000_explicit_options
.float_gprs
= true;
2009 if (! strcmp (arg
, "yes") || ! strcmp (arg
, "single"))
2010 rs6000_float_gprs
= 1;
2011 else if (! strcmp (arg
, "double"))
2012 rs6000_float_gprs
= 2;
2013 else if (! strcmp (arg
, "no"))
2014 rs6000_float_gprs
= 0;
2017 error ("invalid option for -mfloat-gprs: '%s'", arg
);
2022 case OPT_mlong_double_
:
2023 rs6000_explicit_options
.long_double
= true;
2024 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
2025 if (value
!= 64 && value
!= 128)
2027 error ("Unknown switch -mlong-double-%s", arg
);
2028 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
2032 rs6000_long_double_type_size
= value
;
2035 case OPT_msched_costly_dep_
:
2036 rs6000_sched_costly_dep_str
= arg
;
2040 rs6000_explicit_options
.alignment
= true;
2041 if (! strcmp (arg
, "power"))
2043 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2044 some C library functions, so warn about it. The flag may be
2045 useful for performance studies from time to time though, so
2046 don't disable it entirely. */
2047 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
2048 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2049 " it is incompatible with the installed C and C++ libraries");
2050 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
2052 else if (! strcmp (arg
, "natural"))
2053 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
2056 error ("unknown -malign-XXXXX option specified: '%s'", arg
);
2064 /* Do anything needed at the start of the asm file. */
2067 rs6000_file_start (void)
2071 const char *start
= buffer
;
2072 struct rs6000_cpu_select
*ptr
;
2073 const char *default_cpu
= TARGET_CPU_DEFAULT
;
2074 FILE *file
= asm_out_file
;
2076 default_file_start ();
2078 #ifdef TARGET_BI_ARCH
2079 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
2083 if (flag_verbose_asm
)
2085 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
2086 rs6000_select
[0].string
= default_cpu
;
2088 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
2090 ptr
= &rs6000_select
[i
];
2091 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
2093 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
2098 if (PPC405_ERRATUM77
)
2100 fprintf (file
, "%s PPC405CR_ERRATUM77", start
);
2104 #ifdef USING_ELFOS_H
2105 switch (rs6000_sdata
)
2107 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
2108 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
2109 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
2110 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
2113 if (rs6000_sdata
&& g_switch_value
)
2115 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
2125 if (DEFAULT_ABI
== ABI_AIX
|| (TARGET_ELF
&& flag_pic
== 2))
2127 switch_to_section (toc_section
);
2128 switch_to_section (text_section
);
2133 /* Return nonzero if this function is known to have a null epilogue. */
2136 direct_return (void)
2138 if (reload_completed
)
2140 rs6000_stack_t
*info
= rs6000_stack_info ();
2142 if (info
->first_gp_reg_save
== 32
2143 && info
->first_fp_reg_save
== 64
2144 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
2145 && ! info
->lr_save_p
2146 && ! info
->cr_save_p
2147 && info
->vrsave_mask
== 0
2155 /* Return the number of instructions it takes to form a constant in an
2156 integer register. */
2159 num_insns_constant_wide (HOST_WIDE_INT value
)
2161 /* signed constant loadable with {cal|addi} */
2162 if ((unsigned HOST_WIDE_INT
) (value
+ 0x8000) < 0x10000)
2165 /* constant loadable with {cau|addis} */
2166 else if ((value
& 0xffff) == 0
2167 && (value
>> 31 == -1 || value
>> 31 == 0))
2170 #if HOST_BITS_PER_WIDE_INT == 64
2171 else if (TARGET_POWERPC64
)
2173 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
2174 HOST_WIDE_INT high
= value
>> 31;
2176 if (high
== 0 || high
== -1)
2182 return num_insns_constant_wide (high
) + 1;
2184 return (num_insns_constant_wide (high
)
2185 + num_insns_constant_wide (low
) + 1);
2194 num_insns_constant (rtx op
, enum machine_mode mode
)
2196 HOST_WIDE_INT low
, high
;
2198 switch (GET_CODE (op
))
2201 #if HOST_BITS_PER_WIDE_INT == 64
2202 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
2203 && mask64_operand (op
, mode
))
2207 return num_insns_constant_wide (INTVAL (op
));
2215 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2216 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2217 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
2220 if (mode
== VOIDmode
|| mode
== DImode
)
2222 high
= CONST_DOUBLE_HIGH (op
);
2223 low
= CONST_DOUBLE_LOW (op
);
2230 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2231 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
2232 high
= l
[WORDS_BIG_ENDIAN
== 0];
2233 low
= l
[WORDS_BIG_ENDIAN
!= 0];
2237 return (num_insns_constant_wide (low
)
2238 + num_insns_constant_wide (high
));
2241 if ((high
== 0 && low
>= 0)
2242 || (high
== -1 && low
< 0))
2243 return num_insns_constant_wide (low
);
2245 else if (mask64_operand (op
, mode
))
2249 return num_insns_constant_wide (high
) + 1;
2252 return (num_insns_constant_wide (high
)
2253 + num_insns_constant_wide (low
) + 1);
2261 /* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2262 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2263 corresponding element of the vector, but for V4SFmode and V2SFmode,
2264 the corresponding "float" is interpreted as an SImode integer. */
2266 static HOST_WIDE_INT
2267 const_vector_elt_as_int (rtx op
, unsigned int elt
)
2269 rtx tmp
= CONST_VECTOR_ELT (op
, elt
);
2270 if (GET_MODE (op
) == V4SFmode
2271 || GET_MODE (op
) == V2SFmode
)
2272 tmp
= gen_lowpart (SImode
, tmp
);
2273 return INTVAL (tmp
);
2276 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2277 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2278 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2279 all items are set to the same value and contain COPIES replicas of the
2280 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2281 operand and the others are set to the value of the operand's msb. */
2284 vspltis_constant (rtx op
, unsigned step
, unsigned copies
)
2286 enum machine_mode mode
= GET_MODE (op
);
2287 enum machine_mode inner
= GET_MODE_INNER (mode
);
2290 unsigned nunits
= GET_MODE_NUNITS (mode
);
2291 unsigned bitsize
= GET_MODE_BITSIZE (inner
);
2292 unsigned mask
= GET_MODE_MASK (inner
);
2294 HOST_WIDE_INT val
= const_vector_elt_as_int (op
, nunits
- 1);
2295 HOST_WIDE_INT splat_val
= val
;
2296 HOST_WIDE_INT msb_val
= val
> 0 ? 0 : -1;
2298 /* Construct the value to be splatted, if possible. If not, return 0. */
2299 for (i
= 2; i
<= copies
; i
*= 2)
2301 HOST_WIDE_INT small_val
;
2303 small_val
= splat_val
>> bitsize
;
2305 if (splat_val
!= ((small_val
<< bitsize
) | (small_val
& mask
)))
2307 splat_val
= small_val
;
2310 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2311 if (EASY_VECTOR_15 (splat_val
))
2314 /* Also check if we can splat, and then add the result to itself. Do so if
2315 the value is positive, of if the splat instruction is using OP's mode;
2316 for splat_val < 0, the splat and the add should use the same mode. */
2317 else if (EASY_VECTOR_15_ADD_SELF (splat_val
)
2318 && (splat_val
>= 0 || (step
== 1 && copies
== 1)))
2324 /* Check if VAL is present in every STEP-th element, and the
2325 other elements are filled with its most significant bit. */
2326 for (i
= 0; i
< nunits
- 1; ++i
)
2328 HOST_WIDE_INT desired_val
;
2329 if (((i
+ 1) & (step
- 1)) == 0)
2332 desired_val
= msb_val
;
2334 if (desired_val
!= const_vector_elt_as_int (op
, i
))
2342 /* Return true if OP is of the given MODE and can be synthesized
2343 with a vspltisb, vspltish or vspltisw. */
2346 easy_altivec_constant (rtx op
, enum machine_mode mode
)
2348 unsigned step
, copies
;
2350 if (mode
== VOIDmode
)
2351 mode
= GET_MODE (op
);
2352 else if (mode
!= GET_MODE (op
))
2355 /* Start with a vspltisw. */
2356 step
= GET_MODE_NUNITS (mode
) / 4;
2359 if (vspltis_constant (op
, step
, copies
))
2362 /* Then try with a vspltish. */
2368 if (vspltis_constant (op
, step
, copies
))
2371 /* And finally a vspltisb. */
2377 if (vspltis_constant (op
, step
, copies
))
2383 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2384 result is OP. Abort if it is not possible. */
2387 gen_easy_altivec_constant (rtx op
)
2389 enum machine_mode mode
= GET_MODE (op
);
2390 int nunits
= GET_MODE_NUNITS (mode
);
2391 rtx last
= CONST_VECTOR_ELT (op
, nunits
- 1);
2392 unsigned step
= nunits
/ 4;
2393 unsigned copies
= 1;
2395 /* Start with a vspltisw. */
2396 if (vspltis_constant (op
, step
, copies
))
2397 return gen_rtx_VEC_DUPLICATE (V4SImode
, gen_lowpart (SImode
, last
));
2399 /* Then try with a vspltish. */
2405 if (vspltis_constant (op
, step
, copies
))
2406 return gen_rtx_VEC_DUPLICATE (V8HImode
, gen_lowpart (HImode
, last
));
2408 /* And finally a vspltisb. */
2414 if (vspltis_constant (op
, step
, copies
))
2415 return gen_rtx_VEC_DUPLICATE (V16QImode
, gen_lowpart (QImode
, last
));
2421 output_vec_const_move (rtx
*operands
)
2424 enum machine_mode mode
;
2429 mode
= GET_MODE (dest
);
2434 if (zero_constant (vec
, mode
))
2435 return "vxor %0,%0,%0";
2437 splat_vec
= gen_easy_altivec_constant (vec
);
2438 gcc_assert (GET_CODE (splat_vec
) == VEC_DUPLICATE
);
2439 operands
[1] = XEXP (splat_vec
, 0);
2440 if (!EASY_VECTOR_15 (INTVAL (operands
[1])))
2443 switch (GET_MODE (splat_vec
))
2446 return "vspltisw %0,%1";
2449 return "vspltish %0,%1";
2452 return "vspltisb %0,%1";
2459 gcc_assert (TARGET_SPE
);
2461 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2462 pattern of V1DI, V4HI, and V2SF.
2464 FIXME: We should probably return # and add post reload
2465 splitters for these, but this way is so easy ;-). */
2466 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
2467 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
2468 operands
[1] = CONST_VECTOR_ELT (vec
, 0);
2469 operands
[2] = CONST_VECTOR_ELT (vec
, 1);
2471 return "li %0,%1\n\tevmergelo %0,%0,%0";
2473 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2476 /* Initialize vector TARGET to VALS. */
2479 rs6000_expand_vector_init (rtx target
, rtx vals
)
2481 enum machine_mode mode
= GET_MODE (target
);
2482 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2483 int n_elts
= GET_MODE_NUNITS (mode
);
2484 int n_var
= 0, one_var
= -1;
2485 bool all_same
= true, all_const_zero
= true;
2489 for (i
= 0; i
< n_elts
; ++i
)
2491 x
= XVECEXP (vals
, 0, i
);
2492 if (!CONSTANT_P (x
))
2493 ++n_var
, one_var
= i
;
2494 else if (x
!= CONST0_RTX (inner_mode
))
2495 all_const_zero
= false;
2497 if (i
> 0 && !rtx_equal_p (x
, XVECEXP (vals
, 0, 0)))
2503 if (mode
!= V4SFmode
&& all_const_zero
)
2505 /* Zero register. */
2506 emit_insn (gen_rtx_SET (VOIDmode
, target
,
2507 gen_rtx_XOR (mode
, target
, target
)));
2510 else if (mode
!= V4SFmode
&& easy_vector_constant (vals
, mode
))
2512 /* Splat immediate. */
2513 emit_insn (gen_rtx_SET (VOIDmode
, target
, vals
));
2517 ; /* Splat vector element. */
2520 /* Load from constant pool. */
2521 emit_move_insn (target
, gen_rtx_CONST_VECTOR (mode
, XVEC (vals
, 0)));
2526 /* Store value to stack temp. Load vector element. Splat. */
2529 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (inner_mode
), 0);
2530 emit_move_insn (adjust_address_nv (mem
, inner_mode
, 0),
2531 XVECEXP (vals
, 0, 0));
2532 x
= gen_rtx_UNSPEC (VOIDmode
,
2533 gen_rtvec (1, const0_rtx
), UNSPEC_LVE
);
2534 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2536 gen_rtx_SET (VOIDmode
,
2539 x
= gen_rtx_VEC_SELECT (inner_mode
, target
,
2540 gen_rtx_PARALLEL (VOIDmode
,
2541 gen_rtvec (1, const0_rtx
)));
2542 emit_insn (gen_rtx_SET (VOIDmode
, target
,
2543 gen_rtx_VEC_DUPLICATE (mode
, x
)));
2547 /* One field is non-constant. Load constant then overwrite
2551 rtx copy
= copy_rtx (vals
);
2553 /* Load constant part of vector, substitute neighboring value for
2555 XVECEXP (copy
, 0, one_var
) = XVECEXP (vals
, 0, (one_var
+ 1) % n_elts
);
2556 rs6000_expand_vector_init (target
, copy
);
2558 /* Insert variable. */
2559 rs6000_expand_vector_set (target
, XVECEXP (vals
, 0, one_var
), one_var
);
2563 /* Construct the vector in memory one field at a time
2564 and load the whole vector. */
2565 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2566 for (i
= 0; i
< n_elts
; i
++)
2567 emit_move_insn (adjust_address_nv (mem
, inner_mode
,
2568 i
* GET_MODE_SIZE (inner_mode
)),
2569 XVECEXP (vals
, 0, i
));
2570 emit_move_insn (target
, mem
);
2573 /* Set field ELT of TARGET to VAL. */
2576 rs6000_expand_vector_set (rtx target
, rtx val
, int elt
)
2578 enum machine_mode mode
= GET_MODE (target
);
2579 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2580 rtx reg
= gen_reg_rtx (mode
);
2582 int width
= GET_MODE_SIZE (inner_mode
);
2585 /* Load single variable value. */
2586 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (inner_mode
), 0);
2587 emit_move_insn (adjust_address_nv (mem
, inner_mode
, 0), val
);
2588 x
= gen_rtx_UNSPEC (VOIDmode
,
2589 gen_rtvec (1, const0_rtx
), UNSPEC_LVE
);
2590 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2592 gen_rtx_SET (VOIDmode
,
2596 /* Linear sequence. */
2597 mask
= gen_rtx_PARALLEL (V16QImode
, rtvec_alloc (16));
2598 for (i
= 0; i
< 16; ++i
)
2599 XVECEXP (mask
, 0, i
) = GEN_INT (i
);
2601 /* Set permute mask to insert element into target. */
2602 for (i
= 0; i
< width
; ++i
)
2603 XVECEXP (mask
, 0, elt
*width
+ i
)
2604 = GEN_INT (i
+ 0x10);
2605 x
= gen_rtx_CONST_VECTOR (V16QImode
, XVEC (mask
, 0));
2606 x
= gen_rtx_UNSPEC (mode
,
2607 gen_rtvec (3, target
, reg
,
2608 force_reg (V16QImode
, x
)),
2610 emit_insn (gen_rtx_SET (VOIDmode
, target
, x
));
2613 /* Extract field ELT from VEC into TARGET. */
2616 rs6000_expand_vector_extract (rtx target
, rtx vec
, int elt
)
2618 enum machine_mode mode
= GET_MODE (vec
);
2619 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2622 /* Allocate mode-sized buffer. */
2623 mem
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2625 /* Add offset to field within buffer matching vector element. */
2626 mem
= adjust_address_nv (mem
, mode
, elt
* GET_MODE_SIZE (inner_mode
));
2628 /* Store single field into mode-sized buffer. */
2629 x
= gen_rtx_UNSPEC (VOIDmode
,
2630 gen_rtvec (1, const0_rtx
), UNSPEC_STVE
);
2631 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
2633 gen_rtx_SET (VOIDmode
,
2636 emit_move_insn (target
, adjust_address_nv (mem
, inner_mode
, 0));
2639 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2640 implement ANDing by the mask IN. */
2642 build_mask64_2_operands (rtx in
, rtx
*out
)
2644 #if HOST_BITS_PER_WIDE_INT >= 64
2645 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2648 gcc_assert (GET_CODE (in
) == CONST_INT
);
2653 /* Assume c initially something like 0x00fff000000fffff. The idea
2654 is to rotate the word so that the middle ^^^^^^ group of zeros
2655 is at the MS end and can be cleared with an rldicl mask. We then
2656 rotate back and clear off the MS ^^ group of zeros with a
2658 c
= ~c
; /* c == 0xff000ffffff00000 */
2659 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2660 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2661 c
= ~c
; /* c == 0x00fff000000fffff */
2662 c
&= -lsb
; /* c == 0x00fff00000000000 */
2663 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2664 c
= ~c
; /* c == 0xff000fffffffffff */
2665 c
&= -lsb
; /* c == 0xff00000000000000 */
2667 while ((lsb
>>= 1) != 0)
2668 shift
++; /* shift == 44 on exit from loop */
2669 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2670 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2671 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2675 /* Assume c initially something like 0xff000f0000000000. The idea
2676 is to rotate the word so that the ^^^ middle group of zeros
2677 is at the LS end and can be cleared with an rldicr mask. We then
2678 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2680 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2681 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2682 c
= ~c
; /* c == 0x00fff0ffffffffff */
2683 c
&= -lsb
; /* c == 0x00fff00000000000 */
2684 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2685 c
= ~c
; /* c == 0xff000fffffffffff */
2686 c
&= -lsb
; /* c == 0xff00000000000000 */
2688 while ((lsb
>>= 1) != 0)
2689 shift
++; /* shift == 44 on exit from loop */
2690 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2691 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2692 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2695 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2696 masks will be all 1's. We are guaranteed more than one transition. */
2697 out
[0] = GEN_INT (64 - shift
);
2698 out
[1] = GEN_INT (m1
);
2699 out
[2] = GEN_INT (shift
);
2700 out
[3] = GEN_INT (m2
);
2708 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2711 invalid_e500_subreg (rtx op
, enum machine_mode mode
)
2713 if (TARGET_E500_DOUBLE
)
2715 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
2716 subreg:TI and reg:TF. */
2717 if (GET_CODE (op
) == SUBREG
2718 && (mode
== SImode
|| mode
== DImode
|| mode
== TImode
)
2719 && REG_P (SUBREG_REG (op
))
2720 && (GET_MODE (SUBREG_REG (op
)) == DFmode
2721 || GET_MODE (SUBREG_REG (op
)) == TFmode
))
2724 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
2726 if (GET_CODE (op
) == SUBREG
2727 && (mode
== DFmode
|| mode
== TFmode
)
2728 && REG_P (SUBREG_REG (op
))
2729 && (GET_MODE (SUBREG_REG (op
)) == DImode
2730 || GET_MODE (SUBREG_REG (op
)) == TImode
))
2735 && GET_CODE (op
) == SUBREG
2737 && REG_P (SUBREG_REG (op
))
2738 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op
))))
2744 /* AIX increases natural record alignment to doubleword if the first
2745 field is an FP double while the FP fields remain word aligned. */
2748 rs6000_special_round_type_align (tree type
, unsigned int computed
,
2749 unsigned int specified
)
2751 unsigned int align
= MAX (computed
, specified
);
2752 tree field
= TYPE_FIELDS (type
);
2754 /* Skip all non field decls */
2755 while (field
!= NULL
&& TREE_CODE (field
) != FIELD_DECL
)
2756 field
= TREE_CHAIN (field
);
2758 if (field
!= NULL
&& field
!= type
)
2760 type
= TREE_TYPE (field
);
2761 while (TREE_CODE (type
) == ARRAY_TYPE
)
2762 type
= TREE_TYPE (type
);
2764 if (type
!= error_mark_node
&& TYPE_MODE (type
) == DFmode
)
2765 align
= MAX (align
, 64);
2771 /* Darwin increases record alignment to the natural alignment of
2775 darwin_rs6000_special_round_type_align (tree type
, unsigned int computed
,
2776 unsigned int specified
)
2778 unsigned int align
= MAX (computed
, specified
);
2780 if (TYPE_PACKED (type
))
2783 /* Find the first field, looking down into aggregates. */
2785 tree field
= TYPE_FIELDS (type
);
2786 /* Skip all non field decls */
2787 while (field
!= NULL
&& TREE_CODE (field
) != FIELD_DECL
)
2788 field
= TREE_CHAIN (field
);
2791 type
= TREE_TYPE (field
);
2792 while (TREE_CODE (type
) == ARRAY_TYPE
)
2793 type
= TREE_TYPE (type
);
2794 } while (AGGREGATE_TYPE_P (type
));
2796 if (! AGGREGATE_TYPE_P (type
) && type
!= error_mark_node
)
2797 align
= MAX (align
, TYPE_ALIGN (type
));
2802 /* Return 1 for an operand in small memory on V.4/eabi. */
2805 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2806 enum machine_mode mode ATTRIBUTE_UNUSED
)
2811 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2814 if (DEFAULT_ABI
!= ABI_V4
)
2817 if (GET_CODE (op
) == SYMBOL_REF
)
2820 else if (GET_CODE (op
) != CONST
2821 || GET_CODE (XEXP (op
, 0)) != PLUS
2822 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2823 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2828 rtx sum
= XEXP (op
, 0);
2829 HOST_WIDE_INT summand
;
2831 /* We have to be careful here, because it is the referenced address
2832 that must be 32k from _SDA_BASE_, not just the symbol. */
2833 summand
= INTVAL (XEXP (sum
, 1));
2834 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2837 sym_ref
= XEXP (sum
, 0);
2840 return SYMBOL_REF_SMALL_P (sym_ref
);
2846 /* Return true if either operand is a general purpose register. */
2849 gpr_or_gpr_p (rtx op0
, rtx op1
)
2851 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
2852 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
2856 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2859 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
2861 switch (GET_CODE (op
))
2864 if (RS6000_SYMBOL_REF_TLS_P (op
))
2866 else if (CONSTANT_POOL_ADDRESS_P (op
))
2868 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2876 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2885 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2886 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2888 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2897 constant_pool_expr_p (rtx op
)
2901 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2905 toc_relative_expr_p (rtx op
)
2909 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2913 legitimate_constant_pool_address_p (rtx x
)
2916 && GET_CODE (x
) == PLUS
2917 && GET_CODE (XEXP (x
, 0)) == REG
2918 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
2919 && constant_pool_expr_p (XEXP (x
, 1)));
2923 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
2925 return (DEFAULT_ABI
== ABI_V4
2926 && !flag_pic
&& !TARGET_TOC
2927 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
2928 && small_data_operand (x
, mode
));
2931 /* SPE offset addressing is limited to 5-bits worth of double words. */
2932 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2935 rs6000_legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
2937 unsigned HOST_WIDE_INT offset
, extra
;
2939 if (GET_CODE (x
) != PLUS
)
2941 if (GET_CODE (XEXP (x
, 0)) != REG
)
2943 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2945 if (legitimate_constant_pool_address_p (x
))
2947 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2950 offset
= INTVAL (XEXP (x
, 1));
2958 /* AltiVec vector modes. Only reg+reg addressing is valid and
2959 constant offset zero should not occur due to canonicalization.
2960 Allow any offset when not strict before reload. */
2967 /* SPE vector modes. */
2968 return SPE_CONST_OFFSET_OK (offset
);
2971 if (TARGET_E500_DOUBLE
)
2972 return SPE_CONST_OFFSET_OK (offset
);
2975 /* On e500v2, we may have:
2977 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2979 Which gets addressed with evldd instructions. */
2980 if (TARGET_E500_DOUBLE
)
2981 return SPE_CONST_OFFSET_OK (offset
);
2983 if (mode
== DFmode
|| !TARGET_POWERPC64
)
2985 else if (offset
& 3)
2990 if (TARGET_E500_DOUBLE
)
2991 return (SPE_CONST_OFFSET_OK (offset
)
2992 && SPE_CONST_OFFSET_OK (offset
+ 8));
2995 if (mode
== TFmode
|| !TARGET_POWERPC64
)
2997 else if (offset
& 3)
3008 return (offset
< 0x10000) && (offset
+ extra
< 0x10000);
3012 legitimate_indexed_address_p (rtx x
, int strict
)
3016 if (GET_CODE (x
) != PLUS
)
3022 /* Recognize the rtl generated by reload which we know will later be
3023 replaced with proper base and index regs. */
3025 && reload_in_progress
3026 && (REG_P (op0
) || GET_CODE (op0
) == PLUS
)
3030 return (REG_P (op0
) && REG_P (op1
)
3031 && ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
3032 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
3033 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
3034 && INT_REG_OK_FOR_INDEX_P (op0
, strict
))));
3038 legitimate_indirect_address_p (rtx x
, int strict
)
3040 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
3044 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
3046 if (!TARGET_MACHO
|| !flag_pic
3047 || mode
!= SImode
|| GET_CODE (x
) != MEM
)
3051 if (GET_CODE (x
) != LO_SUM
)
3053 if (GET_CODE (XEXP (x
, 0)) != REG
)
3055 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
3059 return CONSTANT_P (x
);
3063 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
3065 if (GET_CODE (x
) != LO_SUM
)
3067 if (GET_CODE (XEXP (x
, 0)) != REG
)
3069 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3071 /* Restrict addressing for DI because of our SUBREG hackery. */
3072 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3077 if (TARGET_ELF
|| TARGET_MACHO
)
3079 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
3083 if (GET_MODE_NUNITS (mode
) != 1)
3085 if (GET_MODE_BITSIZE (mode
) > 64
3086 || (GET_MODE_BITSIZE (mode
) > 32 && !TARGET_POWERPC64
3087 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
)))
3090 return CONSTANT_P (x
);
3097 /* Try machine-dependent ways of modifying an illegitimate address
3098 to be legitimate. If we find one, return the new, valid address.
3099 This is used from only one place: `memory_address' in explow.c.
3101 OLDX is the address as it was before break_out_memory_refs was
3102 called. In some cases it is useful to look at this to decide what
3105 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3107 It is always safe for this function to do nothing. It exists to
3108 recognize opportunities to optimize the output.
3110 On RS/6000, first check for the sum of a register with a constant
3111 integer that is out of range. If so, generate code to add the
3112 constant with the low-order 16 bits masked to the register and force
3113 this result into another register (this can be done with `cau').
3114 Then generate an address of REG+(CONST&0xffff), allowing for the
3115 possibility of bit 16 being a one.
3117 Then check for the sum of a register and something not constant, try to
3118 load the other things into a register and return the sum. */
3121 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3122 enum machine_mode mode
)
3124 if (GET_CODE (x
) == SYMBOL_REF
)
3126 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
3128 return rs6000_legitimize_tls_address (x
, model
);
3131 if (GET_CODE (x
) == PLUS
3132 && GET_CODE (XEXP (x
, 0)) == REG
3133 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3134 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
3136 HOST_WIDE_INT high_int
, low_int
;
3138 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3139 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
3140 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3141 GEN_INT (high_int
)), 0);
3142 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
3144 else if (GET_CODE (x
) == PLUS
3145 && GET_CODE (XEXP (x
, 0)) == REG
3146 && GET_CODE (XEXP (x
, 1)) != CONST_INT
3147 && GET_MODE_NUNITS (mode
) == 1
3148 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3150 || (((mode
!= DImode
&& mode
!= DFmode
) || TARGET_E500_DOUBLE
)
3152 && (TARGET_POWERPC64
|| mode
!= DImode
)
3155 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3156 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
3158 else if (ALTIVEC_VECTOR_MODE (mode
))
3162 /* Make sure both operands are registers. */
3163 if (GET_CODE (x
) == PLUS
)
3164 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
3165 force_reg (Pmode
, XEXP (x
, 1)));
3167 reg
= force_reg (Pmode
, x
);
3170 else if (SPE_VECTOR_MODE (mode
)
3171 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3172 || mode
== DImode
)))
3176 /* We accept [reg + reg] and [reg + OFFSET]. */
3178 if (GET_CODE (x
) == PLUS
)
3180 rtx op1
= XEXP (x
, 0);
3181 rtx op2
= XEXP (x
, 1);
3183 op1
= force_reg (Pmode
, op1
);
3185 if (GET_CODE (op2
) != REG
3186 && (GET_CODE (op2
) != CONST_INT
3187 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
3188 op2
= force_reg (Pmode
, op2
);
3190 return gen_rtx_PLUS (Pmode
, op1
, op2
);
3193 return force_reg (Pmode
, x
);
3199 && GET_CODE (x
) != CONST_INT
3200 && GET_CODE (x
) != CONST_DOUBLE
3202 && GET_MODE_NUNITS (mode
) == 1
3203 && (GET_MODE_BITSIZE (mode
) <= 32
3204 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
3206 rtx reg
= gen_reg_rtx (Pmode
);
3207 emit_insn (gen_elf_high (reg
, x
));
3208 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3210 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
3213 && ! MACHO_DYNAMIC_NO_PIC_P
3215 && GET_CODE (x
) != CONST_INT
3216 && GET_CODE (x
) != CONST_DOUBLE
3218 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
3222 rtx reg
= gen_reg_rtx (Pmode
);
3223 emit_insn (gen_macho_high (reg
, x
));
3224 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3227 && constant_pool_expr_p (x
)
3228 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
3230 return create_TOC_reference (x
);
3236 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3237 We need to emit DTP-relative relocations. */
3240 rs6000_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3245 fputs ("\t.long\t", file
);
3248 fputs (DOUBLE_INT_ASM_OP
, file
);
3253 output_addr_const (file
, x
);
3254 fputs ("@dtprel+0x8000", file
);
3257 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3259 static GTY(()) rtx rs6000_tls_symbol
;
3261 rs6000_tls_get_addr (void)
3263 if (!rs6000_tls_symbol
)
3264 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
3266 return rs6000_tls_symbol
;
3269 /* Construct the SYMBOL_REF for TLS GOT references. */
3271 static GTY(()) rtx rs6000_got_symbol
;
3273 rs6000_got_sym (void)
3275 if (!rs6000_got_symbol
)
3277 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3278 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
3279 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
3282 return rs6000_got_symbol
;
3285 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3286 this (thread-local) address. */
3289 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
3293 dest
= gen_reg_rtx (Pmode
);
3294 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
3300 tlsreg
= gen_rtx_REG (Pmode
, 13);
3301 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
3305 tlsreg
= gen_rtx_REG (Pmode
, 2);
3306 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
3310 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
3314 tmp
= gen_reg_rtx (Pmode
);
3317 tlsreg
= gen_rtx_REG (Pmode
, 13);
3318 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
3322 tlsreg
= gen_rtx_REG (Pmode
, 2);
3323 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
3327 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
3329 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
3334 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
3336 /* We currently use relocations like @got@tlsgd for tls, which
3337 means the linker will handle allocation of tls entries, placing
3338 them in the .got section. So use a pointer to the .got section,
3339 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3340 or to secondary GOT sections used by 32-bit -fPIC. */
3342 got
= gen_rtx_REG (Pmode
, 2);
3346 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
3349 rtx gsym
= rs6000_got_sym ();
3350 got
= gen_reg_rtx (Pmode
);
3352 rs6000_emit_move (got
, gsym
, Pmode
);
3355 rtx tempLR
, tmp3
, mem
;
3358 tempLR
= gen_reg_rtx (Pmode
);
3359 tmp1
= gen_reg_rtx (Pmode
);
3360 tmp2
= gen_reg_rtx (Pmode
);
3361 tmp3
= gen_reg_rtx (Pmode
);
3362 mem
= gen_const_mem (Pmode
, tmp1
);
3364 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, gsym
));
3365 emit_move_insn (tmp1
, tempLR
);
3366 emit_move_insn (tmp2
, mem
);
3367 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
3368 last
= emit_move_insn (got
, tmp3
);
3369 set_unique_reg_note (last
, REG_EQUAL
, gsym
);
3370 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
3372 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
3378 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
3380 r3
= gen_rtx_REG (Pmode
, 3);
3382 insn
= gen_tls_gd_64 (r3
, got
, addr
);
3384 insn
= gen_tls_gd_32 (r3
, got
, addr
);
3387 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3388 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3389 insn
= emit_call_insn (insn
);
3390 CONST_OR_PURE_CALL_P (insn
) = 1;
3391 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3392 insn
= get_insns ();
3394 emit_libcall_block (insn
, dest
, r3
, addr
);
3396 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
3398 r3
= gen_rtx_REG (Pmode
, 3);
3400 insn
= gen_tls_ld_64 (r3
, got
);
3402 insn
= gen_tls_ld_32 (r3
, got
);
3405 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3406 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3407 insn
= emit_call_insn (insn
);
3408 CONST_OR_PURE_CALL_P (insn
) = 1;
3409 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3410 insn
= get_insns ();
3412 tmp1
= gen_reg_rtx (Pmode
);
3413 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
3415 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
3416 if (rs6000_tls_size
== 16)
3419 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
3421 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
3423 else if (rs6000_tls_size
== 32)
3425 tmp2
= gen_reg_rtx (Pmode
);
3427 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
3429 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
3432 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
3434 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
3438 tmp2
= gen_reg_rtx (Pmode
);
3440 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
3442 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
3444 insn
= gen_rtx_SET (Pmode
, dest
,
3445 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
3451 /* IE, or 64 bit offset LE. */
3452 tmp2
= gen_reg_rtx (Pmode
);
3454 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
3456 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
3459 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
3461 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
3469 /* Return 1 if X contains a thread-local symbol. */
3472 rs6000_tls_referenced_p (rtx x
)
3474 if (! TARGET_HAVE_TLS
)
3477 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
3480 /* Return 1 if *X is a thread-local symbol. This is the same as
3481 rs6000_tls_symbol_ref except for the type of the unused argument. */
3484 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
3486 return RS6000_SYMBOL_REF_TLS_P (*x
);
3489 /* The convention appears to be to define this wherever it is used.
3490 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3491 is now used here. */
3492 #ifndef REG_MODE_OK_FOR_BASE_P
3493 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3496 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3497 replace the input X, or the original X if no replacement is called for.
3498 The output parameter *WIN is 1 if the calling macro should goto WIN,
3501 For RS/6000, we wish to handle large displacements off a base
3502 register by splitting the addend across an addiu/addis and the mem insn.
3503 This cuts number of extra insns needed from 3 to 1.
3505 On Darwin, we use this to generate code for floating point constants.
3506 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3507 The Darwin code is inside #if TARGET_MACHO because only then is
3508 machopic_function_base_name() defined. */
3510 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3511 int opnum
, int type
,
3512 int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3514 /* We must recognize output that we have already generated ourselves. */
3515 if (GET_CODE (x
) == PLUS
3516 && GET_CODE (XEXP (x
, 0)) == PLUS
3517 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3518 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3519 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3521 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3522 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3523 opnum
, (enum reload_type
)type
);
3529 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3530 && GET_CODE (x
) == LO_SUM
3531 && GET_CODE (XEXP (x
, 0)) == PLUS
3532 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3533 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3534 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3535 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3536 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3537 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3538 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3540 /* Result of previous invocation of this function on Darwin
3541 floating point constant. */
3542 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3543 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3544 opnum
, (enum reload_type
)type
);
3550 /* Force ld/std non-word aligned offset into base register by wrapping
3552 if (GET_CODE (x
) == PLUS
3553 && GET_CODE (XEXP (x
, 0)) == REG
3554 && REGNO (XEXP (x
, 0)) < 32
3555 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3556 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3557 && (INTVAL (XEXP (x
, 1)) & 3) != 0
3558 && !ALTIVEC_VECTOR_MODE (mode
)
3559 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
3560 && TARGET_POWERPC64
)
3562 x
= gen_rtx_PLUS (GET_MODE (x
), x
, GEN_INT (0));
3563 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3564 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3565 opnum
, (enum reload_type
) type
);
3570 if (GET_CODE (x
) == PLUS
3571 && GET_CODE (XEXP (x
, 0)) == REG
3572 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3573 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3574 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3575 && !SPE_VECTOR_MODE (mode
)
3576 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3578 && !ALTIVEC_VECTOR_MODE (mode
))
3580 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3581 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3583 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3585 /* Check for 32-bit overflow. */
3586 if (high
+ low
!= val
)
3592 /* Reload the high part into a base reg; leave the low part
3593 in the mem directly. */
3595 x
= gen_rtx_PLUS (GET_MODE (x
),
3596 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3600 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3601 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3602 opnum
, (enum reload_type
)type
);
3607 if (GET_CODE (x
) == SYMBOL_REF
3608 && !ALTIVEC_VECTOR_MODE (mode
)
3609 && !SPE_VECTOR_MODE (mode
)
3611 && DEFAULT_ABI
== ABI_DARWIN
3612 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3614 && DEFAULT_ABI
== ABI_V4
3617 /* Don't do this for TFmode, since the result isn't offsettable.
3618 The same goes for DImode without 64-bit gprs and DFmode
3621 && (mode
!= DImode
|| TARGET_POWERPC64
)
3622 && (mode
!= DFmode
|| TARGET_POWERPC64
3623 || (TARGET_FPRS
&& TARGET_HARD_FLOAT
)))
3628 rtx offset
= gen_rtx_CONST (Pmode
,
3629 gen_rtx_MINUS (Pmode
, x
,
3630 machopic_function_base_sym ()));
3631 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3632 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3633 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3637 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3638 gen_rtx_HIGH (Pmode
, x
), x
);
3640 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3641 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3642 opnum
, (enum reload_type
)type
);
3647 /* Reload an offset address wrapped by an AND that represents the
3648 masking of the lower bits. Strip the outer AND and let reload
3649 convert the offset address into an indirect address. */
3651 && ALTIVEC_VECTOR_MODE (mode
)
3652 && GET_CODE (x
) == AND
3653 && GET_CODE (XEXP (x
, 0)) == PLUS
3654 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3655 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3656 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3657 && INTVAL (XEXP (x
, 1)) == -16)
3665 && constant_pool_expr_p (x
)
3666 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3668 x
= create_TOC_reference (x
);
3676 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3677 that is a valid memory address for an instruction.
3678 The MODE argument is the machine mode for the MEM expression
3679 that wants to use this address.
3681 On the RS/6000, there are four valid address: a SYMBOL_REF that
3682 refers to a constant pool entry of an address (or the sum of it
3683 plus a constant), a short (16-bit signed) constant plus a register,
3684 the sum of two registers, or a register indirect, possibly with an
3685 auto-increment. For DFmode and DImode with a constant plus register,
3686 we must ensure that both words are addressable or PowerPC64 with offset
3689 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3690 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3691 adjacent memory cells are accessed by adding word-sized offsets
3692 during assembly output. */
3694 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3696 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3698 && ALTIVEC_VECTOR_MODE (mode
)
3699 && GET_CODE (x
) == AND
3700 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3701 && INTVAL (XEXP (x
, 1)) == -16)
3704 if (RS6000_SYMBOL_REF_TLS_P (x
))
3706 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3708 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3709 && !ALTIVEC_VECTOR_MODE (mode
)
3710 && !SPE_VECTOR_MODE (mode
)
3712 /* Restrict addressing for DI because of our SUBREG hackery. */
3713 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
3716 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3718 if (legitimate_small_data_p (mode
, x
))
3720 if (legitimate_constant_pool_address_p (x
))
3722 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3724 && GET_CODE (x
) == PLUS
3725 && GET_CODE (XEXP (x
, 0)) == REG
3726 && (XEXP (x
, 0) == virtual_stack_vars_rtx
3727 || XEXP (x
, 0) == arg_pointer_rtx
)
3728 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3730 if (rs6000_legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3734 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3736 || ((mode
!= DFmode
|| TARGET_E500_DOUBLE
) && mode
!= TFmode
))
3737 && (TARGET_POWERPC64
|| mode
!= DImode
)
3738 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3740 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3745 /* Go to LABEL if ADDR (a legitimate address expression)
3746 has an effect that depends on the machine mode it is used for.
3748 On the RS/6000 this is true of all integral offsets (since AltiVec
3749 modes don't allow them) or is a pre-increment or decrement.
3751 ??? Except that due to conceptual problems in offsettable_address_p
3752 we can't really report the problems of integral offsets. So leave
3753 this assuming that the adjustable offset must be valid for the
3754 sub-words of a TFmode operand, which is what we had before. */
3757 rs6000_mode_dependent_address (rtx addr
)
3759 switch (GET_CODE (addr
))
3762 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3764 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3765 return val
+ 12 + 0x8000 >= 0x10000;
3772 /* Auto-increment cases are now treated generically in recog.c. */
3781 /* More elaborate version of recog's offsettable_memref_p predicate
3782 that works around the ??? note of rs6000_mode_dependent_address.
3783 In particular it accepts
3785 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3787 in 32-bit mode, that the recog predicate rejects. */
3790 rs6000_offsettable_memref_p (rtx op
)
3795 /* First mimic offsettable_memref_p. */
3796 if (offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)))
3799 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3800 the latter predicate knows nothing about the mode of the memory
3801 reference and, therefore, assumes that it is the largest supported
3802 mode (TFmode). As a consequence, legitimate offsettable memory
3803 references are rejected. rs6000_legitimate_offset_address_p contains
3804 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
3805 return rs6000_legitimate_offset_address_p (GET_MODE (op
), XEXP (op
, 0), 1);
3808 /* Return number of consecutive hard regs needed starting at reg REGNO
3809 to hold something of mode MODE.
3810 This is ordinarily the length in words of a value of mode MODE
3811 but can be less for certain modes in special long registers.
3813 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3814 scalar instructions. The upper 32 bits are only available to the
3817 POWER and PowerPC GPRs hold 32 bits worth;
3818 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3821 rs6000_hard_regno_nregs (int regno
, enum machine_mode mode
)
3823 if (FP_REGNO_P (regno
))
3824 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3826 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3827 return (GET_MODE_SIZE (mode
) + UNITS_PER_SPE_WORD
- 1) / UNITS_PER_SPE_WORD
;
3829 if (ALTIVEC_REGNO_P (regno
))
3831 (GET_MODE_SIZE (mode
) + UNITS_PER_ALTIVEC_WORD
- 1) / UNITS_PER_ALTIVEC_WORD
;
3833 /* The value returned for SCmode in the E500 double case is 2 for
3834 ABI compatibility; storing an SCmode value in a single register
3835 would require function_arg and rs6000_spe_function_arg to handle
3836 SCmode so as to pass the value correctly in a pair of
3838 if (TARGET_E500_DOUBLE
&& FLOAT_MODE_P (mode
) && mode
!= SCmode
)
3839 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3841 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3844 /* Change register usage conditional on target flags. */
3846 rs6000_conditional_register_usage (void)
3850 /* Set MQ register fixed (already call_used) if not POWER
3851 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3856 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3858 fixed_regs
[13] = call_used_regs
[13]
3859 = call_really_used_regs
[13] = 1;
3861 /* Conditionally disable FPRs. */
3862 if (TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
3863 for (i
= 32; i
< 64; i
++)
3864 fixed_regs
[i
] = call_used_regs
[i
]
3865 = call_really_used_regs
[i
] = 1;
3867 /* The TOC register is not killed across calls in a way that is
3868 visible to the compiler. */
3869 if (DEFAULT_ABI
== ABI_AIX
)
3870 call_really_used_regs
[2] = 0;
3872 if (DEFAULT_ABI
== ABI_V4
3873 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3875 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3877 if (DEFAULT_ABI
== ABI_V4
3878 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3880 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3881 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3882 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3884 if (DEFAULT_ABI
== ABI_DARWIN
3885 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3886 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3887 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3888 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3890 if (TARGET_TOC
&& TARGET_MINIMAL_TOC
)
3891 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3892 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3895 global_regs
[VSCR_REGNO
] = 1;
3899 global_regs
[SPEFSCR_REGNO
] = 1;
3900 fixed_regs
[FIXED_SCRATCH
]
3901 = call_used_regs
[FIXED_SCRATCH
]
3902 = call_really_used_regs
[FIXED_SCRATCH
] = 1;
3905 if (! TARGET_ALTIVEC
)
3907 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
3908 fixed_regs
[i
] = call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3909 call_really_used_regs
[VRSAVE_REGNO
] = 1;
3912 if (TARGET_ALTIVEC_ABI
)
3913 for (i
= FIRST_ALTIVEC_REGNO
; i
< FIRST_ALTIVEC_REGNO
+ 20; ++i
)
3914 call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3917 /* Try to output insns to set TARGET equal to the constant C if it can
3918 be done in less than N insns. Do all computations in MODE.
3919 Returns the place where the output has been placed if it can be
3920 done and the insns have been emitted. If it would take more than N
3921 insns, zero is returned and no insns and emitted. */
3924 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
3925 rtx source
, int n ATTRIBUTE_UNUSED
)
3927 rtx result
, insn
, set
;
3928 HOST_WIDE_INT c0
, c1
;
3935 dest
= gen_reg_rtx (mode
);
3936 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3940 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3942 emit_insn (gen_rtx_SET (VOIDmode
, copy_rtx (result
),
3943 GEN_INT (INTVAL (source
)
3944 & (~ (HOST_WIDE_INT
) 0xffff))));
3945 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3946 gen_rtx_IOR (SImode
, copy_rtx (result
),
3947 GEN_INT (INTVAL (source
) & 0xffff))));
3952 switch (GET_CODE (source
))
3955 c0
= INTVAL (source
);
3960 #if HOST_BITS_PER_WIDE_INT >= 64
3961 c0
= CONST_DOUBLE_LOW (source
);
3964 c0
= CONST_DOUBLE_LOW (source
);
3965 c1
= CONST_DOUBLE_HIGH (source
);
3973 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3980 insn
= get_last_insn ();
3981 set
= single_set (insn
);
3982 if (! CONSTANT_P (SET_SRC (set
)))
3983 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3988 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3989 fall back to a straight forward decomposition. We do this to avoid
3990 exponential run times encountered when looking for longer sequences
3991 with rs6000_emit_set_const. */
3993 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
3995 if (!TARGET_POWERPC64
)
3997 rtx operand1
, operand2
;
3999 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
4001 operand2
= operand_subword_force (copy_rtx (dest
), WORDS_BIG_ENDIAN
!= 0,
4003 emit_move_insn (operand1
, GEN_INT (c1
));
4004 emit_move_insn (operand2
, GEN_INT (c2
));
4008 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
4011 ud2
= (c1
& 0xffff0000) >> 16;
4012 #if HOST_BITS_PER_WIDE_INT >= 64
4016 ud4
= (c2
& 0xffff0000) >> 16;
4018 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
4019 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
4022 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
4024 emit_move_insn (dest
, GEN_INT (ud1
));
4027 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
4028 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
4031 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
4034 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
4036 emit_move_insn (copy_rtx (dest
),
4037 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4040 else if ((ud4
== 0xffff && (ud3
& 0x8000))
4041 || (ud4
== 0 && ! (ud3
& 0x8000)))
4044 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
4047 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
4050 emit_move_insn (copy_rtx (dest
),
4051 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4053 emit_move_insn (copy_rtx (dest
),
4054 gen_rtx_ASHIFT (DImode
, copy_rtx (dest
),
4057 emit_move_insn (copy_rtx (dest
),
4058 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4064 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
4067 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
4070 emit_move_insn (copy_rtx (dest
),
4071 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4074 emit_move_insn (copy_rtx (dest
),
4075 gen_rtx_ASHIFT (DImode
, copy_rtx (dest
),
4078 emit_move_insn (copy_rtx (dest
),
4079 gen_rtx_IOR (DImode
, copy_rtx (dest
),
4080 GEN_INT (ud2
<< 16)));
4082 emit_move_insn (copy_rtx (dest
),
4083 gen_rtx_IOR (DImode
, copy_rtx (dest
), GEN_INT (ud1
)));
4089 /* Helper for the following. Get rid of [r+r] memory refs
4090 in cases where it won't work (TImode, TFmode). */
4093 rs6000_eliminate_indexed_memrefs (rtx operands
[2])
4095 if (GET_CODE (operands
[0]) == MEM
4096 && GET_CODE (XEXP (operands
[0], 0)) != REG
4097 && ! legitimate_constant_pool_address_p (XEXP (operands
[0], 0))
4098 && ! reload_in_progress
)
4100 = replace_equiv_address (operands
[0],
4101 copy_addr_to_reg (XEXP (operands
[0], 0)));
4103 if (GET_CODE (operands
[1]) == MEM
4104 && GET_CODE (XEXP (operands
[1], 0)) != REG
4105 && ! legitimate_constant_pool_address_p (XEXP (operands
[1], 0))
4106 && ! reload_in_progress
)
4108 = replace_equiv_address (operands
[1],
4109 copy_addr_to_reg (XEXP (operands
[1], 0)));
4112 /* Emit a move from SOURCE to DEST in mode MODE. */
4114 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
4118 operands
[1] = source
;
4120 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4121 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4122 && ! FLOAT_MODE_P (mode
)
4123 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
4125 /* FIXME. This should never happen. */
4126 /* Since it seems that it does, do the safe thing and convert
4128 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
4130 gcc_assert (GET_CODE (operands
[1]) != CONST_DOUBLE
4131 || FLOAT_MODE_P (mode
)
4132 || ((CONST_DOUBLE_HIGH (operands
[1]) != 0
4133 || CONST_DOUBLE_LOW (operands
[1]) < 0)
4134 && (CONST_DOUBLE_HIGH (operands
[1]) != -1
4135 || CONST_DOUBLE_LOW (operands
[1]) >= 0)));
4137 /* Check if GCC is setting up a block move that will end up using FP
4138 registers as temporaries. We must make sure this is acceptable. */
4139 if (GET_CODE (operands
[0]) == MEM
4140 && GET_CODE (operands
[1]) == MEM
4142 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
4143 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
4144 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
4145 ? 32 : MEM_ALIGN (operands
[0])))
4146 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
4148 : MEM_ALIGN (operands
[1]))))
4149 && ! MEM_VOLATILE_P (operands
[0])
4150 && ! MEM_VOLATILE_P (operands
[1]))
4152 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
4153 adjust_address (operands
[1], SImode
, 0));
4154 emit_move_insn (adjust_address (copy_rtx (operands
[0]), SImode
, 4),
4155 adjust_address (copy_rtx (operands
[1]), SImode
, 4));
4159 if (!no_new_pseudos
&& GET_CODE (operands
[0]) == MEM
4160 && !gpc_reg_operand (operands
[1], mode
))
4161 operands
[1] = force_reg (mode
, operands
[1]);
4163 if (mode
== SFmode
&& ! TARGET_POWERPC
4164 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4165 && GET_CODE (operands
[0]) == MEM
)
4169 if (reload_in_progress
|| reload_completed
)
4170 regnum
= true_regnum (operands
[1]);
4171 else if (GET_CODE (operands
[1]) == REG
)
4172 regnum
= REGNO (operands
[1]);
4176 /* If operands[1] is a register, on POWER it may have
4177 double-precision data in it, so truncate it to single
4179 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
4182 newreg
= (no_new_pseudos
? copy_rtx (operands
[1])
4183 : gen_reg_rtx (mode
));
4184 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
4185 operands
[1] = newreg
;
4189 /* Recognize the case where operand[1] is a reference to thread-local
4190 data and load its address to a register. */
4191 if (rs6000_tls_referenced_p (operands
[1]))
4193 enum tls_model model
;
4194 rtx tmp
= operands
[1];
4197 if (GET_CODE (tmp
) == CONST
&& GET_CODE (XEXP (tmp
, 0)) == PLUS
)
4199 addend
= XEXP (XEXP (tmp
, 0), 1);
4200 tmp
= XEXP (XEXP (tmp
, 0), 0);
4203 gcc_assert (GET_CODE (tmp
) == SYMBOL_REF
);
4204 model
= SYMBOL_REF_TLS_MODEL (tmp
);
4205 gcc_assert (model
!= 0);
4207 tmp
= rs6000_legitimize_tls_address (tmp
, model
);
4210 tmp
= gen_rtx_PLUS (mode
, tmp
, addend
);
4211 tmp
= force_operand (tmp
, operands
[0]);
4216 /* Handle the case where reload calls us with an invalid address. */
4217 if (reload_in_progress
&& mode
== Pmode
4218 && (! general_operand (operands
[1], mode
)
4219 || ! nonimmediate_operand (operands
[0], mode
)))
4222 /* 128-bit constant floating-point values on Darwin should really be
4223 loaded as two parts. */
4224 if (!TARGET_IEEEQUAD
&& TARGET_LONG_DOUBLE_128
4225 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
4227 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4228 know how to get a DFmode SUBREG of a TFmode. */
4229 enum machine_mode imode
= (TARGET_E500_DOUBLE
? DFmode
: DImode
);
4230 rs6000_emit_move (simplify_gen_subreg (imode
, operands
[0], mode
, 0),
4231 simplify_gen_subreg (imode
, operands
[1], mode
, 0),
4233 rs6000_emit_move (simplify_gen_subreg (imode
, operands
[0], mode
,
4234 GET_MODE_SIZE (imode
)),
4235 simplify_gen_subreg (imode
, operands
[1], mode
,
4236 GET_MODE_SIZE (imode
)),
4241 /* FIXME: In the long term, this switch statement should go away
4242 and be replaced by a sequence of tests based on things like
4248 if (CONSTANT_P (operands
[1])
4249 && GET_CODE (operands
[1]) != CONST_INT
)
4250 operands
[1] = force_const_mem (mode
, operands
[1]);
4254 rs6000_eliminate_indexed_memrefs (operands
);
4259 if (CONSTANT_P (operands
[1])
4260 && ! easy_fp_constant (operands
[1], mode
))
4261 operands
[1] = force_const_mem (mode
, operands
[1]);
4272 if (CONSTANT_P (operands
[1])
4273 && !easy_vector_constant (operands
[1], mode
))
4274 operands
[1] = force_const_mem (mode
, operands
[1]);
4279 /* Use default pattern for address of ELF small data */
4282 && DEFAULT_ABI
== ABI_V4
4283 && (GET_CODE (operands
[1]) == SYMBOL_REF
4284 || GET_CODE (operands
[1]) == CONST
)
4285 && small_data_operand (operands
[1], mode
))
4287 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4291 if (DEFAULT_ABI
== ABI_V4
4292 && mode
== Pmode
&& mode
== SImode
4293 && flag_pic
== 1 && got_operand (operands
[1], mode
))
4295 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
4299 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
4303 && CONSTANT_P (operands
[1])
4304 && GET_CODE (operands
[1]) != HIGH
4305 && GET_CODE (operands
[1]) != CONST_INT
)
4307 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
4309 /* If this is a function address on -mcall-aixdesc,
4310 convert it to the address of the descriptor. */
4311 if (DEFAULT_ABI
== ABI_AIX
4312 && GET_CODE (operands
[1]) == SYMBOL_REF
4313 && XSTR (operands
[1], 0)[0] == '.')
4315 const char *name
= XSTR (operands
[1], 0);
4317 while (*name
== '.')
4319 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
4320 CONSTANT_POOL_ADDRESS_P (new_ref
)
4321 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
4322 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
4323 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
4324 SYMBOL_REF_DATA (new_ref
) = SYMBOL_REF_DATA (operands
[1]);
4325 operands
[1] = new_ref
;
4328 if (DEFAULT_ABI
== ABI_DARWIN
)
4331 if (MACHO_DYNAMIC_NO_PIC_P
)
4333 /* Take care of any required data indirection. */
4334 operands
[1] = rs6000_machopic_legitimize_pic_address (
4335 operands
[1], mode
, operands
[0]);
4336 if (operands
[0] != operands
[1])
4337 emit_insn (gen_rtx_SET (VOIDmode
,
4338 operands
[0], operands
[1]));
4342 emit_insn (gen_macho_high (target
, operands
[1]));
4343 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
4347 emit_insn (gen_elf_high (target
, operands
[1]));
4348 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
4352 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4353 and we have put it in the TOC, we just need to make a TOC-relative
4356 && GET_CODE (operands
[1]) == SYMBOL_REF
4357 && constant_pool_expr_p (operands
[1])
4358 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
4359 get_pool_mode (operands
[1])))
4361 operands
[1] = create_TOC_reference (operands
[1]);
4363 else if (mode
== Pmode
4364 && CONSTANT_P (operands
[1])
4365 && ((GET_CODE (operands
[1]) != CONST_INT
4366 && ! easy_fp_constant (operands
[1], mode
))
4367 || (GET_CODE (operands
[1]) == CONST_INT
4368 && num_insns_constant (operands
[1], mode
) > 2)
4369 || (GET_CODE (operands
[0]) == REG
4370 && FP_REGNO_P (REGNO (operands
[0]))))
4371 && GET_CODE (operands
[1]) != HIGH
4372 && ! legitimate_constant_pool_address_p (operands
[1])
4373 && ! toc_relative_expr_p (operands
[1]))
4375 /* Emit a USE operation so that the constant isn't deleted if
4376 expensive optimizations are turned on because nobody
4377 references it. This should only be done for operands that
4378 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4379 This should not be done for operands that contain LABEL_REFs.
4380 For now, we just handle the obvious case. */
4381 if (GET_CODE (operands
[1]) != LABEL_REF
)
4382 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
4385 /* Darwin uses a special PIC legitimizer. */
4386 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
4389 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
4391 if (operands
[0] != operands
[1])
4392 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4397 /* If we are to limit the number of things we put in the TOC and
4398 this is a symbol plus a constant we can add in one insn,
4399 just put the symbol in the TOC and add the constant. Don't do
4400 this if reload is in progress. */
4401 if (GET_CODE (operands
[1]) == CONST
4402 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
4403 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
4404 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
4405 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
4406 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
4407 && ! side_effects_p (operands
[0]))
4410 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
4411 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
4413 sym
= force_reg (mode
, sym
);
4415 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
4417 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
4421 operands
[1] = force_const_mem (mode
, operands
[1]);
4424 && constant_pool_expr_p (XEXP (operands
[1], 0))
4425 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4426 get_pool_constant (XEXP (operands
[1], 0)),
4427 get_pool_mode (XEXP (operands
[1], 0))))
4430 = gen_const_mem (mode
,
4431 create_TOC_reference (XEXP (operands
[1], 0)));
4432 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
4438 rs6000_eliminate_indexed_memrefs (operands
);
4442 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
4444 gen_rtx_SET (VOIDmode
,
4445 operands
[0], operands
[1]),
4446 gen_rtx_CLOBBER (VOIDmode
,
4447 gen_rtx_SCRATCH (SImode
)))));
4456 /* Above, we may have called force_const_mem which may have returned
4457 an invalid address. If we can, fix this up; otherwise, reload will
4458 have to deal with it. */
4459 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
4460 operands
[1] = validize_mem (operands
[1]);
4463 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4466 /* Nonzero if we can use a floating-point register to pass this arg. */
4467 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4468 (SCALAR_FLOAT_MODE_P (MODE) \
4469 && !DECIMAL_FLOAT_MODE_P (MODE) \
4470 && (CUM)->fregno <= FP_ARG_MAX_REG \
4471 && TARGET_HARD_FLOAT && TARGET_FPRS)
4473 /* Nonzero if we can use an AltiVec register to pass this arg. */
4474 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4475 (ALTIVEC_VECTOR_MODE (MODE) \
4476 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4477 && TARGET_ALTIVEC_ABI \
4480 /* Return a nonzero value to say to return the function value in
4481 memory, just as large structures are always returned. TYPE will be
4482 the data type of the value, and FNTYPE will be the type of the
4483 function doing the returning, or @code{NULL} for libcalls.
4485 The AIX ABI for the RS/6000 specifies that all structures are
4486 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4487 specifies that structures <= 8 bytes are returned in r3/r4, but a
4488 draft put them in memory, and GCC used to implement the draft
4489 instead of the final standard. Therefore, aix_struct_return
4490 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4491 compatibility can change DRAFT_V4_STRUCT_RET to override the
4492 default, and -m switches get the final word. See
4493 rs6000_override_options for more details.
4495 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4496 long double support is enabled. These values are returned in memory.
4498 int_size_in_bytes returns -1 for variable size objects, which go in
4499 memory always. The cast to unsigned makes -1 > 8. */
4502 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
4504 /* In the darwin64 abi, try to use registers for larger structs
4506 if (rs6000_darwin64_abi
4507 && TREE_CODE (type
) == RECORD_TYPE
4508 && int_size_in_bytes (type
) > 0)
4510 CUMULATIVE_ARGS valcum
;
4514 valcum
.fregno
= FP_ARG_MIN_REG
;
4515 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
4516 /* Do a trial code generation as if this were going to be passed
4517 as an argument; if any part goes in memory, we return NULL. */
4518 valret
= rs6000_darwin64_record_arg (&valcum
, type
, 1, true);
4521 /* Otherwise fall through to more conventional ABI rules. */
4524 if (AGGREGATE_TYPE_P (type
)
4525 && (aix_struct_return
4526 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
4529 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
4530 modes only exist for GCC vector types if -maltivec. */
4531 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
4532 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
4535 /* Return synthetic vectors in memory. */
4536 if (TREE_CODE (type
) == VECTOR_TYPE
4537 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
4539 static bool warned_for_return_big_vectors
= false;
4540 if (!warned_for_return_big_vectors
)
4542 warning (0, "GCC vector returned by reference: "
4543 "non-standard ABI extension with no compatibility guarantee");
4544 warned_for_return_big_vectors
= true;
4549 if (DEFAULT_ABI
== ABI_V4
&& TARGET_IEEEQUAD
&& TYPE_MODE (type
) == TFmode
)
4555 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4556 for a call to a function whose data type is FNTYPE.
4557 For a library call, FNTYPE is 0.
4559 For incoming args we set the number of arguments in the prototype large
4560 so we never return a PARALLEL. */
4563 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4564 rtx libname ATTRIBUTE_UNUSED
, int incoming
,
4565 int libcall
, int n_named_args
)
4567 static CUMULATIVE_ARGS zero_cumulative
;
4569 *cum
= zero_cumulative
;
4571 cum
->fregno
= FP_ARG_MIN_REG
;
4572 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
4573 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4574 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
4575 ? CALL_LIBCALL
: CALL_NORMAL
);
4576 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
4577 cum
->stdarg
= fntype
4578 && (TYPE_ARG_TYPES (fntype
) != 0
4579 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4580 != void_type_node
));
4582 cum
->nargs_prototype
= 0;
4583 if (incoming
|| cum
->prototype
)
4584 cum
->nargs_prototype
= n_named_args
;
4586 /* Check for a longcall attribute. */
4587 if ((!fntype
&& rs6000_default_long_calls
)
4589 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
4590 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
))))
4591 cum
->call_cookie
|= CALL_LONG
;
4593 if (TARGET_DEBUG_ARG
)
4595 fprintf (stderr
, "\ninit_cumulative_args:");
4598 tree ret_type
= TREE_TYPE (fntype
);
4599 fprintf (stderr
, " ret code = %s,",
4600 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
4603 if (cum
->call_cookie
& CALL_LONG
)
4604 fprintf (stderr
, " longcall,");
4606 fprintf (stderr
, " proto = %d, nargs = %d\n",
4607 cum
->prototype
, cum
->nargs_prototype
);
4612 && TARGET_ALTIVEC_ABI
4613 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype
))))
4615 error ("cannot return value in vector register because"
4616 " altivec instructions are disabled, use -maltivec"
4621 /* Return true if TYPE must be passed on the stack and not in registers. */
4624 rs6000_must_pass_in_stack (enum machine_mode mode
, tree type
)
4626 if (DEFAULT_ABI
== ABI_AIX
|| TARGET_64BIT
)
4627 return must_pass_in_stack_var_size (mode
, type
);
4629 return must_pass_in_stack_var_size_or_pad (mode
, type
);
4632 /* If defined, a C expression which determines whether, and in which
4633 direction, to pad out an argument with extra space. The value
4634 should be of type `enum direction': either `upward' to pad above
4635 the argument, `downward' to pad below, or `none' to inhibit
4638 For the AIX ABI structs are always stored left shifted in their
4642 function_arg_padding (enum machine_mode mode
, tree type
)
4644 #ifndef AGGREGATE_PADDING_FIXED
4645 #define AGGREGATE_PADDING_FIXED 0
4647 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4648 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4651 if (!AGGREGATE_PADDING_FIXED
)
4653 /* GCC used to pass structures of the same size as integer types as
4654 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4655 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4656 passed padded downward, except that -mstrict-align further
4657 muddied the water in that multi-component structures of 2 and 4
4658 bytes in size were passed padded upward.
4660 The following arranges for best compatibility with previous
4661 versions of gcc, but removes the -mstrict-align dependency. */
4662 if (BYTES_BIG_ENDIAN
)
4664 HOST_WIDE_INT size
= 0;
4666 if (mode
== BLKmode
)
4668 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
4669 size
= int_size_in_bytes (type
);
4672 size
= GET_MODE_SIZE (mode
);
4674 if (size
== 1 || size
== 2 || size
== 4)
4680 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
4682 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
4686 /* Fall back to the default. */
4687 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
4690 /* If defined, a C expression that gives the alignment boundary, in bits,
4691 of an argument with the specified mode and type. If it is not defined,
4692 PARM_BOUNDARY is used for all arguments.
4694 V.4 wants long longs and doubles to be double word aligned. Just
4695 testing the mode size is a boneheaded way to do this as it means
4696 that other types such as complex int are also double word aligned.
4697 However, we're stuck with this because changing the ABI might break
4698 existing library interfaces.
4700 Doubleword align SPE vectors.
4701 Quadword align Altivec vectors.
4702 Quadword align large synthetic vector types. */
4705 function_arg_boundary (enum machine_mode mode
, tree type
)
4707 if (DEFAULT_ABI
== ABI_V4
4708 && (GET_MODE_SIZE (mode
) == 8
4709 || (TARGET_HARD_FLOAT
4711 && mode
== TFmode
)))
4713 else if (SPE_VECTOR_MODE (mode
)
4714 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4715 && int_size_in_bytes (type
) >= 8
4716 && int_size_in_bytes (type
) < 16))
4718 else if (ALTIVEC_VECTOR_MODE (mode
)
4719 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4720 && int_size_in_bytes (type
) >= 16))
4722 else if (rs6000_darwin64_abi
&& mode
== BLKmode
4723 && type
&& TYPE_ALIGN (type
) > 64)
4726 return PARM_BOUNDARY
;
4729 /* For a function parm of MODE and TYPE, return the starting word in
4730 the parameter area. NWORDS of the parameter area are already used. */
4733 rs6000_parm_start (enum machine_mode mode
, tree type
, unsigned int nwords
)
4736 unsigned int parm_offset
;
4738 align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
4739 parm_offset
= DEFAULT_ABI
== ABI_V4
? 2 : 6;
4740 return nwords
+ (-(parm_offset
+ nwords
) & align
);
4743 /* Compute the size (in words) of a function argument. */
4745 static unsigned long
4746 rs6000_arg_size (enum machine_mode mode
, tree type
)
4750 if (mode
!= BLKmode
)
4751 size
= GET_MODE_SIZE (mode
);
4753 size
= int_size_in_bytes (type
);
4756 return (size
+ 3) >> 2;
4758 return (size
+ 7) >> 3;
4761 /* Use this to flush pending int fields. */
4764 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*cum
,
4765 HOST_WIDE_INT bitpos
)
4767 unsigned int startbit
, endbit
;
4768 int intregs
, intoffset
;
4769 enum machine_mode mode
;
4771 if (cum
->intoffset
== -1)
4774 intoffset
= cum
->intoffset
;
4775 cum
->intoffset
= -1;
4777 if (intoffset
% BITS_PER_WORD
!= 0)
4779 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
4781 if (mode
== BLKmode
)
4783 /* We couldn't find an appropriate mode, which happens,
4784 e.g., in packed structs when there are 3 bytes to load.
4785 Back intoffset back to the beginning of the word in this
4787 intoffset
= intoffset
& -BITS_PER_WORD
;
4791 startbit
= intoffset
& -BITS_PER_WORD
;
4792 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4793 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
4794 cum
->words
+= intregs
;
4797 /* The darwin64 ABI calls for us to recurse down through structs,
4798 looking for elements passed in registers. Unfortunately, we have
4799 to track int register count here also because of misalignments
4800 in powerpc alignment mode. */
4803 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*cum
,
4805 HOST_WIDE_INT startbitpos
)
4809 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4810 if (TREE_CODE (f
) == FIELD_DECL
)
4812 HOST_WIDE_INT bitpos
= startbitpos
;
4813 tree ftype
= TREE_TYPE (f
);
4814 enum machine_mode mode
;
4815 if (ftype
== error_mark_node
)
4817 mode
= TYPE_MODE (ftype
);
4819 if (DECL_SIZE (f
) != 0
4820 && host_integerp (bit_position (f
), 1))
4821 bitpos
+= int_bit_position (f
);
4823 /* ??? FIXME: else assume zero offset. */
4825 if (TREE_CODE (ftype
) == RECORD_TYPE
)
4826 rs6000_darwin64_record_arg_advance_recurse (cum
, ftype
, bitpos
);
4827 else if (USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
4829 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4830 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4831 cum
->words
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4833 else if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, 1))
4835 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4839 else if (cum
->intoffset
== -1)
4840 cum
->intoffset
= bitpos
;
4844 /* Update the data in CUM to advance over an argument
4845 of mode MODE and data type TYPE.
4846 (TYPE is null for libcalls where that information may not be available.)
4848 Note that for args passed by reference, function_arg will be called
4849 with MODE and TYPE set to that of the pointer to the arg, not the arg
4853 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4854 tree type
, int named
, int depth
)
4858 /* Only tick off an argument if we're not recursing. */
4860 cum
->nargs_prototype
--;
4862 if (TARGET_ALTIVEC_ABI
4863 && (ALTIVEC_VECTOR_MODE (mode
)
4864 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4865 && int_size_in_bytes (type
) == 16)))
4869 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4872 if (!TARGET_ALTIVEC
)
4873 error ("cannot pass argument in vector register because"
4874 " altivec instructions are disabled, use -maltivec"
4877 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4878 even if it is going to be passed in a vector register.
4879 Darwin does the same for variable-argument functions. */
4880 if ((DEFAULT_ABI
== ABI_AIX
&& TARGET_64BIT
)
4881 || (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
))
4891 /* Vector parameters must be 16-byte aligned. This places
4892 them at 2 mod 4 in terms of words in 32-bit mode, since
4893 the parameter save area starts at offset 24 from the
4894 stack. In 64-bit mode, they just have to start on an
4895 even word, since the parameter save area is 16-byte
4896 aligned. Space for GPRs is reserved even if the argument
4897 will be passed in memory. */
4899 align
= (2 - cum
->words
) & 3;
4901 align
= cum
->words
& 1;
4902 cum
->words
+= align
+ rs6000_arg_size (mode
, type
);
4904 if (TARGET_DEBUG_ARG
)
4906 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
4908 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
4909 cum
->nargs_prototype
, cum
->prototype
,
4910 GET_MODE_NAME (mode
));
4914 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
4916 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4919 else if (rs6000_darwin64_abi
4921 && TREE_CODE (type
) == RECORD_TYPE
4922 && (size
= int_size_in_bytes (type
)) > 0)
4924 /* Variable sized types have size == -1 and are
4925 treated as if consisting entirely of ints.
4926 Pad to 16 byte boundary if needed. */
4927 if (TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
4928 && (cum
->words
% 2) != 0)
4930 /* For varargs, we can just go up by the size of the struct. */
4932 cum
->words
+= (size
+ 7) / 8;
4935 /* It is tempting to say int register count just goes up by
4936 sizeof(type)/8, but this is wrong in a case such as
4937 { int; double; int; } [powerpc alignment]. We have to
4938 grovel through the fields for these too. */
4940 rs6000_darwin64_record_arg_advance_recurse (cum
, type
, 0);
4941 rs6000_darwin64_record_arg_advance_flush (cum
,
4942 size
* BITS_PER_UNIT
);
4945 else if (DEFAULT_ABI
== ABI_V4
)
4947 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4948 && (mode
== SFmode
|| mode
== DFmode
4949 || (mode
== TFmode
&& !TARGET_IEEEQUAD
)))
4951 if (cum
->fregno
+ (mode
== TFmode
? 1 : 0) <= FP_ARG_V4_MAX_REG
)
4952 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4955 cum
->fregno
= FP_ARG_V4_MAX_REG
+ 1;
4956 if (mode
== DFmode
|| mode
== TFmode
)
4957 cum
->words
+= cum
->words
& 1;
4958 cum
->words
+= rs6000_arg_size (mode
, type
);
4963 int n_words
= rs6000_arg_size (mode
, type
);
4964 int gregno
= cum
->sysv_gregno
;
4966 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4967 (r7,r8) or (r9,r10). As does any other 2 word item such
4968 as complex int due to a historical mistake. */
4970 gregno
+= (1 - gregno
) & 1;
4972 /* Multi-reg args are not split between registers and stack. */
4973 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4975 /* Long long and SPE vectors are aligned on the stack.
4976 So are other 2 word items such as complex int due to
4977 a historical mistake. */
4979 cum
->words
+= cum
->words
& 1;
4980 cum
->words
+= n_words
;
4983 /* Note: continuing to accumulate gregno past when we've started
4984 spilling to the stack indicates the fact that we've started
4985 spilling to the stack to expand_builtin_saveregs. */
4986 cum
->sysv_gregno
= gregno
+ n_words
;
4989 if (TARGET_DEBUG_ARG
)
4991 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4992 cum
->words
, cum
->fregno
);
4993 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
4994 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
4995 fprintf (stderr
, "mode = %4s, named = %d\n",
4996 GET_MODE_NAME (mode
), named
);
5001 int n_words
= rs6000_arg_size (mode
, type
);
5002 int start_words
= cum
->words
;
5003 int align_words
= rs6000_parm_start (mode
, type
, start_words
);
5005 cum
->words
= align_words
+ n_words
;
5007 if (SCALAR_FLOAT_MODE_P (mode
)
5008 && !DECIMAL_FLOAT_MODE_P (mode
)
5009 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
5010 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
5012 if (TARGET_DEBUG_ARG
)
5014 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
5015 cum
->words
, cum
->fregno
);
5016 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
5017 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
5018 fprintf (stderr
, "named = %d, align = %d, depth = %d\n",
5019 named
, align_words
- start_words
, depth
);
5025 spe_build_register_parallel (enum machine_mode mode
, int gregno
)
5032 r1
= gen_rtx_REG (DImode
, gregno
);
5033 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5034 return gen_rtx_PARALLEL (mode
, gen_rtvec (1, r1
));
5038 r1
= gen_rtx_REG (DImode
, gregno
);
5039 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5040 r3
= gen_rtx_REG (DImode
, gregno
+ 2);
5041 r3
= gen_rtx_EXPR_LIST (VOIDmode
, r3
, GEN_INT (8));
5042 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r3
));
5045 r1
= gen_rtx_REG (DImode
, gregno
);
5046 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
5047 r3
= gen_rtx_REG (DImode
, gregno
+ 2);
5048 r3
= gen_rtx_EXPR_LIST (VOIDmode
, r3
, GEN_INT (8));
5049 r5
= gen_rtx_REG (DImode
, gregno
+ 4);
5050 r5
= gen_rtx_EXPR_LIST (VOIDmode
, r5
, GEN_INT (16));
5051 r7
= gen_rtx_REG (DImode
, gregno
+ 6);
5052 r7
= gen_rtx_EXPR_LIST (VOIDmode
, r7
, GEN_INT (24));
5053 return gen_rtx_PARALLEL (mode
, gen_rtvec (4, r1
, r3
, r5
, r7
));
5060 /* Determine where to put a SIMD argument on the SPE. */
5062 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5065 int gregno
= cum
->sysv_gregno
;
5067 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
5068 are passed and returned in a pair of GPRs for ABI compatibility. */
5069 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DCmode
5070 || mode
== TFmode
|| mode
== TCmode
))
5072 int n_words
= rs6000_arg_size (mode
, type
);
5074 /* Doubles go in an odd/even register pair (r5/r6, etc). */
5076 gregno
+= (1 - gregno
) & 1;
5078 /* Multi-reg args are not split between registers and stack. */
5079 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5082 return spe_build_register_parallel (mode
, gregno
);
5086 int n_words
= rs6000_arg_size (mode
, type
);
5088 /* SPE vectors are put in odd registers. */
5089 if (n_words
== 2 && (gregno
& 1) == 0)
5092 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
5095 enum machine_mode m
= SImode
;
5097 r1
= gen_rtx_REG (m
, gregno
);
5098 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
5099 r2
= gen_rtx_REG (m
, gregno
+ 1);
5100 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
5101 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
5108 if (gregno
<= GP_ARG_MAX_REG
)
5109 return gen_rtx_REG (mode
, gregno
);
5115 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5116 structure between cum->intoffset and bitpos to integer registers. */
5119 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*cum
,
5120 HOST_WIDE_INT bitpos
, rtx rvec
[], int *k
)
5122 enum machine_mode mode
;
5124 unsigned int startbit
, endbit
;
5125 int this_regno
, intregs
, intoffset
;
5128 if (cum
->intoffset
== -1)
5131 intoffset
= cum
->intoffset
;
5132 cum
->intoffset
= -1;
5134 /* If this is the trailing part of a word, try to only load that
5135 much into the register. Otherwise load the whole register. Note
5136 that in the latter case we may pick up unwanted bits. It's not a
5137 problem at the moment but may wish to revisit. */
5139 if (intoffset
% BITS_PER_WORD
!= 0)
5141 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
5143 if (mode
== BLKmode
)
5145 /* We couldn't find an appropriate mode, which happens,
5146 e.g., in packed structs when there are 3 bytes to load.
5147 Back intoffset back to the beginning of the word in this
5149 intoffset
= intoffset
& -BITS_PER_WORD
;
5156 startbit
= intoffset
& -BITS_PER_WORD
;
5157 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
5158 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
5159 this_regno
= cum
->words
+ intoffset
/ BITS_PER_WORD
;
5161 if (intregs
> 0 && intregs
> GP_ARG_NUM_REG
- this_regno
)
5164 intregs
= MIN (intregs
, GP_ARG_NUM_REG
- this_regno
);
5168 intoffset
/= BITS_PER_UNIT
;
5171 regno
= GP_ARG_MIN_REG
+ this_regno
;
5172 reg
= gen_rtx_REG (mode
, regno
);
5174 gen_rtx_EXPR_LIST (VOIDmode
, reg
, GEN_INT (intoffset
));
5177 intoffset
= (intoffset
| (UNITS_PER_WORD
-1)) + 1;
5181 while (intregs
> 0);
5184 /* Recursive workhorse for the following. */
5187 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*cum
, tree type
,
5188 HOST_WIDE_INT startbitpos
, rtx rvec
[],
5193 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
5194 if (TREE_CODE (f
) == FIELD_DECL
)
5196 HOST_WIDE_INT bitpos
= startbitpos
;
5197 tree ftype
= TREE_TYPE (f
);
5198 enum machine_mode mode
;
5199 if (ftype
== error_mark_node
)
5201 mode
= TYPE_MODE (ftype
);
5203 if (DECL_SIZE (f
) != 0
5204 && host_integerp (bit_position (f
), 1))
5205 bitpos
+= int_bit_position (f
);
5207 /* ??? FIXME: else assume zero offset. */
5209 if (TREE_CODE (ftype
) == RECORD_TYPE
)
5210 rs6000_darwin64_record_arg_recurse (cum
, ftype
, bitpos
, rvec
, k
);
5211 else if (cum
->named
&& USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
5216 case SCmode
: mode
= SFmode
; break;
5217 case DCmode
: mode
= DFmode
; break;
5218 case TCmode
: mode
= TFmode
; break;
5222 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
5224 = gen_rtx_EXPR_LIST (VOIDmode
,
5225 gen_rtx_REG (mode
, cum
->fregno
++),
5226 GEN_INT (bitpos
/ BITS_PER_UNIT
));
5230 else if (cum
->named
&& USE_ALTIVEC_FOR_ARG_P (cum
, mode
, ftype
, 1))
5232 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
5234 = gen_rtx_EXPR_LIST (VOIDmode
,
5235 gen_rtx_REG (mode
, cum
->vregno
++),
5236 GEN_INT (bitpos
/ BITS_PER_UNIT
));
5238 else if (cum
->intoffset
== -1)
5239 cum
->intoffset
= bitpos
;
5243 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
5244 the register(s) to be used for each field and subfield of a struct
5245 being passed by value, along with the offset of where the
5246 register's value may be found in the block. FP fields go in FP
5247 register, vector fields go in vector registers, and everything
5248 else goes in int registers, packed as in memory.
5250 This code is also used for function return values. RETVAL indicates
5251 whether this is the case.
5253 Much of this is taken from the SPARC V9 port, which has a similar
5254 calling convention. */
5257 rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*orig_cum
, tree type
,
5258 int named
, bool retval
)
5260 rtx rvec
[FIRST_PSEUDO_REGISTER
];
5261 int k
= 1, kbase
= 1;
5262 HOST_WIDE_INT typesize
= int_size_in_bytes (type
);
5263 /* This is a copy; modifications are not visible to our caller. */
5264 CUMULATIVE_ARGS copy_cum
= *orig_cum
;
5265 CUMULATIVE_ARGS
*cum
= ©_cum
;
5267 /* Pad to 16 byte boundary if needed. */
5268 if (!retval
&& TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
5269 && (cum
->words
% 2) != 0)
5276 /* Put entries into rvec[] for individual FP and vector fields, and
5277 for the chunks of memory that go in int regs. Note we start at
5278 element 1; 0 is reserved for an indication of using memory, and
5279 may or may not be filled in below. */
5280 rs6000_darwin64_record_arg_recurse (cum
, type
, 0, rvec
, &k
);
5281 rs6000_darwin64_record_arg_flush (cum
, typesize
* BITS_PER_UNIT
, rvec
, &k
);
5283 /* If any part of the struct went on the stack put all of it there.
5284 This hack is because the generic code for
5285 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
5286 parts of the struct are not at the beginning. */
5290 return NULL_RTX
; /* doesn't go in registers at all */
5292 rvec
[0] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5294 if (k
> 1 || cum
->use_stack
)
5295 return gen_rtx_PARALLEL (BLKmode
, gen_rtvec_v (k
- kbase
, &rvec
[kbase
]));
5300 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
5303 rs6000_mixed_function_arg (enum machine_mode mode
, tree type
, int align_words
)
5307 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5309 if (align_words
>= GP_ARG_NUM_REG
)
5312 n_units
= rs6000_arg_size (mode
, type
);
5314 /* Optimize the simple case where the arg fits in one gpr, except in
5315 the case of BLKmode due to assign_parms assuming that registers are
5316 BITS_PER_WORD wide. */
5318 || (n_units
== 1 && mode
!= BLKmode
))
5319 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5322 if (align_words
+ n_units
> GP_ARG_NUM_REG
)
5323 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5324 using a magic NULL_RTX component.
5325 This is not strictly correct. Only some of the arg belongs in
5326 memory, not all of it. However, the normal scheme using
5327 function_arg_partial_nregs can result in unusual subregs, eg.
5328 (subreg:SI (reg:DF) 4), which are not handled well. The code to
5329 store the whole arg to memory is often more efficient than code
5330 to store pieces, and we know that space is available in the right
5331 place for the whole arg. */
5332 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5337 rtx r
= gen_rtx_REG (SImode
, GP_ARG_MIN_REG
+ align_words
);
5338 rtx off
= GEN_INT (i
++ * 4);
5339 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5341 while (++align_words
< GP_ARG_NUM_REG
&& --n_units
!= 0);
5343 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5346 /* Determine where to put an argument to a function.
5347 Value is zero to push the argument on the stack,
5348 or a hard register in which to store the argument.
5350 MODE is the argument's machine mode.
5351 TYPE is the data type of the argument (as a tree).
5352 This is null for libcalls where that information may
5354 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5355 the preceding args and about the function being called. It is
5356 not modified in this routine.
5357 NAMED is nonzero if this argument is a named parameter
5358 (otherwise it is an extra parameter matching an ellipsis).
5360 On RS/6000 the first eight words of non-FP are normally in registers
5361 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
5362 Under V.4, the first 8 FP args are in registers.
5364 If this is floating-point and no prototype is specified, we use
5365 both an FP and integer register (or possibly FP reg and stack). Library
5366 functions (when CALL_LIBCALL is set) always have the proper types for args,
5367 so we can pass the FP value just in one register. emit_library_function
5368 doesn't support PARALLEL anyway.
5370 Note that for args passed by reference, function_arg will be called
5371 with MODE and TYPE set to that of the pointer to the arg, not the arg
5375 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5376 tree type
, int named
)
5378 enum rs6000_abi abi
= DEFAULT_ABI
;
5380 /* Return a marker to indicate whether CR1 needs to set or clear the
5381 bit that V.4 uses to say fp args were passed in registers.
5382 Assume that we don't need the marker for software floating point,
5383 or compiler generated library calls. */
5384 if (mode
== VOIDmode
)
5387 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
5389 || (cum
->nargs_prototype
< 0
5390 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))))
5392 /* For the SPE, we need to crxor CR6 always. */
5394 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
5395 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
5396 return GEN_INT (cum
->call_cookie
5397 | ((cum
->fregno
== FP_ARG_MIN_REG
)
5398 ? CALL_V4_SET_FP_ARGS
5399 : CALL_V4_CLEAR_FP_ARGS
));
5402 return GEN_INT (cum
->call_cookie
);
5405 if (rs6000_darwin64_abi
&& mode
== BLKmode
5406 && TREE_CODE (type
) == RECORD_TYPE
)
5408 rtx rslt
= rs6000_darwin64_record_arg (cum
, type
, named
, false);
5409 if (rslt
!= NULL_RTX
)
5411 /* Else fall through to usual handling. */
5414 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
5415 if (TARGET_64BIT
&& ! cum
->prototype
)
5417 /* Vector parameters get passed in vector register
5418 and also in GPRs or memory, in absence of prototype. */
5421 align_words
= (cum
->words
+ 1) & ~1;
5423 if (align_words
>= GP_ARG_NUM_REG
)
5429 slot
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5431 return gen_rtx_PARALLEL (mode
,
5433 gen_rtx_EXPR_LIST (VOIDmode
,
5435 gen_rtx_EXPR_LIST (VOIDmode
,
5436 gen_rtx_REG (mode
, cum
->vregno
),
5440 return gen_rtx_REG (mode
, cum
->vregno
);
5441 else if (TARGET_ALTIVEC_ABI
5442 && (ALTIVEC_VECTOR_MODE (mode
)
5443 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
5444 && int_size_in_bytes (type
) == 16)))
5446 if (named
|| abi
== ABI_V4
)
5450 /* Vector parameters to varargs functions under AIX or Darwin
5451 get passed in memory and possibly also in GPRs. */
5452 int align
, align_words
, n_words
;
5453 enum machine_mode part_mode
;
5455 /* Vector parameters must be 16-byte aligned. This places them at
5456 2 mod 4 in terms of words in 32-bit mode, since the parameter
5457 save area starts at offset 24 from the stack. In 64-bit mode,
5458 they just have to start on an even word, since the parameter
5459 save area is 16-byte aligned. */
5461 align
= (2 - cum
->words
) & 3;
5463 align
= cum
->words
& 1;
5464 align_words
= cum
->words
+ align
;
5466 /* Out of registers? Memory, then. */
5467 if (align_words
>= GP_ARG_NUM_REG
)
5470 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5471 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5473 /* The vector value goes in GPRs. Only the part of the
5474 value in GPRs is reported here. */
5476 n_words
= rs6000_arg_size (mode
, type
);
5477 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
5478 /* Fortunately, there are only two possibilities, the value
5479 is either wholly in GPRs or half in GPRs and half not. */
5482 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
5485 else if (TARGET_SPE_ABI
&& TARGET_SPE
5486 && (SPE_VECTOR_MODE (mode
)
5487 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
5490 || mode
== TCmode
))))
5491 return rs6000_spe_function_arg (cum
, mode
, type
);
5493 else if (abi
== ABI_V4
)
5495 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5496 && (mode
== SFmode
|| mode
== DFmode
5497 || (mode
== TFmode
&& !TARGET_IEEEQUAD
)))
5499 if (cum
->fregno
+ (mode
== TFmode
? 1 : 0) <= FP_ARG_V4_MAX_REG
)
5500 return gen_rtx_REG (mode
, cum
->fregno
);
5506 int n_words
= rs6000_arg_size (mode
, type
);
5507 int gregno
= cum
->sysv_gregno
;
5509 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5510 (r7,r8) or (r9,r10). As does any other 2 word item such
5511 as complex int due to a historical mistake. */
5513 gregno
+= (1 - gregno
) & 1;
5515 /* Multi-reg args are not split between registers and stack. */
5516 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5519 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5520 return rs6000_mixed_function_arg (mode
, type
,
5521 gregno
- GP_ARG_MIN_REG
);
5522 return gen_rtx_REG (mode
, gregno
);
5527 int align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
5529 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5531 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5535 enum machine_mode fmode
= mode
;
5536 unsigned long n_fpreg
= (GET_MODE_SIZE (mode
) + 7) >> 3;
5538 if (cum
->fregno
+ n_fpreg
> FP_ARG_MAX_REG
+ 1)
5540 /* Currently, we only ever need one reg here because complex
5541 doubles are split. */
5542 gcc_assert (cum
->fregno
== FP_ARG_MAX_REG
&& fmode
== TFmode
);
5544 /* Long double split over regs and memory. */
5548 /* Do we also need to pass this arg in the parameter save
5551 && (cum
->nargs_prototype
<= 0
5552 || (DEFAULT_ABI
== ABI_AIX
5554 && align_words
>= GP_ARG_NUM_REG
)));
5556 if (!needs_psave
&& mode
== fmode
)
5557 return gen_rtx_REG (fmode
, cum
->fregno
);
5562 /* Describe the part that goes in gprs or the stack.
5563 This piece must come first, before the fprs. */
5564 if (align_words
< GP_ARG_NUM_REG
)
5566 unsigned long n_words
= rs6000_arg_size (mode
, type
);
5568 if (align_words
+ n_words
> GP_ARG_NUM_REG
5569 || (TARGET_32BIT
&& TARGET_POWERPC64
))
5571 /* If this is partially on the stack, then we only
5572 include the portion actually in registers here. */
5573 enum machine_mode rmode
= TARGET_32BIT
? SImode
: DImode
;
5576 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
5577 /* Not all of the arg fits in gprs. Say that it
5578 goes in memory too, using a magic NULL_RTX
5579 component. Also see comment in
5580 rs6000_mixed_function_arg for why the normal
5581 function_arg_partial_nregs scheme doesn't work
5583 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
,
5587 r
= gen_rtx_REG (rmode
,
5588 GP_ARG_MIN_REG
+ align_words
);
5589 off
= GEN_INT (i
++ * GET_MODE_SIZE (rmode
));
5590 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5592 while (++align_words
< GP_ARG_NUM_REG
&& --n_words
!= 0);
5596 /* The whole arg fits in gprs. */
5597 r
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5598 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5602 /* It's entirely in memory. */
5603 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5606 /* Describe where this piece goes in the fprs. */
5607 r
= gen_rtx_REG (fmode
, cum
->fregno
);
5608 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5610 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5612 else if (align_words
< GP_ARG_NUM_REG
)
5614 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5615 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5617 if (mode
== BLKmode
)
5620 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5627 /* For an arg passed partly in registers and partly in memory, this is
5628 the number of bytes passed in registers. For args passed entirely in
5629 registers or entirely in memory, zero. When an arg is described by a
5630 PARALLEL, perhaps using more than one register type, this function
5631 returns the number of bytes used by the first element of the PARALLEL. */
5634 rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5635 tree type
, bool named
)
5640 if (DEFAULT_ABI
== ABI_V4
)
5643 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
)
5644 && cum
->nargs_prototype
>= 0)
5647 /* In this complicated case we just disable the partial_nregs code. */
5648 if (rs6000_darwin64_abi
&& mode
== BLKmode
5649 && TREE_CODE (type
) == RECORD_TYPE
5650 && int_size_in_bytes (type
) > 0)
5653 align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
5655 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5657 /* If we are passing this arg in the fixed parameter save area
5658 (gprs or memory) as well as fprs, then this function should
5659 return the number of partial bytes passed in the parameter
5660 save area rather than partial bytes passed in fprs. */
5662 && (cum
->nargs_prototype
<= 0
5663 || (DEFAULT_ABI
== ABI_AIX
5665 && align_words
>= GP_ARG_NUM_REG
)))
5667 else if (cum
->fregno
+ ((GET_MODE_SIZE (mode
) + 7) >> 3)
5668 > FP_ARG_MAX_REG
+ 1)
5669 ret
= (FP_ARG_MAX_REG
+ 1 - cum
->fregno
) * 8;
5670 else if (cum
->nargs_prototype
>= 0)
5674 if (align_words
< GP_ARG_NUM_REG
5675 && GP_ARG_NUM_REG
< align_words
+ rs6000_arg_size (mode
, type
))
5676 ret
= (GP_ARG_NUM_REG
- align_words
) * (TARGET_32BIT
? 4 : 8);
5678 if (ret
!= 0 && TARGET_DEBUG_ARG
)
5679 fprintf (stderr
, "rs6000_arg_partial_bytes: %d\n", ret
);
5684 /* A C expression that indicates when an argument must be passed by
5685 reference. If nonzero for an argument, a copy of that argument is
5686 made in memory and a pointer to the argument is passed instead of
5687 the argument itself. The pointer is passed in whatever way is
5688 appropriate for passing a pointer to that type.
5690 Under V.4, aggregates and long double are passed by reference.
5692 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5693 reference unless the AltiVec vector extension ABI is in force.
5695 As an extension to all ABIs, variable sized types are passed by
5699 rs6000_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
5700 enum machine_mode mode
, tree type
,
5701 bool named ATTRIBUTE_UNUSED
)
5703 if (DEFAULT_ABI
== ABI_V4
&& TARGET_IEEEQUAD
&& mode
== TFmode
)
5705 if (TARGET_DEBUG_ARG
)
5706 fprintf (stderr
, "function_arg_pass_by_reference: V4 long double\n");
5713 if (DEFAULT_ABI
== ABI_V4
&& AGGREGATE_TYPE_P (type
))
5715 if (TARGET_DEBUG_ARG
)
5716 fprintf (stderr
, "function_arg_pass_by_reference: V4 aggregate\n");
5720 if (int_size_in_bytes (type
) < 0)
5722 if (TARGET_DEBUG_ARG
)
5723 fprintf (stderr
, "function_arg_pass_by_reference: variable size\n");
5727 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5728 modes only exist for GCC vector types if -maltivec. */
5729 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5731 if (TARGET_DEBUG_ARG
)
5732 fprintf (stderr
, "function_arg_pass_by_reference: AltiVec\n");
5736 /* Pass synthetic vectors in memory. */
5737 if (TREE_CODE (type
) == VECTOR_TYPE
5738 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
5740 static bool warned_for_pass_big_vectors
= false;
5741 if (TARGET_DEBUG_ARG
)
5742 fprintf (stderr
, "function_arg_pass_by_reference: synthetic vector\n");
5743 if (!warned_for_pass_big_vectors
)
5745 warning (0, "GCC vector passed by reference: "
5746 "non-standard ABI extension with no compatibility guarantee");
5747 warned_for_pass_big_vectors
= true;
5756 rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
)
5759 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
5764 for (i
= 0; i
< nregs
; i
++)
5766 rtx tem
= adjust_address_nv (x
, reg_mode
, i
* GET_MODE_SIZE (reg_mode
));
5767 if (reload_completed
)
5769 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
5772 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
5773 i
* GET_MODE_SIZE (reg_mode
));
5776 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
5780 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
5784 /* Perform any needed actions needed for a function that is receiving a
5785 variable number of arguments.
5789 MODE and TYPE are the mode and type of the current parameter.
5791 PRETEND_SIZE is a variable that should be set to the amount of stack
5792 that must be pushed by the prolog to pretend that our caller pushed
5795 Normally, this macro will push all remaining incoming registers on the
5796 stack and set PRETEND_SIZE to the length of the registers pushed. */
5799 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5800 tree type
, int *pretend_size ATTRIBUTE_UNUSED
,
5803 CUMULATIVE_ARGS next_cum
;
5804 int reg_size
= TARGET_32BIT
? 4 : 8;
5805 rtx save_area
= NULL_RTX
, mem
;
5806 int first_reg_offset
, set
;
5808 /* Skip the last named argument. */
5810 function_arg_advance (&next_cum
, mode
, type
, 1, 0);
5812 if (DEFAULT_ABI
== ABI_V4
)
5814 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
5818 int gpr_reg_num
= 0, gpr_size
= 0, fpr_size
= 0;
5819 HOST_WIDE_INT offset
= 0;
5821 /* Try to optimize the size of the varargs save area.
5822 The ABI requires that ap.reg_save_area is doubleword
5823 aligned, but we don't need to allocate space for all
5824 the bytes, only those to which we actually will save
5826 if (cfun
->va_list_gpr_size
&& first_reg_offset
< GP_ARG_NUM_REG
)
5827 gpr_reg_num
= GP_ARG_NUM_REG
- first_reg_offset
;
5828 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5829 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
5830 && cfun
->va_list_fpr_size
)
5833 fpr_size
= (next_cum
.fregno
- FP_ARG_MIN_REG
)
5834 * UNITS_PER_FP_WORD
;
5835 if (cfun
->va_list_fpr_size
5836 < FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5837 fpr_size
+= cfun
->va_list_fpr_size
* UNITS_PER_FP_WORD
;
5839 fpr_size
+= (FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5840 * UNITS_PER_FP_WORD
;
5844 offset
= -((first_reg_offset
* reg_size
) & ~7);
5845 if (!fpr_size
&& gpr_reg_num
> cfun
->va_list_gpr_size
)
5847 gpr_reg_num
= cfun
->va_list_gpr_size
;
5848 if (reg_size
== 4 && (first_reg_offset
& 1))
5851 gpr_size
= (gpr_reg_num
* reg_size
+ 7) & ~7;
5854 offset
= - (int) (next_cum
.fregno
- FP_ARG_MIN_REG
)
5856 - (int) (GP_ARG_NUM_REG
* reg_size
);
5858 if (gpr_size
+ fpr_size
)
5861 = assign_stack_local (BLKmode
, gpr_size
+ fpr_size
, 64);
5862 gcc_assert (GET_CODE (reg_save_area
) == MEM
);
5863 reg_save_area
= XEXP (reg_save_area
, 0);
5864 if (GET_CODE (reg_save_area
) == PLUS
)
5866 gcc_assert (XEXP (reg_save_area
, 0)
5867 == virtual_stack_vars_rtx
);
5868 gcc_assert (GET_CODE (XEXP (reg_save_area
, 1)) == CONST_INT
);
5869 offset
+= INTVAL (XEXP (reg_save_area
, 1));
5872 gcc_assert (reg_save_area
== virtual_stack_vars_rtx
);
5875 cfun
->machine
->varargs_save_offset
= offset
;
5876 save_area
= plus_constant (virtual_stack_vars_rtx
, offset
);
5881 first_reg_offset
= next_cum
.words
;
5882 save_area
= virtual_incoming_args_rtx
;
5884 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
5885 first_reg_offset
+= rs6000_arg_size (TYPE_MODE (type
), type
);
5888 set
= get_varargs_alias_set ();
5889 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
5890 && cfun
->va_list_gpr_size
)
5892 int nregs
= GP_ARG_NUM_REG
- first_reg_offset
;
5894 if (va_list_gpr_counter_field
)
5896 /* V4 va_list_gpr_size counts number of registers needed. */
5897 if (nregs
> cfun
->va_list_gpr_size
)
5898 nregs
= cfun
->va_list_gpr_size
;
5902 /* char * va_list instead counts number of bytes needed. */
5903 if (nregs
> cfun
->va_list_gpr_size
/ reg_size
)
5904 nregs
= cfun
->va_list_gpr_size
/ reg_size
;
5907 mem
= gen_rtx_MEM (BLKmode
,
5908 plus_constant (save_area
,
5909 first_reg_offset
* reg_size
));
5910 MEM_NOTRAP_P (mem
) = 1;
5911 set_mem_alias_set (mem
, set
);
5912 set_mem_align (mem
, BITS_PER_WORD
);
5914 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
5918 /* Save FP registers if needed. */
5919 if (DEFAULT_ABI
== ABI_V4
5920 && TARGET_HARD_FLOAT
&& TARGET_FPRS
5922 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
5923 && cfun
->va_list_fpr_size
)
5925 int fregno
= next_cum
.fregno
, nregs
;
5926 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
5927 rtx lab
= gen_label_rtx ();
5928 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
)
5929 * UNITS_PER_FP_WORD
);
5932 (gen_rtx_SET (VOIDmode
,
5934 gen_rtx_IF_THEN_ELSE (VOIDmode
,
5935 gen_rtx_NE (VOIDmode
, cr1
,
5937 gen_rtx_LABEL_REF (VOIDmode
, lab
),
5941 fregno
<= FP_ARG_V4_MAX_REG
&& nregs
< cfun
->va_list_fpr_size
;
5942 fregno
++, off
+= UNITS_PER_FP_WORD
, nregs
++)
5944 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
5945 MEM_NOTRAP_P (mem
) = 1;
5946 set_mem_alias_set (mem
, set
);
5947 set_mem_align (mem
, GET_MODE_ALIGNMENT (DFmode
));
5948 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
5955 /* Create the va_list data type. */
5958 rs6000_build_builtin_va_list (void)
5960 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
5962 /* For AIX, prefer 'char *' because that's what the system
5963 header files like. */
5964 if (DEFAULT_ABI
!= ABI_V4
)
5965 return build_pointer_type (char_type_node
);
5967 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
5968 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
5970 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
5971 unsigned_char_type_node
);
5972 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
5973 unsigned_char_type_node
);
5974 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5976 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
5977 short_unsigned_type_node
);
5978 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
5980 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
5983 va_list_gpr_counter_field
= f_gpr
;
5984 va_list_fpr_counter_field
= f_fpr
;
5986 DECL_FIELD_CONTEXT (f_gpr
) = record
;
5987 DECL_FIELD_CONTEXT (f_fpr
) = record
;
5988 DECL_FIELD_CONTEXT (f_res
) = record
;
5989 DECL_FIELD_CONTEXT (f_ovf
) = record
;
5990 DECL_FIELD_CONTEXT (f_sav
) = record
;
5992 TREE_CHAIN (record
) = type_decl
;
5993 TYPE_NAME (record
) = type_decl
;
5994 TYPE_FIELDS (record
) = f_gpr
;
5995 TREE_CHAIN (f_gpr
) = f_fpr
;
5996 TREE_CHAIN (f_fpr
) = f_res
;
5997 TREE_CHAIN (f_res
) = f_ovf
;
5998 TREE_CHAIN (f_ovf
) = f_sav
;
6000 layout_type (record
);
6002 /* The correct type is an array type of one element. */
6003 return build_array_type (record
, build_index_type (size_zero_node
));
6006 /* Implement va_start. */
6009 rs6000_va_start (tree valist
, rtx nextarg
)
6011 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
6012 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
6013 tree gpr
, fpr
, ovf
, sav
, t
;
6015 /* Only SVR4 needs something special. */
6016 if (DEFAULT_ABI
!= ABI_V4
)
6018 std_expand_builtin_va_start (valist
, nextarg
);
6022 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6023 f_fpr
= TREE_CHAIN (f_gpr
);
6024 f_res
= TREE_CHAIN (f_fpr
);
6025 f_ovf
= TREE_CHAIN (f_res
);
6026 f_sav
= TREE_CHAIN (f_ovf
);
6028 valist
= build_va_arg_indirect_ref (valist
);
6029 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6030 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6031 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6032 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6034 /* Count number of gp and fp argument registers used. */
6035 words
= current_function_args_info
.words
;
6036 n_gpr
= MIN (current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
,
6038 n_fpr
= MIN (current_function_args_info
.fregno
- FP_ARG_MIN_REG
,
6041 if (TARGET_DEBUG_ARG
)
6042 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
6043 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
6044 words
, n_gpr
, n_fpr
);
6046 if (cfun
->va_list_gpr_size
)
6048 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (gpr
), gpr
,
6049 build_int_cst (NULL_TREE
, n_gpr
));
6050 TREE_SIDE_EFFECTS (t
) = 1;
6051 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6054 if (cfun
->va_list_fpr_size
)
6056 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (fpr
), fpr
,
6057 build_int_cst (NULL_TREE
, n_fpr
));
6058 TREE_SIDE_EFFECTS (t
) = 1;
6059 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6062 /* Find the overflow area. */
6063 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
6065 t
= build2 (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
6066 build_int_cst (NULL_TREE
, words
* UNITS_PER_WORD
));
6067 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovf
), ovf
, t
);
6068 TREE_SIDE_EFFECTS (t
) = 1;
6069 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6071 /* If there were no va_arg invocations, don't set up the register
6073 if (!cfun
->va_list_gpr_size
6074 && !cfun
->va_list_fpr_size
6075 && n_gpr
< GP_ARG_NUM_REG
6076 && n_fpr
< FP_ARG_V4_MAX_REG
)
6079 /* Find the register save area. */
6080 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
6081 if (cfun
->machine
->varargs_save_offset
)
6082 t
= build2 (PLUS_EXPR
, TREE_TYPE (sav
), t
,
6083 build_int_cst (NULL_TREE
, cfun
->machine
->varargs_save_offset
));
6084 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (sav
), sav
, t
);
6085 TREE_SIDE_EFFECTS (t
) = 1;
6086 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6089 /* Implement va_arg. */
6092 rs6000_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
6094 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
6095 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
6096 int size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
6097 tree lab_false
, lab_over
, addr
;
6099 tree ptrtype
= build_pointer_type (type
);
6101 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
6103 t
= rs6000_gimplify_va_arg (valist
, ptrtype
, pre_p
, post_p
);
6104 return build_va_arg_indirect_ref (t
);
6107 if (DEFAULT_ABI
!= ABI_V4
)
6109 if (targetm
.calls
.split_complex_arg
&& TREE_CODE (type
) == COMPLEX_TYPE
)
6111 tree elem_type
= TREE_TYPE (type
);
6112 enum machine_mode elem_mode
= TYPE_MODE (elem_type
);
6113 int elem_size
= GET_MODE_SIZE (elem_mode
);
6115 if (elem_size
< UNITS_PER_WORD
)
6117 tree real_part
, imag_part
;
6118 tree post
= NULL_TREE
;
6120 real_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
6122 /* Copy the value into a temporary, lest the formal temporary
6123 be reused out from under us. */
6124 real_part
= get_initialized_tmp_var (real_part
, pre_p
, &post
);
6125 append_to_statement_list (post
, pre_p
);
6127 imag_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
6130 return build2 (COMPLEX_EXPR
, type
, real_part
, imag_part
);
6134 return std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
6137 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
6138 f_fpr
= TREE_CHAIN (f_gpr
);
6139 f_res
= TREE_CHAIN (f_fpr
);
6140 f_ovf
= TREE_CHAIN (f_res
);
6141 f_sav
= TREE_CHAIN (f_ovf
);
6143 valist
= build_va_arg_indirect_ref (valist
);
6144 gpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
6145 fpr
= build3 (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
6146 ovf
= build3 (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
6147 sav
= build3 (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
6149 size
= int_size_in_bytes (type
);
6150 rsize
= (size
+ 3) / 4;
6153 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
6154 && (TYPE_MODE (type
) == SFmode
6155 || TYPE_MODE (type
) == DFmode
6156 || TYPE_MODE (type
) == TFmode
))
6158 /* FP args go in FP registers, if present. */
6160 n_reg
= (size
+ 7) / 8;
6163 if (TYPE_MODE (type
) != SFmode
)
6168 /* Otherwise into GP registers. */
6177 /* Pull the value out of the saved registers.... */
6180 addr
= create_tmp_var (ptr_type_node
, "addr");
6181 DECL_POINTER_ALIAS_SET (addr
) = get_varargs_alias_set ();
6183 /* AltiVec vectors never go in registers when -mabi=altivec. */
6184 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
6188 lab_false
= create_artificial_label ();
6189 lab_over
= create_artificial_label ();
6191 /* Long long and SPE vectors are aligned in the registers.
6192 As are any other 2 gpr item such as complex int due to a
6193 historical mistake. */
6195 if (n_reg
== 2 && reg
== gpr
)
6197 u
= build2 (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
6198 size_int (n_reg
- 1));
6199 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, u
);
6202 t
= fold_convert (TREE_TYPE (reg
), size_int (8 - n_reg
+ 1));
6203 t
= build2 (GE_EXPR
, boolean_type_node
, u
, t
);
6204 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
6205 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
6206 gimplify_and_add (t
, pre_p
);
6210 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
, size_int (sav_ofs
));
6212 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, size_int (n_reg
));
6213 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
6214 u
= build2 (MULT_EXPR
, integer_type_node
, u
, size_int (sav_scale
));
6215 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, u
);
6217 t
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, addr
, t
);
6218 gimplify_and_add (t
, pre_p
);
6220 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
6221 gimplify_and_add (t
, pre_p
);
6223 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
6224 append_to_statement_list (t
, pre_p
);
6226 if ((n_reg
== 2 && reg
!= gpr
) || n_reg
> 2)
6228 /* Ensure that we don't find any more args in regs.
6229 Alignment has taken care of the n_reg == 2 gpr case. */
6230 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (reg
), reg
, size_int (8));
6231 gimplify_and_add (t
, pre_p
);
6235 /* ... otherwise out of the overflow area. */
6237 /* Care for on-stack alignment if needed. */
6241 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (align
- 1));
6242 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (t
), t
,
6243 build_int_cst (NULL_TREE
, -align
));
6245 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
6247 u
= build2 (GIMPLE_MODIFY_STMT
, void_type_node
, addr
, t
);
6248 gimplify_and_add (u
, pre_p
);
6250 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (size
));
6251 t
= build2 (GIMPLE_MODIFY_STMT
, TREE_TYPE (ovf
), ovf
, t
);
6252 gimplify_and_add (t
, pre_p
);
6256 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
6257 append_to_statement_list (t
, pre_p
);
6260 if (STRICT_ALIGNMENT
6261 && (TYPE_ALIGN (type
)
6262 > (unsigned) BITS_PER_UNIT
* (align
< 4 ? 4 : align
)))
6264 /* The value (of type complex double, for example) may not be
6265 aligned in memory in the saved registers, so copy via a
6266 temporary. (This is the same code as used for SPARC.) */
6267 tree tmp
= create_tmp_var (type
, "va_arg_tmp");
6268 tree dest_addr
= build_fold_addr_expr (tmp
);
6270 tree copy
= build_function_call_expr
6271 (implicit_built_in_decls
[BUILT_IN_MEMCPY
],
6272 tree_cons (NULL_TREE
, dest_addr
,
6273 tree_cons (NULL_TREE
, addr
,
6274 tree_cons (NULL_TREE
, size_int (rsize
* 4),
6277 gimplify_and_add (copy
, pre_p
);
6281 addr
= fold_convert (ptrtype
, addr
);
6282 return build_va_arg_indirect_ref (addr
);
6288 def_builtin (int mask
, const char *name
, tree type
, int code
)
6290 if (mask
& target_flags
)
6292 if (rs6000_builtin_decls
[code
])
6295 rs6000_builtin_decls
[code
] =
6296 add_builtin_function (name
, type
, code
, BUILT_IN_MD
,
6301 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
6303 static const struct builtin_description bdesc_3arg
[] =
6305 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
6306 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
6307 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
6308 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
6309 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
6310 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
6311 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
6312 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
6313 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
6314 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
6315 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
6316 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
6317 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
6318 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
6319 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
6320 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
6321 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
6322 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
6323 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
6324 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
6325 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
6326 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
6327 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
6329 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD
},
6330 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS
},
6331 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD
},
6332 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS
},
6333 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM
},
6334 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM
},
6335 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM
},
6336 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM
},
6337 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM
},
6338 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS
},
6339 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS
},
6340 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS
},
6341 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB
},
6342 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM
},
6343 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL
},
6346 /* DST operations: void foo (void *, const int, const char). */
6348 static const struct builtin_description bdesc_dst
[] =
6350 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
6351 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
6352 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
6353 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
},
6355 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST
},
6356 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT
},
6357 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST
},
6358 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT
}
6361 /* Simple binary operations: VECc = foo (VECa, VECb). */
6363 static struct builtin_description bdesc_2arg
[] =
6365 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
6366 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
6367 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
6368 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
6369 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
6370 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
6371 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
6372 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
6373 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
6374 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
6375 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
6376 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
6377 { MASK_ALTIVEC
, CODE_FOR_andcv4si3
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
6378 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
6379 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
6380 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
6381 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
6382 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
6383 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
6384 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
6385 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
6386 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
6387 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
6388 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
6389 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
6390 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
6391 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
6392 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
6393 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
6394 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
6395 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
6396 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
6397 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
6398 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
6399 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
6400 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
6401 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
6402 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
6403 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
6404 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
6405 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
6406 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
6407 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
6408 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
6409 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
6410 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
6411 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
6412 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
6413 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
6414 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
6415 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
6416 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
6417 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
6418 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
6419 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
6420 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
6421 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
6422 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
6423 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
6424 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
6425 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
6426 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
6427 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
6428 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
6429 { MASK_ALTIVEC
, CODE_FOR_altivec_norv4si3
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
6430 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
6431 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
6432 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
6433 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
6434 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
6435 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
6436 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
6437 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
6438 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
6439 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
6440 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
6441 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
6442 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
6443 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
6444 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
6445 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
6446 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
6447 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
6448 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
6449 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
6450 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
6451 { MASK_ALTIVEC
, CODE_FOR_lshrv16qi3
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
6452 { MASK_ALTIVEC
, CODE_FOR_lshrv8hi3
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
6453 { MASK_ALTIVEC
, CODE_FOR_lshrv4si3
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
6454 { MASK_ALTIVEC
, CODE_FOR_ashrv16qi3
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
6455 { MASK_ALTIVEC
, CODE_FOR_ashrv8hi3
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
6456 { MASK_ALTIVEC
, CODE_FOR_ashrv4si3
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
6457 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
6458 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
6459 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
6460 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
6461 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
6462 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
6463 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
6464 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
6465 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
6466 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
6467 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
6468 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
6469 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
6470 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
6471 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
6472 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
6473 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
6474 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
6475 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
6477 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD
},
6478 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP
},
6479 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM
},
6480 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM
},
6481 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM
},
6482 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC
},
6483 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS
},
6484 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS
},
6485 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS
},
6486 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS
},
6487 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS
},
6488 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS
},
6489 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS
},
6490 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND
},
6491 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC
},
6492 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG
},
6493 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW
},
6494 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW
},
6495 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH
},
6496 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH
},
6497 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB
},
6498 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB
},
6499 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB
},
6500 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ
},
6501 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP
},
6502 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW
},
6503 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH
},
6504 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB
},
6505 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE
},
6506 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT
},
6507 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP
},
6508 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW
},
6509 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW
},
6510 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH
},
6511 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH
},
6512 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB
},
6513 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB
},
6514 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE
},
6515 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT
},
6516 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX
},
6517 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP
},
6518 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW
},
6519 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW
},
6520 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH
},
6521 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH
},
6522 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB
},
6523 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB
},
6524 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH
},
6525 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW
},
6526 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH
},
6527 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB
},
6528 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL
},
6529 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW
},
6530 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH
},
6531 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB
},
6532 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN
},
6533 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP
},
6534 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW
},
6535 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW
},
6536 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH
},
6537 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH
},
6538 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB
},
6539 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB
},
6540 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE
},
6541 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB
},
6542 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB
},
6543 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH
},
6544 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH
},
6545 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO
},
6546 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH
},
6547 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH
},
6548 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB
},
6549 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB
},
6550 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR
},
6551 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR
},
6552 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK
},
6553 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM
},
6554 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM
},
6555 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX
},
6556 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS
},
6557 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS
},
6558 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS
},
6559 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS
},
6560 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS
},
6561 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU
},
6562 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS
},
6563 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS
},
6564 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL
},
6565 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW
},
6566 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH
},
6567 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB
},
6568 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL
},
6569 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW
},
6570 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH
},
6571 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB
},
6572 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL
},
6573 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO
},
6574 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR
},
6575 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW
},
6576 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH
},
6577 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB
},
6578 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA
},
6579 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW
},
6580 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH
},
6581 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB
},
6582 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL
},
6583 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO
},
6584 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB
},
6585 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP
},
6586 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM
},
6587 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM
},
6588 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM
},
6589 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC
},
6590 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS
},
6591 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS
},
6592 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS
},
6593 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS
},
6594 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS
},
6595 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS
},
6596 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS
},
6597 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S
},
6598 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS
},
6599 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS
},
6600 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS
},
6601 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S
},
6602 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS
},
6603 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR
},
6605 /* Place holder, leave as first spe builtin. */
6606 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
6607 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
6608 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
6609 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
6610 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
6611 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
6612 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
6613 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
6614 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
6615 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
6616 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
6617 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
6618 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
6619 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
6620 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
6621 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
6622 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
6623 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
6624 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
6625 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
6626 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
6627 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
6628 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
6629 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
6630 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
6631 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
6632 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
6633 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
6634 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
6635 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
6636 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
6637 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
6638 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
6639 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
6640 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
6641 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
6642 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
6643 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
6644 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
6645 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
6646 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
6647 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
6648 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
6649 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
6650 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
6651 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
6652 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
6653 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
6654 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
6655 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
6656 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
6657 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
6658 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
6659 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
6660 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
6661 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
6662 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
6663 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
6664 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
6665 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
6666 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
6667 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
6668 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
6669 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
6670 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
6671 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
6672 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
6673 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
6674 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
6675 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
6676 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
6677 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
6678 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
6679 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
6680 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
6681 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
6682 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
6683 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
6684 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
6685 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
6686 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
6687 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
6688 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
6689 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
6690 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
6691 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
6692 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
6693 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
6694 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
6695 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
6696 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
6697 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
6698 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
6699 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
6700 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
6701 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
6702 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
6703 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
6704 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
6705 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
6706 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
6707 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
6708 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
6709 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
6710 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
6711 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
6712 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
6713 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
6714 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
6716 /* SPE binary operations expecting a 5-bit unsigned literal. */
6717 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
6719 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
6720 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
6721 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
6722 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
6723 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
6724 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
6725 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
6726 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
6727 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
6728 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
6729 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
6730 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
6731 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
6732 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
6733 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
6734 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
6735 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
6736 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
6737 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
6738 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
6739 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
6740 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
6741 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
6742 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
6743 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
6744 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
6746 /* Place-holder. Leave as last binary SPE builtin. */
6747 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
}
6750 /* AltiVec predicates. */
6752 struct builtin_description_predicates
6754 const unsigned int mask
;
6755 const enum insn_code icode
;
6757 const char *const name
;
6758 const enum rs6000_builtins code
;
6761 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
6763 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
6764 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
6765 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
6766 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
6767 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
6768 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
6769 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
6770 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
6771 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
6772 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
6773 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
6774 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
6775 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
},
6777 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P
},
6778 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P
},
6779 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P
}
6782 /* SPE predicates. */
6783 static struct builtin_description bdesc_spe_predicates
[] =
6785 /* Place-holder. Leave as first. */
6786 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
6787 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
6788 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
6789 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
6790 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
6791 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
6792 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
6793 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
6794 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
6795 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
6796 /* Place-holder. Leave as last. */
6797 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
6800 /* SPE evsel predicates. */
6801 static struct builtin_description bdesc_spe_evsel
[] =
6803 /* Place-holder. Leave as first. */
6804 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
6805 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
6806 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
6807 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
6808 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
6809 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
6810 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
6811 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
6812 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
6813 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
6814 /* Place-holder. Leave as last. */
6815 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
6818 /* ABS* operations. */
6820 static const struct builtin_description bdesc_abs
[] =
6822 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
6823 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
6824 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
6825 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
6826 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
6827 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
6828 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
6831 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6834 static struct builtin_description bdesc_1arg
[] =
6836 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
6837 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
6838 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
6839 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
6840 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
6841 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
6842 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
6843 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
6844 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
6845 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
6846 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
6847 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
6848 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
6849 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
6850 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
6851 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
6852 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
6854 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS
},
6855 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS
},
6856 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL
},
6857 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE
},
6858 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR
},
6859 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE
},
6860 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR
},
6861 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE
},
6862 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND
},
6863 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE
},
6864 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC
},
6865 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH
},
6866 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH
},
6867 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX
},
6868 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB
},
6869 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL
},
6870 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX
},
6871 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH
},
6872 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB
},
6874 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6875 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6876 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
6877 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
6878 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
6879 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
6880 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
6881 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
6882 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
6883 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
6884 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
6885 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
6886 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
6887 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
6888 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
6889 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
6890 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
6891 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
6892 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
6893 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
6894 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
6895 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
6896 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
6897 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
6898 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
6899 { 0, CODE_FOR_negv2si2
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
6900 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
6901 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
6902 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
6903 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
6905 /* Place-holder. Leave as last unary SPE builtin. */
6906 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
}
6910 rs6000_expand_unop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6913 tree arg0
= TREE_VALUE (arglist
);
6914 rtx op0
= expand_normal (arg0
);
6915 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6916 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6918 if (icode
== CODE_FOR_nothing
)
6919 /* Builtin not supported on this processor. */
6922 /* If we got invalid arguments bail out before generating bad rtl. */
6923 if (arg0
== error_mark_node
)
6926 if (icode
== CODE_FOR_altivec_vspltisb
6927 || icode
== CODE_FOR_altivec_vspltish
6928 || icode
== CODE_FOR_altivec_vspltisw
6929 || icode
== CODE_FOR_spe_evsplatfi
6930 || icode
== CODE_FOR_spe_evsplati
)
6932 /* Only allow 5-bit *signed* literals. */
6933 if (GET_CODE (op0
) != CONST_INT
6934 || INTVAL (op0
) > 15
6935 || INTVAL (op0
) < -16)
6937 error ("argument 1 must be a 5-bit signed literal");
6943 || GET_MODE (target
) != tmode
6944 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6945 target
= gen_reg_rtx (tmode
);
6947 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6948 op0
= copy_to_mode_reg (mode0
, op0
);
6950 pat
= GEN_FCN (icode
) (target
, op0
);
6959 altivec_expand_abs_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6961 rtx pat
, scratch1
, scratch2
;
6962 tree arg0
= TREE_VALUE (arglist
);
6963 rtx op0
= expand_normal (arg0
);
6964 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6965 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6967 /* If we have invalid arguments, bail out before generating bad rtl. */
6968 if (arg0
== error_mark_node
)
6972 || GET_MODE (target
) != tmode
6973 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6974 target
= gen_reg_rtx (tmode
);
6976 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6977 op0
= copy_to_mode_reg (mode0
, op0
);
6979 scratch1
= gen_reg_rtx (mode0
);
6980 scratch2
= gen_reg_rtx (mode0
);
6982 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
6991 rs6000_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6994 tree arg0
= TREE_VALUE (arglist
);
6995 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6996 rtx op0
= expand_normal (arg0
);
6997 rtx op1
= expand_normal (arg1
);
6998 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6999 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7000 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7002 if (icode
== CODE_FOR_nothing
)
7003 /* Builtin not supported on this processor. */
7006 /* If we got invalid arguments bail out before generating bad rtl. */
7007 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7010 if (icode
== CODE_FOR_altivec_vcfux
7011 || icode
== CODE_FOR_altivec_vcfsx
7012 || icode
== CODE_FOR_altivec_vctsxs
7013 || icode
== CODE_FOR_altivec_vctuxs
7014 || icode
== CODE_FOR_altivec_vspltb
7015 || icode
== CODE_FOR_altivec_vsplth
7016 || icode
== CODE_FOR_altivec_vspltw
7017 || icode
== CODE_FOR_spe_evaddiw
7018 || icode
== CODE_FOR_spe_evldd
7019 || icode
== CODE_FOR_spe_evldh
7020 || icode
== CODE_FOR_spe_evldw
7021 || icode
== CODE_FOR_spe_evlhhesplat
7022 || icode
== CODE_FOR_spe_evlhhossplat
7023 || icode
== CODE_FOR_spe_evlhhousplat
7024 || icode
== CODE_FOR_spe_evlwhe
7025 || icode
== CODE_FOR_spe_evlwhos
7026 || icode
== CODE_FOR_spe_evlwhou
7027 || icode
== CODE_FOR_spe_evlwhsplat
7028 || icode
== CODE_FOR_spe_evlwwsplat
7029 || icode
== CODE_FOR_spe_evrlwi
7030 || icode
== CODE_FOR_spe_evslwi
7031 || icode
== CODE_FOR_spe_evsrwis
7032 || icode
== CODE_FOR_spe_evsubifw
7033 || icode
== CODE_FOR_spe_evsrwiu
)
7035 /* Only allow 5-bit unsigned literals. */
7037 if (TREE_CODE (arg1
) != INTEGER_CST
7038 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
7040 error ("argument 2 must be a 5-bit unsigned literal");
7046 || GET_MODE (target
) != tmode
7047 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7048 target
= gen_reg_rtx (tmode
);
7050 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7051 op0
= copy_to_mode_reg (mode0
, op0
);
7052 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7053 op1
= copy_to_mode_reg (mode1
, op1
);
7055 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
7064 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
7065 tree arglist
, rtx target
)
7068 tree cr6_form
= TREE_VALUE (arglist
);
7069 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
7070 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7071 rtx op0
= expand_normal (arg0
);
7072 rtx op1
= expand_normal (arg1
);
7073 enum machine_mode tmode
= SImode
;
7074 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7075 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7078 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
7080 error ("argument 1 of __builtin_altivec_predicate must be a constant");
7084 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
7086 gcc_assert (mode0
== mode1
);
7088 /* If we have invalid arguments, bail out before generating bad rtl. */
7089 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7093 || GET_MODE (target
) != tmode
7094 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7095 target
= gen_reg_rtx (tmode
);
7097 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7098 op0
= copy_to_mode_reg (mode0
, op0
);
7099 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7100 op1
= copy_to_mode_reg (mode1
, op1
);
7102 scratch
= gen_reg_rtx (mode0
);
7104 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
7105 gen_rtx_SYMBOL_REF (Pmode
, opcode
));
7110 /* The vec_any* and vec_all* predicates use the same opcodes for two
7111 different operations, but the bits in CR6 will be different
7112 depending on what information we want. So we have to play tricks
7113 with CR6 to get the right bits out.
7115 If you think this is disgusting, look at the specs for the
7116 AltiVec predicates. */
7118 switch (cr6_form_int
)
7121 emit_insn (gen_cr6_test_for_zero (target
));
7124 emit_insn (gen_cr6_test_for_zero_reverse (target
));
7127 emit_insn (gen_cr6_test_for_lt (target
));
7130 emit_insn (gen_cr6_test_for_lt_reverse (target
));
7133 error ("argument 1 of __builtin_altivec_predicate is out of range");
7141 altivec_expand_lv_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7144 tree arg0
= TREE_VALUE (arglist
);
7145 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7146 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7147 enum machine_mode mode0
= Pmode
;
7148 enum machine_mode mode1
= Pmode
;
7149 rtx op0
= expand_normal (arg0
);
7150 rtx op1
= expand_normal (arg1
);
7152 if (icode
== CODE_FOR_nothing
)
7153 /* Builtin not supported on this processor. */
7156 /* If we got invalid arguments bail out before generating bad rtl. */
7157 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7161 || GET_MODE (target
) != tmode
7162 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7163 target
= gen_reg_rtx (tmode
);
7165 op1
= copy_to_mode_reg (mode1
, op1
);
7167 if (op0
== const0_rtx
)
7169 addr
= gen_rtx_MEM (tmode
, op1
);
7173 op0
= copy_to_mode_reg (mode0
, op0
);
7174 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
7177 pat
= GEN_FCN (icode
) (target
, addr
);
7187 spe_expand_stv_builtin (enum insn_code icode
, tree arglist
)
7189 tree arg0
= TREE_VALUE (arglist
);
7190 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7191 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7192 rtx op0
= expand_normal (arg0
);
7193 rtx op1
= expand_normal (arg1
);
7194 rtx op2
= expand_normal (arg2
);
7196 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
7197 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
7198 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
7200 /* Invalid arguments. Bail before doing anything stoopid! */
7201 if (arg0
== error_mark_node
7202 || arg1
== error_mark_node
7203 || arg2
== error_mark_node
)
7206 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
7207 op0
= copy_to_mode_reg (mode2
, op0
);
7208 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
7209 op1
= copy_to_mode_reg (mode0
, op1
);
7210 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
7211 op2
= copy_to_mode_reg (mode1
, op2
);
7213 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
7220 altivec_expand_stv_builtin (enum insn_code icode
, tree arglist
)
7222 tree arg0
= TREE_VALUE (arglist
);
7223 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7224 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7225 rtx op0
= expand_normal (arg0
);
7226 rtx op1
= expand_normal (arg1
);
7227 rtx op2
= expand_normal (arg2
);
7229 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7230 enum machine_mode mode1
= Pmode
;
7231 enum machine_mode mode2
= Pmode
;
7233 /* Invalid arguments. Bail before doing anything stoopid! */
7234 if (arg0
== error_mark_node
7235 || arg1
== error_mark_node
7236 || arg2
== error_mark_node
)
7239 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
7240 op0
= copy_to_mode_reg (tmode
, op0
);
7242 op2
= copy_to_mode_reg (mode2
, op2
);
7244 if (op1
== const0_rtx
)
7246 addr
= gen_rtx_MEM (tmode
, op2
);
7250 op1
= copy_to_mode_reg (mode1
, op1
);
7251 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
7254 pat
= GEN_FCN (icode
) (addr
, op0
);
7261 rs6000_expand_ternop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7264 tree arg0
= TREE_VALUE (arglist
);
7265 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7266 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7267 rtx op0
= expand_normal (arg0
);
7268 rtx op1
= expand_normal (arg1
);
7269 rtx op2
= expand_normal (arg2
);
7270 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7271 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7272 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7273 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
7275 if (icode
== CODE_FOR_nothing
)
7276 /* Builtin not supported on this processor. */
7279 /* If we got invalid arguments bail out before generating bad rtl. */
7280 if (arg0
== error_mark_node
7281 || arg1
== error_mark_node
7282 || arg2
== error_mark_node
)
7285 if (icode
== CODE_FOR_altivec_vsldoi_v4sf
7286 || icode
== CODE_FOR_altivec_vsldoi_v4si
7287 || icode
== CODE_FOR_altivec_vsldoi_v8hi
7288 || icode
== CODE_FOR_altivec_vsldoi_v16qi
)
7290 /* Only allow 4-bit unsigned literals. */
7292 if (TREE_CODE (arg2
) != INTEGER_CST
7293 || TREE_INT_CST_LOW (arg2
) & ~0xf)
7295 error ("argument 3 must be a 4-bit unsigned literal");
7301 || GET_MODE (target
) != tmode
7302 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7303 target
= gen_reg_rtx (tmode
);
7305 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7306 op0
= copy_to_mode_reg (mode0
, op0
);
7307 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7308 op1
= copy_to_mode_reg (mode1
, op1
);
7309 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
7310 op2
= copy_to_mode_reg (mode2
, op2
);
7312 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
7320 /* Expand the lvx builtins. */
7322 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
7324 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7325 tree arglist
= TREE_OPERAND (exp
, 1);
7326 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7328 enum machine_mode tmode
, mode0
;
7330 enum insn_code icode
;
7334 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
7335 icode
= CODE_FOR_altivec_lvx_v16qi
;
7337 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
7338 icode
= CODE_FOR_altivec_lvx_v8hi
;
7340 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
7341 icode
= CODE_FOR_altivec_lvx_v4si
;
7343 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
7344 icode
= CODE_FOR_altivec_lvx_v4sf
;
7353 arg0
= TREE_VALUE (arglist
);
7354 op0
= expand_normal (arg0
);
7355 tmode
= insn_data
[icode
].operand
[0].mode
;
7356 mode0
= insn_data
[icode
].operand
[1].mode
;
7359 || GET_MODE (target
) != tmode
7360 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7361 target
= gen_reg_rtx (tmode
);
7363 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7364 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
7366 pat
= GEN_FCN (icode
) (target
, op0
);
7373 /* Expand the stvx builtins. */
7375 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
7378 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7379 tree arglist
= TREE_OPERAND (exp
, 1);
7380 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7382 enum machine_mode mode0
, mode1
;
7384 enum insn_code icode
;
7388 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
7389 icode
= CODE_FOR_altivec_stvx_v16qi
;
7391 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
7392 icode
= CODE_FOR_altivec_stvx_v8hi
;
7394 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
7395 icode
= CODE_FOR_altivec_stvx_v4si
;
7397 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
7398 icode
= CODE_FOR_altivec_stvx_v4sf
;
7405 arg0
= TREE_VALUE (arglist
);
7406 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7407 op0
= expand_normal (arg0
);
7408 op1
= expand_normal (arg1
);
7409 mode0
= insn_data
[icode
].operand
[0].mode
;
7410 mode1
= insn_data
[icode
].operand
[1].mode
;
7412 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7413 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
7414 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
7415 op1
= copy_to_mode_reg (mode1
, op1
);
7417 pat
= GEN_FCN (icode
) (op0
, op1
);
7425 /* Expand the dst builtins. */
7427 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
7430 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7431 tree arglist
= TREE_OPERAND (exp
, 1);
7432 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7433 tree arg0
, arg1
, arg2
;
7434 enum machine_mode mode0
, mode1
, mode2
;
7435 rtx pat
, op0
, op1
, op2
;
7436 struct builtin_description
*d
;
7441 /* Handle DST variants. */
7442 d
= (struct builtin_description
*) bdesc_dst
;
7443 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
7444 if (d
->code
== fcode
)
7446 arg0
= TREE_VALUE (arglist
);
7447 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7448 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7449 op0
= expand_normal (arg0
);
7450 op1
= expand_normal (arg1
);
7451 op2
= expand_normal (arg2
);
7452 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7453 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
7454 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
7456 /* Invalid arguments, bail out before generating bad rtl. */
7457 if (arg0
== error_mark_node
7458 || arg1
== error_mark_node
7459 || arg2
== error_mark_node
)
7464 if (TREE_CODE (arg2
) != INTEGER_CST
7465 || TREE_INT_CST_LOW (arg2
) & ~0x3)
7467 error ("argument to %qs must be a 2-bit unsigned literal", d
->name
);
7471 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
7472 op0
= copy_to_mode_reg (Pmode
, op0
);
7473 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
7474 op1
= copy_to_mode_reg (mode1
, op1
);
7476 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
7486 /* Expand vec_init builtin. */
7488 altivec_expand_vec_init_builtin (tree type
, tree arglist
, rtx target
)
7490 enum machine_mode tmode
= TYPE_MODE (type
);
7491 enum machine_mode inner_mode
= GET_MODE_INNER (tmode
);
7492 int i
, n_elt
= GET_MODE_NUNITS (tmode
);
7493 rtvec v
= rtvec_alloc (n_elt
);
7495 gcc_assert (VECTOR_MODE_P (tmode
));
7497 for (i
= 0; i
< n_elt
; ++i
, arglist
= TREE_CHAIN (arglist
))
7499 rtx x
= expand_normal (TREE_VALUE (arglist
));
7500 RTVEC_ELT (v
, i
) = gen_lowpart (inner_mode
, x
);
7503 gcc_assert (arglist
== NULL
);
7505 if (!target
|| !register_operand (target
, tmode
))
7506 target
= gen_reg_rtx (tmode
);
7508 rs6000_expand_vector_init (target
, gen_rtx_PARALLEL (tmode
, v
));
7512 /* Return the integer constant in ARG. Constrain it to be in the range
7513 of the subparts of VEC_TYPE; issue an error if not. */
7516 get_element_number (tree vec_type
, tree arg
)
7518 unsigned HOST_WIDE_INT elt
, max
= TYPE_VECTOR_SUBPARTS (vec_type
) - 1;
7520 if (!host_integerp (arg
, 1)
7521 || (elt
= tree_low_cst (arg
, 1), elt
> max
))
7523 error ("selector must be an integer constant in the range 0..%wi", max
);
7530 /* Expand vec_set builtin. */
7532 altivec_expand_vec_set_builtin (tree arglist
)
7534 enum machine_mode tmode
, mode1
;
7535 tree arg0
, arg1
, arg2
;
7539 arg0
= TREE_VALUE (arglist
);
7540 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7541 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7543 tmode
= TYPE_MODE (TREE_TYPE (arg0
));
7544 mode1
= TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
)));
7545 gcc_assert (VECTOR_MODE_P (tmode
));
7547 op0
= expand_expr (arg0
, NULL_RTX
, tmode
, 0);
7548 op1
= expand_expr (arg1
, NULL_RTX
, mode1
, 0);
7549 elt
= get_element_number (TREE_TYPE (arg0
), arg2
);
7551 if (GET_MODE (op1
) != mode1
&& GET_MODE (op1
) != VOIDmode
)
7552 op1
= convert_modes (mode1
, GET_MODE (op1
), op1
, true);
7554 op0
= force_reg (tmode
, op0
);
7555 op1
= force_reg (mode1
, op1
);
7557 rs6000_expand_vector_set (op0
, op1
, elt
);
7562 /* Expand vec_ext builtin. */
7564 altivec_expand_vec_ext_builtin (tree arglist
, rtx target
)
7566 enum machine_mode tmode
, mode0
;
7571 arg0
= TREE_VALUE (arglist
);
7572 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7574 op0
= expand_normal (arg0
);
7575 elt
= get_element_number (TREE_TYPE (arg0
), arg1
);
7577 tmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
)));
7578 mode0
= TYPE_MODE (TREE_TYPE (arg0
));
7579 gcc_assert (VECTOR_MODE_P (mode0
));
7581 op0
= force_reg (mode0
, op0
);
7583 if (optimize
|| !target
|| !register_operand (target
, tmode
))
7584 target
= gen_reg_rtx (tmode
);
7586 rs6000_expand_vector_extract (target
, op0
, elt
);
7591 /* Expand the builtin in EXP and store the result in TARGET. Store
7592 true in *EXPANDEDP if we found a builtin to expand. */
7594 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
7596 struct builtin_description
*d
;
7597 struct builtin_description_predicates
*dp
;
7599 enum insn_code icode
;
7600 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7601 tree arglist
= TREE_OPERAND (exp
, 1);
7604 enum machine_mode tmode
, mode0
;
7605 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7607 if (fcode
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
7608 && fcode
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
)
7611 error ("unresolved overload for Altivec builtin %qF", fndecl
);
7615 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
7619 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
7623 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
7631 case ALTIVEC_BUILTIN_STVX
:
7632 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
7633 case ALTIVEC_BUILTIN_STVEBX
:
7634 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
7635 case ALTIVEC_BUILTIN_STVEHX
:
7636 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
7637 case ALTIVEC_BUILTIN_STVEWX
:
7638 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
7639 case ALTIVEC_BUILTIN_STVXL
:
7640 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
7642 case ALTIVEC_BUILTIN_MFVSCR
:
7643 icode
= CODE_FOR_altivec_mfvscr
;
7644 tmode
= insn_data
[icode
].operand
[0].mode
;
7647 || GET_MODE (target
) != tmode
7648 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7649 target
= gen_reg_rtx (tmode
);
7651 pat
= GEN_FCN (icode
) (target
);
7657 case ALTIVEC_BUILTIN_MTVSCR
:
7658 icode
= CODE_FOR_altivec_mtvscr
;
7659 arg0
= TREE_VALUE (arglist
);
7660 op0
= expand_normal (arg0
);
7661 mode0
= insn_data
[icode
].operand
[0].mode
;
7663 /* If we got invalid arguments bail out before generating bad rtl. */
7664 if (arg0
== error_mark_node
)
7667 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7668 op0
= copy_to_mode_reg (mode0
, op0
);
7670 pat
= GEN_FCN (icode
) (op0
);
7675 case ALTIVEC_BUILTIN_DSSALL
:
7676 emit_insn (gen_altivec_dssall ());
7679 case ALTIVEC_BUILTIN_DSS
:
7680 icode
= CODE_FOR_altivec_dss
;
7681 arg0
= TREE_VALUE (arglist
);
7683 op0
= expand_normal (arg0
);
7684 mode0
= insn_data
[icode
].operand
[0].mode
;
7686 /* If we got invalid arguments bail out before generating bad rtl. */
7687 if (arg0
== error_mark_node
)
7690 if (TREE_CODE (arg0
) != INTEGER_CST
7691 || TREE_INT_CST_LOW (arg0
) & ~0x3)
7693 error ("argument to dss must be a 2-bit unsigned literal");
7697 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7698 op0
= copy_to_mode_reg (mode0
, op0
);
7700 emit_insn (gen_altivec_dss (op0
));
7703 case ALTIVEC_BUILTIN_VEC_INIT_V4SI
:
7704 case ALTIVEC_BUILTIN_VEC_INIT_V8HI
:
7705 case ALTIVEC_BUILTIN_VEC_INIT_V16QI
:
7706 case ALTIVEC_BUILTIN_VEC_INIT_V4SF
:
7707 return altivec_expand_vec_init_builtin (TREE_TYPE (exp
), arglist
, target
);
7709 case ALTIVEC_BUILTIN_VEC_SET_V4SI
:
7710 case ALTIVEC_BUILTIN_VEC_SET_V8HI
:
7711 case ALTIVEC_BUILTIN_VEC_SET_V16QI
:
7712 case ALTIVEC_BUILTIN_VEC_SET_V4SF
:
7713 return altivec_expand_vec_set_builtin (arglist
);
7715 case ALTIVEC_BUILTIN_VEC_EXT_V4SI
:
7716 case ALTIVEC_BUILTIN_VEC_EXT_V8HI
:
7717 case ALTIVEC_BUILTIN_VEC_EXT_V16QI
:
7718 case ALTIVEC_BUILTIN_VEC_EXT_V4SF
:
7719 return altivec_expand_vec_ext_builtin (arglist
, target
);
7726 /* Expand abs* operations. */
7727 d
= (struct builtin_description
*) bdesc_abs
;
7728 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
7729 if (d
->code
== fcode
)
7730 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
7732 /* Expand the AltiVec predicates. */
7733 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
7734 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
7735 if (dp
->code
== fcode
)
7736 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
,
7739 /* LV* are funky. We initialized them differently. */
7742 case ALTIVEC_BUILTIN_LVSL
:
7743 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
7745 case ALTIVEC_BUILTIN_LVSR
:
7746 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
7748 case ALTIVEC_BUILTIN_LVEBX
:
7749 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
7751 case ALTIVEC_BUILTIN_LVEHX
:
7752 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
7754 case ALTIVEC_BUILTIN_LVEWX
:
7755 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
7757 case ALTIVEC_BUILTIN_LVXL
:
7758 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
7760 case ALTIVEC_BUILTIN_LVX
:
7761 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
7772 /* Binops that need to be initialized manually, but can be expanded
7773 automagically by rs6000_expand_binop_builtin. */
7774 static struct builtin_description bdesc_2arg_spe
[] =
7776 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
7777 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
7778 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
7779 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
7780 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
7781 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
7782 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
7783 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
7784 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
7785 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
7786 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
7787 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
7788 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
7789 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
7790 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
7791 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
7792 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
7793 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
7794 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
7795 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
7796 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
7797 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
7800 /* Expand the builtin in EXP and store the result in TARGET. Store
7801 true in *EXPANDEDP if we found a builtin to expand.
7803 This expands the SPE builtins that are not simple unary and binary
7806 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
7808 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7809 tree arglist
= TREE_OPERAND (exp
, 1);
7811 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7812 enum insn_code icode
;
7813 enum machine_mode tmode
, mode0
;
7815 struct builtin_description
*d
;
7820 /* Syntax check for a 5-bit unsigned immediate. */
7823 case SPE_BUILTIN_EVSTDD
:
7824 case SPE_BUILTIN_EVSTDH
:
7825 case SPE_BUILTIN_EVSTDW
:
7826 case SPE_BUILTIN_EVSTWHE
:
7827 case SPE_BUILTIN_EVSTWHO
:
7828 case SPE_BUILTIN_EVSTWWE
:
7829 case SPE_BUILTIN_EVSTWWO
:
7830 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7831 if (TREE_CODE (arg1
) != INTEGER_CST
7832 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
7834 error ("argument 2 must be a 5-bit unsigned literal");
7842 /* The evsplat*i instructions are not quite generic. */
7845 case SPE_BUILTIN_EVSPLATFI
:
7846 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
7848 case SPE_BUILTIN_EVSPLATI
:
7849 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
7855 d
= (struct builtin_description
*) bdesc_2arg_spe
;
7856 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
7857 if (d
->code
== fcode
)
7858 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
7860 d
= (struct builtin_description
*) bdesc_spe_predicates
;
7861 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
7862 if (d
->code
== fcode
)
7863 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
7865 d
= (struct builtin_description
*) bdesc_spe_evsel
;
7866 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
7867 if (d
->code
== fcode
)
7868 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
7872 case SPE_BUILTIN_EVSTDDX
:
7873 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
7874 case SPE_BUILTIN_EVSTDHX
:
7875 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
7876 case SPE_BUILTIN_EVSTDWX
:
7877 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
7878 case SPE_BUILTIN_EVSTWHEX
:
7879 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
7880 case SPE_BUILTIN_EVSTWHOX
:
7881 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
7882 case SPE_BUILTIN_EVSTWWEX
:
7883 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
7884 case SPE_BUILTIN_EVSTWWOX
:
7885 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
7886 case SPE_BUILTIN_EVSTDD
:
7887 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
7888 case SPE_BUILTIN_EVSTDH
:
7889 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
7890 case SPE_BUILTIN_EVSTDW
:
7891 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
7892 case SPE_BUILTIN_EVSTWHE
:
7893 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
7894 case SPE_BUILTIN_EVSTWHO
:
7895 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
7896 case SPE_BUILTIN_EVSTWWE
:
7897 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
7898 case SPE_BUILTIN_EVSTWWO
:
7899 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
7900 case SPE_BUILTIN_MFSPEFSCR
:
7901 icode
= CODE_FOR_spe_mfspefscr
;
7902 tmode
= insn_data
[icode
].operand
[0].mode
;
7905 || GET_MODE (target
) != tmode
7906 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7907 target
= gen_reg_rtx (tmode
);
7909 pat
= GEN_FCN (icode
) (target
);
7914 case SPE_BUILTIN_MTSPEFSCR
:
7915 icode
= CODE_FOR_spe_mtspefscr
;
7916 arg0
= TREE_VALUE (arglist
);
7917 op0
= expand_normal (arg0
);
7918 mode0
= insn_data
[icode
].operand
[0].mode
;
7920 if (arg0
== error_mark_node
)
7923 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7924 op0
= copy_to_mode_reg (mode0
, op0
);
7926 pat
= GEN_FCN (icode
) (op0
);
7939 spe_expand_predicate_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7941 rtx pat
, scratch
, tmp
;
7942 tree form
= TREE_VALUE (arglist
);
7943 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
7944 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7945 rtx op0
= expand_normal (arg0
);
7946 rtx op1
= expand_normal (arg1
);
7947 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7948 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7952 if (TREE_CODE (form
) != INTEGER_CST
)
7954 error ("argument 1 of __builtin_spe_predicate must be a constant");
7958 form_int
= TREE_INT_CST_LOW (form
);
7960 gcc_assert (mode0
== mode1
);
7962 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7966 || GET_MODE (target
) != SImode
7967 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
7968 target
= gen_reg_rtx (SImode
);
7970 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7971 op0
= copy_to_mode_reg (mode0
, op0
);
7972 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7973 op1
= copy_to_mode_reg (mode1
, op1
);
7975 scratch
= gen_reg_rtx (CCmode
);
7977 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7982 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7983 _lower_. We use one compare, but look in different bits of the
7984 CR for each variant.
7986 There are 2 elements in each SPE simd type (upper/lower). The CR
7987 bits are set as follows:
7989 BIT0 | BIT 1 | BIT 2 | BIT 3
7990 U | L | (U | L) | (U & L)
7992 So, for an "all" relationship, BIT 3 would be set.
7993 For an "any" relationship, BIT 2 would be set. Etc.
7995 Following traditional nomenclature, these bits map to:
7997 BIT0 | BIT 1 | BIT 2 | BIT 3
8000 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
8005 /* All variant. OV bit. */
8007 /* We need to get to the OV bit, which is the ORDERED bit. We
8008 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
8009 that's ugly and will make validate_condition_mode die.
8010 So let's just use another pattern. */
8011 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
8013 /* Any variant. EQ bit. */
8017 /* Upper variant. LT bit. */
8021 /* Lower variant. GT bit. */
8026 error ("argument 1 of __builtin_spe_predicate is out of range");
8030 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
8031 emit_move_insn (target
, tmp
);
8036 /* The evsel builtins look like this:
8038 e = __builtin_spe_evsel_OP (a, b, c, d);
8042 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
8043 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
8047 spe_expand_evsel_builtin (enum insn_code icode
, tree arglist
, rtx target
)
8050 tree arg0
= TREE_VALUE (arglist
);
8051 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
8052 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
8053 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
8054 rtx op0
= expand_normal (arg0
);
8055 rtx op1
= expand_normal (arg1
);
8056 rtx op2
= expand_normal (arg2
);
8057 rtx op3
= expand_normal (arg3
);
8058 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
8059 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
8061 gcc_assert (mode0
== mode1
);
8063 if (arg0
== error_mark_node
|| arg1
== error_mark_node
8064 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
8068 || GET_MODE (target
) != mode0
8069 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
8070 target
= gen_reg_rtx (mode0
);
8072 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
8073 op0
= copy_to_mode_reg (mode0
, op0
);
8074 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
8075 op1
= copy_to_mode_reg (mode0
, op1
);
8076 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
8077 op2
= copy_to_mode_reg (mode0
, op2
);
8078 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
8079 op3
= copy_to_mode_reg (mode0
, op3
);
8081 /* Generate the compare. */
8082 scratch
= gen_reg_rtx (CCmode
);
8083 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
8088 if (mode0
== V2SImode
)
8089 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
8091 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
8096 /* Expand an expression EXP that calls a built-in function,
8097 with result going to TARGET if that's convenient
8098 (and in mode MODE if that's convenient).
8099 SUBTARGET may be used as the target for computing one of EXP's operands.
8100 IGNORE is nonzero if the value is to be ignored. */
8103 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
8104 enum machine_mode mode ATTRIBUTE_UNUSED
,
8105 int ignore ATTRIBUTE_UNUSED
)
8107 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
8108 tree arglist
= TREE_OPERAND (exp
, 1);
8109 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
8110 struct builtin_description
*d
;
8115 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_LOAD
8116 || fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
8118 int icode
= (int) CODE_FOR_altivec_lvsr
;
8119 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
8120 enum machine_mode mode
= insn_data
[icode
].operand
[1].mode
;
8124 gcc_assert (TARGET_ALTIVEC
);
8126 arg
= TREE_VALUE (arglist
);
8127 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == POINTER_TYPE
);
8128 op
= expand_expr (arg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
8129 addr
= memory_address (mode
, op
);
8130 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
8134 /* For the load case need to negate the address. */
8135 op
= gen_reg_rtx (GET_MODE (addr
));
8136 emit_insn (gen_rtx_SET (VOIDmode
, op
,
8137 gen_rtx_NEG (GET_MODE (addr
), addr
)));
8139 op
= gen_rtx_MEM (mode
, op
);
8142 || GET_MODE (target
) != tmode
8143 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
8144 target
= gen_reg_rtx (tmode
);
8146 /*pat = gen_altivec_lvsr (target, op);*/
8147 pat
= GEN_FCN (icode
) (target
, op
);
8157 ret
= altivec_expand_builtin (exp
, target
, &success
);
8164 ret
= spe_expand_builtin (exp
, target
, &success
);
8170 gcc_assert (TARGET_ALTIVEC
|| TARGET_SPE
);
8172 /* Handle simple unary operations. */
8173 d
= (struct builtin_description
*) bdesc_1arg
;
8174 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
8175 if (d
->code
== fcode
)
8176 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
8178 /* Handle simple binary operations. */
8179 d
= (struct builtin_description
*) bdesc_2arg
;
8180 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
8181 if (d
->code
== fcode
)
8182 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
8184 /* Handle simple ternary operations. */
8185 d
= (struct builtin_description
*) bdesc_3arg
;
8186 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
8187 if (d
->code
== fcode
)
8188 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
8194 build_opaque_vector_type (tree node
, int nunits
)
8196 node
= copy_node (node
);
8197 TYPE_MAIN_VARIANT (node
) = node
;
8198 return build_vector_type (node
, nunits
);
8202 rs6000_init_builtins (void)
8204 V2SI_type_node
= build_vector_type (intSI_type_node
, 2);
8205 V2SF_type_node
= build_vector_type (float_type_node
, 2);
8206 V4HI_type_node
= build_vector_type (intHI_type_node
, 4);
8207 V4SI_type_node
= build_vector_type (intSI_type_node
, 4);
8208 V4SF_type_node
= build_vector_type (float_type_node
, 4);
8209 V8HI_type_node
= build_vector_type (intHI_type_node
, 8);
8210 V16QI_type_node
= build_vector_type (intQI_type_node
, 16);
8212 unsigned_V16QI_type_node
= build_vector_type (unsigned_intQI_type_node
, 16);
8213 unsigned_V8HI_type_node
= build_vector_type (unsigned_intHI_type_node
, 8);
8214 unsigned_V4SI_type_node
= build_vector_type (unsigned_intSI_type_node
, 4);
8216 opaque_V2SF_type_node
= build_opaque_vector_type (float_type_node
, 2);
8217 opaque_V2SI_type_node
= build_opaque_vector_type (intSI_type_node
, 2);
8218 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
8219 opaque_V4SI_type_node
= copy_node (V4SI_type_node
);
8221 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
8222 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
8223 'vector unsigned short'. */
8225 bool_char_type_node
= build_distinct_type_copy (unsigned_intQI_type_node
);
8226 bool_short_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
8227 bool_int_type_node
= build_distinct_type_copy (unsigned_intSI_type_node
);
8228 pixel_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
8230 long_integer_type_internal_node
= long_integer_type_node
;
8231 long_unsigned_type_internal_node
= long_unsigned_type_node
;
8232 intQI_type_internal_node
= intQI_type_node
;
8233 uintQI_type_internal_node
= unsigned_intQI_type_node
;
8234 intHI_type_internal_node
= intHI_type_node
;
8235 uintHI_type_internal_node
= unsigned_intHI_type_node
;
8236 intSI_type_internal_node
= intSI_type_node
;
8237 uintSI_type_internal_node
= unsigned_intSI_type_node
;
8238 float_type_internal_node
= float_type_node
;
8239 void_type_internal_node
= void_type_node
;
8241 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8242 get_identifier ("__bool char"),
8243 bool_char_type_node
));
8244 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8245 get_identifier ("__bool short"),
8246 bool_short_type_node
));
8247 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8248 get_identifier ("__bool int"),
8249 bool_int_type_node
));
8250 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8251 get_identifier ("__pixel"),
8254 bool_V16QI_type_node
= build_vector_type (bool_char_type_node
, 16);
8255 bool_V8HI_type_node
= build_vector_type (bool_short_type_node
, 8);
8256 bool_V4SI_type_node
= build_vector_type (bool_int_type_node
, 4);
8257 pixel_V8HI_type_node
= build_vector_type (pixel_type_node
, 8);
8259 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8260 get_identifier ("__vector unsigned char"),
8261 unsigned_V16QI_type_node
));
8262 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8263 get_identifier ("__vector signed char"),
8265 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8266 get_identifier ("__vector __bool char"),
8267 bool_V16QI_type_node
));
8269 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8270 get_identifier ("__vector unsigned short"),
8271 unsigned_V8HI_type_node
));
8272 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8273 get_identifier ("__vector signed short"),
8275 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8276 get_identifier ("__vector __bool short"),
8277 bool_V8HI_type_node
));
8279 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8280 get_identifier ("__vector unsigned int"),
8281 unsigned_V4SI_type_node
));
8282 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8283 get_identifier ("__vector signed int"),
8285 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8286 get_identifier ("__vector __bool int"),
8287 bool_V4SI_type_node
));
8289 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8290 get_identifier ("__vector float"),
8292 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
8293 get_identifier ("__vector __pixel"),
8294 pixel_V8HI_type_node
));
8297 spe_init_builtins ();
8299 altivec_init_builtins ();
8300 if (TARGET_ALTIVEC
|| TARGET_SPE
)
8301 rs6000_common_init_builtins ();
8304 /* AIX libm provides clog as __clog. */
8305 if (built_in_decls
[BUILT_IN_CLOG
])
8306 set_user_assembler_name (built_in_decls
[BUILT_IN_CLOG
], "__clog");
8310 /* Search through a set of builtins and enable the mask bits.
8311 DESC is an array of builtins.
8312 SIZE is the total number of builtins.
8313 START is the builtin enum at which to start.
8314 END is the builtin enum at which to end. */
8316 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
8317 enum rs6000_builtins start
,
8318 enum rs6000_builtins end
)
8322 for (i
= 0; i
< size
; ++i
)
8323 if (desc
[i
].code
== start
)
8329 for (; i
< size
; ++i
)
8331 /* Flip all the bits on. */
8332 desc
[i
].mask
= target_flags
;
8333 if (desc
[i
].code
== end
)
8339 spe_init_builtins (void)
8341 tree endlink
= void_list_node
;
8342 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
8343 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
8344 struct builtin_description
*d
;
8347 tree v2si_ftype_4_v2si
8348 = build_function_type
8349 (opaque_V2SI_type_node
,
8350 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8351 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8352 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8353 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8356 tree v2sf_ftype_4_v2sf
8357 = build_function_type
8358 (opaque_V2SF_type_node
,
8359 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8360 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8361 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8362 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8365 tree int_ftype_int_v2si_v2si
8366 = build_function_type
8368 tree_cons (NULL_TREE
, integer_type_node
,
8369 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8370 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8373 tree int_ftype_int_v2sf_v2sf
8374 = build_function_type
8376 tree_cons (NULL_TREE
, integer_type_node
,
8377 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8378 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
8381 tree void_ftype_v2si_puint_int
8382 = build_function_type (void_type_node
,
8383 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8384 tree_cons (NULL_TREE
, puint_type_node
,
8385 tree_cons (NULL_TREE
,
8389 tree void_ftype_v2si_puint_char
8390 = build_function_type (void_type_node
,
8391 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8392 tree_cons (NULL_TREE
, puint_type_node
,
8393 tree_cons (NULL_TREE
,
8397 tree void_ftype_v2si_pv2si_int
8398 = build_function_type (void_type_node
,
8399 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8400 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8401 tree_cons (NULL_TREE
,
8405 tree void_ftype_v2si_pv2si_char
8406 = build_function_type (void_type_node
,
8407 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
8408 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8409 tree_cons (NULL_TREE
,
8414 = build_function_type (void_type_node
,
8415 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
8418 = build_function_type (integer_type_node
, endlink
);
8420 tree v2si_ftype_pv2si_int
8421 = build_function_type (opaque_V2SI_type_node
,
8422 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
8423 tree_cons (NULL_TREE
, integer_type_node
,
8426 tree v2si_ftype_puint_int
8427 = build_function_type (opaque_V2SI_type_node
,
8428 tree_cons (NULL_TREE
, puint_type_node
,
8429 tree_cons (NULL_TREE
, integer_type_node
,
8432 tree v2si_ftype_pushort_int
8433 = build_function_type (opaque_V2SI_type_node
,
8434 tree_cons (NULL_TREE
, pushort_type_node
,
8435 tree_cons (NULL_TREE
, integer_type_node
,
8438 tree v2si_ftype_signed_char
8439 = build_function_type (opaque_V2SI_type_node
,
8440 tree_cons (NULL_TREE
, signed_char_type_node
,
8443 /* The initialization of the simple binary and unary builtins is
8444 done in rs6000_common_init_builtins, but we have to enable the
8445 mask bits here manually because we have run out of `target_flags'
8446 bits. We really need to redesign this mask business. */
8448 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
8449 ARRAY_SIZE (bdesc_2arg
),
8452 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
8453 ARRAY_SIZE (bdesc_1arg
),
8455 SPE_BUILTIN_EVSUBFUSIAAW
);
8456 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
8457 ARRAY_SIZE (bdesc_spe_predicates
),
8458 SPE_BUILTIN_EVCMPEQ
,
8459 SPE_BUILTIN_EVFSTSTLT
);
8460 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
8461 ARRAY_SIZE (bdesc_spe_evsel
),
8462 SPE_BUILTIN_EVSEL_CMPGTS
,
8463 SPE_BUILTIN_EVSEL_FSTSTEQ
);
8465 (*lang_hooks
.decls
.pushdecl
)
8466 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
8467 opaque_V2SI_type_node
));
8469 /* Initialize irregular SPE builtins. */
8471 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
8472 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
8473 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
8474 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
8475 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
8476 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
8477 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
8478 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
8479 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
8480 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
8481 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
8482 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
8483 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
8484 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
8485 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
8486 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
8487 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
8488 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
8491 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
8492 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
8493 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
8494 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
8495 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
8496 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
8497 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
8498 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
8499 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
8500 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
8501 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
8502 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
8503 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
8504 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
8505 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
8506 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
8507 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
8508 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
8509 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
8510 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
8511 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
8512 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
8515 d
= (struct builtin_description
*) bdesc_spe_predicates
;
8516 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
8520 switch (insn_data
[d
->icode
].operand
[1].mode
)
8523 type
= int_ftype_int_v2si_v2si
;
8526 type
= int_ftype_int_v2sf_v2sf
;
8532 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8535 /* Evsel predicates. */
8536 d
= (struct builtin_description
*) bdesc_spe_evsel
;
8537 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
8541 switch (insn_data
[d
->icode
].operand
[1].mode
)
8544 type
= v2si_ftype_4_v2si
;
8547 type
= v2sf_ftype_4_v2sf
;
8553 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8558 altivec_init_builtins (void)
8560 struct builtin_description
*d
;
8561 struct builtin_description_predicates
*dp
;
8565 tree pfloat_type_node
= build_pointer_type (float_type_node
);
8566 tree pint_type_node
= build_pointer_type (integer_type_node
);
8567 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
8568 tree pchar_type_node
= build_pointer_type (char_type_node
);
8570 tree pvoid_type_node
= build_pointer_type (void_type_node
);
8572 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
8573 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
8574 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
8575 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
8577 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
8579 tree int_ftype_opaque
8580 = build_function_type_list (integer_type_node
,
8581 opaque_V4SI_type_node
, NULL_TREE
);
8583 tree opaque_ftype_opaque_int
8584 = build_function_type_list (opaque_V4SI_type_node
,
8585 opaque_V4SI_type_node
, integer_type_node
, NULL_TREE
);
8586 tree opaque_ftype_opaque_opaque_int
8587 = build_function_type_list (opaque_V4SI_type_node
,
8588 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
8589 integer_type_node
, NULL_TREE
);
8590 tree int_ftype_int_opaque_opaque
8591 = build_function_type_list (integer_type_node
,
8592 integer_type_node
, opaque_V4SI_type_node
,
8593 opaque_V4SI_type_node
, NULL_TREE
);
8594 tree int_ftype_int_v4si_v4si
8595 = build_function_type_list (integer_type_node
,
8596 integer_type_node
, V4SI_type_node
,
8597 V4SI_type_node
, NULL_TREE
);
8598 tree v4sf_ftype_pcfloat
8599 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
8600 tree void_ftype_pfloat_v4sf
8601 = build_function_type_list (void_type_node
,
8602 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
8603 tree v4si_ftype_pcint
8604 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
8605 tree void_ftype_pint_v4si
8606 = build_function_type_list (void_type_node
,
8607 pint_type_node
, V4SI_type_node
, NULL_TREE
);
8608 tree v8hi_ftype_pcshort
8609 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
8610 tree void_ftype_pshort_v8hi
8611 = build_function_type_list (void_type_node
,
8612 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
8613 tree v16qi_ftype_pcchar
8614 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
8615 tree void_ftype_pchar_v16qi
8616 = build_function_type_list (void_type_node
,
8617 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
8618 tree void_ftype_v4si
8619 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
8620 tree v8hi_ftype_void
8621 = build_function_type (V8HI_type_node
, void_list_node
);
8622 tree void_ftype_void
8623 = build_function_type (void_type_node
, void_list_node
);
8625 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
8627 tree opaque_ftype_long_pcvoid
8628 = build_function_type_list (opaque_V4SI_type_node
,
8629 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8630 tree v16qi_ftype_long_pcvoid
8631 = build_function_type_list (V16QI_type_node
,
8632 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8633 tree v8hi_ftype_long_pcvoid
8634 = build_function_type_list (V8HI_type_node
,
8635 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8636 tree v4si_ftype_long_pcvoid
8637 = build_function_type_list (V4SI_type_node
,
8638 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
8640 tree void_ftype_opaque_long_pvoid
8641 = build_function_type_list (void_type_node
,
8642 opaque_V4SI_type_node
, long_integer_type_node
,
8643 pvoid_type_node
, NULL_TREE
);
8644 tree void_ftype_v4si_long_pvoid
8645 = build_function_type_list (void_type_node
,
8646 V4SI_type_node
, long_integer_type_node
,
8647 pvoid_type_node
, NULL_TREE
);
8648 tree void_ftype_v16qi_long_pvoid
8649 = build_function_type_list (void_type_node
,
8650 V16QI_type_node
, long_integer_type_node
,
8651 pvoid_type_node
, NULL_TREE
);
8652 tree void_ftype_v8hi_long_pvoid
8653 = build_function_type_list (void_type_node
,
8654 V8HI_type_node
, long_integer_type_node
,
8655 pvoid_type_node
, NULL_TREE
);
8656 tree int_ftype_int_v8hi_v8hi
8657 = build_function_type_list (integer_type_node
,
8658 integer_type_node
, V8HI_type_node
,
8659 V8HI_type_node
, NULL_TREE
);
8660 tree int_ftype_int_v16qi_v16qi
8661 = build_function_type_list (integer_type_node
,
8662 integer_type_node
, V16QI_type_node
,
8663 V16QI_type_node
, NULL_TREE
);
8664 tree int_ftype_int_v4sf_v4sf
8665 = build_function_type_list (integer_type_node
,
8666 integer_type_node
, V4SF_type_node
,
8667 V4SF_type_node
, NULL_TREE
);
8668 tree v4si_ftype_v4si
8669 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8670 tree v8hi_ftype_v8hi
8671 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8672 tree v16qi_ftype_v16qi
8673 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8674 tree v4sf_ftype_v4sf
8675 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8676 tree void_ftype_pcvoid_int_int
8677 = build_function_type_list (void_type_node
,
8678 pcvoid_type_node
, integer_type_node
,
8679 integer_type_node
, NULL_TREE
);
8681 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
8682 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
8683 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
8684 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
8685 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
8686 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
8687 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
8688 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
8689 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
8690 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
8691 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
8692 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
8693 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
8694 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
8695 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
8696 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
8697 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
8698 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
8699 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
8700 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_int
, ALTIVEC_BUILTIN_DSS
);
8701 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
8702 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
8703 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
8704 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
8705 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
8706 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
8707 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
8708 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
8709 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
8710 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
8711 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
8712 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
8713 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ld", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LD
);
8714 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lde", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDE
);
8715 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ldl", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDL
);
8716 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSL
);
8717 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSR
);
8718 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEBX
);
8719 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEHX
);
8720 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEWX
);
8721 def_builtin (MASK_ALTIVEC
, "__builtin_vec_st", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_ST
);
8722 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ste", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STE
);
8723 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stl", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STL
);
8724 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEWX
);
8725 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEBX
);
8726 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEHX
);
8728 def_builtin (MASK_ALTIVEC
, "__builtin_vec_step", int_ftype_opaque
, ALTIVEC_BUILTIN_VEC_STEP
);
8730 def_builtin (MASK_ALTIVEC
, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int
, ALTIVEC_BUILTIN_VEC_SLD
);
8731 def_builtin (MASK_ALTIVEC
, "__builtin_vec_splat", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_SPLAT
);
8732 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltw", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTW
);
8733 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vsplth", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTH
);
8734 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltb", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTB
);
8735 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctf", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTF
);
8736 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfsx", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFSX
);
8737 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfux", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFUX
);
8738 def_builtin (MASK_ALTIVEC
, "__builtin_vec_cts", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTS
);
8739 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctu", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTU
);
8741 /* Add the DST variants. */
8742 d
= (struct builtin_description
*) bdesc_dst
;
8743 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
8744 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_int
, d
->code
);
8746 /* Initialize the predicates. */
8747 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
8748 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
8750 enum machine_mode mode1
;
8752 bool is_overloaded
= dp
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8753 && dp
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8758 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
8763 type
= int_ftype_int_opaque_opaque
;
8766 type
= int_ftype_int_v4si_v4si
;
8769 type
= int_ftype_int_v8hi_v8hi
;
8772 type
= int_ftype_int_v16qi_v16qi
;
8775 type
= int_ftype_int_v4sf_v4sf
;
8781 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
8784 /* Initialize the abs* operators. */
8785 d
= (struct builtin_description
*) bdesc_abs
;
8786 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
8788 enum machine_mode mode0
;
8791 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8796 type
= v4si_ftype_v4si
;
8799 type
= v8hi_ftype_v8hi
;
8802 type
= v16qi_ftype_v16qi
;
8805 type
= v4sf_ftype_v4sf
;
8811 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8818 /* Initialize target builtin that implements
8819 targetm.vectorize.builtin_mask_for_load. */
8821 decl
= add_builtin_function ("__builtin_altivec_mask_for_load",
8822 v16qi_ftype_long_pcvoid
,
8823 ALTIVEC_BUILTIN_MASK_FOR_LOAD
,
8824 BUILT_IN_MD
, NULL
, NULL_TREE
);
8825 TREE_READONLY (decl
) = 1;
8826 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8827 altivec_builtin_mask_for_load
= decl
;
8830 /* Access to the vec_init patterns. */
8831 ftype
= build_function_type_list (V4SI_type_node
, integer_type_node
,
8832 integer_type_node
, integer_type_node
,
8833 integer_type_node
, NULL_TREE
);
8834 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v4si", ftype
,
8835 ALTIVEC_BUILTIN_VEC_INIT_V4SI
);
8837 ftype
= build_function_type_list (V8HI_type_node
, short_integer_type_node
,
8838 short_integer_type_node
,
8839 short_integer_type_node
,
8840 short_integer_type_node
,
8841 short_integer_type_node
,
8842 short_integer_type_node
,
8843 short_integer_type_node
,
8844 short_integer_type_node
, NULL_TREE
);
8845 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v8hi", ftype
,
8846 ALTIVEC_BUILTIN_VEC_INIT_V8HI
);
8848 ftype
= build_function_type_list (V16QI_type_node
, char_type_node
,
8849 char_type_node
, char_type_node
,
8850 char_type_node
, char_type_node
,
8851 char_type_node
, char_type_node
,
8852 char_type_node
, char_type_node
,
8853 char_type_node
, char_type_node
,
8854 char_type_node
, char_type_node
,
8855 char_type_node
, char_type_node
,
8856 char_type_node
, NULL_TREE
);
8857 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v16qi", ftype
,
8858 ALTIVEC_BUILTIN_VEC_INIT_V16QI
);
8860 ftype
= build_function_type_list (V4SF_type_node
, float_type_node
,
8861 float_type_node
, float_type_node
,
8862 float_type_node
, NULL_TREE
);
8863 def_builtin (MASK_ALTIVEC
, "__builtin_vec_init_v4sf", ftype
,
8864 ALTIVEC_BUILTIN_VEC_INIT_V4SF
);
8866 /* Access to the vec_set patterns. */
8867 ftype
= build_function_type_list (V4SI_type_node
, V4SI_type_node
,
8869 integer_type_node
, NULL_TREE
);
8870 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v4si", ftype
,
8871 ALTIVEC_BUILTIN_VEC_SET_V4SI
);
8873 ftype
= build_function_type_list (V8HI_type_node
, V8HI_type_node
,
8875 integer_type_node
, NULL_TREE
);
8876 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v8hi", ftype
,
8877 ALTIVEC_BUILTIN_VEC_SET_V8HI
);
8879 ftype
= build_function_type_list (V8HI_type_node
, V16QI_type_node
,
8881 integer_type_node
, NULL_TREE
);
8882 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v16qi", ftype
,
8883 ALTIVEC_BUILTIN_VEC_SET_V16QI
);
8885 ftype
= build_function_type_list (V4SF_type_node
, V4SF_type_node
,
8887 integer_type_node
, NULL_TREE
);
8888 def_builtin (MASK_ALTIVEC
, "__builtin_vec_set_v4sf", ftype
,
8889 ALTIVEC_BUILTIN_VEC_SET_V4SF
);
8891 /* Access to the vec_extract patterns. */
8892 ftype
= build_function_type_list (intSI_type_node
, V4SI_type_node
,
8893 integer_type_node
, NULL_TREE
);
8894 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v4si", ftype
,
8895 ALTIVEC_BUILTIN_VEC_EXT_V4SI
);
8897 ftype
= build_function_type_list (intHI_type_node
, V8HI_type_node
,
8898 integer_type_node
, NULL_TREE
);
8899 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v8hi", ftype
,
8900 ALTIVEC_BUILTIN_VEC_EXT_V8HI
);
8902 ftype
= build_function_type_list (intQI_type_node
, V16QI_type_node
,
8903 integer_type_node
, NULL_TREE
);
8904 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v16qi", ftype
,
8905 ALTIVEC_BUILTIN_VEC_EXT_V16QI
);
8907 ftype
= build_function_type_list (float_type_node
, V4SF_type_node
,
8908 integer_type_node
, NULL_TREE
);
8909 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ext_v4sf", ftype
,
8910 ALTIVEC_BUILTIN_VEC_EXT_V4SF
);
8914 rs6000_common_init_builtins (void)
8916 struct builtin_description
*d
;
8919 tree v4sf_ftype_v4sf_v4sf_v16qi
8920 = build_function_type_list (V4SF_type_node
,
8921 V4SF_type_node
, V4SF_type_node
,
8922 V16QI_type_node
, NULL_TREE
);
8923 tree v4si_ftype_v4si_v4si_v16qi
8924 = build_function_type_list (V4SI_type_node
,
8925 V4SI_type_node
, V4SI_type_node
,
8926 V16QI_type_node
, NULL_TREE
);
8927 tree v8hi_ftype_v8hi_v8hi_v16qi
8928 = build_function_type_list (V8HI_type_node
,
8929 V8HI_type_node
, V8HI_type_node
,
8930 V16QI_type_node
, NULL_TREE
);
8931 tree v16qi_ftype_v16qi_v16qi_v16qi
8932 = build_function_type_list (V16QI_type_node
,
8933 V16QI_type_node
, V16QI_type_node
,
8934 V16QI_type_node
, NULL_TREE
);
8936 = build_function_type_list (V4SI_type_node
, integer_type_node
, NULL_TREE
);
8938 = build_function_type_list (V8HI_type_node
, integer_type_node
, NULL_TREE
);
8939 tree v16qi_ftype_int
8940 = build_function_type_list (V16QI_type_node
, integer_type_node
, NULL_TREE
);
8941 tree v8hi_ftype_v16qi
8942 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
8943 tree v4sf_ftype_v4sf
8944 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8946 tree v2si_ftype_v2si_v2si
8947 = build_function_type_list (opaque_V2SI_type_node
,
8948 opaque_V2SI_type_node
,
8949 opaque_V2SI_type_node
, NULL_TREE
);
8951 tree v2sf_ftype_v2sf_v2sf
8952 = build_function_type_list (opaque_V2SF_type_node
,
8953 opaque_V2SF_type_node
,
8954 opaque_V2SF_type_node
, NULL_TREE
);
8956 tree v2si_ftype_int_int
8957 = build_function_type_list (opaque_V2SI_type_node
,
8958 integer_type_node
, integer_type_node
,
8961 tree opaque_ftype_opaque
8962 = build_function_type_list (opaque_V4SI_type_node
,
8963 opaque_V4SI_type_node
, NULL_TREE
);
8965 tree v2si_ftype_v2si
8966 = build_function_type_list (opaque_V2SI_type_node
,
8967 opaque_V2SI_type_node
, NULL_TREE
);
8969 tree v2sf_ftype_v2sf
8970 = build_function_type_list (opaque_V2SF_type_node
,
8971 opaque_V2SF_type_node
, NULL_TREE
);
8973 tree v2sf_ftype_v2si
8974 = build_function_type_list (opaque_V2SF_type_node
,
8975 opaque_V2SI_type_node
, NULL_TREE
);
8977 tree v2si_ftype_v2sf
8978 = build_function_type_list (opaque_V2SI_type_node
,
8979 opaque_V2SF_type_node
, NULL_TREE
);
8981 tree v2si_ftype_v2si_char
8982 = build_function_type_list (opaque_V2SI_type_node
,
8983 opaque_V2SI_type_node
,
8984 char_type_node
, NULL_TREE
);
8986 tree v2si_ftype_int_char
8987 = build_function_type_list (opaque_V2SI_type_node
,
8988 integer_type_node
, char_type_node
, NULL_TREE
);
8990 tree v2si_ftype_char
8991 = build_function_type_list (opaque_V2SI_type_node
,
8992 char_type_node
, NULL_TREE
);
8994 tree int_ftype_int_int
8995 = build_function_type_list (integer_type_node
,
8996 integer_type_node
, integer_type_node
,
8999 tree opaque_ftype_opaque_opaque
9000 = build_function_type_list (opaque_V4SI_type_node
,
9001 opaque_V4SI_type_node
, opaque_V4SI_type_node
, NULL_TREE
);
9002 tree v4si_ftype_v4si_v4si
9003 = build_function_type_list (V4SI_type_node
,
9004 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9005 tree v4sf_ftype_v4si_int
9006 = build_function_type_list (V4SF_type_node
,
9007 V4SI_type_node
, integer_type_node
, NULL_TREE
);
9008 tree v4si_ftype_v4sf_int
9009 = build_function_type_list (V4SI_type_node
,
9010 V4SF_type_node
, integer_type_node
, NULL_TREE
);
9011 tree v4si_ftype_v4si_int
9012 = build_function_type_list (V4SI_type_node
,
9013 V4SI_type_node
, integer_type_node
, NULL_TREE
);
9014 tree v8hi_ftype_v8hi_int
9015 = build_function_type_list (V8HI_type_node
,
9016 V8HI_type_node
, integer_type_node
, NULL_TREE
);
9017 tree v16qi_ftype_v16qi_int
9018 = build_function_type_list (V16QI_type_node
,
9019 V16QI_type_node
, integer_type_node
, NULL_TREE
);
9020 tree v16qi_ftype_v16qi_v16qi_int
9021 = build_function_type_list (V16QI_type_node
,
9022 V16QI_type_node
, V16QI_type_node
,
9023 integer_type_node
, NULL_TREE
);
9024 tree v8hi_ftype_v8hi_v8hi_int
9025 = build_function_type_list (V8HI_type_node
,
9026 V8HI_type_node
, V8HI_type_node
,
9027 integer_type_node
, NULL_TREE
);
9028 tree v4si_ftype_v4si_v4si_int
9029 = build_function_type_list (V4SI_type_node
,
9030 V4SI_type_node
, V4SI_type_node
,
9031 integer_type_node
, NULL_TREE
);
9032 tree v4sf_ftype_v4sf_v4sf_int
9033 = build_function_type_list (V4SF_type_node
,
9034 V4SF_type_node
, V4SF_type_node
,
9035 integer_type_node
, NULL_TREE
);
9036 tree v4sf_ftype_v4sf_v4sf
9037 = build_function_type_list (V4SF_type_node
,
9038 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9039 tree opaque_ftype_opaque_opaque_opaque
9040 = build_function_type_list (opaque_V4SI_type_node
,
9041 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
9042 opaque_V4SI_type_node
, NULL_TREE
);
9043 tree v4sf_ftype_v4sf_v4sf_v4si
9044 = build_function_type_list (V4SF_type_node
,
9045 V4SF_type_node
, V4SF_type_node
,
9046 V4SI_type_node
, NULL_TREE
);
9047 tree v4sf_ftype_v4sf_v4sf_v4sf
9048 = build_function_type_list (V4SF_type_node
,
9049 V4SF_type_node
, V4SF_type_node
,
9050 V4SF_type_node
, NULL_TREE
);
9051 tree v4si_ftype_v4si_v4si_v4si
9052 = build_function_type_list (V4SI_type_node
,
9053 V4SI_type_node
, V4SI_type_node
,
9054 V4SI_type_node
, NULL_TREE
);
9055 tree v8hi_ftype_v8hi_v8hi
9056 = build_function_type_list (V8HI_type_node
,
9057 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9058 tree v8hi_ftype_v8hi_v8hi_v8hi
9059 = build_function_type_list (V8HI_type_node
,
9060 V8HI_type_node
, V8HI_type_node
,
9061 V8HI_type_node
, NULL_TREE
);
9062 tree v4si_ftype_v8hi_v8hi_v4si
9063 = build_function_type_list (V4SI_type_node
,
9064 V8HI_type_node
, V8HI_type_node
,
9065 V4SI_type_node
, NULL_TREE
);
9066 tree v4si_ftype_v16qi_v16qi_v4si
9067 = build_function_type_list (V4SI_type_node
,
9068 V16QI_type_node
, V16QI_type_node
,
9069 V4SI_type_node
, NULL_TREE
);
9070 tree v16qi_ftype_v16qi_v16qi
9071 = build_function_type_list (V16QI_type_node
,
9072 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9073 tree v4si_ftype_v4sf_v4sf
9074 = build_function_type_list (V4SI_type_node
,
9075 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9076 tree v8hi_ftype_v16qi_v16qi
9077 = build_function_type_list (V8HI_type_node
,
9078 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9079 tree v4si_ftype_v8hi_v8hi
9080 = build_function_type_list (V4SI_type_node
,
9081 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9082 tree v8hi_ftype_v4si_v4si
9083 = build_function_type_list (V8HI_type_node
,
9084 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9085 tree v16qi_ftype_v8hi_v8hi
9086 = build_function_type_list (V16QI_type_node
,
9087 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9088 tree v4si_ftype_v16qi_v4si
9089 = build_function_type_list (V4SI_type_node
,
9090 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
9091 tree v4si_ftype_v16qi_v16qi
9092 = build_function_type_list (V4SI_type_node
,
9093 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9094 tree v4si_ftype_v8hi_v4si
9095 = build_function_type_list (V4SI_type_node
,
9096 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
9097 tree v4si_ftype_v8hi
9098 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
9099 tree int_ftype_v4si_v4si
9100 = build_function_type_list (integer_type_node
,
9101 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
9102 tree int_ftype_v4sf_v4sf
9103 = build_function_type_list (integer_type_node
,
9104 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
9105 tree int_ftype_v16qi_v16qi
9106 = build_function_type_list (integer_type_node
,
9107 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
9108 tree int_ftype_v8hi_v8hi
9109 = build_function_type_list (integer_type_node
,
9110 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
9112 /* Add the simple ternary operators. */
9113 d
= (struct builtin_description
*) bdesc_3arg
;
9114 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
9116 enum machine_mode mode0
, mode1
, mode2
, mode3
;
9118 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9119 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9130 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9133 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9134 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9135 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
9136 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
9139 /* When all four are of the same mode. */
9140 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
9145 type
= opaque_ftype_opaque_opaque_opaque
;
9148 type
= v4si_ftype_v4si_v4si_v4si
;
9151 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
9154 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
9157 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
9163 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
9168 type
= v4si_ftype_v4si_v4si_v16qi
;
9171 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
9174 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
9177 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
9183 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
9184 && mode3
== V4SImode
)
9185 type
= v4si_ftype_v16qi_v16qi_v4si
;
9186 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
9187 && mode3
== V4SImode
)
9188 type
= v4si_ftype_v8hi_v8hi_v4si
;
9189 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
9190 && mode3
== V4SImode
)
9191 type
= v4sf_ftype_v4sf_v4sf_v4si
;
9193 /* vchar, vchar, vchar, 4 bit literal. */
9194 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
9196 type
= v16qi_ftype_v16qi_v16qi_int
;
9198 /* vshort, vshort, vshort, 4 bit literal. */
9199 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
9201 type
= v8hi_ftype_v8hi_v8hi_int
;
9203 /* vint, vint, vint, 4 bit literal. */
9204 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
9206 type
= v4si_ftype_v4si_v4si_int
;
9208 /* vfloat, vfloat, vfloat, 4 bit literal. */
9209 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
9211 type
= v4sf_ftype_v4sf_v4sf_int
;
9216 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9219 /* Add the simple binary operators. */
9220 d
= (struct builtin_description
*) bdesc_2arg
;
9221 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
9223 enum machine_mode mode0
, mode1
, mode2
;
9225 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9226 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9236 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9239 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9240 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9241 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
9244 /* When all three operands are of the same mode. */
9245 if (mode0
== mode1
&& mode1
== mode2
)
9250 type
= opaque_ftype_opaque_opaque
;
9253 type
= v4sf_ftype_v4sf_v4sf
;
9256 type
= v4si_ftype_v4si_v4si
;
9259 type
= v16qi_ftype_v16qi_v16qi
;
9262 type
= v8hi_ftype_v8hi_v8hi
;
9265 type
= v2si_ftype_v2si_v2si
;
9268 type
= v2sf_ftype_v2sf_v2sf
;
9271 type
= int_ftype_int_int
;
9278 /* A few other combos we really don't want to do manually. */
9280 /* vint, vfloat, vfloat. */
9281 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
9282 type
= v4si_ftype_v4sf_v4sf
;
9284 /* vshort, vchar, vchar. */
9285 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
9286 type
= v8hi_ftype_v16qi_v16qi
;
9288 /* vint, vshort, vshort. */
9289 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
9290 type
= v4si_ftype_v8hi_v8hi
;
9292 /* vshort, vint, vint. */
9293 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
9294 type
= v8hi_ftype_v4si_v4si
;
9296 /* vchar, vshort, vshort. */
9297 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
9298 type
= v16qi_ftype_v8hi_v8hi
;
9300 /* vint, vchar, vint. */
9301 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
9302 type
= v4si_ftype_v16qi_v4si
;
9304 /* vint, vchar, vchar. */
9305 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
9306 type
= v4si_ftype_v16qi_v16qi
;
9308 /* vint, vshort, vint. */
9309 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
9310 type
= v4si_ftype_v8hi_v4si
;
9312 /* vint, vint, 5 bit literal. */
9313 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
9314 type
= v4si_ftype_v4si_int
;
9316 /* vshort, vshort, 5 bit literal. */
9317 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
9318 type
= v8hi_ftype_v8hi_int
;
9320 /* vchar, vchar, 5 bit literal. */
9321 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
9322 type
= v16qi_ftype_v16qi_int
;
9324 /* vfloat, vint, 5 bit literal. */
9325 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
9326 type
= v4sf_ftype_v4si_int
;
9328 /* vint, vfloat, 5 bit literal. */
9329 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
9330 type
= v4si_ftype_v4sf_int
;
9332 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
9333 type
= v2si_ftype_int_int
;
9335 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
9336 type
= v2si_ftype_v2si_char
;
9338 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
9339 type
= v2si_ftype_int_char
;
9344 gcc_assert (mode0
== SImode
);
9348 type
= int_ftype_v4si_v4si
;
9351 type
= int_ftype_v4sf_v4sf
;
9354 type
= int_ftype_v16qi_v16qi
;
9357 type
= int_ftype_v8hi_v8hi
;
9364 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9367 /* Add the simple unary operators. */
9368 d
= (struct builtin_description
*) bdesc_1arg
;
9369 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
9371 enum machine_mode mode0
, mode1
;
9373 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
9374 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
9383 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
9386 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
9387 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
9390 if (mode0
== V4SImode
&& mode1
== QImode
)
9391 type
= v4si_ftype_int
;
9392 else if (mode0
== V8HImode
&& mode1
== QImode
)
9393 type
= v8hi_ftype_int
;
9394 else if (mode0
== V16QImode
&& mode1
== QImode
)
9395 type
= v16qi_ftype_int
;
9396 else if (mode0
== VOIDmode
&& mode1
== VOIDmode
)
9397 type
= opaque_ftype_opaque
;
9398 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
9399 type
= v4sf_ftype_v4sf
;
9400 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
9401 type
= v8hi_ftype_v16qi
;
9402 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
9403 type
= v4si_ftype_v8hi
;
9404 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
9405 type
= v2si_ftype_v2si
;
9406 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
9407 type
= v2sf_ftype_v2sf
;
9408 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
9409 type
= v2sf_ftype_v2si
;
9410 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
9411 type
= v2si_ftype_v2sf
;
9412 else if (mode0
== V2SImode
&& mode1
== QImode
)
9413 type
= v2si_ftype_char
;
9417 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
9422 rs6000_init_libfuncs (void)
9424 if (DEFAULT_ABI
!= ABI_V4
&& TARGET_XCOFF
9425 && !TARGET_POWER2
&& !TARGET_POWERPC
)
9427 /* AIX library routines for float->int conversion. */
9428 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
9429 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
9430 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
9431 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
9434 if (!TARGET_IEEEQUAD
)
9435 /* AIX/Darwin/64-bit Linux quad floating point routines. */
9436 if (!TARGET_XL_COMPAT
)
9438 set_optab_libfunc (add_optab
, TFmode
, "__gcc_qadd");
9439 set_optab_libfunc (sub_optab
, TFmode
, "__gcc_qsub");
9440 set_optab_libfunc (smul_optab
, TFmode
, "__gcc_qmul");
9441 set_optab_libfunc (sdiv_optab
, TFmode
, "__gcc_qdiv");
9443 if (!(TARGET_HARD_FLOAT
&& (TARGET_FPRS
|| TARGET_E500_DOUBLE
)))
9445 set_optab_libfunc (neg_optab
, TFmode
, "__gcc_qneg");
9446 set_optab_libfunc (eq_optab
, TFmode
, "__gcc_qeq");
9447 set_optab_libfunc (ne_optab
, TFmode
, "__gcc_qne");
9448 set_optab_libfunc (gt_optab
, TFmode
, "__gcc_qgt");
9449 set_optab_libfunc (ge_optab
, TFmode
, "__gcc_qge");
9450 set_optab_libfunc (lt_optab
, TFmode
, "__gcc_qlt");
9451 set_optab_libfunc (le_optab
, TFmode
, "__gcc_qle");
9452 set_optab_libfunc (unord_optab
, TFmode
, "__gcc_qunord");
9454 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "__gcc_stoq");
9455 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "__gcc_dtoq");
9456 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "__gcc_qtos");
9457 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "__gcc_qtod");
9458 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "__gcc_qtoi");
9459 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "__gcc_qtou");
9460 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "__gcc_itoq");
9461 set_conv_libfunc (ufloat_optab
, TFmode
, SImode
, "__gcc_utoq");
9466 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
9467 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
9468 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
9469 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
9473 /* 32-bit SVR4 quad floating point routines. */
9475 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
9476 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
9477 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
9478 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
9479 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
9480 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
9481 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
9483 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
9484 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
9485 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
9486 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
9487 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
9488 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
9490 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
9491 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
9492 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
9493 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
9494 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
9495 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
9496 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
9497 set_conv_libfunc (ufloat_optab
, TFmode
, SImode
, "_q_utoq");
9502 /* Expand a block clear operation, and return 1 if successful. Return 0
9503 if we should let the compiler generate normal code.
9505 operands[0] is the destination
9506 operands[1] is the length
9507 operands[3] is the alignment */
9510 expand_block_clear (rtx operands
[])
9512 rtx orig_dest
= operands
[0];
9513 rtx bytes_rtx
= operands
[1];
9514 rtx align_rtx
= operands
[3];
9515 bool constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
9516 HOST_WIDE_INT align
;
9517 HOST_WIDE_INT bytes
;
9522 /* If this is not a fixed size move, just call memcpy */
9526 /* This must be a fixed size alignment */
9527 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
9528 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
9530 /* Anything to clear? */
9531 bytes
= INTVAL (bytes_rtx
);
9535 /* Use the builtin memset after a point, to avoid huge code bloat.
9536 When optimize_size, avoid any significant code bloat; calling
9537 memset is about 4 instructions, so allow for one instruction to
9538 load zero and three to do clearing. */
9539 if (TARGET_ALTIVEC
&& align
>= 128)
9541 else if (TARGET_POWERPC64
&& align
>= 32)
9546 if (optimize_size
&& bytes
> 3 * clear_step
)
9548 if (! optimize_size
&& bytes
> 8 * clear_step
)
9551 for (offset
= 0; bytes
> 0; offset
+= clear_bytes
, bytes
-= clear_bytes
)
9553 enum machine_mode mode
= BLKmode
;
9556 if (bytes
>= 16 && TARGET_ALTIVEC
&& align
>= 128)
9561 else if (bytes
>= 8 && TARGET_POWERPC64
9562 /* 64-bit loads and stores require word-aligned
9564 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
9569 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
9570 { /* move 4 bytes */
9574 else if (bytes
>= 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
9575 { /* move 2 bytes */
9579 else /* move 1 byte at a time */
9585 dest
= adjust_address (orig_dest
, mode
, offset
);
9587 emit_move_insn (dest
, CONST0_RTX (mode
));
9594 /* Expand a block move operation, and return 1 if successful. Return 0
9595 if we should let the compiler generate normal code.
9597 operands[0] is the destination
9598 operands[1] is the source
9599 operands[2] is the length
9600 operands[3] is the alignment */
9602 #define MAX_MOVE_REG 4
9605 expand_block_move (rtx operands
[])
9607 rtx orig_dest
= operands
[0];
9608 rtx orig_src
= operands
[1];
9609 rtx bytes_rtx
= operands
[2];
9610 rtx align_rtx
= operands
[3];
9611 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
9616 rtx stores
[MAX_MOVE_REG
];
9619 /* If this is not a fixed size move, just call memcpy */
9623 /* This must be a fixed size alignment */
9624 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
9625 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
9627 /* Anything to move? */
9628 bytes
= INTVAL (bytes_rtx
);
9632 /* store_one_arg depends on expand_block_move to handle at least the size of
9633 reg_parm_stack_space. */
9634 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
9637 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
9640 rtx (*movmemsi
) (rtx
, rtx
, rtx
, rtx
);
9641 rtx (*mov
) (rtx
, rtx
);
9643 enum machine_mode mode
= BLKmode
;
9646 /* Altivec first, since it will be faster than a string move
9647 when it applies, and usually not significantly larger. */
9648 if (TARGET_ALTIVEC
&& bytes
>= 16 && align
>= 128)
9652 gen_func
.mov
= gen_movv4si
;
9654 else if (TARGET_STRING
9655 && bytes
> 24 /* move up to 32 bytes at a time */
9663 && ! fixed_regs
[12])
9665 move_bytes
= (bytes
> 32) ? 32 : bytes
;
9666 gen_func
.movmemsi
= gen_movmemsi_8reg
;
9668 else if (TARGET_STRING
9669 && bytes
> 16 /* move up to 24 bytes at a time */
9675 && ! fixed_regs
[10])
9677 move_bytes
= (bytes
> 24) ? 24 : bytes
;
9678 gen_func
.movmemsi
= gen_movmemsi_6reg
;
9680 else if (TARGET_STRING
9681 && bytes
> 8 /* move up to 16 bytes at a time */
9687 move_bytes
= (bytes
> 16) ? 16 : bytes
;
9688 gen_func
.movmemsi
= gen_movmemsi_4reg
;
9690 else if (bytes
>= 8 && TARGET_POWERPC64
9691 /* 64-bit loads and stores require word-aligned
9693 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
9697 gen_func
.mov
= gen_movdi
;
9699 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
9700 { /* move up to 8 bytes at a time */
9701 move_bytes
= (bytes
> 8) ? 8 : bytes
;
9702 gen_func
.movmemsi
= gen_movmemsi_2reg
;
9704 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
9705 { /* move 4 bytes */
9708 gen_func
.mov
= gen_movsi
;
9710 else if (bytes
>= 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
9711 { /* move 2 bytes */
9714 gen_func
.mov
= gen_movhi
;
9716 else if (TARGET_STRING
&& bytes
> 1)
9717 { /* move up to 4 bytes at a time */
9718 move_bytes
= (bytes
> 4) ? 4 : bytes
;
9719 gen_func
.movmemsi
= gen_movmemsi_1reg
;
9721 else /* move 1 byte at a time */
9725 gen_func
.mov
= gen_movqi
;
9728 src
= adjust_address (orig_src
, mode
, offset
);
9729 dest
= adjust_address (orig_dest
, mode
, offset
);
9731 if (mode
!= BLKmode
)
9733 rtx tmp_reg
= gen_reg_rtx (mode
);
9735 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
9736 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
9739 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
9742 for (i
= 0; i
< num_reg
; i
++)
9743 emit_insn (stores
[i
]);
9747 if (mode
== BLKmode
)
9749 /* Move the address into scratch registers. The movmemsi
9750 patterns require zero offset. */
9751 if (!REG_P (XEXP (src
, 0)))
9753 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
9754 src
= replace_equiv_address (src
, src_reg
);
9756 set_mem_size (src
, GEN_INT (move_bytes
));
9758 if (!REG_P (XEXP (dest
, 0)))
9760 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
9761 dest
= replace_equiv_address (dest
, dest_reg
);
9763 set_mem_size (dest
, GEN_INT (move_bytes
));
9765 emit_insn ((*gen_func
.movmemsi
) (dest
, src
,
9766 GEN_INT (move_bytes
& 31),
9775 /* Return a string to perform a load_multiple operation.
9776 operands[0] is the vector.
9777 operands[1] is the source address.
9778 operands[2] is the first destination register. */
9781 rs6000_output_load_multiple (rtx operands
[3])
9783 /* We have to handle the case where the pseudo used to contain the address
9784 is assigned to one of the output registers. */
9786 int words
= XVECLEN (operands
[0], 0);
9789 if (XVECLEN (operands
[0], 0) == 1)
9790 return "{l|lwz} %2,0(%1)";
9792 for (i
= 0; i
< words
; i
++)
9793 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
9794 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
9798 xop
[0] = GEN_INT (4 * (words
-1));
9799 xop
[1] = operands
[1];
9800 xop
[2] = operands
[2];
9801 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
9806 xop
[0] = GEN_INT (4 * (words
-1));
9807 xop
[1] = operands
[1];
9808 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
9809 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
9814 for (j
= 0; j
< words
; j
++)
9817 xop
[0] = GEN_INT (j
* 4);
9818 xop
[1] = operands
[1];
9819 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
9820 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
9822 xop
[0] = GEN_INT (i
* 4);
9823 xop
[1] = operands
[1];
9824 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
9829 return "{lsi|lswi} %2,%1,%N0";
9833 /* A validation routine: say whether CODE, a condition code, and MODE
9834 match. The other alternatives either don't make sense or should
9835 never be generated. */
9838 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
9840 gcc_assert ((GET_RTX_CLASS (code
) == RTX_COMPARE
9841 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
9842 && GET_MODE_CLASS (mode
) == MODE_CC
);
9844 /* These don't make sense. */
9845 gcc_assert ((code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
)
9846 || mode
!= CCUNSmode
);
9848 gcc_assert ((code
!= GTU
&& code
!= LTU
&& code
!= GEU
&& code
!= LEU
)
9849 || mode
== CCUNSmode
);
9851 gcc_assert (mode
== CCFPmode
9852 || (code
!= ORDERED
&& code
!= UNORDERED
9853 && code
!= UNEQ
&& code
!= LTGT
9854 && code
!= UNGT
&& code
!= UNLT
9855 && code
!= UNGE
&& code
!= UNLE
));
9857 /* These should never be generated except for
9858 flag_finite_math_only. */
9859 gcc_assert (mode
!= CCFPmode
9860 || flag_finite_math_only
9861 || (code
!= LE
&& code
!= GE
9862 && code
!= UNEQ
&& code
!= LTGT
9863 && code
!= UNGT
&& code
!= UNLT
));
9865 /* These are invalid; the information is not there. */
9866 gcc_assert (mode
!= CCEQmode
|| code
== EQ
|| code
== NE
);
9870 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9871 mask required to convert the result of a rotate insn into a shift
9872 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9875 includes_lshift_p (rtx shiftop
, rtx andop
)
9877 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9879 shift_mask
<<= INTVAL (shiftop
);
9881 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9884 /* Similar, but for right shift. */
9887 includes_rshift_p (rtx shiftop
, rtx andop
)
9889 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9891 shift_mask
>>= INTVAL (shiftop
);
9893 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9896 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9897 to perform a left shift. It must have exactly SHIFTOP least
9898 significant 0's, then one or more 1's, then zero or more 0's. */
9901 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
9903 if (GET_CODE (andop
) == CONST_INT
)
9905 HOST_WIDE_INT c
, lsb
, shift_mask
;
9908 if (c
== 0 || c
== ~0)
9912 shift_mask
<<= INTVAL (shiftop
);
9914 /* Find the least significant one bit. */
9917 /* It must coincide with the LSB of the shift mask. */
9918 if (-lsb
!= shift_mask
)
9921 /* Invert to look for the next transition (if any). */
9924 /* Remove the low group of ones (originally low group of zeros). */
9927 /* Again find the lsb, and check we have all 1's above. */
9931 else if (GET_CODE (andop
) == CONST_DOUBLE
9932 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9934 HOST_WIDE_INT low
, high
, lsb
;
9935 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
9937 low
= CONST_DOUBLE_LOW (andop
);
9938 if (HOST_BITS_PER_WIDE_INT
< 64)
9939 high
= CONST_DOUBLE_HIGH (andop
);
9941 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
9942 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
9945 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9947 shift_mask_high
= ~0;
9948 if (INTVAL (shiftop
) > 32)
9949 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9953 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
9960 return high
== -lsb
;
9963 shift_mask_low
= ~0;
9964 shift_mask_low
<<= INTVAL (shiftop
);
9968 if (-lsb
!= shift_mask_low
)
9971 if (HOST_BITS_PER_WIDE_INT
< 64)
9976 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9979 return high
== -lsb
;
9983 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
9989 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9990 to perform a left shift. It must have SHIFTOP or more least
9991 significant 0's, with the remainder of the word 1's. */
9994 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
9996 if (GET_CODE (andop
) == CONST_INT
)
9998 HOST_WIDE_INT c
, lsb
, shift_mask
;
10001 shift_mask
<<= INTVAL (shiftop
);
10002 c
= INTVAL (andop
);
10004 /* Find the least significant one bit. */
10007 /* It must be covered by the shift mask.
10008 This test also rejects c == 0. */
10009 if ((lsb
& shift_mask
) == 0)
10012 /* Check we have all 1's above the transition, and reject all 1's. */
10013 return c
== -lsb
&& lsb
!= 1;
10015 else if (GET_CODE (andop
) == CONST_DOUBLE
10016 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
10018 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
10020 low
= CONST_DOUBLE_LOW (andop
);
10022 if (HOST_BITS_PER_WIDE_INT
< 64)
10024 HOST_WIDE_INT high
, shift_mask_high
;
10026 high
= CONST_DOUBLE_HIGH (andop
);
10030 shift_mask_high
= ~0;
10031 if (INTVAL (shiftop
) > 32)
10032 shift_mask_high
<<= INTVAL (shiftop
) - 32;
10034 lsb
= high
& -high
;
10036 if ((lsb
& shift_mask_high
) == 0)
10039 return high
== -lsb
;
10045 shift_mask_low
= ~0;
10046 shift_mask_low
<<= INTVAL (shiftop
);
10050 if ((lsb
& shift_mask_low
) == 0)
10053 return low
== -lsb
&& lsb
!= 1;
10059 /* Return 1 if operands will generate a valid arguments to rlwimi
10060 instruction for insert with right shift in 64-bit mode. The mask may
10061 not start on the first bit or stop on the last bit because wrap-around
10062 effects of instruction do not correspond to semantics of RTL insn. */
10065 insvdi_rshift_rlwimi_p (rtx sizeop
, rtx startop
, rtx shiftop
)
10067 if (INTVAL (startop
) > 32
10068 && INTVAL (startop
) < 64
10069 && INTVAL (sizeop
) > 1
10070 && INTVAL (sizeop
) + INTVAL (startop
) < 64
10071 && INTVAL (shiftop
) > 0
10072 && INTVAL (sizeop
) + INTVAL (shiftop
) < 32
10073 && (64 - (INTVAL (shiftop
) & 63)) >= INTVAL (sizeop
))
10079 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
10080 for lfq and stfq insns iff the registers are hard registers. */
10083 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
10085 /* We might have been passed a SUBREG. */
10086 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
10089 /* We might have been passed non floating point registers. */
10090 if (!FP_REGNO_P (REGNO (reg1
))
10091 || !FP_REGNO_P (REGNO (reg2
)))
10094 return (REGNO (reg1
) == REGNO (reg2
) - 1);
10097 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
10098 addr1 and addr2 must be in consecutive memory locations
10099 (addr2 == addr1 + 8). */
10102 mems_ok_for_quad_peep (rtx mem1
, rtx mem2
)
10105 unsigned int reg1
, reg2
;
10106 int offset1
, offset2
;
10108 /* The mems cannot be volatile. */
10109 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
10112 addr1
= XEXP (mem1
, 0);
10113 addr2
= XEXP (mem2
, 0);
10115 /* Extract an offset (if used) from the first addr. */
10116 if (GET_CODE (addr1
) == PLUS
)
10118 /* If not a REG, return zero. */
10119 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
10123 reg1
= REGNO (XEXP (addr1
, 0));
10124 /* The offset must be constant! */
10125 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
10127 offset1
= INTVAL (XEXP (addr1
, 1));
10130 else if (GET_CODE (addr1
) != REG
)
10134 reg1
= REGNO (addr1
);
10135 /* This was a simple (mem (reg)) expression. Offset is 0. */
10139 /* And now for the second addr. */
10140 if (GET_CODE (addr2
) == PLUS
)
10142 /* If not a REG, return zero. */
10143 if (GET_CODE (XEXP (addr2
, 0)) != REG
)
10147 reg2
= REGNO (XEXP (addr2
, 0));
10148 /* The offset must be constant. */
10149 if (GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
10151 offset2
= INTVAL (XEXP (addr2
, 1));
10154 else if (GET_CODE (addr2
) != REG
)
10158 reg2
= REGNO (addr2
);
10159 /* This was a simple (mem (reg)) expression. Offset is 0. */
10163 /* Both of these must have the same base register. */
10167 /* The offset for the second addr must be 8 more than the first addr. */
10168 if (offset2
!= offset1
+ 8)
10171 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
10176 /* Return the register class of a scratch register needed to copy IN into
10177 or out of a register in CLASS in MODE. If it can be done directly,
10178 NO_REGS is returned. */
10181 rs6000_secondary_reload_class (enum reg_class
class,
10182 enum machine_mode mode ATTRIBUTE_UNUSED
,
10187 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
10189 && MACHOPIC_INDIRECT
10193 /* We cannot copy a symbolic operand directly into anything
10194 other than BASE_REGS for TARGET_ELF. So indicate that a
10195 register from BASE_REGS is needed as an intermediate
10198 On Darwin, pic addresses require a load from memory, which
10199 needs a base register. */
10200 if (class != BASE_REGS
10201 && (GET_CODE (in
) == SYMBOL_REF
10202 || GET_CODE (in
) == HIGH
10203 || GET_CODE (in
) == LABEL_REF
10204 || GET_CODE (in
) == CONST
))
10208 if (GET_CODE (in
) == REG
)
10210 regno
= REGNO (in
);
10211 if (regno
>= FIRST_PSEUDO_REGISTER
)
10213 regno
= true_regnum (in
);
10214 if (regno
>= FIRST_PSEUDO_REGISTER
)
10218 else if (GET_CODE (in
) == SUBREG
)
10220 regno
= true_regnum (in
);
10221 if (regno
>= FIRST_PSEUDO_REGISTER
)
10227 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
10229 if (class == GENERAL_REGS
|| class == BASE_REGS
10230 || (regno
>= 0 && INT_REGNO_P (regno
)))
10233 /* Constants, memory, and FP registers can go into FP registers. */
10234 if ((regno
== -1 || FP_REGNO_P (regno
))
10235 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
10238 /* Memory, and AltiVec registers can go into AltiVec registers. */
10239 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
10240 && class == ALTIVEC_REGS
)
10243 /* We can copy among the CR registers. */
10244 if ((class == CR_REGS
|| class == CR0_REGS
)
10245 && regno
>= 0 && CR_REGNO_P (regno
))
10248 /* Otherwise, we need GENERAL_REGS. */
10249 return GENERAL_REGS
;
10252 /* Given a comparison operation, return the bit number in CCR to test. We
10253 know this is a valid comparison.
10255 SCC_P is 1 if this is for an scc. That means that %D will have been
10256 used instead of %C, so the bits will be in different places.
10258 Return -1 if OP isn't a valid comparison for some reason. */
10261 ccr_bit (rtx op
, int scc_p
)
10263 enum rtx_code code
= GET_CODE (op
);
10264 enum machine_mode cc_mode
;
10269 if (!COMPARISON_P (op
))
10272 reg
= XEXP (op
, 0);
10274 gcc_assert (GET_CODE (reg
) == REG
&& CR_REGNO_P (REGNO (reg
)));
10276 cc_mode
= GET_MODE (reg
);
10277 cc_regnum
= REGNO (reg
);
10278 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
10280 validate_condition_mode (code
, cc_mode
);
10282 /* When generating a sCOND operation, only positive conditions are
10285 || code
== EQ
|| code
== GT
|| code
== LT
|| code
== UNORDERED
10286 || code
== GTU
|| code
== LTU
);
10291 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
10293 return base_bit
+ 2;
10294 case GT
: case GTU
: case UNLE
:
10295 return base_bit
+ 1;
10296 case LT
: case LTU
: case UNGE
:
10298 case ORDERED
: case UNORDERED
:
10299 return base_bit
+ 3;
10302 /* If scc, we will have done a cror to put the bit in the
10303 unordered position. So test that bit. For integer, this is ! LT
10304 unless this is an scc insn. */
10305 return scc_p
? base_bit
+ 3 : base_bit
;
10308 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
10311 gcc_unreachable ();
10315 /* Return the GOT register. */
10318 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
10320 /* The second flow pass currently (June 1999) can't update
10321 regs_ever_live without disturbing other parts of the compiler, so
10322 update it here to make the prolog/epilogue code happy. */
10323 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
10324 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
10326 current_function_uses_pic_offset_table
= 1;
10328 return pic_offset_table_rtx
;
10331 /* Function to init struct machine_function.
10332 This will be called, via a pointer variable,
10333 from push_function_context. */
10335 static struct machine_function
*
10336 rs6000_init_machine_status (void)
10338 return ggc_alloc_cleared (sizeof (machine_function
));
10341 /* These macros test for integers and extract the low-order bits. */
10343 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
10344 && GET_MODE (X) == VOIDmode)
10346 #define INT_LOWPART(X) \
10347 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
10350 extract_MB (rtx op
)
10353 unsigned long val
= INT_LOWPART (op
);
10355 /* If the high bit is zero, the value is the first 1 bit we find
10357 if ((val
& 0x80000000) == 0)
10359 gcc_assert (val
& 0xffffffff);
10362 while (((val
<<= 1) & 0x80000000) == 0)
10367 /* If the high bit is set and the low bit is not, or the mask is all
10368 1's, the value is zero. */
10369 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
10372 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10375 while (((val
>>= 1) & 1) != 0)
10382 extract_ME (rtx op
)
10385 unsigned long val
= INT_LOWPART (op
);
10387 /* If the low bit is zero, the value is the first 1 bit we find from
10389 if ((val
& 1) == 0)
10391 gcc_assert (val
& 0xffffffff);
10394 while (((val
>>= 1) & 1) == 0)
10400 /* If the low bit is set and the high bit is not, or the mask is all
10401 1's, the value is 31. */
10402 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
10405 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
10408 while (((val
<<= 1) & 0x80000000) != 0)
10414 /* Locate some local-dynamic symbol still in use by this function
10415 so that we can print its name in some tls_ld pattern. */
10417 static const char *
10418 rs6000_get_some_local_dynamic_name (void)
10422 if (cfun
->machine
->some_ld_name
)
10423 return cfun
->machine
->some_ld_name
;
10425 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
10427 && for_each_rtx (&PATTERN (insn
),
10428 rs6000_get_some_local_dynamic_name_1
, 0))
10429 return cfun
->machine
->some_ld_name
;
10431 gcc_unreachable ();
10434 /* Helper function for rs6000_get_some_local_dynamic_name. */
10437 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
10441 if (GET_CODE (x
) == SYMBOL_REF
)
10443 const char *str
= XSTR (x
, 0);
10444 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
10446 cfun
->machine
->some_ld_name
= str
;
10454 /* Write out a function code label. */
10457 rs6000_output_function_entry (FILE *file
, const char *fname
)
10459 if (fname
[0] != '.')
10461 switch (DEFAULT_ABI
)
10464 gcc_unreachable ();
10470 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "L.");
10479 RS6000_OUTPUT_BASENAME (file
, fname
);
10481 assemble_name (file
, fname
);
10484 /* Print an operand. Recognize special options, documented below. */
10487 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
10488 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
10490 #define SMALL_DATA_RELOC "sda21"
10491 #define SMALL_DATA_REG 0
10495 print_operand (FILE *file
, rtx x
, int code
)
10499 unsigned HOST_WIDE_INT uval
;
10504 /* Write out an instruction after the call which may be replaced
10505 with glue code by the loader. This depends on the AIX version. */
10506 asm_fprintf (file
, RS6000_CALL_GLUE
);
10509 /* %a is output_address. */
10512 /* If X is a constant integer whose low-order 5 bits are zero,
10513 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10514 in the AIX assembler where "sri" with a zero shift count
10515 writes a trash instruction. */
10516 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
10523 /* If constant, low-order 16 bits of constant, unsigned.
10524 Otherwise, write normally. */
10526 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
10528 print_operand (file
, x
, 0);
10532 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10533 for 64-bit mask direction. */
10534 putc (((INT_LOWPART (x
) & 1) == 0 ? 'r' : 'l'), file
);
10537 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10541 /* X is a CR register. Print the number of the GT bit of the CR. */
10542 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10543 output_operand_lossage ("invalid %%E value");
10545 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 1);
10549 /* Like 'J' but get to the GT bit only. */
10550 gcc_assert (GET_CODE (x
) == REG
);
10552 /* Bit 1 is GT bit. */
10553 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 1;
10555 /* Add one for shift count in rlinm for scc. */
10556 fprintf (file
, "%d", i
+ 1);
10560 /* X is a CR register. Print the number of the EQ bit of the CR */
10561 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10562 output_operand_lossage ("invalid %%E value");
10564 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
10568 /* X is a CR register. Print the shift count needed to move it
10569 to the high-order four bits. */
10570 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10571 output_operand_lossage ("invalid %%f value");
10573 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
10577 /* Similar, but print the count for the rotate in the opposite
10579 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10580 output_operand_lossage ("invalid %%F value");
10582 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
10586 /* X is a constant integer. If it is negative, print "m",
10587 otherwise print "z". This is to make an aze or ame insn. */
10588 if (GET_CODE (x
) != CONST_INT
)
10589 output_operand_lossage ("invalid %%G value");
10590 else if (INTVAL (x
) >= 0)
10597 /* If constant, output low-order five bits. Otherwise, write
10600 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
10602 print_operand (file
, x
, 0);
10606 /* If constant, output low-order six bits. Otherwise, write
10609 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
10611 print_operand (file
, x
, 0);
10615 /* Print `i' if this is a constant, else nothing. */
10621 /* Write the bit number in CCR for jump. */
10622 i
= ccr_bit (x
, 0);
10624 output_operand_lossage ("invalid %%j code");
10626 fprintf (file
, "%d", i
);
10630 /* Similar, but add one for shift count in rlinm for scc and pass
10631 scc flag to `ccr_bit'. */
10632 i
= ccr_bit (x
, 1);
10634 output_operand_lossage ("invalid %%J code");
10636 /* If we want bit 31, write a shift count of zero, not 32. */
10637 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10641 /* X must be a constant. Write the 1's complement of the
10644 output_operand_lossage ("invalid %%k value");
10646 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
10650 /* X must be a symbolic constant on ELF. Write an
10651 expression suitable for an 'addi' that adds in the low 16
10652 bits of the MEM. */
10653 if (GET_CODE (x
) != CONST
)
10655 print_operand_address (file
, x
);
10656 fputs ("@l", file
);
10660 if (GET_CODE (XEXP (x
, 0)) != PLUS
10661 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
10662 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
10663 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
10664 output_operand_lossage ("invalid %%K value");
10665 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
10666 fputs ("@l", file
);
10667 /* For GNU as, there must be a non-alphanumeric character
10668 between 'l' and the number. The '-' is added by
10669 print_operand() already. */
10670 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
10672 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
10676 /* %l is output_asm_label. */
10679 /* Write second word of DImode or DFmode reference. Works on register
10680 or non-indexed memory only. */
10681 if (GET_CODE (x
) == REG
)
10682 fputs (reg_names
[REGNO (x
) + 1], file
);
10683 else if (GET_CODE (x
) == MEM
)
10685 /* Handle possible auto-increment. Since it is pre-increment and
10686 we have already done it, we can just use an offset of word. */
10687 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10688 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10689 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
10692 output_address (XEXP (adjust_address_nv (x
, SImode
,
10696 if (small_data_operand (x
, GET_MODE (x
)))
10697 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10698 reg_names
[SMALL_DATA_REG
]);
10703 /* MB value for a mask operand. */
10704 if (! mask_operand (x
, SImode
))
10705 output_operand_lossage ("invalid %%m value");
10707 fprintf (file
, "%d", extract_MB (x
));
10711 /* ME value for a mask operand. */
10712 if (! mask_operand (x
, SImode
))
10713 output_operand_lossage ("invalid %%M value");
10715 fprintf (file
, "%d", extract_ME (x
));
10718 /* %n outputs the negative of its operand. */
10721 /* Write the number of elements in the vector times 4. */
10722 if (GET_CODE (x
) != PARALLEL
)
10723 output_operand_lossage ("invalid %%N value");
10725 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
10729 /* Similar, but subtract 1 first. */
10730 if (GET_CODE (x
) != PARALLEL
)
10731 output_operand_lossage ("invalid %%O value");
10733 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
10737 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10739 || INT_LOWPART (x
) < 0
10740 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
10741 output_operand_lossage ("invalid %%p value");
10743 fprintf (file
, "%d", i
);
10747 /* The operand must be an indirect memory reference. The result
10748 is the register name. */
10749 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
10750 || REGNO (XEXP (x
, 0)) >= 32)
10751 output_operand_lossage ("invalid %%P value");
10753 fputs (reg_names
[REGNO (XEXP (x
, 0))], file
);
10757 /* This outputs the logical code corresponding to a boolean
10758 expression. The expression may have one or both operands
10759 negated (if one, only the first one). For condition register
10760 logical operations, it will also treat the negated
10761 CR codes as NOTs, but not handle NOTs of them. */
10763 const char *const *t
= 0;
10765 enum rtx_code code
= GET_CODE (x
);
10766 static const char * const tbl
[3][3] = {
10767 { "and", "andc", "nor" },
10768 { "or", "orc", "nand" },
10769 { "xor", "eqv", "xor" } };
10773 else if (code
== IOR
)
10775 else if (code
== XOR
)
10778 output_operand_lossage ("invalid %%q value");
10780 if (GET_CODE (XEXP (x
, 0)) != NOT
)
10784 if (GET_CODE (XEXP (x
, 1)) == NOT
)
10802 /* X is a CR register. Print the mask for `mtcrf'. */
10803 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10804 output_operand_lossage ("invalid %%R value");
10806 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
10810 /* Low 5 bits of 32 - value */
10812 output_operand_lossage ("invalid %%s value");
10814 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
10818 /* PowerPC64 mask position. All 0's is excluded.
10819 CONST_INT 32-bit mask is considered sign-extended so any
10820 transition must occur within the CONST_INT, not on the boundary. */
10821 if (! mask64_operand (x
, DImode
))
10822 output_operand_lossage ("invalid %%S value");
10824 uval
= INT_LOWPART (x
);
10826 if (uval
& 1) /* Clear Left */
10828 #if HOST_BITS_PER_WIDE_INT > 64
10829 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
10833 else /* Clear Right */
10836 #if HOST_BITS_PER_WIDE_INT > 64
10837 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
10843 gcc_assert (i
>= 0);
10844 fprintf (file
, "%d", i
);
10848 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10849 gcc_assert (GET_CODE (x
) == REG
&& GET_MODE (x
) == CCmode
);
10851 /* Bit 3 is OV bit. */
10852 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
10854 /* If we want bit 31, write a shift count of zero, not 32. */
10855 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10859 /* Print the symbolic name of a branch target register. */
10860 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
10861 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
10862 output_operand_lossage ("invalid %%T value");
10863 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
10864 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
10866 fputs ("ctr", file
);
10870 /* High-order 16 bits of constant for use in unsigned operand. */
10872 output_operand_lossage ("invalid %%u value");
10874 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10875 (INT_LOWPART (x
) >> 16) & 0xffff);
10879 /* High-order 16 bits of constant for use in signed operand. */
10881 output_operand_lossage ("invalid %%v value");
10883 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10884 (INT_LOWPART (x
) >> 16) & 0xffff);
10888 /* Print `u' if this has an auto-increment or auto-decrement. */
10889 if (GET_CODE (x
) == MEM
10890 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
10891 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
10896 /* Print the trap code for this operand. */
10897 switch (GET_CODE (x
))
10900 fputs ("eq", file
); /* 4 */
10903 fputs ("ne", file
); /* 24 */
10906 fputs ("lt", file
); /* 16 */
10909 fputs ("le", file
); /* 20 */
10912 fputs ("gt", file
); /* 8 */
10915 fputs ("ge", file
); /* 12 */
10918 fputs ("llt", file
); /* 2 */
10921 fputs ("lle", file
); /* 6 */
10924 fputs ("lgt", file
); /* 1 */
10927 fputs ("lge", file
); /* 5 */
10930 gcc_unreachable ();
10935 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10938 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
10939 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
10941 print_operand (file
, x
, 0);
10945 /* MB value for a PowerPC64 rldic operand. */
10946 val
= (GET_CODE (x
) == CONST_INT
10947 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
10952 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
10953 if ((val
<<= 1) < 0)
10956 #if HOST_BITS_PER_WIDE_INT == 32
10957 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
10958 i
+= 32; /* zero-extend high-part was all 0's */
10959 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
10961 val
= CONST_DOUBLE_LOW (x
);
10967 for ( ; i
< 64; i
++)
10968 if ((val
<<= 1) < 0)
10973 fprintf (file
, "%d", i
+ 1);
10977 if (GET_CODE (x
) == MEM
10978 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
10983 /* Like 'L', for third word of TImode */
10984 if (GET_CODE (x
) == REG
)
10985 fputs (reg_names
[REGNO (x
) + 2], file
);
10986 else if (GET_CODE (x
) == MEM
)
10988 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10989 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10990 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
10992 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
10993 if (small_data_operand (x
, GET_MODE (x
)))
10994 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10995 reg_names
[SMALL_DATA_REG
]);
11000 /* X is a SYMBOL_REF. Write out the name preceded by a
11001 period and without any trailing data in brackets. Used for function
11002 names. If we are configured for System V (or the embedded ABI) on
11003 the PowerPC, do not emit the period, since those systems do not use
11004 TOCs and the like. */
11005 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
11007 /* Mark the decl as referenced so that cgraph will output the
11009 if (SYMBOL_REF_DECL (x
))
11010 mark_decl_referenced (SYMBOL_REF_DECL (x
));
11012 /* For macho, check to see if we need a stub. */
11015 const char *name
= XSTR (x
, 0);
11017 if (MACHOPIC_INDIRECT
11018 && machopic_classify_symbol (x
) == MACHOPIC_UNDEFINED_FUNCTION
)
11019 name
= machopic_indirection_name (x
, /*stub_p=*/true);
11021 assemble_name (file
, name
);
11023 else if (!DOT_SYMBOLS
)
11024 assemble_name (file
, XSTR (x
, 0));
11026 rs6000_output_function_entry (file
, XSTR (x
, 0));
11030 /* Like 'L', for last word of TImode. */
11031 if (GET_CODE (x
) == REG
)
11032 fputs (reg_names
[REGNO (x
) + 3], file
);
11033 else if (GET_CODE (x
) == MEM
)
11035 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
11036 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
11037 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
11039 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
11040 if (small_data_operand (x
, GET_MODE (x
)))
11041 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
11042 reg_names
[SMALL_DATA_REG
]);
11046 /* Print AltiVec or SPE memory operand. */
11051 gcc_assert (GET_CODE (x
) == MEM
);
11055 /* Ugly hack because %y is overloaded. */
11056 if ((TARGET_SPE
|| TARGET_E500_DOUBLE
)
11057 && (GET_MODE_SIZE (GET_MODE (x
)) == 8
11058 || GET_MODE (x
) == TFmode
11059 || GET_MODE (x
) == TImode
))
11061 /* Handle [reg]. */
11062 if (GET_CODE (tmp
) == REG
)
11064 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
11067 /* Handle [reg+UIMM]. */
11068 else if (GET_CODE (tmp
) == PLUS
&&
11069 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
11073 gcc_assert (GET_CODE (XEXP (tmp
, 0)) == REG
);
11075 x
= INTVAL (XEXP (tmp
, 1));
11076 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
11080 /* Fall through. Must be [reg+reg]. */
11083 && GET_CODE (tmp
) == AND
11084 && GET_CODE (XEXP (tmp
, 1)) == CONST_INT
11085 && INTVAL (XEXP (tmp
, 1)) == -16)
11086 tmp
= XEXP (tmp
, 0);
11087 if (GET_CODE (tmp
) == REG
)
11088 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
11091 gcc_assert (GET_CODE (tmp
) == PLUS
11092 && REG_P (XEXP (tmp
, 0))
11093 && REG_P (XEXP (tmp
, 1)));
11095 if (REGNO (XEXP (tmp
, 0)) == 0)
11096 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
11097 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
11099 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
11100 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
11106 if (GET_CODE (x
) == REG
)
11107 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
11108 else if (GET_CODE (x
) == MEM
)
11110 /* We need to handle PRE_INC and PRE_DEC here, since we need to
11111 know the width from the mode. */
11112 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
11113 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
11114 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
11115 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
11116 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
11117 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
11119 output_address (XEXP (x
, 0));
11122 output_addr_const (file
, x
);
11126 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
11130 output_operand_lossage ("invalid %%xn code");
11134 /* Print the address of an operand. */
11137 print_operand_address (FILE *file
, rtx x
)
11139 if (GET_CODE (x
) == REG
)
11140 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
11141 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
11142 || GET_CODE (x
) == LABEL_REF
)
11144 output_addr_const (file
, x
);
11145 if (small_data_operand (x
, GET_MODE (x
)))
11146 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
11147 reg_names
[SMALL_DATA_REG
]);
11149 gcc_assert (!TARGET_TOC
);
11151 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
11153 gcc_assert (REG_P (XEXP (x
, 0)));
11154 if (REGNO (XEXP (x
, 0)) == 0)
11155 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
11156 reg_names
[ REGNO (XEXP (x
, 0)) ]);
11158 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
11159 reg_names
[ REGNO (XEXP (x
, 1)) ]);
11161 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
11162 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
11163 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
11165 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
11166 && CONSTANT_P (XEXP (x
, 1)))
11168 output_addr_const (file
, XEXP (x
, 1));
11169 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
11173 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
11174 && CONSTANT_P (XEXP (x
, 1)))
11176 fprintf (file
, "lo16(");
11177 output_addr_const (file
, XEXP (x
, 1));
11178 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
11181 else if (legitimate_constant_pool_address_p (x
))
11183 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
11185 rtx contains_minus
= XEXP (x
, 1);
11189 /* Find the (minus (sym) (toc)) buried in X, and temporarily
11190 turn it into (sym) for output_addr_const. */
11191 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
11192 contains_minus
= XEXP (contains_minus
, 0);
11194 minus
= XEXP (contains_minus
, 0);
11195 symref
= XEXP (minus
, 0);
11196 XEXP (contains_minus
, 0) = symref
;
11201 name
= XSTR (symref
, 0);
11202 newname
= alloca (strlen (name
) + sizeof ("@toc"));
11203 strcpy (newname
, name
);
11204 strcat (newname
, "@toc");
11205 XSTR (symref
, 0) = newname
;
11207 output_addr_const (file
, XEXP (x
, 1));
11209 XSTR (symref
, 0) = name
;
11210 XEXP (contains_minus
, 0) = minus
;
11213 output_addr_const (file
, XEXP (x
, 1));
11215 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
11218 gcc_unreachable ();
11221 /* Target hook for assembling integer objects. The PowerPC version has
11222 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
11223 is defined. It also needs to handle DI-mode objects on 64-bit
11227 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
11229 #ifdef RELOCATABLE_NEEDS_FIXUP
11230 /* Special handling for SI values. */
11231 if (RELOCATABLE_NEEDS_FIXUP
&& size
== 4 && aligned_p
)
11233 static int recurse
= 0;
11235 /* For -mrelocatable, we mark all addresses that need to be fixed up
11236 in the .fixup section. */
11237 if (TARGET_RELOCATABLE
11238 && in_section
!= toc_section
11239 && in_section
!= text_section
11240 && !unlikely_text_section_p (in_section
)
11242 && GET_CODE (x
) != CONST_INT
11243 && GET_CODE (x
) != CONST_DOUBLE
11249 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
11251 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
11252 fprintf (asm_out_file
, "\t.long\t(");
11253 output_addr_const (asm_out_file
, x
);
11254 fprintf (asm_out_file
, ")@fixup\n");
11255 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
11256 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
11257 fprintf (asm_out_file
, "\t.long\t");
11258 assemble_name (asm_out_file
, buf
);
11259 fprintf (asm_out_file
, "\n\t.previous\n");
11263 /* Remove initial .'s to turn a -mcall-aixdesc function
11264 address into the address of the descriptor, not the function
11266 else if (GET_CODE (x
) == SYMBOL_REF
11267 && XSTR (x
, 0)[0] == '.'
11268 && DEFAULT_ABI
== ABI_AIX
)
11270 const char *name
= XSTR (x
, 0);
11271 while (*name
== '.')
11274 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
11278 #endif /* RELOCATABLE_NEEDS_FIXUP */
11279 return default_assemble_integer (x
, size
, aligned_p
);
11282 #ifdef HAVE_GAS_HIDDEN
11283 /* Emit an assembler directive to set symbol visibility for DECL to
11284 VISIBILITY_TYPE. */
11287 rs6000_assemble_visibility (tree decl
, int vis
)
11289 /* Functions need to have their entry point symbol visibility set as
11290 well as their descriptor symbol visibility. */
11291 if (DEFAULT_ABI
== ABI_AIX
11293 && TREE_CODE (decl
) == FUNCTION_DECL
)
11295 static const char * const visibility_types
[] = {
11296 NULL
, "internal", "hidden", "protected"
11299 const char *name
, *type
;
11301 name
= ((* targetm
.strip_name_encoding
)
11302 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
11303 type
= visibility_types
[vis
];
11305 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
11306 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
11309 default_assemble_visibility (decl
, vis
);
11314 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
11316 /* Reversal of FP compares takes care -- an ordered compare
11317 becomes an unordered compare and vice versa. */
11318 if (mode
== CCFPmode
11319 && (!flag_finite_math_only
11320 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
11321 || code
== UNEQ
|| code
== LTGT
))
11322 return reverse_condition_maybe_unordered (code
);
11324 return reverse_condition (code
);
11327 /* Generate a compare for CODE. Return a brand-new rtx that
11328 represents the result of the compare. */
11331 rs6000_generate_compare (enum rtx_code code
)
11333 enum machine_mode comp_mode
;
11334 rtx compare_result
;
11336 if (rs6000_compare_fp_p
)
11337 comp_mode
= CCFPmode
;
11338 else if (code
== GTU
|| code
== LTU
11339 || code
== GEU
|| code
== LEU
)
11340 comp_mode
= CCUNSmode
;
11341 else if ((code
== EQ
|| code
== NE
)
11342 && GET_CODE (rs6000_compare_op0
) == SUBREG
11343 && GET_CODE (rs6000_compare_op1
) == SUBREG
11344 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0
)
11345 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1
))
11346 /* These are unsigned values, perhaps there will be a later
11347 ordering compare that can be shared with this one.
11348 Unfortunately we cannot detect the signedness of the operands
11349 for non-subregs. */
11350 comp_mode
= CCUNSmode
;
11352 comp_mode
= CCmode
;
11354 /* First, the compare. */
11355 compare_result
= gen_reg_rtx (comp_mode
);
11357 /* E500 FP compare instructions on the GPRs. Yuck! */
11358 if ((!TARGET_FPRS
&& TARGET_HARD_FLOAT
)
11359 && rs6000_compare_fp_p
)
11361 rtx cmp
, or_result
, compare_result2
;
11362 enum machine_mode op_mode
= GET_MODE (rs6000_compare_op0
);
11364 if (op_mode
== VOIDmode
)
11365 op_mode
= GET_MODE (rs6000_compare_op1
);
11367 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
11368 This explains the following mess. */
11372 case EQ
: case UNEQ
: case NE
: case LTGT
:
11376 cmp
= flag_unsafe_math_optimizations
11377 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
11378 rs6000_compare_op1
)
11379 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
11380 rs6000_compare_op1
);
11384 cmp
= flag_unsafe_math_optimizations
11385 ? gen_tstdfeq_gpr (compare_result
, rs6000_compare_op0
,
11386 rs6000_compare_op1
)
11387 : gen_cmpdfeq_gpr (compare_result
, rs6000_compare_op0
,
11388 rs6000_compare_op1
);
11392 cmp
= flag_unsafe_math_optimizations
11393 ? gen_tsttfeq_gpr (compare_result
, rs6000_compare_op0
,
11394 rs6000_compare_op1
)
11395 : gen_cmptfeq_gpr (compare_result
, rs6000_compare_op0
,
11396 rs6000_compare_op1
);
11400 gcc_unreachable ();
11404 case GT
: case GTU
: case UNGT
: case UNGE
: case GE
: case GEU
:
11408 cmp
= flag_unsafe_math_optimizations
11409 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
11410 rs6000_compare_op1
)
11411 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
11412 rs6000_compare_op1
);
11416 cmp
= flag_unsafe_math_optimizations
11417 ? gen_tstdfgt_gpr (compare_result
, rs6000_compare_op0
,
11418 rs6000_compare_op1
)
11419 : gen_cmpdfgt_gpr (compare_result
, rs6000_compare_op0
,
11420 rs6000_compare_op1
);
11424 cmp
= flag_unsafe_math_optimizations
11425 ? gen_tsttfgt_gpr (compare_result
, rs6000_compare_op0
,
11426 rs6000_compare_op1
)
11427 : gen_cmptfgt_gpr (compare_result
, rs6000_compare_op0
,
11428 rs6000_compare_op1
);
11432 gcc_unreachable ();
11436 case LT
: case LTU
: case UNLT
: case UNLE
: case LE
: case LEU
:
11440 cmp
= flag_unsafe_math_optimizations
11441 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
11442 rs6000_compare_op1
)
11443 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
11444 rs6000_compare_op1
);
11448 cmp
= flag_unsafe_math_optimizations
11449 ? gen_tstdflt_gpr (compare_result
, rs6000_compare_op0
,
11450 rs6000_compare_op1
)
11451 : gen_cmpdflt_gpr (compare_result
, rs6000_compare_op0
,
11452 rs6000_compare_op1
);
11456 cmp
= flag_unsafe_math_optimizations
11457 ? gen_tsttflt_gpr (compare_result
, rs6000_compare_op0
,
11458 rs6000_compare_op1
)
11459 : gen_cmptflt_gpr (compare_result
, rs6000_compare_op0
,
11460 rs6000_compare_op1
);
11464 gcc_unreachable ();
11468 gcc_unreachable ();
11471 /* Synthesize LE and GE from LT/GT || EQ. */
11472 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
11478 case LE
: code
= LT
; break;
11479 case GE
: code
= GT
; break;
11480 case LEU
: code
= LT
; break;
11481 case GEU
: code
= GT
; break;
11482 default: gcc_unreachable ();
11485 compare_result2
= gen_reg_rtx (CCFPmode
);
11491 cmp
= flag_unsafe_math_optimizations
11492 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
11493 rs6000_compare_op1
)
11494 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
11495 rs6000_compare_op1
);
11499 cmp
= flag_unsafe_math_optimizations
11500 ? gen_tstdfeq_gpr (compare_result2
, rs6000_compare_op0
,
11501 rs6000_compare_op1
)
11502 : gen_cmpdfeq_gpr (compare_result2
, rs6000_compare_op0
,
11503 rs6000_compare_op1
);
11507 cmp
= flag_unsafe_math_optimizations
11508 ? gen_tsttfeq_gpr (compare_result2
, rs6000_compare_op0
,
11509 rs6000_compare_op1
)
11510 : gen_cmptfeq_gpr (compare_result2
, rs6000_compare_op0
,
11511 rs6000_compare_op1
);
11515 gcc_unreachable ();
11519 /* OR them together. */
11520 or_result
= gen_reg_rtx (CCFPmode
);
11521 cmp
= gen_e500_cr_ior_compare (or_result
, compare_result
,
11523 compare_result
= or_result
;
11528 if (code
== NE
|| code
== LTGT
)
11538 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
11539 CLOBBERs to match cmptf_internal2 pattern. */
11540 if (comp_mode
== CCFPmode
&& TARGET_XL_COMPAT
11541 && GET_MODE (rs6000_compare_op0
) == TFmode
11542 && !TARGET_IEEEQUAD
11543 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
)
11544 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
11546 gen_rtx_SET (VOIDmode
,
11548 gen_rtx_COMPARE (comp_mode
,
11549 rs6000_compare_op0
,
11550 rs6000_compare_op1
)),
11551 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11552 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11553 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11554 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11555 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11556 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11557 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
11558 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)))));
11559 else if (GET_CODE (rs6000_compare_op1
) == UNSPEC
11560 && XINT (rs6000_compare_op1
, 1) == UNSPEC_SP_TEST
)
11562 rtx op1
= XVECEXP (rs6000_compare_op1
, 0, 0);
11563 comp_mode
= CCEQmode
;
11564 compare_result
= gen_reg_rtx (CCEQmode
);
11566 emit_insn (gen_stack_protect_testdi (compare_result
,
11567 rs6000_compare_op0
, op1
));
11569 emit_insn (gen_stack_protect_testsi (compare_result
,
11570 rs6000_compare_op0
, op1
));
11573 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
11574 gen_rtx_COMPARE (comp_mode
,
11575 rs6000_compare_op0
,
11576 rs6000_compare_op1
)));
11579 /* Some kinds of FP comparisons need an OR operation;
11580 under flag_finite_math_only we don't bother. */
11581 if (rs6000_compare_fp_p
11582 && !flag_finite_math_only
11583 && !(TARGET_HARD_FLOAT
&& !TARGET_FPRS
)
11584 && (code
== LE
|| code
== GE
11585 || code
== UNEQ
|| code
== LTGT
11586 || code
== UNGT
|| code
== UNLT
))
11588 enum rtx_code or1
, or2
;
11589 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
11590 rtx or_result
= gen_reg_rtx (CCEQmode
);
11594 case LE
: or1
= LT
; or2
= EQ
; break;
11595 case GE
: or1
= GT
; or2
= EQ
; break;
11596 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
11597 case LTGT
: or1
= LT
; or2
= GT
; break;
11598 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
11599 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
11600 default: gcc_unreachable ();
11602 validate_condition_mode (or1
, comp_mode
);
11603 validate_condition_mode (or2
, comp_mode
);
11604 or1_rtx
= gen_rtx_fmt_ee (or1
, SImode
, compare_result
, const0_rtx
);
11605 or2_rtx
= gen_rtx_fmt_ee (or2
, SImode
, compare_result
, const0_rtx
);
11606 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
11607 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
11609 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
11611 compare_result
= or_result
;
11615 validate_condition_mode (code
, GET_MODE (compare_result
));
11617 return gen_rtx_fmt_ee (code
, VOIDmode
, compare_result
, const0_rtx
);
11621 /* Emit the RTL for an sCOND pattern. */
11624 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
11627 enum machine_mode op_mode
;
11628 enum rtx_code cond_code
;
11630 condition_rtx
= rs6000_generate_compare (code
);
11631 cond_code
= GET_CODE (condition_rtx
);
11633 if (rs6000_compare_fp_p
11634 && !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
11638 PUT_MODE (condition_rtx
, SImode
);
11639 t
= XEXP (condition_rtx
, 0);
11641 gcc_assert (cond_code
== NE
|| cond_code
== EQ
);
11643 if (cond_code
== NE
)
11644 emit_insn (gen_e500_flip_gt_bit (t
, t
));
11646 emit_insn (gen_move_from_CR_gt_bit (result
, t
));
11650 if (cond_code
== NE
11651 || cond_code
== GE
|| cond_code
== LE
11652 || cond_code
== GEU
|| cond_code
== LEU
11653 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
11655 rtx not_result
= gen_reg_rtx (CCEQmode
);
11656 rtx not_op
, rev_cond_rtx
;
11657 enum machine_mode cc_mode
;
11659 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
11661 rev_cond_rtx
= gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode
, cond_code
),
11662 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
11663 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
11664 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
11665 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
11668 op_mode
= GET_MODE (rs6000_compare_op0
);
11669 if (op_mode
== VOIDmode
)
11670 op_mode
= GET_MODE (rs6000_compare_op1
);
11672 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
11674 PUT_MODE (condition_rtx
, DImode
);
11675 convert_move (result
, condition_rtx
, 0);
11679 PUT_MODE (condition_rtx
, SImode
);
11680 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
11684 /* Emit a branch of kind CODE to location LOC. */
11687 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
11689 rtx condition_rtx
, loc_ref
;
11691 condition_rtx
= rs6000_generate_compare (code
);
11692 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
11693 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
11694 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
11695 loc_ref
, pc_rtx
)));
11698 /* Return the string to output a conditional branch to LABEL, which is
11699 the operand number of the label, or -1 if the branch is really a
11700 conditional return.
11702 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11703 condition code register and its mode specifies what kind of
11704 comparison we made.
11706 REVERSED is nonzero if we should reverse the sense of the comparison.
11708 INSN is the insn. */
11711 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
11713 static char string
[64];
11714 enum rtx_code code
= GET_CODE (op
);
11715 rtx cc_reg
= XEXP (op
, 0);
11716 enum machine_mode mode
= GET_MODE (cc_reg
);
11717 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
11718 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
11719 int really_reversed
= reversed
^ need_longbranch
;
11725 validate_condition_mode (code
, mode
);
11727 /* Work out which way this really branches. We could use
11728 reverse_condition_maybe_unordered here always but this
11729 makes the resulting assembler clearer. */
11730 if (really_reversed
)
11732 /* Reversal of FP compares takes care -- an ordered compare
11733 becomes an unordered compare and vice versa. */
11734 if (mode
== CCFPmode
)
11735 code
= reverse_condition_maybe_unordered (code
);
11737 code
= reverse_condition (code
);
11740 if ((!TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
11742 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11747 /* Opposite of GT. */
11756 gcc_unreachable ();
11762 /* Not all of these are actually distinct opcodes, but
11763 we distinguish them for clarity of the resulting assembler. */
11764 case NE
: case LTGT
:
11765 ccode
= "ne"; break;
11766 case EQ
: case UNEQ
:
11767 ccode
= "eq"; break;
11769 ccode
= "ge"; break;
11770 case GT
: case GTU
: case UNGT
:
11771 ccode
= "gt"; break;
11773 ccode
= "le"; break;
11774 case LT
: case LTU
: case UNLT
:
11775 ccode
= "lt"; break;
11776 case UNORDERED
: ccode
= "un"; break;
11777 case ORDERED
: ccode
= "nu"; break;
11778 case UNGE
: ccode
= "nl"; break;
11779 case UNLE
: ccode
= "ng"; break;
11781 gcc_unreachable ();
11784 /* Maybe we have a guess as to how likely the branch is.
11785 The old mnemonics don't have a way to specify this information. */
11787 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
11788 if (note
!= NULL_RTX
)
11790 /* PROB is the difference from 50%. */
11791 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
11793 /* Only hint for highly probable/improbable branches on newer
11794 cpus as static prediction overrides processor dynamic
11795 prediction. For older cpus we may as well always hint, but
11796 assume not taken for branches that are very close to 50% as a
11797 mispredicted taken branch is more expensive than a
11798 mispredicted not-taken branch. */
11799 if (rs6000_always_hint
11800 || (abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48
11801 && br_prob_note_reliable_p (note
)))
11803 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
11804 && ((prob
> 0) ^ need_longbranch
))
11812 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
11814 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
11816 /* We need to escape any '%' characters in the reg_names string.
11817 Assume they'd only be the first character.... */
11818 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
11820 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
11824 /* If the branch distance was too far, we may have to use an
11825 unconditional branch to go the distance. */
11826 if (need_longbranch
)
11827 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
11829 s
+= sprintf (s
, ",%s", label
);
11835 /* Return the string to flip the GT bit on a CR. */
11837 output_e500_flip_gt_bit (rtx dst
, rtx src
)
11839 static char string
[64];
11842 gcc_assert (GET_CODE (dst
) == REG
&& CR_REGNO_P (REGNO (dst
))
11843 && GET_CODE (src
) == REG
&& CR_REGNO_P (REGNO (src
)));
11846 a
= 4 * (REGNO (dst
) - CR0_REGNO
) + 1;
11847 b
= 4 * (REGNO (src
) - CR0_REGNO
) + 1;
11849 sprintf (string
, "crnot %d,%d", a
, b
);
11853 /* Return insn index for the vector compare instruction for given CODE,
11854 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
11858 get_vec_cmp_insn (enum rtx_code code
,
11859 enum machine_mode dest_mode
,
11860 enum machine_mode op_mode
)
11862 if (!TARGET_ALTIVEC
)
11863 return INSN_NOT_AVAILABLE
;
11868 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
11869 return UNSPEC_VCMPEQUB
;
11870 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
11871 return UNSPEC_VCMPEQUH
;
11872 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
11873 return UNSPEC_VCMPEQUW
;
11874 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
11875 return UNSPEC_VCMPEQFP
;
11878 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
11879 return UNSPEC_VCMPGEFP
;
11881 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
11882 return UNSPEC_VCMPGTSB
;
11883 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
11884 return UNSPEC_VCMPGTSH
;
11885 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
11886 return UNSPEC_VCMPGTSW
;
11887 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
11888 return UNSPEC_VCMPGTFP
;
11891 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
11892 return UNSPEC_VCMPGTUB
;
11893 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
11894 return UNSPEC_VCMPGTUH
;
11895 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
11896 return UNSPEC_VCMPGTUW
;
11901 return INSN_NOT_AVAILABLE
;
11904 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11905 DMODE is expected destination mode. This is a recursive function. */
11908 rs6000_emit_vector_compare (enum rtx_code rcode
,
11910 enum machine_mode dmode
)
11914 enum machine_mode dest_mode
;
11915 enum machine_mode op_mode
= GET_MODE (op1
);
11917 gcc_assert (TARGET_ALTIVEC
);
11918 gcc_assert (GET_MODE (op0
) == GET_MODE (op1
));
11920 /* Floating point vector compare instructions uses destination V4SImode.
11921 Move destination to appropriate mode later. */
11922 if (dmode
== V4SFmode
)
11923 dest_mode
= V4SImode
;
11927 mask
= gen_reg_rtx (dest_mode
);
11928 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
11930 if (vec_cmp_insn
== INSN_NOT_AVAILABLE
)
11932 bool swap_operands
= false;
11933 bool try_again
= false;
11938 swap_operands
= true;
11943 swap_operands
= true;
11951 /* Invert condition and try again.
11952 e.g., A != B becomes ~(A==B). */
11954 enum rtx_code rev_code
;
11955 enum insn_code nor_code
;
11958 rev_code
= reverse_condition_maybe_unordered (rcode
);
11959 eq_rtx
= rs6000_emit_vector_compare (rev_code
, op0
, op1
,
11962 nor_code
= one_cmpl_optab
->handlers
[(int)dest_mode
].insn_code
;
11963 gcc_assert (nor_code
!= CODE_FOR_nothing
);
11964 emit_insn (GEN_FCN (nor_code
) (mask
, eq_rtx
));
11966 if (dmode
!= dest_mode
)
11968 rtx temp
= gen_reg_rtx (dest_mode
);
11969 convert_move (temp
, mask
, 0);
11979 /* Try GT/GTU/LT/LTU OR EQ */
11982 enum insn_code ior_code
;
11983 enum rtx_code new_code
;
12004 gcc_unreachable ();
12007 c_rtx
= rs6000_emit_vector_compare (new_code
,
12008 op0
, op1
, dest_mode
);
12009 eq_rtx
= rs6000_emit_vector_compare (EQ
, op0
, op1
,
12012 ior_code
= ior_optab
->handlers
[(int)dest_mode
].insn_code
;
12013 gcc_assert (ior_code
!= CODE_FOR_nothing
);
12014 emit_insn (GEN_FCN (ior_code
) (mask
, c_rtx
, eq_rtx
));
12015 if (dmode
!= dest_mode
)
12017 rtx temp
= gen_reg_rtx (dest_mode
);
12018 convert_move (temp
, mask
, 0);
12025 gcc_unreachable ();
12030 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
12031 /* You only get two chances. */
12032 gcc_assert (vec_cmp_insn
!= INSN_NOT_AVAILABLE
);
12044 emit_insn (gen_rtx_SET (VOIDmode
, mask
,
12045 gen_rtx_UNSPEC (dest_mode
,
12046 gen_rtvec (2, op0
, op1
),
12048 if (dmode
!= dest_mode
)
12050 rtx temp
= gen_reg_rtx (dest_mode
);
12051 convert_move (temp
, mask
, 0);
12057 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
12058 valid insn doesn exist for given mode. */
12061 get_vsel_insn (enum machine_mode mode
)
12066 return UNSPEC_VSEL4SI
;
12069 return UNSPEC_VSEL4SF
;
12072 return UNSPEC_VSEL8HI
;
12075 return UNSPEC_VSEL16QI
;
12078 return INSN_NOT_AVAILABLE
;
12081 return INSN_NOT_AVAILABLE
;
12084 /* Emit vector select insn where DEST is destination using
12085 operands OP1, OP2 and MASK. */
12088 rs6000_emit_vector_select (rtx dest
, rtx op1
, rtx op2
, rtx mask
)
12091 enum machine_mode dest_mode
= GET_MODE (dest
);
12092 int vsel_insn_index
= get_vsel_insn (GET_MODE (dest
));
12094 temp
= gen_reg_rtx (dest_mode
);
12096 /* For each vector element, select op1 when mask is 1 otherwise
12098 t
= gen_rtx_SET (VOIDmode
, temp
,
12099 gen_rtx_UNSPEC (dest_mode
,
12100 gen_rtvec (3, op2
, op1
, mask
),
12103 emit_move_insn (dest
, temp
);
12107 /* Emit vector conditional expression.
12108 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
12109 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
12112 rs6000_emit_vector_cond_expr (rtx dest
, rtx op1
, rtx op2
,
12113 rtx cond
, rtx cc_op0
, rtx cc_op1
)
12115 enum machine_mode dest_mode
= GET_MODE (dest
);
12116 enum rtx_code rcode
= GET_CODE (cond
);
12119 if (!TARGET_ALTIVEC
)
12122 /* Get the vector mask for the given relational operations. */
12123 mask
= rs6000_emit_vector_compare (rcode
, cc_op0
, cc_op1
, dest_mode
);
12125 rs6000_emit_vector_select (dest
, op1
, op2
, mask
);
12130 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
12131 operands of the last comparison is nonzero/true, FALSE_COND if it
12132 is zero/false. Return 0 if the hardware has no such operation. */
12135 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
12137 enum rtx_code code
= GET_CODE (op
);
12138 rtx op0
= rs6000_compare_op0
;
12139 rtx op1
= rs6000_compare_op1
;
12140 REAL_VALUE_TYPE c1
;
12141 enum machine_mode compare_mode
= GET_MODE (op0
);
12142 enum machine_mode result_mode
= GET_MODE (dest
);
12144 bool is_against_zero
;
12146 /* These modes should always match. */
12147 if (GET_MODE (op1
) != compare_mode
12148 /* In the isel case however, we can use a compare immediate, so
12149 op1 may be a small constant. */
12150 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
12152 if (GET_MODE (true_cond
) != result_mode
)
12154 if (GET_MODE (false_cond
) != result_mode
)
12157 /* First, work out if the hardware can do this at all, or
12158 if it's too slow.... */
12159 if (! rs6000_compare_fp_p
)
12162 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
12165 else if (TARGET_HARD_FLOAT
&& !TARGET_FPRS
12166 && SCALAR_FLOAT_MODE_P (compare_mode
))
12169 is_against_zero
= op1
== CONST0_RTX (compare_mode
);
12171 /* A floating-point subtract might overflow, underflow, or produce
12172 an inexact result, thus changing the floating-point flags, so it
12173 can't be generated if we care about that. It's safe if one side
12174 of the construct is zero, since then no subtract will be
12176 if (SCALAR_FLOAT_MODE_P (compare_mode
)
12177 && flag_trapping_math
&& ! is_against_zero
)
12180 /* Eliminate half of the comparisons by switching operands, this
12181 makes the remaining code simpler. */
12182 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
12183 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
12185 code
= reverse_condition_maybe_unordered (code
);
12187 true_cond
= false_cond
;
12191 /* UNEQ and LTGT take four instructions for a comparison with zero,
12192 it'll probably be faster to use a branch here too. */
12193 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
12196 if (GET_CODE (op1
) == CONST_DOUBLE
)
12197 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
12199 /* We're going to try to implement comparisons by performing
12200 a subtract, then comparing against zero. Unfortunately,
12201 Inf - Inf is NaN which is not zero, and so if we don't
12202 know that the operand is finite and the comparison
12203 would treat EQ different to UNORDERED, we can't do it. */
12204 if (HONOR_INFINITIES (compare_mode
)
12205 && code
!= GT
&& code
!= UNGE
12206 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
12207 /* Constructs of the form (a OP b ? a : b) are safe. */
12208 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
12209 || (! rtx_equal_p (op0
, true_cond
)
12210 && ! rtx_equal_p (op1
, true_cond
))))
12213 /* At this point we know we can use fsel. */
12215 /* Reduce the comparison to a comparison against zero. */
12216 if (! is_against_zero
)
12218 temp
= gen_reg_rtx (compare_mode
);
12219 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12220 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
12222 op1
= CONST0_RTX (compare_mode
);
12225 /* If we don't care about NaNs we can reduce some of the comparisons
12226 down to faster ones. */
12227 if (! HONOR_NANS (compare_mode
))
12233 true_cond
= false_cond
;
12246 /* Now, reduce everything down to a GE. */
12253 temp
= gen_reg_rtx (compare_mode
);
12254 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12259 temp
= gen_reg_rtx (compare_mode
);
12260 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
12265 temp
= gen_reg_rtx (compare_mode
);
12266 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12267 gen_rtx_NEG (compare_mode
,
12268 gen_rtx_ABS (compare_mode
, op0
))));
12273 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
12274 temp
= gen_reg_rtx (result_mode
);
12275 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12276 gen_rtx_IF_THEN_ELSE (result_mode
,
12277 gen_rtx_GE (VOIDmode
,
12279 true_cond
, false_cond
)));
12280 false_cond
= true_cond
;
12283 temp
= gen_reg_rtx (compare_mode
);
12284 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12289 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
12290 temp
= gen_reg_rtx (result_mode
);
12291 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
12292 gen_rtx_IF_THEN_ELSE (result_mode
,
12293 gen_rtx_GE (VOIDmode
,
12295 true_cond
, false_cond
)));
12296 true_cond
= false_cond
;
12299 temp
= gen_reg_rtx (compare_mode
);
12300 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
12305 gcc_unreachable ();
12308 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
12309 gen_rtx_IF_THEN_ELSE (result_mode
,
12310 gen_rtx_GE (VOIDmode
,
12312 true_cond
, false_cond
)));
12316 /* Same as above, but for ints (isel). */
12319 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
12321 rtx condition_rtx
, cr
;
12323 /* All isel implementations thus far are 32-bits. */
12324 if (GET_MODE (rs6000_compare_op0
) != SImode
)
12327 /* We still have to do the compare, because isel doesn't do a
12328 compare, it just looks at the CRx bits set by a previous compare
12330 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
12331 cr
= XEXP (condition_rtx
, 0);
12333 if (GET_MODE (cr
) == CCmode
)
12334 emit_insn (gen_isel_signed (dest
, condition_rtx
,
12335 true_cond
, false_cond
, cr
));
12337 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
12338 true_cond
, false_cond
, cr
));
12344 output_isel (rtx
*operands
)
12346 enum rtx_code code
;
12348 code
= GET_CODE (operands
[1]);
12349 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
12351 PUT_CODE (operands
[1], reverse_condition (code
));
12352 return "isel %0,%3,%2,%j1";
12355 return "isel %0,%2,%3,%j1";
12359 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
12361 enum machine_mode mode
= GET_MODE (op0
);
12365 if (code
== SMAX
|| code
== SMIN
)
12370 if (code
== SMAX
|| code
== UMAX
)
12371 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
12372 op0
, op1
, mode
, 0);
12374 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
12375 op1
, op0
, mode
, 0);
12376 gcc_assert (target
);
12377 if (target
!= dest
)
12378 emit_move_insn (dest
, target
);
12381 /* Emit instructions to perform a load-reserved/store-conditional operation.
12382 The operation performed is an atomic
12383 (set M (CODE:MODE M OP))
12384 If not NULL, BEFORE is atomically set to M before the operation, and
12385 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
12386 If SYNC_P then a memory barrier is emitted before the operation.
12387 Either OP or M may be wrapped in a NOT operation. */
12390 rs6000_emit_sync (enum rtx_code code
, enum machine_mode mode
,
12391 rtx m
, rtx op
, rtx before_param
, rtx after_param
,
12394 enum machine_mode used_mode
;
12395 rtx the_op
, set_before
, set_after
, set_atomic
, cc_scratch
, before
, after
;
12398 HOST_WIDE_INT imask
= GET_MODE_MASK (mode
);
12399 rtx shift
= NULL_RTX
;
12402 emit_insn (gen_memory_barrier ());
12404 if (GET_CODE (m
) == NOT
)
12405 used_m
= XEXP (m
, 0);
12409 /* If this is smaller than SImode, we'll have to use SImode with
12411 if (mode
== QImode
|| mode
== HImode
)
12415 if (MEM_ALIGN (used_m
) >= 32)
12418 if (BYTES_BIG_ENDIAN
)
12419 ishift
= GET_MODE_BITSIZE (SImode
) - GET_MODE_BITSIZE (mode
);
12421 shift
= GEN_INT (ishift
);
12425 rtx addrSI
, aligned_addr
;
12426 int shift_mask
= mode
== QImode
? 0x18 : 0x10;
12428 addrSI
= force_reg (SImode
, gen_lowpart_common (SImode
,
12429 XEXP (used_m
, 0)));
12430 shift
= gen_reg_rtx (SImode
);
12432 emit_insn (gen_rlwinm (shift
, addrSI
, GEN_INT (3),
12433 GEN_INT (shift_mask
)));
12434 emit_insn (gen_xorsi3 (shift
, shift
, GEN_INT (shift_mask
)));
12436 aligned_addr
= expand_binop (Pmode
, and_optab
,
12438 GEN_INT (-4), NULL_RTX
,
12439 1, OPTAB_LIB_WIDEN
);
12440 used_m
= change_address (used_m
, SImode
, aligned_addr
);
12441 set_mem_align (used_m
, 32);
12442 /* It's safe to keep the old alias set of USED_M, because
12443 the operation is atomic and only affects the original
12445 if (GET_CODE (m
) == NOT
)
12446 m
= gen_rtx_NOT (SImode
, used_m
);
12451 if (GET_CODE (op
) == NOT
)
12453 oldop
= lowpart_subreg (SImode
, XEXP (op
, 0), mode
);
12454 oldop
= gen_rtx_NOT (SImode
, oldop
);
12457 oldop
= lowpart_subreg (SImode
, op
, mode
);
12463 newop
= expand_binop (SImode
, and_optab
,
12464 oldop
, GEN_INT (imask
), NULL_RTX
,
12465 1, OPTAB_LIB_WIDEN
);
12466 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
12470 newop
= expand_binop (SImode
, ior_optab
,
12471 oldop
, GEN_INT (~imask
), NULL_RTX
,
12472 1, OPTAB_LIB_WIDEN
);
12473 emit_insn (gen_rotlsi3 (newop
, newop
, shift
));
12481 newop
= expand_binop (SImode
, and_optab
,
12482 oldop
, GEN_INT (imask
), NULL_RTX
,
12483 1, OPTAB_LIB_WIDEN
);
12484 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
12486 mask
= gen_reg_rtx (SImode
);
12487 emit_move_insn (mask
, GEN_INT (imask
));
12488 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12491 newop
= gen_rtx_PLUS (SImode
, m
, newop
);
12493 newop
= gen_rtx_MINUS (SImode
, m
, newop
);
12494 newop
= gen_rtx_AND (SImode
, newop
, mask
);
12495 newop
= gen_rtx_IOR (SImode
, newop
,
12496 gen_rtx_AND (SImode
,
12497 gen_rtx_NOT (SImode
, mask
),
12503 gcc_unreachable ();
12506 if (GET_CODE (m
) == NOT
)
12510 mask
= gen_reg_rtx (SImode
);
12511 emit_move_insn (mask
, GEN_INT (imask
));
12512 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12514 xorm
= gen_rtx_XOR (SImode
, used_m
, mask
);
12515 /* Depending on the value of 'op', the XOR or the operation might
12516 be able to be simplified away. */
12517 newop
= simplify_gen_binary (code
, SImode
, xorm
, newop
);
12520 used_mode
= SImode
;
12521 before
= gen_reg_rtx (used_mode
);
12522 after
= gen_reg_rtx (used_mode
);
12527 before
= before_param
;
12528 after
= after_param
;
12530 if (before
== NULL_RTX
)
12531 before
= gen_reg_rtx (used_mode
);
12532 if (after
== NULL_RTX
)
12533 after
= gen_reg_rtx (used_mode
);
12536 if ((code
== PLUS
|| code
== MINUS
|| GET_CODE (m
) == NOT
)
12537 && used_mode
!= mode
)
12538 the_op
= op
; /* Computed above. */
12539 else if (GET_CODE (op
) == NOT
&& GET_CODE (m
) != NOT
)
12540 the_op
= gen_rtx_fmt_ee (code
, used_mode
, op
, m
);
12542 the_op
= gen_rtx_fmt_ee (code
, used_mode
, m
, op
);
12544 set_after
= gen_rtx_SET (VOIDmode
, after
, the_op
);
12545 set_before
= gen_rtx_SET (VOIDmode
, before
, used_m
);
12546 set_atomic
= gen_rtx_SET (VOIDmode
, used_m
,
12547 gen_rtx_UNSPEC (used_mode
,
12548 gen_rtvec (1, the_op
),
12550 cc_scratch
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (CCmode
));
12552 if ((code
== PLUS
|| code
== MINUS
) && used_mode
!= mode
)
12553 vec
= gen_rtvec (5, set_after
, set_before
, set_atomic
, cc_scratch
,
12554 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (SImode
)));
12556 vec
= gen_rtvec (4, set_after
, set_before
, set_atomic
, cc_scratch
);
12557 emit_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
12559 /* Shift and mask the return values properly. */
12560 if (used_mode
!= mode
&& before_param
)
12562 emit_insn (gen_lshrsi3 (before
, before
, shift
));
12563 convert_move (before_param
, before
, 1);
12566 if (used_mode
!= mode
&& after_param
)
12568 emit_insn (gen_lshrsi3 (after
, after
, shift
));
12569 convert_move (after_param
, after
, 1);
12572 /* The previous sequence will end with a branch that's dependent on
12573 the conditional store, so placing an isync will ensure that no
12574 other instructions (especially, no load or store instructions)
12575 can start before the atomic operation completes. */
12577 emit_insn (gen_isync ());
12580 /* A subroutine of the atomic operation splitters. Jump to LABEL if
12581 COND is true. Mark the jump as unlikely to be taken. */
12584 emit_unlikely_jump (rtx cond
, rtx label
)
12586 rtx very_unlikely
= GEN_INT (REG_BR_PROB_BASE
/ 100 - 1);
12589 x
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, label
, pc_rtx
);
12590 x
= emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
, x
));
12591 REG_NOTES (x
) = gen_rtx_EXPR_LIST (REG_BR_PROB
, very_unlikely
, NULL_RTX
);
12594 /* A subroutine of the atomic operation splitters. Emit a load-locked
12595 instruction in MODE. */
12598 emit_load_locked (enum machine_mode mode
, rtx reg
, rtx mem
)
12600 rtx (*fn
) (rtx
, rtx
) = NULL
;
12601 if (mode
== SImode
)
12602 fn
= gen_load_locked_si
;
12603 else if (mode
== DImode
)
12604 fn
= gen_load_locked_di
;
12605 emit_insn (fn (reg
, mem
));
12608 /* A subroutine of the atomic operation splitters. Emit a store-conditional
12609 instruction in MODE. */
12612 emit_store_conditional (enum machine_mode mode
, rtx res
, rtx mem
, rtx val
)
12614 rtx (*fn
) (rtx
, rtx
, rtx
) = NULL
;
12615 if (mode
== SImode
)
12616 fn
= gen_store_conditional_si
;
12617 else if (mode
== DImode
)
12618 fn
= gen_store_conditional_di
;
12620 /* Emit sync before stwcx. to address PPC405 Erratum. */
12621 if (PPC405_ERRATUM77
)
12622 emit_insn (gen_memory_barrier ());
12624 emit_insn (fn (res
, mem
, val
));
12627 /* Expand an an atomic fetch-and-operate pattern. CODE is the binary operation
12628 to perform. MEM is the memory on which to operate. VAL is the second
12629 operand of the binary operator. BEFORE and AFTER are optional locations to
12630 return the value of MEM either before of after the operation. SCRATCH is
12631 a scratch register. */
12634 rs6000_split_atomic_op (enum rtx_code code
, rtx mem
, rtx val
,
12635 rtx before
, rtx after
, rtx scratch
)
12637 enum machine_mode mode
= GET_MODE (mem
);
12638 rtx label
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12640 emit_insn (gen_memory_barrier ());
12642 label
= gen_label_rtx ();
12643 emit_label (label
);
12644 label
= gen_rtx_LABEL_REF (VOIDmode
, label
);
12646 if (before
== NULL_RTX
)
12648 emit_load_locked (mode
, before
, mem
);
12651 x
= gen_rtx_AND (mode
, gen_rtx_NOT (mode
, before
), val
);
12652 else if (code
== AND
)
12653 x
= gen_rtx_UNSPEC (mode
, gen_rtvec (2, before
, val
), UNSPEC_AND
);
12655 x
= gen_rtx_fmt_ee (code
, mode
, before
, val
);
12657 if (after
!= NULL_RTX
)
12658 emit_insn (gen_rtx_SET (VOIDmode
, after
, copy_rtx (x
)));
12659 emit_insn (gen_rtx_SET (VOIDmode
, scratch
, x
));
12661 emit_store_conditional (mode
, cond
, mem
, scratch
);
12663 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12664 emit_unlikely_jump (x
, label
);
12666 emit_insn (gen_isync ());
12669 /* Expand an atomic compare and swap operation. MEM is the memory on which
12670 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
12671 value to be stored. SCRATCH is a scratch GPR. */
12674 rs6000_split_compare_and_swap (rtx retval
, rtx mem
, rtx oldval
, rtx newval
,
12677 enum machine_mode mode
= GET_MODE (mem
);
12678 rtx label1
, label2
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12680 emit_insn (gen_memory_barrier ());
12682 label1
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12683 label2
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12684 emit_label (XEXP (label1
, 0));
12686 emit_load_locked (mode
, retval
, mem
);
12688 x
= gen_rtx_COMPARE (CCmode
, retval
, oldval
);
12689 emit_insn (gen_rtx_SET (VOIDmode
, cond
, x
));
12691 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12692 emit_unlikely_jump (x
, label2
);
12694 emit_move_insn (scratch
, newval
);
12695 emit_store_conditional (mode
, cond
, mem
, scratch
);
12697 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12698 emit_unlikely_jump (x
, label1
);
12700 emit_insn (gen_isync ());
12701 emit_label (XEXP (label2
, 0));
12704 /* Expand an atomic test and set operation. MEM is the memory on which
12705 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
12708 rs6000_split_lock_test_and_set (rtx retval
, rtx mem
, rtx val
, rtx scratch
)
12710 enum machine_mode mode
= GET_MODE (mem
);
12711 rtx label
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12713 emit_insn (gen_memory_barrier ());
12715 label
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12716 emit_label (XEXP (label
, 0));
12718 emit_load_locked (mode
, retval
, mem
);
12719 emit_move_insn (scratch
, val
);
12720 emit_store_conditional (mode
, cond
, mem
, scratch
);
12722 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12723 emit_unlikely_jump (x
, label
);
12725 emit_insn (gen_isync ());
12729 rs6000_expand_compare_and_swapqhi (rtx dst
, rtx mem
, rtx oldval
, rtx newval
)
12731 enum machine_mode mode
= GET_MODE (mem
);
12732 rtx addrSI
, align
, wdst
, shift
, mask
;
12733 HOST_WIDE_INT shift_mask
= mode
== QImode
? 0x18 : 0x10;
12734 HOST_WIDE_INT imask
= GET_MODE_MASK (mode
);
12736 /* Shift amount for subword relative to aligned word. */
12737 addrSI
= force_reg (SImode
, gen_lowpart_common (SImode
, XEXP (mem
, 0)));
12738 shift
= gen_reg_rtx (SImode
);
12739 emit_insn (gen_rlwinm (shift
, addrSI
, GEN_INT (3),
12740 GEN_INT (shift_mask
)));
12741 emit_insn (gen_xorsi3 (shift
, shift
, GEN_INT (shift_mask
)));
12743 /* Shift and mask old value into position within word. */
12744 oldval
= convert_modes (SImode
, mode
, oldval
, 1);
12745 oldval
= expand_binop (SImode
, and_optab
,
12746 oldval
, GEN_INT (imask
), NULL_RTX
,
12747 1, OPTAB_LIB_WIDEN
);
12748 emit_insn (gen_ashlsi3 (oldval
, oldval
, shift
));
12750 /* Shift and mask new value into position within word. */
12751 newval
= convert_modes (SImode
, mode
, newval
, 1);
12752 newval
= expand_binop (SImode
, and_optab
,
12753 newval
, GEN_INT (imask
), NULL_RTX
,
12754 1, OPTAB_LIB_WIDEN
);
12755 emit_insn (gen_ashlsi3 (newval
, newval
, shift
));
12757 /* Mask for insertion. */
12758 mask
= gen_reg_rtx (SImode
);
12759 emit_move_insn (mask
, GEN_INT (imask
));
12760 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
12762 /* Address of aligned word containing subword. */
12763 align
= expand_binop (Pmode
, and_optab
, XEXP (mem
, 0), GEN_INT (-4),
12764 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
12765 mem
= change_address (mem
, SImode
, align
);
12766 set_mem_align (mem
, 32);
12767 MEM_VOLATILE_P (mem
) = 1;
12769 wdst
= gen_reg_rtx (SImode
);
12770 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst
, mask
,
12771 oldval
, newval
, mem
));
12773 emit_move_insn (dst
, gen_lowpart (mode
, wdst
));
12777 rs6000_split_compare_and_swapqhi (rtx dest
, rtx mask
,
12778 rtx oldval
, rtx newval
, rtx mem
,
12781 rtx label1
, label2
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
12783 emit_insn (gen_memory_barrier ());
12784 label1
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12785 label2
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
12786 emit_label (XEXP (label1
, 0));
12788 emit_load_locked (SImode
, scratch
, mem
);
12790 /* Mask subword within loaded value for comparison with oldval.
12791 Use UNSPEC_AND to avoid clobber.*/
12792 emit_insn (gen_rtx_SET (SImode
, dest
,
12793 gen_rtx_UNSPEC (SImode
,
12794 gen_rtvec (2, scratch
, mask
),
12797 x
= gen_rtx_COMPARE (CCmode
, dest
, oldval
);
12798 emit_insn (gen_rtx_SET (VOIDmode
, cond
, x
));
12800 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12801 emit_unlikely_jump (x
, label2
);
12803 /* Clear subword within loaded value for insertion of new value. */
12804 emit_insn (gen_rtx_SET (SImode
, scratch
,
12805 gen_rtx_AND (SImode
,
12806 gen_rtx_NOT (SImode
, mask
), scratch
)));
12807 emit_insn (gen_iorsi3 (scratch
, scratch
, newval
));
12808 emit_store_conditional (SImode
, cond
, mem
, scratch
);
12810 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
12811 emit_unlikely_jump (x
, label1
);
12813 emit_insn (gen_isync ());
12814 emit_label (XEXP (label2
, 0));
12818 /* Emit instructions to move SRC to DST. Called by splitters for
12819 multi-register moves. It will emit at most one instruction for
12820 each register that is accessed; that is, it won't emit li/lis pairs
12821 (or equivalent for 64-bit code). One of SRC or DST must be a hard
12825 rs6000_split_multireg_move (rtx dst
, rtx src
)
12827 /* The register number of the first register being moved. */
12829 /* The mode that is to be moved. */
12830 enum machine_mode mode
;
12831 /* The mode that the move is being done in, and its size. */
12832 enum machine_mode reg_mode
;
12834 /* The number of registers that will be moved. */
12837 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
12838 mode
= GET_MODE (dst
);
12839 nregs
= hard_regno_nregs
[reg
][mode
];
12840 if (FP_REGNO_P (reg
))
12842 else if (ALTIVEC_REGNO_P (reg
))
12843 reg_mode
= V16QImode
;
12844 else if (TARGET_E500_DOUBLE
&& mode
== TFmode
)
12847 reg_mode
= word_mode
;
12848 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
12850 gcc_assert (reg_mode_size
* nregs
== GET_MODE_SIZE (mode
));
12852 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
12854 /* Move register range backwards, if we might have destructive
12857 for (i
= nregs
- 1; i
>= 0; i
--)
12858 emit_insn (gen_rtx_SET (VOIDmode
,
12859 simplify_gen_subreg (reg_mode
, dst
, mode
,
12860 i
* reg_mode_size
),
12861 simplify_gen_subreg (reg_mode
, src
, mode
,
12862 i
* reg_mode_size
)));
12868 bool used_update
= false;
12870 if (MEM_P (src
) && INT_REGNO_P (reg
))
12874 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
12875 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
12878 breg
= XEXP (XEXP (src
, 0), 0);
12879 delta_rtx
= (GET_CODE (XEXP (src
, 0)) == PRE_INC
12880 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
12881 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
))));
12882 emit_insn (TARGET_32BIT
12883 ? gen_addsi3 (breg
, breg
, delta_rtx
)
12884 : gen_adddi3 (breg
, breg
, delta_rtx
));
12885 src
= replace_equiv_address (src
, breg
);
12887 else if (! rs6000_offsettable_memref_p (src
))
12890 basereg
= gen_rtx_REG (Pmode
, reg
);
12891 emit_insn (gen_rtx_SET (VOIDmode
, basereg
, XEXP (src
, 0)));
12892 src
= replace_equiv_address (src
, basereg
);
12895 breg
= XEXP (src
, 0);
12896 if (GET_CODE (breg
) == PLUS
|| GET_CODE (breg
) == LO_SUM
)
12897 breg
= XEXP (breg
, 0);
12899 /* If the base register we are using to address memory is
12900 also a destination reg, then change that register last. */
12902 && REGNO (breg
) >= REGNO (dst
)
12903 && REGNO (breg
) < REGNO (dst
) + nregs
)
12904 j
= REGNO (breg
) - REGNO (dst
);
12907 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
12911 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
12912 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
12915 breg
= XEXP (XEXP (dst
, 0), 0);
12916 delta_rtx
= (GET_CODE (XEXP (dst
, 0)) == PRE_INC
12917 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
12918 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
))));
12920 /* We have to update the breg before doing the store.
12921 Use store with update, if available. */
12925 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
12926 emit_insn (TARGET_32BIT
12927 ? (TARGET_POWERPC64
12928 ? gen_movdi_si_update (breg
, breg
, delta_rtx
, nsrc
)
12929 : gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
))
12930 : gen_movdi_di_update (breg
, breg
, delta_rtx
, nsrc
));
12931 used_update
= true;
12934 emit_insn (TARGET_32BIT
12935 ? gen_addsi3 (breg
, breg
, delta_rtx
)
12936 : gen_adddi3 (breg
, breg
, delta_rtx
));
12937 dst
= replace_equiv_address (dst
, breg
);
12940 gcc_assert (rs6000_offsettable_memref_p (dst
));
12943 for (i
= 0; i
< nregs
; i
++)
12945 /* Calculate index to next subword. */
12950 /* If compiler already emitted move of first word by
12951 store with update, no need to do anything. */
12952 if (j
== 0 && used_update
)
12955 emit_insn (gen_rtx_SET (VOIDmode
,
12956 simplify_gen_subreg (reg_mode
, dst
, mode
,
12957 j
* reg_mode_size
),
12958 simplify_gen_subreg (reg_mode
, src
, mode
,
12959 j
* reg_mode_size
)));
12965 /* This page contains routines that are used to determine what the
12966 function prologue and epilogue code will do and write them out. */
12968 /* Return the first fixed-point register that is required to be
12969 saved. 32 if none. */
12972 first_reg_to_save (void)
12976 /* Find lowest numbered live register. */
12977 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
12978 if (regs_ever_live
[first_reg
]
12979 && (! call_used_regs
[first_reg
]
12980 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
12981 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
12982 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
12983 || (TARGET_TOC
&& TARGET_MINIMAL_TOC
)))))
12988 && current_function_uses_pic_offset_table
12989 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
12990 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
12996 /* Similar, for FP regs. */
12999 first_fp_reg_to_save (void)
13003 /* Find lowest numbered live register. */
13004 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
13005 if (regs_ever_live
[first_reg
])
13011 /* Similar, for AltiVec regs. */
13014 first_altivec_reg_to_save (void)
13018 /* Stack frame remains as is unless we are in AltiVec ABI. */
13019 if (! TARGET_ALTIVEC_ABI
)
13020 return LAST_ALTIVEC_REGNO
+ 1;
13022 /* On Darwin, the unwind routines are compiled without
13023 TARGET_ALTIVEC, and use save_world to save/restore the
13024 altivec registers when necessary. */
13025 if (DEFAULT_ABI
== ABI_DARWIN
&& current_function_calls_eh_return
13026 && ! TARGET_ALTIVEC
)
13027 return FIRST_ALTIVEC_REGNO
+ 20;
13029 /* Find lowest numbered live register. */
13030 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13031 if (regs_ever_live
[i
])
13037 /* Return a 32-bit mask of the AltiVec registers we need to set in
13038 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
13039 the 32-bit word is 0. */
13041 static unsigned int
13042 compute_vrsave_mask (void)
13044 unsigned int i
, mask
= 0;
13046 /* On Darwin, the unwind routines are compiled without
13047 TARGET_ALTIVEC, and use save_world to save/restore the
13048 call-saved altivec registers when necessary. */
13049 if (DEFAULT_ABI
== ABI_DARWIN
&& current_function_calls_eh_return
13050 && ! TARGET_ALTIVEC
)
13053 /* First, find out if we use _any_ altivec registers. */
13054 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13055 if (regs_ever_live
[i
])
13056 mask
|= ALTIVEC_REG_BIT (i
);
13061 /* Next, remove the argument registers from the set. These must
13062 be in the VRSAVE mask set by the caller, so we don't need to add
13063 them in again. More importantly, the mask we compute here is
13064 used to generate CLOBBERs in the set_vrsave insn, and we do not
13065 wish the argument registers to die. */
13066 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
13067 mask
&= ~ALTIVEC_REG_BIT (i
);
13069 /* Similarly, remove the return value from the set. */
13072 diddle_return_value (is_altivec_return_reg
, &yes
);
13074 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
13080 /* For a very restricted set of circumstances, we can cut down the
13081 size of prologues/epilogues by calling our own save/restore-the-world
13085 compute_save_world_info (rs6000_stack_t
*info_ptr
)
13087 info_ptr
->world_save_p
= 1;
13088 info_ptr
->world_save_p
13089 = (WORLD_SAVE_P (info_ptr
)
13090 && DEFAULT_ABI
== ABI_DARWIN
13091 && ! (current_function_calls_setjmp
&& flag_exceptions
)
13092 && info_ptr
->first_fp_reg_save
== FIRST_SAVED_FP_REGNO
13093 && info_ptr
->first_gp_reg_save
== FIRST_SAVED_GP_REGNO
13094 && info_ptr
->first_altivec_reg_save
== FIRST_SAVED_ALTIVEC_REGNO
13095 && info_ptr
->cr_save_p
);
13097 /* This will not work in conjunction with sibcalls. Make sure there
13098 are none. (This check is expensive, but seldom executed.) */
13099 if (WORLD_SAVE_P (info_ptr
))
13102 for ( insn
= get_last_insn_anywhere (); insn
; insn
= PREV_INSN (insn
))
13103 if ( GET_CODE (insn
) == CALL_INSN
13104 && SIBLING_CALL_P (insn
))
13106 info_ptr
->world_save_p
= 0;
13111 if (WORLD_SAVE_P (info_ptr
))
13113 /* Even if we're not touching VRsave, make sure there's room on the
13114 stack for it, if it looks like we're calling SAVE_WORLD, which
13115 will attempt to save it. */
13116 info_ptr
->vrsave_size
= 4;
13118 /* "Save" the VRsave register too if we're saving the world. */
13119 if (info_ptr
->vrsave_mask
== 0)
13120 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
13122 /* Because the Darwin register save/restore routines only handle
13123 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
13125 gcc_assert (info_ptr
->first_fp_reg_save
>= FIRST_SAVED_FP_REGNO
13126 && (info_ptr
->first_altivec_reg_save
13127 >= FIRST_SAVED_ALTIVEC_REGNO
));
13134 is_altivec_return_reg (rtx reg
, void *xyes
)
13136 bool *yes
= (bool *) xyes
;
13137 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
13142 /* Calculate the stack information for the current function. This is
13143 complicated by having two separate calling sequences, the AIX calling
13144 sequence and the V.4 calling sequence.
13146 AIX (and Darwin/Mac OS X) stack frames look like:
13148 SP----> +---------------------------------------+
13149 | back chain to caller | 0 0
13150 +---------------------------------------+
13151 | saved CR | 4 8 (8-11)
13152 +---------------------------------------+
13154 +---------------------------------------+
13155 | reserved for compilers | 12 24
13156 +---------------------------------------+
13157 | reserved for binders | 16 32
13158 +---------------------------------------+
13159 | saved TOC pointer | 20 40
13160 +---------------------------------------+
13161 | Parameter save area (P) | 24 48
13162 +---------------------------------------+
13163 | Alloca space (A) | 24+P etc.
13164 +---------------------------------------+
13165 | Local variable space (L) | 24+P+A
13166 +---------------------------------------+
13167 | Float/int conversion temporary (X) | 24+P+A+L
13168 +---------------------------------------+
13169 | Save area for AltiVec registers (W) | 24+P+A+L+X
13170 +---------------------------------------+
13171 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
13172 +---------------------------------------+
13173 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
13174 +---------------------------------------+
13175 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
13176 +---------------------------------------+
13177 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
13178 +---------------------------------------+
13179 old SP->| back chain to caller's caller |
13180 +---------------------------------------+
13182 The required alignment for AIX configurations is two words (i.e., 8
13186 V.4 stack frames look like:
13188 SP----> +---------------------------------------+
13189 | back chain to caller | 0
13190 +---------------------------------------+
13191 | caller's saved LR | 4
13192 +---------------------------------------+
13193 | Parameter save area (P) | 8
13194 +---------------------------------------+
13195 | Alloca space (A) | 8+P
13196 +---------------------------------------+
13197 | Varargs save area (V) | 8+P+A
13198 +---------------------------------------+
13199 | Local variable space (L) | 8+P+A+V
13200 +---------------------------------------+
13201 | Float/int conversion temporary (X) | 8+P+A+V+L
13202 +---------------------------------------+
13203 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
13204 +---------------------------------------+
13205 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
13206 +---------------------------------------+
13207 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
13208 +---------------------------------------+
13209 | SPE: area for 64-bit GP registers |
13210 +---------------------------------------+
13211 | SPE alignment padding |
13212 +---------------------------------------+
13213 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
13214 +---------------------------------------+
13215 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
13216 +---------------------------------------+
13217 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
13218 +---------------------------------------+
13219 old SP->| back chain to caller's caller |
13220 +---------------------------------------+
13222 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
13223 given. (But note below and in sysv4.h that we require only 8 and
13224 may round up the size of our stack frame anyways. The historical
13225 reason is early versions of powerpc-linux which didn't properly
13226 align the stack at program startup. A happy side-effect is that
13227 -mno-eabi libraries can be used with -meabi programs.)
13229 The EABI configuration defaults to the V.4 layout. However,
13230 the stack alignment requirements may differ. If -mno-eabi is not
13231 given, the required stack alignment is 8 bytes; if -mno-eabi is
13232 given, the required alignment is 16 bytes. (But see V.4 comment
13235 #ifndef ABI_STACK_BOUNDARY
13236 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
13239 static rs6000_stack_t
*
13240 rs6000_stack_info (void)
13242 static rs6000_stack_t info
;
13243 rs6000_stack_t
*info_ptr
= &info
;
13244 int reg_size
= TARGET_32BIT
? 4 : 8;
13247 HOST_WIDE_INT non_fixed_size
;
13249 memset (&info
, 0, sizeof (info
));
13253 /* Cache value so we don't rescan instruction chain over and over. */
13254 if (cfun
->machine
->insn_chain_scanned_p
== 0)
13255 cfun
->machine
->insn_chain_scanned_p
13256 = spe_func_has_64bit_regs_p () + 1;
13257 info_ptr
->spe_64bit_regs_used
= cfun
->machine
->insn_chain_scanned_p
- 1;
13260 /* Select which calling sequence. */
13261 info_ptr
->abi
= DEFAULT_ABI
;
13263 /* Calculate which registers need to be saved & save area size. */
13264 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
13265 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
13266 even if it currently looks like we won't. */
13267 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
13268 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
13269 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
13270 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
13271 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
13273 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
13275 /* For the SPE, we have an additional upper 32-bits on each GPR.
13276 Ideally we should save the entire 64-bits only when the upper
13277 half is used in SIMD instructions. Since we only record
13278 registers live (not the size they are used in), this proves
13279 difficult because we'd have to traverse the instruction chain at
13280 the right time, taking reload into account. This is a real pain,
13281 so we opt to save the GPRs in 64-bits always if but one register
13282 gets used in 64-bits. Otherwise, all the registers in the frame
13283 get saved in 32-bits.
13285 So... since when we save all GPRs (except the SP) in 64-bits, the
13286 traditional GP save area will be empty. */
13287 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13288 info_ptr
->gp_size
= 0;
13290 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
13291 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
13293 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
13294 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
13295 - info_ptr
->first_altivec_reg_save
);
13297 /* Does this function call anything? */
13298 info_ptr
->calls_p
= (! current_function_is_leaf
13299 || cfun
->machine
->ra_needs_full_frame
);
13301 /* Determine if we need to save the link register. */
13302 if ((DEFAULT_ABI
== ABI_AIX
13303 && current_function_profile
13304 && !TARGET_PROFILE_KERNEL
)
13305 #ifdef TARGET_RELOCATABLE
13306 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
13308 || (info_ptr
->first_fp_reg_save
!= 64
13309 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
13310 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
13311 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
13312 || info_ptr
->calls_p
13313 || rs6000_ra_ever_killed ())
13315 info_ptr
->lr_save_p
= 1;
13316 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
13319 /* Determine if we need to save the condition code registers. */
13320 if (regs_ever_live
[CR2_REGNO
]
13321 || regs_ever_live
[CR3_REGNO
]
13322 || regs_ever_live
[CR4_REGNO
])
13324 info_ptr
->cr_save_p
= 1;
13325 if (DEFAULT_ABI
== ABI_V4
)
13326 info_ptr
->cr_size
= reg_size
;
13329 /* If the current function calls __builtin_eh_return, then we need
13330 to allocate stack space for registers that will hold data for
13331 the exception handler. */
13332 if (current_function_calls_eh_return
)
13335 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
13338 /* SPE saves EH registers in 64-bits. */
13339 ehrd_size
= i
* (TARGET_SPE_ABI
13340 && info_ptr
->spe_64bit_regs_used
!= 0
13341 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
13346 /* Determine various sizes. */
13347 info_ptr
->reg_size
= reg_size
;
13348 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
13349 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
13350 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
13351 TARGET_ALTIVEC
? 16 : 8);
13352 if (FRAME_GROWS_DOWNWARD
)
13353 info_ptr
->vars_size
13354 += RS6000_ALIGN (info_ptr
->fixed_size
+ info_ptr
->vars_size
13355 + info_ptr
->parm_size
,
13356 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
)
13357 - (info_ptr
->fixed_size
+ info_ptr
->vars_size
13358 + info_ptr
->parm_size
);
13360 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13361 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
13363 info_ptr
->spe_gp_size
= 0;
13365 if (TARGET_ALTIVEC_ABI
)
13366 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
13368 info_ptr
->vrsave_mask
= 0;
13370 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
13371 info_ptr
->vrsave_size
= 4;
13373 info_ptr
->vrsave_size
= 0;
13375 compute_save_world_info (info_ptr
);
13377 /* Calculate the offsets. */
13378 switch (DEFAULT_ABI
)
13382 gcc_unreachable ();
13386 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
13387 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
13389 if (TARGET_ALTIVEC_ABI
)
13391 info_ptr
->vrsave_save_offset
13392 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
13394 /* Align stack so vector save area is on a quadword boundary.
13395 The padding goes above the vectors. */
13396 if (info_ptr
->altivec_size
!= 0)
13397 info_ptr
->altivec_padding_size
13398 = info_ptr
->vrsave_save_offset
& 0xF;
13400 info_ptr
->altivec_padding_size
= 0;
13402 info_ptr
->altivec_save_offset
13403 = info_ptr
->vrsave_save_offset
13404 - info_ptr
->altivec_padding_size
13405 - info_ptr
->altivec_size
;
13406 gcc_assert (info_ptr
->altivec_size
== 0
13407 || info_ptr
->altivec_save_offset
% 16 == 0);
13409 /* Adjust for AltiVec case. */
13410 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
13413 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
13414 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
13415 info_ptr
->lr_save_offset
= 2*reg_size
;
13419 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
13420 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
13421 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
13423 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
13425 /* Align stack so SPE GPR save area is aligned on a
13426 double-word boundary. */
13427 if (info_ptr
->spe_gp_size
!= 0)
13428 info_ptr
->spe_padding_size
13429 = 8 - (-info_ptr
->cr_save_offset
% 8);
13431 info_ptr
->spe_padding_size
= 0;
13433 info_ptr
->spe_gp_save_offset
13434 = info_ptr
->cr_save_offset
13435 - info_ptr
->spe_padding_size
13436 - info_ptr
->spe_gp_size
;
13438 /* Adjust for SPE case. */
13439 info_ptr
->ehrd_offset
= info_ptr
->spe_gp_save_offset
;
13441 else if (TARGET_ALTIVEC_ABI
)
13443 info_ptr
->vrsave_save_offset
13444 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
13446 /* Align stack so vector save area is on a quadword boundary. */
13447 if (info_ptr
->altivec_size
!= 0)
13448 info_ptr
->altivec_padding_size
13449 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
13451 info_ptr
->altivec_padding_size
= 0;
13453 info_ptr
->altivec_save_offset
13454 = info_ptr
->vrsave_save_offset
13455 - info_ptr
->altivec_padding_size
13456 - info_ptr
->altivec_size
;
13458 /* Adjust for AltiVec case. */
13459 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
;
13462 info_ptr
->ehrd_offset
= info_ptr
->cr_save_offset
;
13463 info_ptr
->ehrd_offset
-= ehrd_size
;
13464 info_ptr
->lr_save_offset
= reg_size
;
13468 save_align
= (TARGET_ALTIVEC_ABI
|| DEFAULT_ABI
== ABI_DARWIN
) ? 16 : 8;
13469 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
13470 + info_ptr
->gp_size
13471 + info_ptr
->altivec_size
13472 + info_ptr
->altivec_padding_size
13473 + info_ptr
->spe_gp_size
13474 + info_ptr
->spe_padding_size
13476 + info_ptr
->cr_size
13477 + info_ptr
->vrsave_size
,
13480 non_fixed_size
= (info_ptr
->vars_size
13481 + info_ptr
->parm_size
13482 + info_ptr
->save_size
);
13484 info_ptr
->total_size
= RS6000_ALIGN (non_fixed_size
+ info_ptr
->fixed_size
,
13485 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
13487 /* Determine if we need to allocate any stack frame:
13489 For AIX we need to push the stack if a frame pointer is needed
13490 (because the stack might be dynamically adjusted), if we are
13491 debugging, if we make calls, or if the sum of fp_save, gp_save,
13492 and local variables are more than the space needed to save all
13493 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
13494 + 18*8 = 288 (GPR13 reserved).
13496 For V.4 we don't have the stack cushion that AIX uses, but assume
13497 that the debugger can handle stackless frames. */
13499 if (info_ptr
->calls_p
)
13500 info_ptr
->push_p
= 1;
13502 else if (DEFAULT_ABI
== ABI_V4
)
13503 info_ptr
->push_p
= non_fixed_size
!= 0;
13505 else if (frame_pointer_needed
)
13506 info_ptr
->push_p
= 1;
13508 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
13509 info_ptr
->push_p
= 1;
13512 info_ptr
->push_p
= non_fixed_size
> (TARGET_32BIT
? 220 : 288);
13514 /* Zero offsets if we're not saving those registers. */
13515 if (info_ptr
->fp_size
== 0)
13516 info_ptr
->fp_save_offset
= 0;
13518 if (info_ptr
->gp_size
== 0)
13519 info_ptr
->gp_save_offset
= 0;
13521 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
13522 info_ptr
->altivec_save_offset
= 0;
13524 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
13525 info_ptr
->vrsave_save_offset
= 0;
13527 if (! TARGET_SPE_ABI
13528 || info_ptr
->spe_64bit_regs_used
== 0
13529 || info_ptr
->spe_gp_size
== 0)
13530 info_ptr
->spe_gp_save_offset
= 0;
13532 if (! info_ptr
->lr_save_p
)
13533 info_ptr
->lr_save_offset
= 0;
13535 if (! info_ptr
->cr_save_p
)
13536 info_ptr
->cr_save_offset
= 0;
13541 /* Return true if the current function uses any GPRs in 64-bit SIMD
13545 spe_func_has_64bit_regs_p (void)
13549 /* Functions that save and restore all the call-saved registers will
13550 need to save/restore the registers in 64-bits. */
13551 if (current_function_calls_eh_return
13552 || current_function_calls_setjmp
13553 || current_function_has_nonlocal_goto
)
13556 insns
= get_insns ();
13558 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
13564 /* FIXME: This should be implemented with attributes...
13566 (set_attr "spe64" "true")....then,
13567 if (get_spe64(insn)) return true;
13569 It's the only reliable way to do the stuff below. */
13571 i
= PATTERN (insn
);
13572 if (GET_CODE (i
) == SET
)
13574 enum machine_mode mode
= GET_MODE (SET_SRC (i
));
13576 if (SPE_VECTOR_MODE (mode
))
13578 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== TFmode
))
13588 debug_stack_info (rs6000_stack_t
*info
)
13590 const char *abi_string
;
13593 info
= rs6000_stack_info ();
13595 fprintf (stderr
, "\nStack information for function %s:\n",
13596 ((current_function_decl
&& DECL_NAME (current_function_decl
))
13597 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
13602 default: abi_string
= "Unknown"; break;
13603 case ABI_NONE
: abi_string
= "NONE"; break;
13604 case ABI_AIX
: abi_string
= "AIX"; break;
13605 case ABI_DARWIN
: abi_string
= "Darwin"; break;
13606 case ABI_V4
: abi_string
= "V.4"; break;
13609 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
13611 if (TARGET_ALTIVEC_ABI
)
13612 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
13614 if (TARGET_SPE_ABI
)
13615 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
13617 if (info
->first_gp_reg_save
!= 32)
13618 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
13620 if (info
->first_fp_reg_save
!= 64)
13621 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
13623 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
13624 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
13625 info
->first_altivec_reg_save
);
13627 if (info
->lr_save_p
)
13628 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
13630 if (info
->cr_save_p
)
13631 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
13633 if (info
->vrsave_mask
)
13634 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
13637 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
13640 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
13642 if (info
->gp_save_offset
)
13643 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
13645 if (info
->fp_save_offset
)
13646 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
13648 if (info
->altivec_save_offset
)
13649 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
13650 info
->altivec_save_offset
);
13652 if (info
->spe_gp_save_offset
)
13653 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
13654 info
->spe_gp_save_offset
);
13656 if (info
->vrsave_save_offset
)
13657 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
13658 info
->vrsave_save_offset
);
13660 if (info
->lr_save_offset
)
13661 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
13663 if (info
->cr_save_offset
)
13664 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
13666 if (info
->varargs_save_offset
)
13667 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
13669 if (info
->total_size
)
13670 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
13673 if (info
->vars_size
)
13674 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
13677 if (info
->parm_size
)
13678 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
13680 if (info
->fixed_size
)
13681 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
13684 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
13686 if (info
->spe_gp_size
)
13687 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
13690 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
13692 if (info
->altivec_size
)
13693 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
13695 if (info
->vrsave_size
)
13696 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
13698 if (info
->altivec_padding_size
)
13699 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
13700 info
->altivec_padding_size
);
13702 if (info
->spe_padding_size
)
13703 fprintf (stderr
, "\tspe_padding_size = %5d\n",
13704 info
->spe_padding_size
);
13707 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
13709 if (info
->save_size
)
13710 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
13712 if (info
->reg_size
!= 4)
13713 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
13715 fprintf (stderr
, "\n");
13719 rs6000_return_addr (int count
, rtx frame
)
13721 /* Currently we don't optimize very well between prolog and body
13722 code and for PIC code the code can be actually quite bad, so
13723 don't try to be too clever here. */
13724 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
13726 cfun
->machine
->ra_needs_full_frame
= 1;
13733 plus_constant (copy_to_reg
13734 (gen_rtx_MEM (Pmode
,
13735 memory_address (Pmode
, frame
))),
13736 RETURN_ADDRESS_OFFSET
)));
13739 cfun
->machine
->ra_need_lr
= 1;
13740 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
13743 /* Say whether a function is a candidate for sibcall handling or not.
13744 We do not allow indirect calls to be optimized into sibling calls.
13745 Also, we can't do it if there are any vector parameters; there's
13746 nowhere to put the VRsave code so it works; note that functions with
13747 vector parameters are required to have a prototype, so the argument
13748 type info must be available here. (The tail recursion case can work
13749 with vector parameters, but there's no way to distinguish here.) */
13751 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
13756 if (TARGET_ALTIVEC_VRSAVE
)
13758 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
13759 type
; type
= TREE_CHAIN (type
))
13761 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
13765 if (DEFAULT_ABI
== ABI_DARWIN
13766 || (*targetm
.binds_local_p
) (decl
))
13768 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
13770 if (!lookup_attribute ("longcall", attr_list
)
13771 || lookup_attribute ("shortcall", attr_list
))
13778 /* NULL if INSN insn is valid within a low-overhead loop.
13779 Otherwise return why doloop cannot be applied.
13780 PowerPC uses the COUNT register for branch on table instructions. */
13782 static const char *
13783 rs6000_invalid_within_doloop (rtx insn
)
13786 return "Function call in the loop.";
13789 && (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
13790 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
))
13791 return "Computed branch in the loop.";
13797 rs6000_ra_ever_killed (void)
13803 if (current_function_is_thunk
)
13806 /* regs_ever_live has LR marked as used if any sibcalls are present,
13807 but this should not force saving and restoring in the
13808 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
13809 clobbers LR, so that is inappropriate. */
13811 /* Also, the prologue can generate a store into LR that
13812 doesn't really count, like this:
13815 bcl to set PIC register
13819 When we're called from the epilogue, we need to avoid counting
13820 this as a store. */
13822 push_topmost_sequence ();
13823 top
= get_insns ();
13824 pop_topmost_sequence ();
13825 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
13827 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
13833 if (!SIBLING_CALL_P (insn
))
13836 else if (find_regno_note (insn
, REG_INC
, LINK_REGISTER_REGNUM
))
13838 else if (set_of (reg
, insn
) != NULL_RTX
13839 && !prologue_epilogue_contains (insn
))
13846 /* Add a REG_MAYBE_DEAD note to the insn. */
13848 rs6000_maybe_dead (rtx insn
)
13850 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
13855 /* Emit instructions needed to load the TOC register.
13856 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
13857 a constant pool; or for SVR4 -fpic. */
13860 rs6000_emit_load_toc_table (int fromprolog
)
13863 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
13865 if (TARGET_ELF
&& TARGET_SECURE_PLT
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
)
13868 rtx lab
, tmp1
, tmp2
, got
, tempLR
;
13870 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
13871 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
13873 got
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
13875 got
= rs6000_got_sym ();
13876 tmp1
= tmp2
= dest
;
13879 tmp1
= gen_reg_rtx (Pmode
);
13880 tmp2
= gen_reg_rtx (Pmode
);
13882 tempLR
= (fromprolog
13883 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
13884 : gen_reg_rtx (Pmode
));
13885 insn
= emit_insn (gen_load_toc_v4_PIC_1 (tempLR
, lab
));
13887 rs6000_maybe_dead (insn
);
13888 insn
= emit_move_insn (tmp1
, tempLR
);
13890 rs6000_maybe_dead (insn
);
13891 insn
= emit_insn (gen_load_toc_v4_PIC_3b (tmp2
, tmp1
, got
, lab
));
13893 rs6000_maybe_dead (insn
);
13894 insn
= emit_insn (gen_load_toc_v4_PIC_3c (dest
, tmp2
, got
, lab
));
13896 rs6000_maybe_dead (insn
);
13898 else if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
13900 rtx tempLR
= (fromprolog
13901 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
13902 : gen_reg_rtx (Pmode
));
13904 insn
= emit_insn (gen_load_toc_v4_pic_si (tempLR
));
13906 rs6000_maybe_dead (insn
);
13907 insn
= emit_move_insn (dest
, tempLR
);
13909 rs6000_maybe_dead (insn
);
13911 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
13914 rtx tempLR
= (fromprolog
13915 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
13916 : gen_reg_rtx (Pmode
));
13917 rtx temp0
= (fromprolog
13918 ? gen_rtx_REG (Pmode
, 0)
13919 : gen_reg_rtx (Pmode
));
13925 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
13926 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
13928 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
13929 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
13931 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
13933 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
13934 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
13942 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
13943 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, tocsym
));
13944 emit_move_insn (dest
, tempLR
);
13945 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
13947 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
13949 rs6000_maybe_dead (insn
);
13951 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
13953 /* This is for AIX code running in non-PIC ELF32. */
13956 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
13957 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
13959 insn
= emit_insn (gen_elf_high (dest
, realsym
));
13961 rs6000_maybe_dead (insn
);
13962 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
13964 rs6000_maybe_dead (insn
);
13968 gcc_assert (DEFAULT_ABI
== ABI_AIX
);
13971 insn
= emit_insn (gen_load_toc_aix_si (dest
));
13973 insn
= emit_insn (gen_load_toc_aix_di (dest
));
13975 rs6000_maybe_dead (insn
);
13979 /* Emit instructions to restore the link register after determining where
13980 its value has been stored. */
13983 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
13985 rs6000_stack_t
*info
= rs6000_stack_info ();
13988 operands
[0] = source
;
13989 operands
[1] = scratch
;
13991 if (info
->lr_save_p
)
13993 rtx frame_rtx
= stack_pointer_rtx
;
13994 HOST_WIDE_INT sp_offset
= 0;
13997 if (frame_pointer_needed
13998 || current_function_calls_alloca
13999 || info
->total_size
> 32767)
14001 tmp
= gen_frame_mem (Pmode
, frame_rtx
);
14002 emit_move_insn (operands
[1], tmp
);
14003 frame_rtx
= operands
[1];
14005 else if (info
->push_p
)
14006 sp_offset
= info
->total_size
;
14008 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
14009 tmp
= gen_frame_mem (Pmode
, tmp
);
14010 emit_move_insn (tmp
, operands
[0]);
14013 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
14016 static GTY(()) int set
= -1;
14019 get_TOC_alias_set (void)
14022 set
= new_alias_set ();
14026 /* This returns nonzero if the current function uses the TOC. This is
14027 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
14028 is generated by the ABI_V4 load_toc_* patterns. */
14035 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
14038 rtx pat
= PATTERN (insn
);
14041 if (GET_CODE (pat
) == PARALLEL
)
14042 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
14044 rtx sub
= XVECEXP (pat
, 0, i
);
14045 if (GET_CODE (sub
) == USE
)
14047 sub
= XEXP (sub
, 0);
14048 if (GET_CODE (sub
) == UNSPEC
14049 && XINT (sub
, 1) == UNSPEC_TOC
)
14059 create_TOC_reference (rtx symbol
)
14061 if (no_new_pseudos
)
14062 regs_ever_live
[TOC_REGISTER
] = 1;
14063 return gen_rtx_PLUS (Pmode
,
14064 gen_rtx_REG (Pmode
, TOC_REGISTER
),
14065 gen_rtx_CONST (Pmode
,
14066 gen_rtx_MINUS (Pmode
, symbol
,
14067 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
14070 /* If _Unwind_* has been called from within the same module,
14071 toc register is not guaranteed to be saved to 40(1) on function
14072 entry. Save it there in that case. */
14075 rs6000_aix_emit_builtin_unwind_init (void)
14078 rtx stack_top
= gen_reg_rtx (Pmode
);
14079 rtx opcode_addr
= gen_reg_rtx (Pmode
);
14080 rtx opcode
= gen_reg_rtx (SImode
);
14081 rtx tocompare
= gen_reg_rtx (SImode
);
14082 rtx no_toc_save_needed
= gen_label_rtx ();
14084 mem
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
14085 emit_move_insn (stack_top
, mem
);
14087 mem
= gen_frame_mem (Pmode
,
14088 gen_rtx_PLUS (Pmode
, stack_top
,
14089 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
14090 emit_move_insn (opcode_addr
, mem
);
14091 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
14092 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
14093 : 0xE8410028, SImode
));
14095 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
14096 SImode
, NULL_RTX
, NULL_RTX
,
14097 no_toc_save_needed
);
14099 mem
= gen_frame_mem (Pmode
,
14100 gen_rtx_PLUS (Pmode
, stack_top
,
14101 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
14102 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
14103 emit_label (no_toc_save_needed
);
14106 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
14107 and the change to the stack pointer. */
14110 rs6000_emit_stack_tie (void)
14112 rtx mem
= gen_frame_mem (BLKmode
,
14113 gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
14115 emit_insn (gen_stack_tie (mem
));
14118 /* Emit the correct code for allocating stack space, as insns.
14119 If COPY_R12, make sure a copy of the old frame is left in r12.
14120 The generated code may use hard register 0 as a temporary. */
14123 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
14126 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
14127 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
14128 rtx todec
= gen_int_mode (-size
, Pmode
);
14130 if (INTVAL (todec
) != -size
)
14132 warning (0, "stack frame too large");
14133 emit_insn (gen_trap ());
14137 if (current_function_limit_stack
)
14139 if (REG_P (stack_limit_rtx
)
14140 && REGNO (stack_limit_rtx
) > 1
14141 && REGNO (stack_limit_rtx
) <= 31)
14143 emit_insn (TARGET_32BIT
14144 ? gen_addsi3 (tmp_reg
,
14147 : gen_adddi3 (tmp_reg
,
14151 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
14154 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
14156 && DEFAULT_ABI
== ABI_V4
)
14158 rtx toload
= gen_rtx_CONST (VOIDmode
,
14159 gen_rtx_PLUS (Pmode
,
14163 emit_insn (gen_elf_high (tmp_reg
, toload
));
14164 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
14165 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
14169 warning (0, "stack limit expression is not supported");
14172 if (copy_r12
|| ! TARGET_UPDATE
)
14173 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
14179 /* Need a note here so that try_split doesn't get confused. */
14180 if (get_last_insn () == NULL_RTX
)
14181 emit_note (NOTE_INSN_DELETED
);
14182 insn
= emit_move_insn (tmp_reg
, todec
);
14183 try_split (PATTERN (insn
), insn
, 0);
14187 insn
= emit_insn (TARGET_32BIT
14188 ? gen_movsi_update (stack_reg
, stack_reg
,
14190 : gen_movdi_di_update (stack_reg
, stack_reg
,
14191 todec
, stack_reg
));
14195 insn
= emit_insn (TARGET_32BIT
14196 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
14197 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
14198 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
14199 gen_rtx_REG (Pmode
, 12));
14202 RTX_FRAME_RELATED_P (insn
) = 1;
14204 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14205 gen_rtx_SET (VOIDmode
, stack_reg
,
14206 gen_rtx_PLUS (Pmode
, stack_reg
,
14211 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
14212 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
14213 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
14214 deduce these equivalences by itself so it wasn't necessary to hold
14215 its hand so much. */
14218 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
14219 rtx reg2
, rtx rreg
)
14223 /* copy_rtx will not make unique copies of registers, so we need to
14224 ensure we don't have unwanted sharing here. */
14226 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
14229 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
14231 real
= copy_rtx (PATTERN (insn
));
14233 if (reg2
!= NULL_RTX
)
14234 real
= replace_rtx (real
, reg2
, rreg
);
14236 real
= replace_rtx (real
, reg
,
14237 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
14238 STACK_POINTER_REGNUM
),
14241 /* We expect that 'real' is either a SET or a PARALLEL containing
14242 SETs (and possibly other stuff). In a PARALLEL, all the SETs
14243 are important so they all have to be marked RTX_FRAME_RELATED_P. */
14245 if (GET_CODE (real
) == SET
)
14249 temp
= simplify_rtx (SET_SRC (set
));
14251 SET_SRC (set
) = temp
;
14252 temp
= simplify_rtx (SET_DEST (set
));
14254 SET_DEST (set
) = temp
;
14255 if (GET_CODE (SET_DEST (set
)) == MEM
)
14257 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
14259 XEXP (SET_DEST (set
), 0) = temp
;
14266 gcc_assert (GET_CODE (real
) == PARALLEL
);
14267 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
14268 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
14270 rtx set
= XVECEXP (real
, 0, i
);
14272 temp
= simplify_rtx (SET_SRC (set
));
14274 SET_SRC (set
) = temp
;
14275 temp
= simplify_rtx (SET_DEST (set
));
14277 SET_DEST (set
) = temp
;
14278 if (GET_CODE (SET_DEST (set
)) == MEM
)
14280 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
14282 XEXP (SET_DEST (set
), 0) = temp
;
14284 RTX_FRAME_RELATED_P (set
) = 1;
14289 real
= spe_synthesize_frame_save (real
);
14291 RTX_FRAME_RELATED_P (insn
) = 1;
14292 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14297 /* Given an SPE frame note, return a PARALLEL of SETs with the
14298 original note, plus a synthetic register save. */
14301 spe_synthesize_frame_save (rtx real
)
14303 rtx synth
, offset
, reg
, real2
;
14305 if (GET_CODE (real
) != SET
14306 || GET_MODE (SET_SRC (real
)) != V2SImode
)
14309 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
14310 frame related note. The parallel contains a set of the register
14311 being saved, and another set to a synthetic register (n+1200).
14312 This is so we can differentiate between 64-bit and 32-bit saves.
14313 Words cannot describe this nastiness. */
14315 gcc_assert (GET_CODE (SET_DEST (real
)) == MEM
14316 && GET_CODE (XEXP (SET_DEST (real
), 0)) == PLUS
14317 && GET_CODE (SET_SRC (real
)) == REG
);
14320 (set (mem (plus (reg x) (const y)))
14323 (set (mem (plus (reg x) (const y+4)))
14327 real2
= copy_rtx (real
);
14328 PUT_MODE (SET_DEST (real2
), SImode
);
14329 reg
= SET_SRC (real2
);
14330 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
14331 synth
= copy_rtx (real2
);
14333 if (BYTES_BIG_ENDIAN
)
14335 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
14336 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
14339 reg
= SET_SRC (synth
);
14341 synth
= replace_rtx (synth
, reg
,
14342 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
14344 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
14345 synth
= replace_rtx (synth
, offset
,
14346 GEN_INT (INTVAL (offset
)
14347 + (BYTES_BIG_ENDIAN
? 0 : 4)));
14349 RTX_FRAME_RELATED_P (synth
) = 1;
14350 RTX_FRAME_RELATED_P (real2
) = 1;
14351 if (BYTES_BIG_ENDIAN
)
14352 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
14354 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
14359 /* Returns an insn that has a vrsave set operation with the
14360 appropriate CLOBBERs. */
14363 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
14366 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
14367 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
14370 = gen_rtx_SET (VOIDmode
,
14372 gen_rtx_UNSPEC_VOLATILE (SImode
,
14373 gen_rtvec (2, reg
, vrsave
),
14374 UNSPECV_SET_VRSAVE
));
14378 /* We need to clobber the registers in the mask so the scheduler
14379 does not move sets to VRSAVE before sets of AltiVec registers.
14381 However, if the function receives nonlocal gotos, reload will set
14382 all call saved registers live. We will end up with:
14384 (set (reg 999) (mem))
14385 (parallel [ (set (reg vrsave) (unspec blah))
14386 (clobber (reg 999))])
14388 The clobber will cause the store into reg 999 to be dead, and
14389 flow will attempt to delete an epilogue insn. In this case, we
14390 need an unspec use/set of the register. */
14392 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
14393 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
14395 if (!epiloguep
|| call_used_regs
[i
])
14396 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
14397 gen_rtx_REG (V4SImode
, i
));
14400 rtx reg
= gen_rtx_REG (V4SImode
, i
);
14403 = gen_rtx_SET (VOIDmode
,
14405 gen_rtx_UNSPEC (V4SImode
,
14406 gen_rtvec (1, reg
), 27));
14410 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
14412 for (i
= 0; i
< nclobs
; ++i
)
14413 XVECEXP (insn
, 0, i
) = clobs
[i
];
14418 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
14419 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
14422 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
14423 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
14425 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
14426 rtx replacea
, replaceb
;
14428 int_rtx
= GEN_INT (offset
);
14430 /* Some cases that need register indexed addressing. */
14431 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
14432 || (TARGET_E500_DOUBLE
&& mode
== DFmode
)
14434 && SPE_VECTOR_MODE (mode
)
14435 && !SPE_CONST_OFFSET_OK (offset
)))
14437 /* Whomever calls us must make sure r11 is available in the
14438 flow path of instructions in the prologue. */
14439 offset_rtx
= gen_rtx_REG (Pmode
, 11);
14440 emit_move_insn (offset_rtx
, int_rtx
);
14442 replacea
= offset_rtx
;
14443 replaceb
= int_rtx
;
14447 offset_rtx
= int_rtx
;
14448 replacea
= NULL_RTX
;
14449 replaceb
= NULL_RTX
;
14452 reg
= gen_rtx_REG (mode
, regno
);
14453 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
14454 mem
= gen_frame_mem (mode
, addr
);
14456 insn
= emit_move_insn (mem
, reg
);
14458 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
14461 /* Emit an offset memory reference suitable for a frame store, while
14462 converting to a valid addressing mode. */
14465 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
14467 rtx int_rtx
, offset_rtx
;
14469 int_rtx
= GEN_INT (offset
);
14471 if ((TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
14472 || (TARGET_E500_DOUBLE
&& mode
== DFmode
))
14474 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
14475 emit_move_insn (offset_rtx
, int_rtx
);
14478 offset_rtx
= int_rtx
;
14480 return gen_frame_mem (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
14483 /* Look for user-defined global regs. We should not save and restore these,
14484 and cannot use stmw/lmw if there are any in its range. */
14487 no_global_regs_above (int first_greg
)
14490 for (i
= 0; i
< 32 - first_greg
; i
++)
14491 if (global_regs
[first_greg
+ i
])
14496 #ifndef TARGET_FIX_AND_CONTINUE
14497 #define TARGET_FIX_AND_CONTINUE 0
14500 /* Emit function prologue as insns. */
14503 rs6000_emit_prologue (void)
14505 rs6000_stack_t
*info
= rs6000_stack_info ();
14506 enum machine_mode reg_mode
= Pmode
;
14507 int reg_size
= TARGET_32BIT
? 4 : 8;
14508 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
14509 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
14510 rtx frame_reg_rtx
= sp_reg_rtx
;
14511 rtx cr_save_rtx
= NULL_RTX
;
14513 int saving_FPRs_inline
;
14514 int using_store_multiple
;
14515 HOST_WIDE_INT sp_offset
= 0;
14517 if (TARGET_FIX_AND_CONTINUE
)
14519 /* gdb on darwin arranges to forward a function from the old
14520 address by modifying the first 5 instructions of the function
14521 to branch to the overriding function. This is necessary to
14522 permit function pointers that point to the old function to
14523 actually forward to the new function. */
14524 emit_insn (gen_nop ());
14525 emit_insn (gen_nop ());
14526 emit_insn (gen_nop ());
14527 emit_insn (gen_nop ());
14528 emit_insn (gen_nop ());
14531 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
14533 reg_mode
= V2SImode
;
14537 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
14538 && (!TARGET_SPE_ABI
14539 || info
->spe_64bit_regs_used
== 0)
14540 && info
->first_gp_reg_save
< 31
14541 && no_global_regs_above (info
->first_gp_reg_save
));
14542 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
14543 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
14544 || current_function_calls_eh_return
14545 || cfun
->machine
->ra_need_lr
);
14547 /* For V.4, update stack before we do any saving and set back pointer. */
14548 if (! WORLD_SAVE_P (info
)
14550 && (DEFAULT_ABI
== ABI_V4
14551 || current_function_calls_eh_return
))
14553 if (info
->total_size
< 32767)
14554 sp_offset
= info
->total_size
;
14556 frame_reg_rtx
= frame_ptr_rtx
;
14557 rs6000_emit_allocate_stack (info
->total_size
,
14558 (frame_reg_rtx
!= sp_reg_rtx
14559 && (info
->cr_save_p
14561 || info
->first_fp_reg_save
< 64
14562 || info
->first_gp_reg_save
< 32
14564 if (frame_reg_rtx
!= sp_reg_rtx
)
14565 rs6000_emit_stack_tie ();
14568 /* Handle world saves specially here. */
14569 if (WORLD_SAVE_P (info
))
14576 /* save_world expects lr in r0. */
14577 reg0
= gen_rtx_REG (Pmode
, 0);
14578 if (info
->lr_save_p
)
14580 insn
= emit_move_insn (reg0
,
14581 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14582 RTX_FRAME_RELATED_P (insn
) = 1;
14585 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
14586 assumptions about the offsets of various bits of the stack
14588 gcc_assert (info
->gp_save_offset
== -220
14589 && info
->fp_save_offset
== -144
14590 && info
->lr_save_offset
== 8
14591 && info
->cr_save_offset
== 4
14594 && (!current_function_calls_eh_return
14595 || info
->ehrd_offset
== -432)
14596 && info
->vrsave_save_offset
== -224
14597 && info
->altivec_save_offset
== -416);
14599 treg
= gen_rtx_REG (SImode
, 11);
14600 emit_move_insn (treg
, GEN_INT (-info
->total_size
));
14602 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
14603 in R11. It also clobbers R12, so beware! */
14605 /* Preserve CR2 for save_world prologues */
14607 sz
+= 32 - info
->first_gp_reg_save
;
14608 sz
+= 64 - info
->first_fp_reg_save
;
14609 sz
+= LAST_ALTIVEC_REGNO
- info
->first_altivec_reg_save
+ 1;
14610 p
= rtvec_alloc (sz
);
14612 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
,
14613 gen_rtx_REG (Pmode
,
14614 LINK_REGISTER_REGNUM
));
14615 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
14616 gen_rtx_SYMBOL_REF (Pmode
,
14618 /* We do floats first so that the instruction pattern matches
14620 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14622 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
14623 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14624 GEN_INT (info
->fp_save_offset
14625 + sp_offset
+ 8 * i
));
14626 rtx mem
= gen_frame_mem (DFmode
, addr
);
14628 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14630 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
14632 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
14633 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14634 GEN_INT (info
->altivec_save_offset
14635 + sp_offset
+ 16 * i
));
14636 rtx mem
= gen_frame_mem (V4SImode
, addr
);
14638 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14640 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14642 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14643 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14644 GEN_INT (info
->gp_save_offset
14645 + sp_offset
+ reg_size
* i
));
14646 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14648 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14652 /* CR register traditionally saved as CR2. */
14653 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
14654 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14655 GEN_INT (info
->cr_save_offset
14657 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14659 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14661 /* Explain about use of R0. */
14662 if (info
->lr_save_p
)
14664 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14665 GEN_INT (info
->lr_save_offset
14667 rtx mem
= gen_frame_mem (reg_mode
, addr
);
14669 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg0
);
14671 /* Explain what happens to the stack pointer. */
14673 rtx newval
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
, treg
);
14674 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, sp_reg_rtx
, newval
);
14677 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14678 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14679 treg
, GEN_INT (-info
->total_size
));
14680 sp_offset
= info
->total_size
;
14683 /* Save AltiVec registers if needed. */
14684 if (!WORLD_SAVE_P (info
) && TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
14688 /* There should be a non inline version of this, for when we
14689 are saving lots of vector registers. */
14690 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
14691 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
14693 rtx areg
, savereg
, mem
;
14696 offset
= info
->altivec_save_offset
+ sp_offset
14697 + 16 * (i
- info
->first_altivec_reg_save
);
14699 savereg
= gen_rtx_REG (V4SImode
, i
);
14701 areg
= gen_rtx_REG (Pmode
, 0);
14702 emit_move_insn (areg
, GEN_INT (offset
));
14704 /* AltiVec addressing mode is [reg+reg]. */
14705 mem
= gen_frame_mem (V4SImode
,
14706 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
14708 insn
= emit_move_insn (mem
, savereg
);
14710 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14711 areg
, GEN_INT (offset
));
14715 /* VRSAVE is a bit vector representing which AltiVec registers
14716 are used. The OS uses this to determine which vector
14717 registers to save on a context switch. We need to save
14718 VRSAVE on the stack frame, add whatever AltiVec registers we
14719 used in this function, and do the corresponding magic in the
14722 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
14723 && info
->vrsave_mask
!= 0)
14725 rtx reg
, mem
, vrsave
;
14728 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
14729 as frame_reg_rtx and r11 as the static chain pointer for
14730 nested functions. */
14731 reg
= gen_rtx_REG (SImode
, 0);
14732 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
14734 emit_insn (gen_get_vrsave_internal (reg
));
14736 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
14738 if (!WORLD_SAVE_P (info
))
14741 offset
= info
->vrsave_save_offset
+ sp_offset
;
14742 mem
= gen_frame_mem (SImode
,
14743 gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14744 GEN_INT (offset
)));
14745 insn
= emit_move_insn (mem
, reg
);
14748 /* Include the registers in the mask. */
14749 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
14751 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
14754 /* If we use the link register, get it into r0. */
14755 if (!WORLD_SAVE_P (info
) && info
->lr_save_p
)
14757 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
14758 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
14759 RTX_FRAME_RELATED_P (insn
) = 1;
14762 /* If we need to save CR, put it into r12. */
14763 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
14767 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
14768 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
14769 RTX_FRAME_RELATED_P (insn
) = 1;
14770 /* Now, there's no way that dwarf2out_frame_debug_expr is going
14771 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
14772 But that's OK. All we have to do is specify that _one_ condition
14773 code register is saved in this stack slot. The thrower's epilogue
14774 will then restore all the call-saved registers.
14775 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
14776 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
,
14777 gen_rtx_REG (SImode
, CR2_REGNO
));
14778 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14783 /* Do any required saving of fpr's. If only one or two to save, do
14784 it ourselves. Otherwise, call function. */
14785 if (!WORLD_SAVE_P (info
) && saving_FPRs_inline
)
14788 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14789 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
14790 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
14791 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
14792 info
->first_fp_reg_save
+ i
,
14793 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
14796 else if (!WORLD_SAVE_P (info
) && info
->first_fp_reg_save
!= 64)
14800 const char *alloc_rname
;
14802 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
14804 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
14805 gen_rtx_REG (Pmode
,
14806 LINK_REGISTER_REGNUM
));
14807 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
14808 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
14809 alloc_rname
= ggc_strdup (rname
);
14810 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
14811 gen_rtx_SYMBOL_REF (Pmode
,
14813 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14815 rtx addr
, reg
, mem
;
14816 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
14817 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14818 GEN_INT (info
->fp_save_offset
14819 + sp_offset
+ 8*i
));
14820 mem
= gen_frame_mem (DFmode
, addr
);
14822 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14824 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14825 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14826 NULL_RTX
, NULL_RTX
);
14829 /* Save GPRs. This is done as a PARALLEL if we are using
14830 the store-multiple instructions. */
14831 if (!WORLD_SAVE_P (info
) && using_store_multiple
)
14835 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
14836 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14838 rtx addr
, reg
, mem
;
14839 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14840 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14841 GEN_INT (info
->gp_save_offset
14844 mem
= gen_frame_mem (reg_mode
, addr
);
14846 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
14848 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14849 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14850 NULL_RTX
, NULL_RTX
);
14852 else if (!WORLD_SAVE_P (info
))
14855 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14856 if ((regs_ever_live
[info
->first_gp_reg_save
+ i
]
14857 && (!call_used_regs
[info
->first_gp_reg_save
+ i
]
14858 || (i
+ info
->first_gp_reg_save
14859 == RS6000_PIC_OFFSET_TABLE_REGNUM
14860 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
14861 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
14862 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
14863 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
14865 rtx addr
, reg
, mem
;
14866 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14868 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
14870 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
14873 if (!SPE_CONST_OFFSET_OK (offset
))
14875 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
14876 emit_move_insn (b
, GEN_INT (offset
));
14879 b
= GEN_INT (offset
);
14881 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
14882 mem
= gen_frame_mem (V2SImode
, addr
);
14883 insn
= emit_move_insn (mem
, reg
);
14885 if (GET_CODE (b
) == CONST_INT
)
14886 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14887 NULL_RTX
, NULL_RTX
);
14889 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14890 b
, GEN_INT (offset
));
14894 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14895 GEN_INT (info
->gp_save_offset
14898 mem
= gen_frame_mem (reg_mode
, addr
);
14900 insn
= emit_move_insn (mem
, reg
);
14901 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14902 NULL_RTX
, NULL_RTX
);
14907 /* ??? There's no need to emit actual instructions here, but it's the
14908 easiest way to get the frame unwind information emitted. */
14909 if (current_function_calls_eh_return
)
14911 unsigned int i
, regno
;
14913 /* In AIX ABI we need to pretend we save r2 here. */
14916 rtx addr
, reg
, mem
;
14918 reg
= gen_rtx_REG (reg_mode
, 2);
14919 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14920 GEN_INT (sp_offset
+ 5 * reg_size
));
14921 mem
= gen_frame_mem (reg_mode
, addr
);
14923 insn
= emit_move_insn (mem
, reg
);
14924 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14925 NULL_RTX
, NULL_RTX
);
14926 PATTERN (insn
) = gen_blockage ();
14931 regno
= EH_RETURN_DATA_REGNO (i
);
14932 if (regno
== INVALID_REGNUM
)
14935 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
14936 info
->ehrd_offset
+ sp_offset
14937 + reg_size
* (int) i
,
14942 /* Save lr if we used it. */
14943 if (!WORLD_SAVE_P (info
) && info
->lr_save_p
)
14945 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14946 GEN_INT (info
->lr_save_offset
+ sp_offset
));
14947 rtx reg
= gen_rtx_REG (Pmode
, 0);
14948 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
14949 /* This should not be of frame_alias_set, because of
14950 __builtin_return_address. */
14952 insn
= emit_move_insn (mem
, reg
);
14953 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14954 NULL_RTX
, NULL_RTX
);
14957 /* Save CR if we use any that must be preserved. */
14958 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
)
14960 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14961 GEN_INT (info
->cr_save_offset
+ sp_offset
));
14962 rtx mem
= gen_frame_mem (SImode
, addr
);
14963 /* See the large comment above about why CR2_REGNO is used. */
14964 rtx magic_eh_cr_reg
= gen_rtx_REG (SImode
, CR2_REGNO
);
14966 /* If r12 was used to hold the original sp, copy cr into r0 now
14968 if (REGNO (frame_reg_rtx
) == 12)
14972 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
14973 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
14974 RTX_FRAME_RELATED_P (insn
) = 1;
14975 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
, magic_eh_cr_reg
);
14976 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
14981 insn
= emit_move_insn (mem
, cr_save_rtx
);
14983 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
14984 NULL_RTX
, NULL_RTX
);
14987 /* Update stack and set back pointer unless this is V.4,
14988 for which it was done previously. */
14989 if (!WORLD_SAVE_P (info
) && info
->push_p
14990 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
14991 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
14993 /* Set frame pointer, if needed. */
14994 if (frame_pointer_needed
)
14996 insn
= emit_move_insn (gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
14998 RTX_FRAME_RELATED_P (insn
) = 1;
15001 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
15002 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
15003 || (DEFAULT_ABI
== ABI_V4
15004 && (flag_pic
== 1 || (flag_pic
&& TARGET_SECURE_PLT
))
15005 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
15007 /* If emit_load_toc_table will use the link register, we need to save
15008 it. We use R12 for this purpose because emit_load_toc_table
15009 can use register 0. This allows us to use a plain 'blr' to return
15010 from the procedure more often. */
15011 int save_LR_around_toc_setup
= (TARGET_ELF
15012 && DEFAULT_ABI
!= ABI_AIX
15014 && ! info
->lr_save_p
15015 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0);
15016 if (save_LR_around_toc_setup
)
15018 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
15020 insn
= emit_move_insn (frame_ptr_rtx
, lr
);
15021 rs6000_maybe_dead (insn
);
15022 RTX_FRAME_RELATED_P (insn
) = 1;
15024 rs6000_emit_load_toc_table (TRUE
);
15026 insn
= emit_move_insn (lr
, frame_ptr_rtx
);
15027 rs6000_maybe_dead (insn
);
15028 RTX_FRAME_RELATED_P (insn
) = 1;
15031 rs6000_emit_load_toc_table (TRUE
);
15035 if (DEFAULT_ABI
== ABI_DARWIN
15036 && flag_pic
&& current_function_uses_pic_offset_table
)
15038 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
15039 rtx src
= machopic_function_base_sym ();
15041 /* Save and restore LR locally around this call (in R0). */
15042 if (!info
->lr_save_p
)
15043 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode
, 0), lr
));
15045 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr
, src
)));
15047 insn
= emit_move_insn (gen_rtx_REG (Pmode
,
15048 RS6000_PIC_OFFSET_TABLE_REGNUM
),
15050 rs6000_maybe_dead (insn
);
15052 if (!info
->lr_save_p
)
15053 rs6000_maybe_dead (emit_move_insn (lr
, gen_rtx_REG (Pmode
, 0)));
15058 /* Write function prologue. */
15061 rs6000_output_function_prologue (FILE *file
,
15062 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
15064 rs6000_stack_t
*info
= rs6000_stack_info ();
15066 if (TARGET_DEBUG_STACK
)
15067 debug_stack_info (info
);
15069 /* Write .extern for any function we will call to save and restore
15071 if (info
->first_fp_reg_save
< 64
15072 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
15073 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
15074 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
15075 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
15076 RESTORE_FP_SUFFIX
);
15078 /* Write .extern for AIX common mode routines, if needed. */
15079 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
15081 fputs ("\t.extern __mulh\n", file
);
15082 fputs ("\t.extern __mull\n", file
);
15083 fputs ("\t.extern __divss\n", file
);
15084 fputs ("\t.extern __divus\n", file
);
15085 fputs ("\t.extern __quoss\n", file
);
15086 fputs ("\t.extern __quous\n", file
);
15087 common_mode_defined
= 1;
15090 if (! HAVE_prologue
)
15094 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
15095 the "toplevel" insn chain. */
15096 emit_note (NOTE_INSN_DELETED
);
15097 rs6000_emit_prologue ();
15098 emit_note (NOTE_INSN_DELETED
);
15100 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15104 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
15106 INSN_ADDRESSES_NEW (insn
, addr
);
15111 if (TARGET_DEBUG_STACK
)
15112 debug_rtx_list (get_insns (), 100);
15113 final (get_insns (), file
, FALSE
);
15117 rs6000_pic_labelno
++;
15120 /* Emit function epilogue as insns.
15122 At present, dwarf2out_frame_debug_expr doesn't understand
15123 register restores, so we don't bother setting RTX_FRAME_RELATED_P
15124 anywhere in the epilogue. Most of the insns below would in any case
15125 need special notes to explain where r11 is in relation to the stack. */
15128 rs6000_emit_epilogue (int sibcall
)
15130 rs6000_stack_t
*info
;
15131 int restoring_FPRs_inline
;
15132 int using_load_multiple
;
15133 int using_mtcr_multiple
;
15134 int use_backchain_to_restore_sp
;
15136 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
15137 rtx frame_reg_rtx
= sp_reg_rtx
;
15138 enum machine_mode reg_mode
= Pmode
;
15139 int reg_size
= TARGET_32BIT
? 4 : 8;
15142 info
= rs6000_stack_info ();
15144 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
15146 reg_mode
= V2SImode
;
15150 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
15151 && (!TARGET_SPE_ABI
15152 || info
->spe_64bit_regs_used
== 0)
15153 && info
->first_gp_reg_save
< 31
15154 && no_global_regs_above (info
->first_gp_reg_save
));
15155 restoring_FPRs_inline
= (sibcall
15156 || current_function_calls_eh_return
15157 || info
->first_fp_reg_save
== 64
15158 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
15159 use_backchain_to_restore_sp
= (frame_pointer_needed
15160 || current_function_calls_alloca
15161 || info
->total_size
> 32767);
15162 using_mtcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
15163 || rs6000_cpu
== PROCESSOR_PPC603
15164 || rs6000_cpu
== PROCESSOR_PPC750
15167 if (WORLD_SAVE_P (info
))
15171 const char *alloc_rname
;
15174 /* eh_rest_world_r10 will return to the location saved in the LR
15175 stack slot (which is not likely to be our caller.)
15176 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
15177 rest_world is similar, except any R10 parameter is ignored.
15178 The exception-handling stuff that was here in 2.95 is no
15179 longer necessary. */
15183 + 32 - info
->first_gp_reg_save
15184 + LAST_ALTIVEC_REGNO
+ 1 - info
->first_altivec_reg_save
15185 + 63 + 1 - info
->first_fp_reg_save
);
15187 strcpy (rname
, ((current_function_calls_eh_return
) ?
15188 "*eh_rest_world_r10" : "*rest_world"));
15189 alloc_rname
= ggc_strdup (rname
);
15192 RTVEC_ELT (p
, j
++) = gen_rtx_RETURN (VOIDmode
);
15193 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
15194 gen_rtx_REG (Pmode
,
15195 LINK_REGISTER_REGNUM
));
15197 = gen_rtx_USE (VOIDmode
, gen_rtx_SYMBOL_REF (Pmode
, alloc_rname
));
15198 /* The instruction pattern requires a clobber here;
15199 it is shared with the restVEC helper. */
15201 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
15204 /* CR register traditionally saved as CR2. */
15205 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
15206 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15207 GEN_INT (info
->cr_save_offset
));
15208 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15210 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15213 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15215 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
15216 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15217 GEN_INT (info
->gp_save_offset
15219 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15221 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15223 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
15225 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
15226 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15227 GEN_INT (info
->altivec_save_offset
15229 rtx mem
= gen_frame_mem (V4SImode
, addr
);
15231 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15233 for (i
= 0; info
->first_fp_reg_save
+ i
<= 63; i
++)
15235 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
15236 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15237 GEN_INT (info
->fp_save_offset
15239 rtx mem
= gen_frame_mem (DFmode
, addr
);
15241 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
15244 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 0));
15246 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 12));
15248 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 7));
15250 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 8));
15252 = gen_rtx_USE (VOIDmode
, gen_rtx_REG (SImode
, 10));
15253 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15258 /* If we have a frame pointer, a call to alloca, or a large stack
15259 frame, restore the old stack pointer using the backchain. Otherwise,
15260 we know what size to update it with. */
15261 if (use_backchain_to_restore_sp
)
15263 /* Under V.4, don't reset the stack pointer until after we're done
15264 loading the saved registers. */
15265 if (DEFAULT_ABI
== ABI_V4
)
15266 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
15268 emit_move_insn (frame_reg_rtx
,
15269 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
15271 else if (info
->push_p
)
15273 if (DEFAULT_ABI
== ABI_V4
15274 || current_function_calls_eh_return
)
15275 sp_offset
= info
->total_size
;
15278 emit_insn (TARGET_32BIT
15279 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
15280 GEN_INT (info
->total_size
))
15281 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
15282 GEN_INT (info
->total_size
)));
15286 /* Restore AltiVec registers if needed. */
15287 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
15291 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
15292 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
15294 rtx addr
, areg
, mem
;
15296 areg
= gen_rtx_REG (Pmode
, 0);
15298 (areg
, GEN_INT (info
->altivec_save_offset
15300 + 16 * (i
- info
->first_altivec_reg_save
)));
15302 /* AltiVec addressing mode is [reg+reg]. */
15303 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
15304 mem
= gen_frame_mem (V4SImode
, addr
);
15306 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
15310 /* Restore VRSAVE if needed. */
15311 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
15312 && info
->vrsave_mask
!= 0)
15314 rtx addr
, mem
, reg
;
15316 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15317 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
15318 mem
= gen_frame_mem (SImode
, addr
);
15319 reg
= gen_rtx_REG (SImode
, 12);
15320 emit_move_insn (reg
, mem
);
15322 emit_insn (generate_set_vrsave (reg
, info
, 1));
15325 /* Get the old lr if we saved it. */
15326 if (info
->lr_save_p
)
15328 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
15329 info
->lr_save_offset
+ sp_offset
);
15331 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
15334 /* Get the old cr if we saved it. */
15335 if (info
->cr_save_p
)
15337 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15338 GEN_INT (info
->cr_save_offset
+ sp_offset
));
15339 rtx mem
= gen_frame_mem (SImode
, addr
);
15341 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
15344 /* Set LR here to try to overlap restores below. */
15345 if (info
->lr_save_p
)
15346 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
15347 gen_rtx_REG (Pmode
, 0));
15349 /* Load exception handler data registers, if needed. */
15350 if (current_function_calls_eh_return
)
15352 unsigned int i
, regno
;
15356 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15357 GEN_INT (sp_offset
+ 5 * reg_size
));
15358 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15360 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
15367 regno
= EH_RETURN_DATA_REGNO (i
);
15368 if (regno
== INVALID_REGNUM
)
15371 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
15372 info
->ehrd_offset
+ sp_offset
15373 + reg_size
* (int) i
);
15375 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
15379 /* Restore GPRs. This is done as a PARALLEL if we are using
15380 the load-multiple instructions. */
15381 if (using_load_multiple
)
15384 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
15385 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15387 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15388 GEN_INT (info
->gp_save_offset
15391 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15394 gen_rtx_SET (VOIDmode
,
15395 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
15398 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15401 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
15402 if ((regs_ever_live
[info
->first_gp_reg_save
+ i
]
15403 && (!call_used_regs
[info
->first_gp_reg_save
+ i
]
15404 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
15405 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
15406 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
15407 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
15408 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
15410 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15411 GEN_INT (info
->gp_save_offset
15414 rtx mem
= gen_frame_mem (reg_mode
, addr
);
15416 /* Restore 64-bit quantities for SPE. */
15417 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
15419 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
15422 if (!SPE_CONST_OFFSET_OK (offset
))
15424 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
15425 emit_move_insn (b
, GEN_INT (offset
));
15428 b
= GEN_INT (offset
);
15430 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
15431 mem
= gen_frame_mem (V2SImode
, addr
);
15434 emit_move_insn (gen_rtx_REG (reg_mode
,
15435 info
->first_gp_reg_save
+ i
), mem
);
15438 /* Restore fpr's if we need to do it without calling a function. */
15439 if (restoring_FPRs_inline
)
15440 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
15441 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
15442 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
15445 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
15446 GEN_INT (info
->fp_save_offset
15449 mem
= gen_frame_mem (DFmode
, addr
);
15451 emit_move_insn (gen_rtx_REG (DFmode
,
15452 info
->first_fp_reg_save
+ i
),
15456 /* If we saved cr, restore it here. Just those that were used. */
15457 if (info
->cr_save_p
)
15459 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
15462 if (using_mtcr_multiple
)
15464 for (i
= 0; i
< 8; i
++)
15465 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
15467 gcc_assert (count
);
15470 if (using_mtcr_multiple
&& count
> 1)
15475 p
= rtvec_alloc (count
);
15478 for (i
= 0; i
< 8; i
++)
15479 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
15481 rtvec r
= rtvec_alloc (2);
15482 RTVEC_ELT (r
, 0) = r12_rtx
;
15483 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
15484 RTVEC_ELT (p
, ndx
) =
15485 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
15486 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
15489 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15490 gcc_assert (ndx
== count
);
15493 for (i
= 0; i
< 8; i
++)
15494 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
15496 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
15502 /* If this is V.4, unwind the stack pointer after all of the loads
15504 if (frame_reg_rtx
!= sp_reg_rtx
)
15506 /* This blockage is needed so that sched doesn't decide to move
15507 the sp change before the register restores. */
15508 rs6000_emit_stack_tie ();
15509 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
15511 else if (sp_offset
!= 0)
15512 emit_insn (TARGET_32BIT
15513 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
15514 GEN_INT (sp_offset
))
15515 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
15516 GEN_INT (sp_offset
)));
15518 if (current_function_calls_eh_return
)
15520 rtx sa
= EH_RETURN_STACKADJ_RTX
;
15521 emit_insn (TARGET_32BIT
15522 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
15523 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
15529 if (! restoring_FPRs_inline
)
15530 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
15532 p
= rtvec_alloc (2);
15534 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
15535 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
15536 gen_rtx_REG (Pmode
,
15537 LINK_REGISTER_REGNUM
));
15539 /* If we have to restore more than two FP registers, branch to the
15540 restore function. It will return to our caller. */
15541 if (! restoring_FPRs_inline
)
15545 const char *alloc_rname
;
15547 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
15548 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
15549 alloc_rname
= ggc_strdup (rname
);
15550 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
15551 gen_rtx_SYMBOL_REF (Pmode
,
15554 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
15557 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
15558 GEN_INT (info
->fp_save_offset
+ 8*i
));
15559 mem
= gen_frame_mem (DFmode
, addr
);
15561 RTVEC_ELT (p
, i
+3) =
15562 gen_rtx_SET (VOIDmode
,
15563 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
15568 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
15572 /* Write function epilogue. */
15575 rs6000_output_function_epilogue (FILE *file
,
15576 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
15578 if (! HAVE_epilogue
)
15580 rtx insn
= get_last_insn ();
15581 /* If the last insn was a BARRIER, we don't have to write anything except
15582 the trace table. */
15583 if (GET_CODE (insn
) == NOTE
)
15584 insn
= prev_nonnote_insn (insn
);
15585 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
15587 /* This is slightly ugly, but at least we don't have two
15588 copies of the epilogue-emitting code. */
15591 /* A NOTE_INSN_DELETED is supposed to be at the start
15592 and end of the "toplevel" insn chain. */
15593 emit_note (NOTE_INSN_DELETED
);
15594 rs6000_emit_epilogue (FALSE
);
15595 emit_note (NOTE_INSN_DELETED
);
15597 /* Expand INSN_ADDRESSES so final() doesn't crash. */
15601 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
15603 INSN_ADDRESSES_NEW (insn
, addr
);
15608 if (TARGET_DEBUG_STACK
)
15609 debug_rtx_list (get_insns (), 100);
15610 final (get_insns (), file
, FALSE
);
15616 macho_branch_islands ();
15617 /* Mach-O doesn't support labels at the end of objects, so if
15618 it looks like we might want one, insert a NOP. */
15620 rtx insn
= get_last_insn ();
15623 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED_LABEL
)
15624 insn
= PREV_INSN (insn
);
15628 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED_LABEL
)))
15629 fputs ("\tnop\n", file
);
15633 /* Output a traceback table here. See /usr/include/sys/debug.h for info
15636 We don't output a traceback table if -finhibit-size-directive was
15637 used. The documentation for -finhibit-size-directive reads
15638 ``don't output a @code{.size} assembler directive, or anything
15639 else that would cause trouble if the function is split in the
15640 middle, and the two halves are placed at locations far apart in
15641 memory.'' The traceback table has this property, since it
15642 includes the offset from the start of the function to the
15643 traceback table itself.
15645 System V.4 Powerpc's (and the embedded ABI derived from it) use a
15646 different traceback table. */
15647 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
15648 && rs6000_traceback
!= traceback_none
&& !current_function_is_thunk
)
15650 const char *fname
= NULL
;
15651 const char *language_string
= lang_hooks
.name
;
15652 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
15654 int optional_tbtab
;
15655 rs6000_stack_t
*info
= rs6000_stack_info ();
15657 if (rs6000_traceback
== traceback_full
)
15658 optional_tbtab
= 1;
15659 else if (rs6000_traceback
== traceback_part
)
15660 optional_tbtab
= 0;
15662 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
15664 if (optional_tbtab
)
15666 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
15667 while (*fname
== '.') /* V.4 encodes . in the name */
15670 /* Need label immediately before tbtab, so we can compute
15671 its offset from the function start. */
15672 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
15673 ASM_OUTPUT_LABEL (file
, fname
);
15676 /* The .tbtab pseudo-op can only be used for the first eight
15677 expressions, since it can't handle the possibly variable
15678 length fields that follow. However, if you omit the optional
15679 fields, the assembler outputs zeros for all optional fields
15680 anyways, giving each variable length field is minimum length
15681 (as defined in sys/debug.h). Thus we can not use the .tbtab
15682 pseudo-op at all. */
15684 /* An all-zero word flags the start of the tbtab, for debuggers
15685 that have to find it by searching forward from the entry
15686 point or from the current pc. */
15687 fputs ("\t.long 0\n", file
);
15689 /* Tbtab format type. Use format type 0. */
15690 fputs ("\t.byte 0,", file
);
15692 /* Language type. Unfortunately, there does not seem to be any
15693 official way to discover the language being compiled, so we
15694 use language_string.
15695 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
15696 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
15697 a number, so for now use 9. */
15698 if (! strcmp (language_string
, "GNU C"))
15700 else if (! strcmp (language_string
, "GNU F77")
15701 || ! strcmp (language_string
, "GNU F95"))
15703 else if (! strcmp (language_string
, "GNU Pascal"))
15705 else if (! strcmp (language_string
, "GNU Ada"))
15707 else if (! strcmp (language_string
, "GNU C++")
15708 || ! strcmp (language_string
, "GNU Objective-C++"))
15710 else if (! strcmp (language_string
, "GNU Java"))
15712 else if (! strcmp (language_string
, "GNU Objective-C"))
15715 gcc_unreachable ();
15716 fprintf (file
, "%d,", i
);
15718 /* 8 single bit fields: global linkage (not set for C extern linkage,
15719 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
15720 from start of procedure stored in tbtab, internal function, function
15721 has controlled storage, function has no toc, function uses fp,
15722 function logs/aborts fp operations. */
15723 /* Assume that fp operations are used if any fp reg must be saved. */
15724 fprintf (file
, "%d,",
15725 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
15727 /* 6 bitfields: function is interrupt handler, name present in
15728 proc table, function calls alloca, on condition directives
15729 (controls stack walks, 3 bits), saves condition reg, saves
15731 /* The `function calls alloca' bit seems to be set whenever reg 31 is
15732 set up as a frame pointer, even when there is no alloca call. */
15733 fprintf (file
, "%d,",
15734 ((optional_tbtab
<< 6)
15735 | ((optional_tbtab
& frame_pointer_needed
) << 5)
15736 | (info
->cr_save_p
<< 1)
15737 | (info
->lr_save_p
)));
15739 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
15741 fprintf (file
, "%d,",
15742 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
15744 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
15745 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
15747 if (optional_tbtab
)
15749 /* Compute the parameter info from the function decl argument
15752 int next_parm_info_bit
= 31;
15754 for (decl
= DECL_ARGUMENTS (current_function_decl
);
15755 decl
; decl
= TREE_CHAIN (decl
))
15757 rtx parameter
= DECL_INCOMING_RTL (decl
);
15758 enum machine_mode mode
= GET_MODE (parameter
);
15760 if (GET_CODE (parameter
) == REG
)
15762 if (SCALAR_FLOAT_MODE_P (mode
))
15780 gcc_unreachable ();
15783 /* If only one bit will fit, don't or in this entry. */
15784 if (next_parm_info_bit
> 0)
15785 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
15786 next_parm_info_bit
-= 2;
15790 fixed_parms
+= ((GET_MODE_SIZE (mode
)
15791 + (UNITS_PER_WORD
- 1))
15793 next_parm_info_bit
-= 1;
15799 /* Number of fixed point parameters. */
15800 /* This is actually the number of words of fixed point parameters; thus
15801 an 8 byte struct counts as 2; and thus the maximum value is 8. */
15802 fprintf (file
, "%d,", fixed_parms
);
15804 /* 2 bitfields: number of floating point parameters (7 bits), parameters
15806 /* This is actually the number of fp registers that hold parameters;
15807 and thus the maximum value is 13. */
15808 /* Set parameters on stack bit if parameters are not in their original
15809 registers, regardless of whether they are on the stack? Xlc
15810 seems to set the bit when not optimizing. */
15811 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
15813 if (! optional_tbtab
)
15816 /* Optional fields follow. Some are variable length. */
15818 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
15819 11 double float. */
15820 /* There is an entry for each parameter in a register, in the order that
15821 they occur in the parameter list. Any intervening arguments on the
15822 stack are ignored. If the list overflows a long (max possible length
15823 34 bits) then completely leave off all elements that don't fit. */
15824 /* Only emit this long if there was at least one parameter. */
15825 if (fixed_parms
|| float_parms
)
15826 fprintf (file
, "\t.long %d\n", parm_info
);
15828 /* Offset from start of code to tb table. */
15829 fputs ("\t.long ", file
);
15830 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
15832 RS6000_OUTPUT_BASENAME (file
, fname
);
15834 assemble_name (file
, fname
);
15836 rs6000_output_function_entry (file
, fname
);
15839 /* Interrupt handler mask. */
15840 /* Omit this long, since we never set the interrupt handler bit
15843 /* Number of CTL (controlled storage) anchors. */
15844 /* Omit this long, since the has_ctl bit is never set above. */
15846 /* Displacement into stack of each CTL anchor. */
15847 /* Omit this list of longs, because there are no CTL anchors. */
15849 /* Length of function name. */
15852 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
15854 /* Function name. */
15855 assemble_string (fname
, strlen (fname
));
15857 /* Register for alloca automatic storage; this is always reg 31.
15858 Only emit this if the alloca bit was set above. */
15859 if (frame_pointer_needed
)
15860 fputs ("\t.byte 31\n", file
);
15862 fputs ("\t.align 2\n", file
);
15866 /* A C compound statement that outputs the assembler code for a thunk
15867 function, used to implement C++ virtual function calls with
15868 multiple inheritance. The thunk acts as a wrapper around a virtual
15869 function, adjusting the implicit object parameter before handing
15870 control off to the real function.
15872 First, emit code to add the integer DELTA to the location that
15873 contains the incoming first argument. Assume that this argument
15874 contains a pointer, and is the one used to pass the `this' pointer
15875 in C++. This is the incoming argument *before* the function
15876 prologue, e.g. `%o0' on a sparc. The addition must preserve the
15877 values of all other incoming arguments.
15879 After the addition, emit code to jump to FUNCTION, which is a
15880 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
15881 not touch the return address. Hence returning from FUNCTION will
15882 return to whoever called the current `thunk'.
15884 The effect must be as if FUNCTION had been called directly with the
15885 adjusted first argument. This macro is responsible for emitting
15886 all of the code for a thunk function; output_function_prologue()
15887 and output_function_epilogue() are not invoked.
15889 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
15890 been extracted from it.) It might possibly be useful on some
15891 targets, but probably not.
15893 If you do not define this macro, the target-independent code in the
15894 C++ frontend will generate a less efficient heavyweight thunk that
15895 calls FUNCTION instead of jumping to it. The generic approach does
15896 not support varargs. */
15899 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
15900 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
15903 rtx
this, insn
, funexp
;
15905 reload_completed
= 1;
15906 epilogue_completed
= 1;
15907 no_new_pseudos
= 1;
15908 reset_block_changes ();
15910 /* Mark the end of the (empty) prologue. */
15911 emit_note (NOTE_INSN_PROLOGUE_END
);
15913 /* Find the "this" pointer. If the function returns a structure,
15914 the structure return pointer is in r3. */
15915 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
15916 this = gen_rtx_REG (Pmode
, 4);
15918 this = gen_rtx_REG (Pmode
, 3);
15920 /* Apply the constant offset, if required. */
15923 rtx delta_rtx
= GEN_INT (delta
);
15924 emit_insn (TARGET_32BIT
15925 ? gen_addsi3 (this, this, delta_rtx
)
15926 : gen_adddi3 (this, this, delta_rtx
));
15929 /* Apply the offset from the vtable, if required. */
15932 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
15933 rtx tmp
= gen_rtx_REG (Pmode
, 12);
15935 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
15936 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
15938 emit_insn (TARGET_32BIT
15939 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
15940 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
15941 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
15945 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
15947 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
15949 emit_insn (TARGET_32BIT
15950 ? gen_addsi3 (this, this, tmp
)
15951 : gen_adddi3 (this, this, tmp
));
15954 /* Generate a tail call to the target function. */
15955 if (!TREE_USED (function
))
15957 assemble_external (function
);
15958 TREE_USED (function
) = 1;
15960 funexp
= XEXP (DECL_RTL (function
), 0);
15961 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
15964 if (MACHOPIC_INDIRECT
)
15965 funexp
= machopic_indirect_call_target (funexp
);
15968 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
15969 generate sibcall RTL explicitly. */
15970 insn
= emit_call_insn (
15971 gen_rtx_PARALLEL (VOIDmode
,
15973 gen_rtx_CALL (VOIDmode
,
15974 funexp
, const0_rtx
),
15975 gen_rtx_USE (VOIDmode
, const0_rtx
),
15976 gen_rtx_USE (VOIDmode
,
15977 gen_rtx_REG (SImode
,
15978 LINK_REGISTER_REGNUM
)),
15979 gen_rtx_RETURN (VOIDmode
))));
15980 SIBLING_CALL_P (insn
) = 1;
15983 /* Run just enough of rest_of_compilation to get the insns emitted.
15984 There's not really enough bulk here to make other passes such as
15985 instruction scheduling worth while. Note that use_thunk calls
15986 assemble_start_function and assemble_end_function. */
15987 insn
= get_insns ();
15988 insn_locators_initialize ();
15989 shorten_branches (insn
);
15990 final_start_function (insn
, file
, 1);
15991 final (insn
, file
, 1);
15992 final_end_function ();
15994 reload_completed
= 0;
15995 epilogue_completed
= 0;
15996 no_new_pseudos
= 0;
15999 /* A quick summary of the various types of 'constant-pool tables'
16002 Target Flags Name One table per
16003 AIX (none) AIX TOC object file
16004 AIX -mfull-toc AIX TOC object file
16005 AIX -mminimal-toc AIX minimal TOC translation unit
16006 SVR4/EABI (none) SVR4 SDATA object file
16007 SVR4/EABI -fpic SVR4 pic object file
16008 SVR4/EABI -fPIC SVR4 PIC translation unit
16009 SVR4/EABI -mrelocatable EABI TOC function
16010 SVR4/EABI -maix AIX TOC object file
16011 SVR4/EABI -maix -mminimal-toc
16012 AIX minimal TOC translation unit
16014 Name Reg. Set by entries contains:
16015 made by addrs? fp? sum?
16017 AIX TOC 2 crt0 as Y option option
16018 AIX minimal TOC 30 prolog gcc Y Y option
16019 SVR4 SDATA 13 crt0 gcc N Y N
16020 SVR4 pic 30 prolog ld Y not yet N
16021 SVR4 PIC 30 prolog gcc Y option option
16022 EABI TOC 30 prolog gcc Y option option
16026 /* Hash functions for the hash table. */
16029 rs6000_hash_constant (rtx k
)
16031 enum rtx_code code
= GET_CODE (k
);
16032 enum machine_mode mode
= GET_MODE (k
);
16033 unsigned result
= (code
<< 3) ^ mode
;
16034 const char *format
;
16037 format
= GET_RTX_FORMAT (code
);
16038 flen
= strlen (format
);
16044 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
16047 if (mode
!= VOIDmode
)
16048 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
16060 for (; fidx
< flen
; fidx
++)
16061 switch (format
[fidx
])
16066 const char *str
= XSTR (k
, fidx
);
16067 len
= strlen (str
);
16068 result
= result
* 613 + len
;
16069 for (i
= 0; i
< len
; i
++)
16070 result
= result
* 613 + (unsigned) str
[i
];
16075 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
16079 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
16082 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
16083 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
16087 for (i
= 0; i
< sizeof (HOST_WIDE_INT
) / sizeof (unsigned); i
++)
16088 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
16095 gcc_unreachable ();
16102 toc_hash_function (const void *hash_entry
)
16104 const struct toc_hash_struct
*thc
=
16105 (const struct toc_hash_struct
*) hash_entry
;
16106 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
16109 /* Compare H1 and H2 for equivalence. */
16112 toc_hash_eq (const void *h1
, const void *h2
)
16114 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
16115 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
16117 if (((const struct toc_hash_struct
*) h1
)->key_mode
16118 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
16121 return rtx_equal_p (r1
, r2
);
16124 /* These are the names given by the C++ front-end to vtables, and
16125 vtable-like objects. Ideally, this logic should not be here;
16126 instead, there should be some programmatic way of inquiring as
16127 to whether or not an object is a vtable. */
16129 #define VTABLE_NAME_P(NAME) \
16130 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
16131 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
16132 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
16133 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
16134 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
16137 rs6000_output_symbol_ref (FILE *file
, rtx x
)
16139 /* Currently C++ toc references to vtables can be emitted before it
16140 is decided whether the vtable is public or private. If this is
16141 the case, then the linker will eventually complain that there is
16142 a reference to an unknown section. Thus, for vtables only,
16143 we emit the TOC reference to reference the symbol and not the
16145 const char *name
= XSTR (x
, 0);
16147 if (VTABLE_NAME_P (name
))
16149 RS6000_OUTPUT_BASENAME (file
, name
);
16152 assemble_name (file
, name
);
16155 /* Output a TOC entry. We derive the entry name from what is being
16159 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
16162 const char *name
= buf
;
16163 const char *real_name
;
16165 HOST_WIDE_INT offset
= 0;
16167 gcc_assert (!TARGET_NO_TOC
);
16169 /* When the linker won't eliminate them, don't output duplicate
16170 TOC entries (this happens on AIX if there is any kind of TOC,
16171 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
16173 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
16175 struct toc_hash_struct
*h
;
16178 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
16179 time because GGC is not initialized at that point. */
16180 if (toc_hash_table
== NULL
)
16181 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
16182 toc_hash_eq
, NULL
);
16184 h
= ggc_alloc (sizeof (*h
));
16186 h
->key_mode
= mode
;
16187 h
->labelno
= labelno
;
16189 found
= htab_find_slot (toc_hash_table
, h
, 1);
16190 if (*found
== NULL
)
16192 else /* This is indeed a duplicate.
16193 Set this label equal to that label. */
16195 fputs ("\t.set ", file
);
16196 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
16197 fprintf (file
, "%d,", labelno
);
16198 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
16199 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
16205 /* If we're going to put a double constant in the TOC, make sure it's
16206 aligned properly when strict alignment is on. */
16207 if (GET_CODE (x
) == CONST_DOUBLE
16208 && STRICT_ALIGNMENT
16209 && GET_MODE_BITSIZE (mode
) >= 64
16210 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
16211 ASM_OUTPUT_ALIGN (file
, 3);
16214 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
16216 /* Handle FP constants specially. Note that if we have a minimal
16217 TOC, things we put here aren't actually in the TOC, so we can allow
16219 if (GET_CODE (x
) == CONST_DOUBLE
&&
16220 (GET_MODE (x
) == TFmode
|| GET_MODE (x
) == TDmode
))
16222 REAL_VALUE_TYPE rv
;
16225 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16226 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16227 REAL_VALUE_TO_TARGET_DECIMAL128 (rv
, k
);
16229 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
16233 if (TARGET_MINIMAL_TOC
)
16234 fputs (DOUBLE_INT_ASM_OP
, file
);
16236 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16237 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16238 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16239 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
16240 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16241 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16246 if (TARGET_MINIMAL_TOC
)
16247 fputs ("\t.long ", file
);
16249 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
16250 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16251 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16252 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
16253 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
16254 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
16258 else if (GET_CODE (x
) == CONST_DOUBLE
&&
16259 (GET_MODE (x
) == DFmode
|| GET_MODE (x
) == DDmode
))
16261 REAL_VALUE_TYPE rv
;
16264 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16266 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16267 REAL_VALUE_TO_TARGET_DECIMAL64 (rv
, k
);
16269 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
16273 if (TARGET_MINIMAL_TOC
)
16274 fputs (DOUBLE_INT_ASM_OP
, file
);
16276 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
16277 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16278 fprintf (file
, "0x%lx%08lx\n",
16279 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16284 if (TARGET_MINIMAL_TOC
)
16285 fputs ("\t.long ", file
);
16287 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
16288 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16289 fprintf (file
, "0x%lx,0x%lx\n",
16290 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
16294 else if (GET_CODE (x
) == CONST_DOUBLE
&&
16295 (GET_MODE (x
) == SFmode
|| GET_MODE (x
) == SDmode
))
16297 REAL_VALUE_TYPE rv
;
16300 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
16301 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x
)))
16302 REAL_VALUE_TO_TARGET_DECIMAL32 (rv
, l
);
16304 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
16308 if (TARGET_MINIMAL_TOC
)
16309 fputs (DOUBLE_INT_ASM_OP
, file
);
16311 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
16312 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
16317 if (TARGET_MINIMAL_TOC
)
16318 fputs ("\t.long ", file
);
16320 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
16321 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
16325 else if (GET_MODE (x
) == VOIDmode
16326 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
16328 unsigned HOST_WIDE_INT low
;
16329 HOST_WIDE_INT high
;
16331 if (GET_CODE (x
) == CONST_DOUBLE
)
16333 low
= CONST_DOUBLE_LOW (x
);
16334 high
= CONST_DOUBLE_HIGH (x
);
16337 #if HOST_BITS_PER_WIDE_INT == 32
16340 high
= (low
& 0x80000000) ? ~0 : 0;
16344 low
= INTVAL (x
) & 0xffffffff;
16345 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
16349 /* TOC entries are always Pmode-sized, but since this
16350 is a bigendian machine then if we're putting smaller
16351 integer constants in the TOC we have to pad them.
16352 (This is still a win over putting the constants in
16353 a separate constant pool, because then we'd have
16354 to have both a TOC entry _and_ the actual constant.)
16356 For a 32-bit target, CONST_INT values are loaded and shifted
16357 entirely within `low' and can be stored in one TOC entry. */
16359 /* It would be easy to make this work, but it doesn't now. */
16360 gcc_assert (!TARGET_64BIT
|| POINTER_SIZE
>= GET_MODE_BITSIZE (mode
));
16362 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
16364 #if HOST_BITS_PER_WIDE_INT == 32
16365 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
16366 POINTER_SIZE
, &low
, &high
, 0);
16369 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
16370 high
= (HOST_WIDE_INT
) low
>> 32;
16377 if (TARGET_MINIMAL_TOC
)
16378 fputs (DOUBLE_INT_ASM_OP
, file
);
16380 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
16381 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16382 fprintf (file
, "0x%lx%08lx\n",
16383 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16388 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
16390 if (TARGET_MINIMAL_TOC
)
16391 fputs ("\t.long ", file
);
16393 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
16394 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16395 fprintf (file
, "0x%lx,0x%lx\n",
16396 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
16400 if (TARGET_MINIMAL_TOC
)
16401 fputs ("\t.long ", file
);
16403 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
16404 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
16410 if (GET_CODE (x
) == CONST
)
16412 gcc_assert (GET_CODE (XEXP (x
, 0)) == PLUS
);
16414 base
= XEXP (XEXP (x
, 0), 0);
16415 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
16418 switch (GET_CODE (base
))
16421 name
= XSTR (base
, 0);
16425 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
16426 CODE_LABEL_NUMBER (XEXP (base
, 0)));
16430 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
16434 gcc_unreachable ();
16437 real_name
= (*targetm
.strip_name_encoding
) (name
);
16438 if (TARGET_MINIMAL_TOC
)
16439 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
16442 fprintf (file
, "\t.tc %s", real_name
);
16445 fprintf (file
, ".N" HOST_WIDE_INT_PRINT_UNSIGNED
, - offset
);
16447 fprintf (file
, ".P" HOST_WIDE_INT_PRINT_UNSIGNED
, offset
);
16449 fputs ("[TC],", file
);
16452 /* Currently C++ toc references to vtables can be emitted before it
16453 is decided whether the vtable is public or private. If this is
16454 the case, then the linker will eventually complain that there is
16455 a TOC reference to an unknown section. Thus, for vtables only,
16456 we emit the TOC reference to reference the symbol and not the
16458 if (VTABLE_NAME_P (name
))
16460 RS6000_OUTPUT_BASENAME (file
, name
);
16462 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, offset
);
16463 else if (offset
> 0)
16464 fprintf (file
, "+" HOST_WIDE_INT_PRINT_DEC
, offset
);
16467 output_addr_const (file
, x
);
16471 /* Output an assembler pseudo-op to write an ASCII string of N characters
16472 starting at P to FILE.
16474 On the RS/6000, we have to do this using the .byte operation and
16475 write out special characters outside the quoted string.
16476 Also, the assembler is broken; very long strings are truncated,
16477 so we must artificially break them up early. */
16480 output_ascii (FILE *file
, const char *p
, int n
)
16483 int i
, count_string
;
16484 const char *for_string
= "\t.byte \"";
16485 const char *for_decimal
= "\t.byte ";
16486 const char *to_close
= NULL
;
16489 for (i
= 0; i
< n
; i
++)
16492 if (c
>= ' ' && c
< 0177)
16495 fputs (for_string
, file
);
16498 /* Write two quotes to get one. */
16506 for_decimal
= "\"\n\t.byte ";
16510 if (count_string
>= 512)
16512 fputs (to_close
, file
);
16514 for_string
= "\t.byte \"";
16515 for_decimal
= "\t.byte ";
16523 fputs (for_decimal
, file
);
16524 fprintf (file
, "%d", c
);
16526 for_string
= "\n\t.byte \"";
16527 for_decimal
= ", ";
16533 /* Now close the string if we have written one. Then end the line. */
16535 fputs (to_close
, file
);
16538 /* Generate a unique section name for FILENAME for a section type
16539 represented by SECTION_DESC. Output goes into BUF.
16541 SECTION_DESC can be any string, as long as it is different for each
16542 possible section type.
16544 We name the section in the same manner as xlc. The name begins with an
16545 underscore followed by the filename (after stripping any leading directory
16546 names) with the last period replaced by the string SECTION_DESC. If
16547 FILENAME does not contain a period, SECTION_DESC is appended to the end of
16551 rs6000_gen_section_name (char **buf
, const char *filename
,
16552 const char *section_desc
)
16554 const char *q
, *after_last_slash
, *last_period
= 0;
16558 after_last_slash
= filename
;
16559 for (q
= filename
; *q
; q
++)
16562 after_last_slash
= q
+ 1;
16563 else if (*q
== '.')
16567 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
16568 *buf
= (char *) xmalloc (len
);
16573 for (q
= after_last_slash
; *q
; q
++)
16575 if (q
== last_period
)
16577 strcpy (p
, section_desc
);
16578 p
+= strlen (section_desc
);
16582 else if (ISALNUM (*q
))
16586 if (last_period
== 0)
16587 strcpy (p
, section_desc
);
16592 /* Emit profile function. */
16595 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
16597 /* Non-standard profiling for kernels, which just saves LR then calls
16598 _mcount without worrying about arg saves. The idea is to change
16599 the function prologue as little as possible as it isn't easy to
16600 account for arg save/restore code added just for _mcount. */
16601 if (TARGET_PROFILE_KERNEL
)
16604 if (DEFAULT_ABI
== ABI_AIX
)
16606 #ifndef NO_PROFILE_COUNTERS
16607 # define NO_PROFILE_COUNTERS 0
16609 if (NO_PROFILE_COUNTERS
)
16610 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
16614 const char *label_name
;
16617 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
16618 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
16619 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
16621 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
16625 else if (DEFAULT_ABI
== ABI_DARWIN
)
16627 const char *mcount_name
= RS6000_MCOUNT
;
16628 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
16630 /* Be conservative and always set this, at least for now. */
16631 current_function_uses_pic_offset_table
= 1;
16634 /* For PIC code, set up a stub and collect the caller's address
16635 from r0, which is where the prologue puts it. */
16636 if (MACHOPIC_INDIRECT
16637 && current_function_uses_pic_offset_table
)
16638 caller_addr_regno
= 0;
16640 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
16642 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
16646 /* Write function profiler code. */
16649 output_function_profiler (FILE *file
, int labelno
)
16653 switch (DEFAULT_ABI
)
16656 gcc_unreachable ();
16661 warning (0, "no profiling of 64-bit code for this ABI");
16664 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
16665 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
16666 if (NO_PROFILE_COUNTERS
)
16668 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16669 reg_names
[0], reg_names
[1]);
16671 else if (TARGET_SECURE_PLT
&& flag_pic
)
16673 asm_fprintf (file
, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
16674 reg_names
[0], reg_names
[1]);
16675 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
16676 asm_fprintf (file
, "\t{cau|addis} %s,%s,",
16677 reg_names
[12], reg_names
[12]);
16678 assemble_name (file
, buf
);
16679 asm_fprintf (file
, "-1b@ha\n\t{cal|la} %s,", reg_names
[0]);
16680 assemble_name (file
, buf
);
16681 asm_fprintf (file
, "-1b@l(%s)\n", reg_names
[12]);
16683 else if (flag_pic
== 1)
16685 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
16686 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16687 reg_names
[0], reg_names
[1]);
16688 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
16689 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
16690 assemble_name (file
, buf
);
16691 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
16693 else if (flag_pic
> 1)
16695 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16696 reg_names
[0], reg_names
[1]);
16697 /* Now, we need to get the address of the label. */
16698 fputs ("\tbcl 20,31,1f\n\t.long ", file
);
16699 assemble_name (file
, buf
);
16700 fputs ("-.\n1:", file
);
16701 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
16702 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
16703 reg_names
[0], reg_names
[11]);
16704 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
16705 reg_names
[0], reg_names
[0], reg_names
[11]);
16709 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
16710 assemble_name (file
, buf
);
16711 fputs ("@ha\n", file
);
16712 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
16713 reg_names
[0], reg_names
[1]);
16714 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
16715 assemble_name (file
, buf
);
16716 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
16719 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
16720 fprintf (file
, "\tbl %s%s\n",
16721 RS6000_MCOUNT
, flag_pic
? "@plt" : "");
16726 if (!TARGET_PROFILE_KERNEL
)
16728 /* Don't do anything, done in output_profile_hook (). */
16732 gcc_assert (!TARGET_32BIT
);
16734 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
16735 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
16737 if (cfun
->static_chain_decl
!= NULL
)
16739 asm_fprintf (file
, "\tstd %s,24(%s)\n",
16740 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
16741 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
16742 asm_fprintf (file
, "\tld %s,24(%s)\n",
16743 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
16746 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
16754 /* The following variable value is the last issued insn. */
16756 static rtx last_scheduled_insn
;
16758 /* The following variable helps to balance issuing of load and
16759 store instructions */
16761 static int load_store_pendulum
;
16763 /* Power4 load update and store update instructions are cracked into a
16764 load or store and an integer insn which are executed in the same cycle.
16765 Branches have their own dispatch slot which does not count against the
16766 GCC issue rate, but it changes the program flow so there are no other
16767 instructions to issue in this cycle. */
16770 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
16771 int verbose ATTRIBUTE_UNUSED
,
16772 rtx insn
, int more
)
16774 last_scheduled_insn
= insn
;
16775 if (GET_CODE (PATTERN (insn
)) == USE
16776 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
16778 cached_can_issue_more
= more
;
16779 return cached_can_issue_more
;
16782 if (insn_terminates_group_p (insn
, current_group
))
16784 cached_can_issue_more
= 0;
16785 return cached_can_issue_more
;
16788 /* If no reservation, but reach here */
16789 if (recog_memoized (insn
) < 0)
16792 if (rs6000_sched_groups
)
16794 if (is_microcoded_insn (insn
))
16795 cached_can_issue_more
= 0;
16796 else if (is_cracked_insn (insn
))
16797 cached_can_issue_more
= more
> 2 ? more
- 2 : 0;
16799 cached_can_issue_more
= more
- 1;
16801 return cached_can_issue_more
;
16804 if (rs6000_cpu_attr
== CPU_CELL
&& is_nonpipeline_insn (insn
))
16807 cached_can_issue_more
= more
- 1;
16808 return cached_can_issue_more
;
16811 /* Adjust the cost of a scheduling dependency. Return the new cost of
16812 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
16815 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
16817 enum attr_type attr_type
;
16819 if (! recog_memoized (insn
))
16822 switch (REG_NOTE_KIND (link
))
16826 /* Data dependency; DEP_INSN writes a register that INSN reads
16827 some cycles later. */
16829 /* Separate a load from a narrower, dependent store. */
16830 if (rs6000_sched_groups
16831 && GET_CODE (PATTERN (insn
)) == SET
16832 && GET_CODE (PATTERN (dep_insn
)) == SET
16833 && GET_CODE (XEXP (PATTERN (insn
), 1)) == MEM
16834 && GET_CODE (XEXP (PATTERN (dep_insn
), 0)) == MEM
16835 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn
), 1)))
16836 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn
), 0)))))
16839 attr_type
= get_attr_type (insn
);
16844 /* Tell the first scheduling pass about the latency between
16845 a mtctr and bctr (and mtlr and br/blr). The first
16846 scheduling pass will not know about this latency since
16847 the mtctr instruction, which has the latency associated
16848 to it, will be generated by reload. */
16849 return TARGET_POWER
? 5 : 4;
16851 /* Leave some extra cycles between a compare and its
16852 dependent branch, to inhibit expensive mispredicts. */
16853 if ((rs6000_cpu_attr
== CPU_PPC603
16854 || rs6000_cpu_attr
== CPU_PPC604
16855 || rs6000_cpu_attr
== CPU_PPC604E
16856 || rs6000_cpu_attr
== CPU_PPC620
16857 || rs6000_cpu_attr
== CPU_PPC630
16858 || rs6000_cpu_attr
== CPU_PPC750
16859 || rs6000_cpu_attr
== CPU_PPC7400
16860 || rs6000_cpu_attr
== CPU_PPC7450
16861 || rs6000_cpu_attr
== CPU_POWER4
16862 || rs6000_cpu_attr
== CPU_POWER5
16863 || rs6000_cpu_attr
== CPU_CELL
)
16864 && recog_memoized (dep_insn
)
16865 && (INSN_CODE (dep_insn
) >= 0))
16867 switch (get_attr_type (dep_insn
))
16871 case TYPE_DELAYED_COMPARE
:
16872 case TYPE_IMUL_COMPARE
:
16873 case TYPE_LMUL_COMPARE
:
16874 case TYPE_FPCOMPARE
:
16875 case TYPE_CR_LOGICAL
:
16876 case TYPE_DELAYED_CR
:
16885 case TYPE_STORE_UX
:
16887 case TYPE_FPSTORE_U
:
16888 case TYPE_FPSTORE_UX
:
16889 if ((rs6000_cpu
== PROCESSOR_POWER6
)
16890 && recog_memoized (dep_insn
)
16891 && (INSN_CODE (dep_insn
) >= 0))
16894 if (GET_CODE (PATTERN (insn
)) != SET
)
16895 /* If this happens, we have to extend this to schedule
16896 optimally. Return default for now. */
16899 /* Adjust the cost for the case where the value written
16900 by a fixed point operation is used as the address
16901 gen value on a store. */
16902 switch (get_attr_type (dep_insn
))
16909 if (! store_data_bypass_p (dep_insn
, insn
))
16913 case TYPE_LOAD_EXT
:
16914 case TYPE_LOAD_EXT_U
:
16915 case TYPE_LOAD_EXT_UX
:
16916 case TYPE_VAR_SHIFT_ROTATE
:
16917 case TYPE_VAR_DELAYED_COMPARE
:
16919 if (! store_data_bypass_p (dep_insn
, insn
))
16925 case TYPE_FAST_COMPARE
:
16928 case TYPE_INSERT_WORD
:
16929 case TYPE_INSERT_DWORD
:
16930 case TYPE_FPLOAD_U
:
16931 case TYPE_FPLOAD_UX
:
16933 case TYPE_STORE_UX
:
16934 case TYPE_FPSTORE_U
:
16935 case TYPE_FPSTORE_UX
:
16937 if (! store_data_bypass_p (dep_insn
, insn
))
16945 case TYPE_IMUL_COMPARE
:
16946 case TYPE_LMUL_COMPARE
:
16948 if (! store_data_bypass_p (dep_insn
, insn
))
16954 if (! store_data_bypass_p (dep_insn
, insn
))
16960 if (! store_data_bypass_p (dep_insn
, insn
))
16973 case TYPE_LOAD_EXT
:
16974 case TYPE_LOAD_EXT_U
:
16975 case TYPE_LOAD_EXT_UX
:
16976 if ((rs6000_cpu
== PROCESSOR_POWER6
)
16977 && recog_memoized (dep_insn
)
16978 && (INSN_CODE (dep_insn
) >= 0))
16981 /* Adjust the cost for the case where the value written
16982 by a fixed point instruction is used within the address
16983 gen portion of a subsequent load(u)(x) */
16984 switch (get_attr_type (dep_insn
))
16991 if (set_to_load_agen (dep_insn
, insn
))
16995 case TYPE_LOAD_EXT
:
16996 case TYPE_LOAD_EXT_U
:
16997 case TYPE_LOAD_EXT_UX
:
16998 case TYPE_VAR_SHIFT_ROTATE
:
16999 case TYPE_VAR_DELAYED_COMPARE
:
17001 if (set_to_load_agen (dep_insn
, insn
))
17007 case TYPE_FAST_COMPARE
:
17010 case TYPE_INSERT_WORD
:
17011 case TYPE_INSERT_DWORD
:
17012 case TYPE_FPLOAD_U
:
17013 case TYPE_FPLOAD_UX
:
17015 case TYPE_STORE_UX
:
17016 case TYPE_FPSTORE_U
:
17017 case TYPE_FPSTORE_UX
:
17019 if (set_to_load_agen (dep_insn
, insn
))
17027 case TYPE_IMUL_COMPARE
:
17028 case TYPE_LMUL_COMPARE
:
17030 if (set_to_load_agen (dep_insn
, insn
))
17036 if (set_to_load_agen (dep_insn
, insn
))
17042 if (set_to_load_agen (dep_insn
, insn
))
17053 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17054 && recog_memoized (dep_insn
)
17055 && (INSN_CODE (dep_insn
) >= 0)
17056 && (get_attr_type (dep_insn
) == TYPE_MFFGPR
))
17063 /* Fall out to return default cost. */
17067 case REG_DEP_OUTPUT
:
17068 /* Output dependency; DEP_INSN writes a register that INSN writes some
17070 if ((rs6000_cpu
== PROCESSOR_POWER6
)
17071 && recog_memoized (dep_insn
)
17072 && (INSN_CODE (dep_insn
) >= 0))
17074 attr_type
= get_attr_type (insn
);
17079 if (get_attr_type (dep_insn
) == TYPE_FP
)
17083 if (get_attr_type (dep_insn
) == TYPE_MFFGPR
)
17091 /* Anti dependency; DEP_INSN reads a register that INSN writes some
17096 gcc_unreachable ();
17102 /* The function returns a true if INSN is microcoded.
17103 Return false otherwise. */
17106 is_microcoded_insn (rtx insn
)
17108 if (!insn
|| !INSN_P (insn
)
17109 || GET_CODE (PATTERN (insn
)) == USE
17110 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17113 if (rs6000_cpu_attr
== CPU_CELL
)
17114 return get_attr_cell_micro (insn
) == CELL_MICRO_ALWAYS
;
17116 if (rs6000_sched_groups
)
17118 enum attr_type type
= get_attr_type (insn
);
17119 if (type
== TYPE_LOAD_EXT_U
17120 || type
== TYPE_LOAD_EXT_UX
17121 || type
== TYPE_LOAD_UX
17122 || type
== TYPE_STORE_UX
17123 || type
== TYPE_MFCR
)
17130 /* The function returns true if INSN is cracked into 2 instructions
17131 by the processor (and therefore occupies 2 issue slots). */
17134 is_cracked_insn (rtx insn
)
17136 if (!insn
|| !INSN_P (insn
)
17137 || GET_CODE (PATTERN (insn
)) == USE
17138 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17141 if (rs6000_sched_groups
)
17143 enum attr_type type
= get_attr_type (insn
);
17144 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
17145 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
17146 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
17147 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
17148 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
17149 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
17150 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
17151 || type
== TYPE_INSERT_WORD
)
17158 /* The function returns true if INSN can be issued only from
17159 the branch slot. */
17162 is_branch_slot_insn (rtx insn
)
17164 if (!insn
|| !INSN_P (insn
)
17165 || GET_CODE (PATTERN (insn
)) == USE
17166 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17169 if (rs6000_sched_groups
)
17171 enum attr_type type
= get_attr_type (insn
);
17172 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
17180 /* The function returns true if out_inst sets a value that is
17181 used in the address generation computation of in_insn */
17183 set_to_load_agen (rtx out_insn
, rtx in_insn
)
17185 rtx out_set
, in_set
;
17187 /* For performance reasons, only handle the simple case where
17188 both loads are a single_set. */
17189 out_set
= single_set (out_insn
);
17192 in_set
= single_set (in_insn
);
17194 return reg_mentioned_p (SET_DEST (out_set
), SET_SRC (in_set
));
17200 /* The function returns true if the target storage location of
17201 out_insn is adjacent to the target storage location of in_insn */
17202 /* Return 1 if memory locations are adjacent. */
17205 adjacent_mem_locations (rtx insn1
, rtx insn2
)
17208 rtx a
= get_store_dest (PATTERN (insn1
));
17209 rtx b
= get_store_dest (PATTERN (insn2
));
17211 if ((GET_CODE (XEXP (a
, 0)) == REG
17212 || (GET_CODE (XEXP (a
, 0)) == PLUS
17213 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
17214 && (GET_CODE (XEXP (b
, 0)) == REG
17215 || (GET_CODE (XEXP (b
, 0)) == PLUS
17216 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
17218 HOST_WIDE_INT val0
= 0, val1
= 0;
17222 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
17224 reg0
= XEXP (XEXP (a
, 0), 0);
17225 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
17228 reg0
= XEXP (a
, 0);
17230 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
17232 reg1
= XEXP (XEXP (b
, 0), 0);
17233 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
17236 reg1
= XEXP (b
, 0);
17238 val_diff
= val1
- val0
;
17240 return ((REGNO (reg0
) == REGNO (reg1
))
17241 && (val_diff
== INTVAL (MEM_SIZE (a
))
17242 || val_diff
== -INTVAL (MEM_SIZE (b
))));
17248 /* A C statement (sans semicolon) to update the integer scheduling
17249 priority INSN_PRIORITY (INSN). Increase the priority to execute the
17250 INSN earlier, reduce the priority to execute INSN later. Do not
17251 define this macro if you do not need to adjust the scheduling
17252 priorities of insns. */
17255 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
17257 /* On machines (like the 750) which have asymmetric integer units,
17258 where one integer unit can do multiply and divides and the other
17259 can't, reduce the priority of multiply/divide so it is scheduled
17260 before other integer operations. */
17263 if (! INSN_P (insn
))
17266 if (GET_CODE (PATTERN (insn
)) == USE
)
17269 switch (rs6000_cpu_attr
) {
17271 switch (get_attr_type (insn
))
17278 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
17279 priority
, priority
);
17280 if (priority
>= 0 && priority
< 0x01000000)
17287 if (insn_must_be_first_in_group (insn
)
17288 && reload_completed
17289 && current_sched_info
->sched_max_insns_priority
17290 && rs6000_sched_restricted_insns_priority
)
17293 /* Prioritize insns that can be dispatched only in the first
17295 if (rs6000_sched_restricted_insns_priority
== 1)
17296 /* Attach highest priority to insn. This means that in
17297 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
17298 precede 'priority' (critical path) considerations. */
17299 return current_sched_info
->sched_max_insns_priority
;
17300 else if (rs6000_sched_restricted_insns_priority
== 2)
17301 /* Increase priority of insn by a minimal amount. This means that in
17302 haifa-sched.c:ready_sort(), only 'priority' (critical path)
17303 considerations precede dispatch-slot restriction considerations. */
17304 return (priority
+ 1);
17307 if (rs6000_cpu
== PROCESSOR_POWER6
17308 && ((load_store_pendulum
== -2 && is_load_insn (insn
))
17309 || (load_store_pendulum
== 2 && is_store_insn (insn
))))
17310 /* Attach highest priority to insn if the scheduler has just issued two
17311 stores and this instruction is a load, or two loads and this instruction
17312 is a store. Power6 wants loads and stores scheduled alternately
17314 return current_sched_info
->sched_max_insns_priority
;
17319 /* Return true if the instruction is nonpipelined on the Cell. */
17321 is_nonpipeline_insn (rtx insn
)
17323 enum attr_type type
;
17324 if (!insn
|| !INSN_P (insn
)
17325 || GET_CODE (PATTERN (insn
)) == USE
17326 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17329 type
= get_attr_type (insn
);
17330 if (type
== TYPE_IMUL
17331 || type
== TYPE_IMUL2
17332 || type
== TYPE_IMUL3
17333 || type
== TYPE_LMUL
17334 || type
== TYPE_IDIV
17335 || type
== TYPE_LDIV
17336 || type
== TYPE_SDIV
17337 || type
== TYPE_DDIV
17338 || type
== TYPE_SSQRT
17339 || type
== TYPE_DSQRT
17340 || type
== TYPE_MFCR
17341 || type
== TYPE_MFCRF
17342 || type
== TYPE_MFJMPR
)
17350 /* Return how many instructions the machine can issue per cycle. */
17353 rs6000_issue_rate (void)
17355 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
17356 if (!reload_completed
)
17359 switch (rs6000_cpu_attr
) {
17360 case CPU_RIOS1
: /* ? */
17362 case CPU_PPC601
: /* ? */
17387 /* Return how many instructions to look ahead for better insn
17391 rs6000_use_sched_lookahead (void)
17393 if (rs6000_cpu_attr
== CPU_PPC8540
)
17395 if (rs6000_cpu_attr
== CPU_CELL
)
17396 return (reload_completed
? 8 : 0);
17400 /* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
17402 rs6000_use_sched_lookahead_guard (rtx insn
)
17404 if (rs6000_cpu_attr
!= CPU_CELL
)
17407 if (insn
== NULL_RTX
|| !INSN_P (insn
))
17410 if (!reload_completed
17411 || is_nonpipeline_insn (insn
)
17412 || is_microcoded_insn (insn
))
17418 /* Determine is PAT refers to memory. */
17421 is_mem_ref (rtx pat
)
17427 if (GET_CODE (pat
) == MEM
)
17430 /* Recursively process the pattern. */
17431 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
17433 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
17436 ret
|= is_mem_ref (XEXP (pat
, i
));
17437 else if (fmt
[i
] == 'E')
17438 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
17439 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
17445 /* Determine if PAT is a PATTERN of a load insn. */
17448 is_load_insn1 (rtx pat
)
17450 if (!pat
|| pat
== NULL_RTX
)
17453 if (GET_CODE (pat
) == SET
)
17454 return is_mem_ref (SET_SRC (pat
));
17456 if (GET_CODE (pat
) == PARALLEL
)
17460 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17461 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
17468 /* Determine if INSN loads from memory. */
17471 is_load_insn (rtx insn
)
17473 if (!insn
|| !INSN_P (insn
))
17476 if (GET_CODE (insn
) == CALL_INSN
)
17479 return is_load_insn1 (PATTERN (insn
));
17482 /* Determine if PAT is a PATTERN of a store insn. */
17485 is_store_insn1 (rtx pat
)
17487 if (!pat
|| pat
== NULL_RTX
)
17490 if (GET_CODE (pat
) == SET
)
17491 return is_mem_ref (SET_DEST (pat
));
17493 if (GET_CODE (pat
) == PARALLEL
)
17497 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17498 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
17505 /* Determine if INSN stores to memory. */
17508 is_store_insn (rtx insn
)
17510 if (!insn
|| !INSN_P (insn
))
17513 return is_store_insn1 (PATTERN (insn
));
17516 /* Return the dest of a store insn. */
17519 get_store_dest (rtx pat
)
17521 gcc_assert (is_store_insn1 (pat
));
17523 if (GET_CODE (pat
) == SET
)
17524 return SET_DEST (pat
);
17525 else if (GET_CODE (pat
) == PARALLEL
)
17529 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
17531 rtx inner_pat
= XVECEXP (pat
, 0, i
);
17532 if (GET_CODE (inner_pat
) == SET
17533 && is_mem_ref (SET_DEST (inner_pat
)))
17537 /* We shouldn't get here, because we should have either a simple
17538 store insn or a store with update which are covered above. */
17542 /* Returns whether the dependence between INSN and NEXT is considered
17543 costly by the given target. */
17546 rs6000_is_costly_dependence (dep_t dep
, int cost
, int distance
)
17551 /* If the flag is not enabled - no dependence is considered costly;
17552 allow all dependent insns in the same group.
17553 This is the most aggressive option. */
17554 if (rs6000_sched_costly_dep
== no_dep_costly
)
17557 /* If the flag is set to 1 - a dependence is always considered costly;
17558 do not allow dependent instructions in the same group.
17559 This is the most conservative option. */
17560 if (rs6000_sched_costly_dep
== all_deps_costly
)
17563 insn
= DEP_PRO (dep
);
17564 next
= DEP_CON (dep
);
17566 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
17567 && is_load_insn (next
)
17568 && is_store_insn (insn
))
17569 /* Prevent load after store in the same group. */
17572 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
17573 && is_load_insn (next
)
17574 && is_store_insn (insn
)
17575 && DEP_KIND (dep
) == REG_DEP_TRUE
)
17576 /* Prevent load after store in the same group if it is a true
17580 /* The flag is set to X; dependences with latency >= X are considered costly,
17581 and will not be scheduled in the same group. */
17582 if (rs6000_sched_costly_dep
<= max_dep_latency
17583 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
17589 /* Return the next insn after INSN that is found before TAIL is reached,
17590 skipping any "non-active" insns - insns that will not actually occupy
17591 an issue slot. Return NULL_RTX if such an insn is not found. */
17594 get_next_active_insn (rtx insn
, rtx tail
)
17596 if (insn
== NULL_RTX
|| insn
== tail
)
17601 insn
= NEXT_INSN (insn
);
17602 if (insn
== NULL_RTX
|| insn
== tail
)
17607 || (NONJUMP_INSN_P (insn
)
17608 && GET_CODE (PATTERN (insn
)) != USE
17609 && GET_CODE (PATTERN (insn
)) != CLOBBER
17610 && INSN_CODE (insn
) != CODE_FOR_stack_tie
))
17616 /* We are about to begin issuing insns for this clock cycle. */
17619 rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED
, int sched_verbose
,
17620 rtx
*ready ATTRIBUTE_UNUSED
,
17621 int *pn_ready ATTRIBUTE_UNUSED
,
17622 int clock_var ATTRIBUTE_UNUSED
)
17624 int n_ready
= *pn_ready
;
17627 fprintf (dump
, "// rs6000_sched_reorder :\n");
17629 /* Reorder the ready list, if the second to last ready insn
17630 is a nonepipeline insn. */
17631 if (rs6000_cpu_attr
== CPU_CELL
&& n_ready
> 1)
17633 if (is_nonpipeline_insn (ready
[n_ready
- 1])
17634 && (recog_memoized (ready
[n_ready
- 2]) > 0))
17635 /* Simply swap first two insns. */
17637 rtx tmp
= ready
[n_ready
- 1];
17638 ready
[n_ready
- 1] = ready
[n_ready
- 2];
17639 ready
[n_ready
- 2] = tmp
;
17643 if (rs6000_cpu
== PROCESSOR_POWER6
)
17644 load_store_pendulum
= 0;
17646 return rs6000_issue_rate ();
17649 /* Like rs6000_sched_reorder, but called after issuing each insn. */
17652 rs6000_sched_reorder2 (FILE *dump
, int sched_verbose
, rtx
*ready
,
17653 int *pn_ready
, int clock_var ATTRIBUTE_UNUSED
)
17656 fprintf (dump
, "// rs6000_sched_reorder2 :\n");
17658 /* For Power6, we need to handle some special cases to try and keep the
17659 store queue from overflowing and triggering expensive flushes.
17661 This code monitors how load and store instructions are being issued
17662 and skews the ready list one way or the other to increase the likelihood
17663 that a desired instruction is issued at the proper time.
17665 A couple of things are done. First, we maintain a "load_store_pendulum"
17666 to track the current state of load/store issue.
17668 - If the pendulum is at zero, then no loads or stores have been
17669 issued in the current cycle so we do nothing.
17671 - If the pendulum is 1, then a single load has been issued in this
17672 cycle and we attempt to locate another load in the ready list to
17675 - If the pendulum is -2, then two stores have already been
17676 issued in this cycle, so we increase the priority of the first load
17677 in the ready list to increase it's likelihood of being chosen first
17680 - If the pendulum is -1, then a single store has been issued in this
17681 cycle and we attempt to locate another store in the ready list to
17682 issue with it, preferring a store to an adjacent memory location to
17683 facilitate store pairing in the store queue.
17685 - If the pendulum is 2, then two loads have already been
17686 issued in this cycle, so we increase the priority of the first store
17687 in the ready list to increase it's likelihood of being chosen first
17690 - If the pendulum < -2 or > 2, then do nothing.
17692 Note: This code covers the most common scenarios. There exist non
17693 load/store instructions which make use of the LSU and which
17694 would need to be accounted for to strictly model the behavior
17695 of the machine. Those instructions are currently unaccounted
17696 for to help minimize compile time overhead of this code.
17698 if (rs6000_cpu
== PROCESSOR_POWER6
&& last_scheduled_insn
)
17704 if (is_store_insn (last_scheduled_insn
))
17705 /* Issuing a store, swing the load_store_pendulum to the left */
17706 load_store_pendulum
--;
17707 else if (is_load_insn (last_scheduled_insn
))
17708 /* Issuing a load, swing the load_store_pendulum to the right */
17709 load_store_pendulum
++;
17711 return cached_can_issue_more
;
17713 /* If the pendulum is balanced, or there is only one instruction on
17714 the ready list, then all is well, so return. */
17715 if ((load_store_pendulum
== 0) || (*pn_ready
<= 1))
17716 return cached_can_issue_more
;
17718 if (load_store_pendulum
== 1)
17720 /* A load has been issued in this cycle. Scan the ready list
17721 for another load to issue with it */
17726 if (is_load_insn (ready
[pos
]))
17728 /* Found a load. Move it to the head of the ready list,
17729 and adjust it's priority so that it is more likely to
17732 for (i
=pos
; i
<*pn_ready
-1; i
++)
17733 ready
[i
] = ready
[i
+ 1];
17734 ready
[*pn_ready
-1] = tmp
;
17735 if INSN_PRIORITY_KNOWN (tmp
)
17736 INSN_PRIORITY (tmp
)++;
17742 else if (load_store_pendulum
== -2)
17744 /* Two stores have been issued in this cycle. Increase the
17745 priority of the first load in the ready list to favor it for
17746 issuing in the next cycle. */
17751 if (is_load_insn (ready
[pos
])
17752 && INSN_PRIORITY_KNOWN (ready
[pos
]))
17754 INSN_PRIORITY (ready
[pos
])++;
17756 /* Adjust the pendulum to account for the fact that a load
17757 was found and increased in priority. This is to prevent
17758 increasing the priority of multiple loads */
17759 load_store_pendulum
--;
17766 else if (load_store_pendulum
== -1)
17768 /* A store has been issued in this cycle. Scan the ready list for
17769 another store to issue with it, preferring a store to an adjacent
17771 int first_store_pos
= -1;
17777 if (is_store_insn (ready
[pos
]))
17779 /* Maintain the index of the first store found on the
17781 if (first_store_pos
== -1)
17782 first_store_pos
= pos
;
17784 if (is_store_insn (last_scheduled_insn
)
17785 && adjacent_mem_locations (last_scheduled_insn
,ready
[pos
]))
17787 /* Found an adjacent store. Move it to the head of the
17788 ready list, and adjust it's priority so that it is
17789 more likely to stay there */
17791 for (i
=pos
; i
<*pn_ready
-1; i
++)
17792 ready
[i
] = ready
[i
+ 1];
17793 ready
[*pn_ready
-1] = tmp
;
17794 if INSN_PRIORITY_KNOWN (tmp
)
17795 INSN_PRIORITY (tmp
)++;
17796 first_store_pos
= -1;
17804 if (first_store_pos
>= 0)
17806 /* An adjacent store wasn't found, but a non-adjacent store was,
17807 so move the non-adjacent store to the front of the ready
17808 list, and adjust its priority so that it is more likely to
17810 tmp
= ready
[first_store_pos
];
17811 for (i
=first_store_pos
; i
<*pn_ready
-1; i
++)
17812 ready
[i
] = ready
[i
+ 1];
17813 ready
[*pn_ready
-1] = tmp
;
17814 if INSN_PRIORITY_KNOWN (tmp
)
17815 INSN_PRIORITY (tmp
)++;
17818 else if (load_store_pendulum
== 2)
17820 /* Two loads have been issued in this cycle. Increase the priority
17821 of the first store in the ready list to favor it for issuing in
17827 if (is_store_insn (ready
[pos
])
17828 && INSN_PRIORITY_KNOWN (ready
[pos
]))
17830 INSN_PRIORITY (ready
[pos
])++;
17832 /* Adjust the pendulum to account for the fact that a store
17833 was found and increased in priority. This is to prevent
17834 increasing the priority of multiple stores */
17835 load_store_pendulum
++;
17844 return cached_can_issue_more
;
17847 /* Return whether the presence of INSN causes a dispatch group termination
17848 of group WHICH_GROUP.
17850 If WHICH_GROUP == current_group, this function will return true if INSN
17851 causes the termination of the current group (i.e, the dispatch group to
17852 which INSN belongs). This means that INSN will be the last insn in the
17853 group it belongs to.
17855 If WHICH_GROUP == previous_group, this function will return true if INSN
17856 causes the termination of the previous group (i.e, the dispatch group that
17857 precedes the group to which INSN belongs). This means that INSN will be
17858 the first insn in the group it belongs to). */
17861 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
17868 first
= insn_must_be_first_in_group (insn
);
17869 last
= insn_must_be_last_in_group (insn
);
17874 if (which_group
== current_group
)
17876 else if (which_group
== previous_group
)
17884 insn_must_be_first_in_group (rtx insn
)
17886 enum attr_type type
;
17889 || insn
== NULL_RTX
17890 || GET_CODE (insn
) == NOTE
17891 || GET_CODE (PATTERN (insn
)) == USE
17892 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17895 switch (rs6000_cpu
)
17897 case PROCESSOR_POWER5
:
17898 if (is_cracked_insn (insn
))
17900 case PROCESSOR_POWER4
:
17901 if (is_microcoded_insn (insn
))
17904 if (!rs6000_sched_groups
)
17907 type
= get_attr_type (insn
);
17914 case TYPE_DELAYED_CR
:
17915 case TYPE_CR_LOGICAL
:
17929 case PROCESSOR_POWER6
:
17930 type
= get_attr_type (insn
);
17934 case TYPE_INSERT_DWORD
:
17938 case TYPE_VAR_SHIFT_ROTATE
:
17945 case TYPE_INSERT_WORD
:
17946 case TYPE_DELAYED_COMPARE
:
17947 case TYPE_IMUL_COMPARE
:
17948 case TYPE_LMUL_COMPARE
:
17949 case TYPE_FPCOMPARE
:
17960 case TYPE_LOAD_EXT_UX
:
17962 case TYPE_STORE_UX
:
17963 case TYPE_FPLOAD_U
:
17964 case TYPE_FPLOAD_UX
:
17965 case TYPE_FPSTORE_U
:
17966 case TYPE_FPSTORE_UX
:
17980 insn_must_be_last_in_group (rtx insn
)
17982 enum attr_type type
;
17985 || insn
== NULL_RTX
17986 || GET_CODE (insn
) == NOTE
17987 || GET_CODE (PATTERN (insn
)) == USE
17988 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
17991 switch (rs6000_cpu
) {
17992 case PROCESSOR_POWER4
:
17993 case PROCESSOR_POWER5
:
17994 if (is_microcoded_insn (insn
))
17997 if (is_branch_slot_insn (insn
))
18001 case PROCESSOR_POWER6
:
18002 type
= get_attr_type (insn
);
18009 case TYPE_VAR_SHIFT_ROTATE
:
18016 case TYPE_DELAYED_COMPARE
:
18017 case TYPE_IMUL_COMPARE
:
18018 case TYPE_LMUL_COMPARE
:
18019 case TYPE_FPCOMPARE
:
18040 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
18041 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
18044 is_costly_group (rtx
*group_insns
, rtx next_insn
)
18047 int issue_rate
= rs6000_issue_rate ();
18049 for (i
= 0; i
< issue_rate
; i
++)
18052 rtx insn
= group_insns
[i
];
18057 FOR_EACH_DEP_LINK (link
, INSN_FORW_DEPS (insn
))
18059 dep_t dep
= DEP_LINK_DEP (link
);
18060 rtx next
= DEP_CON (dep
);
18062 if (next
== next_insn
18063 && rs6000_is_costly_dependence (dep
, dep_cost (dep
), 0))
18071 /* Utility of the function redefine_groups.
18072 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
18073 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
18074 to keep it "far" (in a separate group) from GROUP_INSNS, following
18075 one of the following schemes, depending on the value of the flag
18076 -minsert_sched_nops = X:
18077 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
18078 in order to force NEXT_INSN into a separate group.
18079 (2) X < sched_finish_regroup_exact: insert exactly X nops.
18080 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
18081 insertion (has a group just ended, how many vacant issue slots remain in the
18082 last group, and how many dispatch groups were encountered so far). */
18085 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
,
18086 rtx next_insn
, bool *group_end
, int can_issue_more
,
18091 int issue_rate
= rs6000_issue_rate ();
18092 bool end
= *group_end
;
18095 if (next_insn
== NULL_RTX
)
18096 return can_issue_more
;
18098 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
18099 return can_issue_more
;
18101 force
= is_costly_group (group_insns
, next_insn
);
18103 return can_issue_more
;
18105 if (sched_verbose
> 6)
18106 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
18107 *group_count
,can_issue_more
);
18109 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
18112 can_issue_more
= 0;
18114 /* Since only a branch can be issued in the last issue_slot, it is
18115 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
18116 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
18117 in this case the last nop will start a new group and the branch
18118 will be forced to the new group. */
18119 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
18122 while (can_issue_more
> 0)
18125 emit_insn_before (nop
, next_insn
);
18133 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
18135 int n_nops
= rs6000_sched_insert_nops
;
18137 /* Nops can't be issued from the branch slot, so the effective
18138 issue_rate for nops is 'issue_rate - 1'. */
18139 if (can_issue_more
== 0)
18140 can_issue_more
= issue_rate
;
18142 if (can_issue_more
== 0)
18144 can_issue_more
= issue_rate
- 1;
18147 for (i
= 0; i
< issue_rate
; i
++)
18149 group_insns
[i
] = 0;
18156 emit_insn_before (nop
, next_insn
);
18157 if (can_issue_more
== issue_rate
- 1) /* new group begins */
18160 if (can_issue_more
== 0)
18162 can_issue_more
= issue_rate
- 1;
18165 for (i
= 0; i
< issue_rate
; i
++)
18167 group_insns
[i
] = 0;
18173 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
18176 /* Is next_insn going to start a new group? */
18179 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
18180 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
18181 || (can_issue_more
< issue_rate
&&
18182 insn_terminates_group_p (next_insn
, previous_group
)));
18183 if (*group_end
&& end
)
18186 if (sched_verbose
> 6)
18187 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
18188 *group_count
, can_issue_more
);
18189 return can_issue_more
;
18192 return can_issue_more
;
18195 /* This function tries to synch the dispatch groups that the compiler "sees"
18196 with the dispatch groups that the processor dispatcher is expected to
18197 form in practice. It tries to achieve this synchronization by forcing the
18198 estimated processor grouping on the compiler (as opposed to the function
18199 'pad_goups' which tries to force the scheduler's grouping on the processor).
18201 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
18202 examines the (estimated) dispatch groups that will be formed by the processor
18203 dispatcher. It marks these group boundaries to reflect the estimated
18204 processor grouping, overriding the grouping that the scheduler had marked.
18205 Depending on the value of the flag '-minsert-sched-nops' this function can
18206 force certain insns into separate groups or force a certain distance between
18207 them by inserting nops, for example, if there exists a "costly dependence"
18210 The function estimates the group boundaries that the processor will form as
18211 follows: It keeps track of how many vacant issue slots are available after
18212 each insn. A subsequent insn will start a new group if one of the following
18214 - no more vacant issue slots remain in the current dispatch group.
18215 - only the last issue slot, which is the branch slot, is vacant, but the next
18216 insn is not a branch.
18217 - only the last 2 or less issue slots, including the branch slot, are vacant,
18218 which means that a cracked insn (which occupies two issue slots) can't be
18219 issued in this group.
18220 - less than 'issue_rate' slots are vacant, and the next insn always needs to
18221 start a new group. */
18224 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
18226 rtx insn
, next_insn
;
18228 int can_issue_more
;
18231 int group_count
= 0;
18235 issue_rate
= rs6000_issue_rate ();
18236 group_insns
= alloca (issue_rate
* sizeof (rtx
));
18237 for (i
= 0; i
< issue_rate
; i
++)
18239 group_insns
[i
] = 0;
18241 can_issue_more
= issue_rate
;
18243 insn
= get_next_active_insn (prev_head_insn
, tail
);
18246 while (insn
!= NULL_RTX
)
18248 slot
= (issue_rate
- can_issue_more
);
18249 group_insns
[slot
] = insn
;
18251 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
18252 if (insn_terminates_group_p (insn
, current_group
))
18253 can_issue_more
= 0;
18255 next_insn
= get_next_active_insn (insn
, tail
);
18256 if (next_insn
== NULL_RTX
)
18257 return group_count
+ 1;
18259 /* Is next_insn going to start a new group? */
18261 = (can_issue_more
== 0
18262 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
18263 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
18264 || (can_issue_more
< issue_rate
&&
18265 insn_terminates_group_p (next_insn
, previous_group
)));
18267 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
18268 next_insn
, &group_end
, can_issue_more
,
18274 can_issue_more
= 0;
18275 for (i
= 0; i
< issue_rate
; i
++)
18277 group_insns
[i
] = 0;
18281 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
18282 PUT_MODE (next_insn
, VOIDmode
);
18283 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
18284 PUT_MODE (next_insn
, TImode
);
18287 if (can_issue_more
== 0)
18288 can_issue_more
= issue_rate
;
18291 return group_count
;
18294 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
18295 dispatch group boundaries that the scheduler had marked. Pad with nops
18296 any dispatch groups which have vacant issue slots, in order to force the
18297 scheduler's grouping on the processor dispatcher. The function
18298 returns the number of dispatch groups found. */
18301 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
18303 rtx insn
, next_insn
;
18306 int can_issue_more
;
18308 int group_count
= 0;
18310 /* Initialize issue_rate. */
18311 issue_rate
= rs6000_issue_rate ();
18312 can_issue_more
= issue_rate
;
18314 insn
= get_next_active_insn (prev_head_insn
, tail
);
18315 next_insn
= get_next_active_insn (insn
, tail
);
18317 while (insn
!= NULL_RTX
)
18320 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
18322 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
18324 if (next_insn
== NULL_RTX
)
18329 /* If the scheduler had marked group termination at this location
18330 (between insn and next_indn), and neither insn nor next_insn will
18331 force group termination, pad the group with nops to force group
18334 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
18335 && !insn_terminates_group_p (insn
, current_group
)
18336 && !insn_terminates_group_p (next_insn
, previous_group
))
18338 if (!is_branch_slot_insn (next_insn
))
18341 while (can_issue_more
)
18344 emit_insn_before (nop
, next_insn
);
18349 can_issue_more
= issue_rate
;
18354 next_insn
= get_next_active_insn (insn
, tail
);
18357 return group_count
;
18360 /* We're beginning a new block. Initialize data structures as necessary. */
18363 rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED
,
18364 int sched_verbose ATTRIBUTE_UNUSED
,
18365 int max_ready ATTRIBUTE_UNUSED
)
18367 last_scheduled_insn
= NULL_RTX
;
18368 load_store_pendulum
= 0;
18371 /* The following function is called at the end of scheduling BB.
18372 After reload, it inserts nops at insn group bundling. */
18375 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
18380 fprintf (dump
, "=== Finishing schedule.\n");
18382 if (reload_completed
&& rs6000_sched_groups
)
18384 if (rs6000_sched_insert_nops
== sched_finish_none
)
18387 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
18388 n_groups
= pad_groups (dump
, sched_verbose
,
18389 current_sched_info
->prev_head
,
18390 current_sched_info
->next_tail
);
18392 n_groups
= redefine_groups (dump
, sched_verbose
,
18393 current_sched_info
->prev_head
,
18394 current_sched_info
->next_tail
);
18396 if (sched_verbose
>= 6)
18398 fprintf (dump
, "ngroups = %d\n", n_groups
);
18399 print_rtl (dump
, current_sched_info
->prev_head
);
18400 fprintf (dump
, "Done finish_sched\n");
18405 /* Length in units of the trampoline for entering a nested function. */
18408 rs6000_trampoline_size (void)
18412 switch (DEFAULT_ABI
)
18415 gcc_unreachable ();
18418 ret
= (TARGET_32BIT
) ? 12 : 24;
18423 ret
= (TARGET_32BIT
) ? 40 : 48;
18430 /* Emit RTL insns to initialize the variable parts of a trampoline.
18431 FNADDR is an RTX for the address of the function's pure code.
18432 CXT is an RTX for the static chain value for the function. */
18435 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
18437 int regsize
= (TARGET_32BIT
) ? 4 : 8;
18438 rtx ctx_reg
= force_reg (Pmode
, cxt
);
18440 switch (DEFAULT_ABI
)
18443 gcc_unreachable ();
18445 /* Macros to shorten the code expansions below. */
18446 #define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
18447 #define MEM_PLUS(addr,offset) \
18448 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
18450 /* Under AIX, just build the 3 word function descriptor */
18453 rtx fn_reg
= gen_reg_rtx (Pmode
);
18454 rtx toc_reg
= gen_reg_rtx (Pmode
);
18455 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
18456 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
18457 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
18458 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
18459 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
18463 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
18466 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__trampoline_setup"),
18467 FALSE
, VOIDmode
, 4,
18469 GEN_INT (rs6000_trampoline_size ()), SImode
,
18479 /* Table of valid machine attributes. */
18481 const struct attribute_spec rs6000_attribute_table
[] =
18483 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
18484 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute
},
18485 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
18486 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
18487 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute
},
18488 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute
},
18489 #ifdef SUBTARGET_ATTRIBUTE_TABLE
18490 SUBTARGET_ATTRIBUTE_TABLE
,
18492 { NULL
, 0, 0, false, false, false, NULL
}
18495 /* Handle the "altivec" attribute. The attribute may have
18496 arguments as follows:
18498 __attribute__((altivec(vector__)))
18499 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
18500 __attribute__((altivec(bool__))) (always followed by 'unsigned')
18502 and may appear more than once (e.g., 'vector bool char') in a
18503 given declaration. */
18506 rs6000_handle_altivec_attribute (tree
*node
,
18507 tree name ATTRIBUTE_UNUSED
,
18509 int flags ATTRIBUTE_UNUSED
,
18510 bool *no_add_attrs
)
18512 tree type
= *node
, result
= NULL_TREE
;
18513 enum machine_mode mode
;
18516 = ((args
&& TREE_CODE (args
) == TREE_LIST
&& TREE_VALUE (args
)
18517 && TREE_CODE (TREE_VALUE (args
)) == IDENTIFIER_NODE
)
18518 ? *IDENTIFIER_POINTER (TREE_VALUE (args
))
18521 while (POINTER_TYPE_P (type
)
18522 || TREE_CODE (type
) == FUNCTION_TYPE
18523 || TREE_CODE (type
) == METHOD_TYPE
18524 || TREE_CODE (type
) == ARRAY_TYPE
)
18525 type
= TREE_TYPE (type
);
18527 mode
= TYPE_MODE (type
);
18529 /* Check for invalid AltiVec type qualifiers. */
18530 if (type
== long_unsigned_type_node
|| type
== long_integer_type_node
)
18533 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
18534 else if (rs6000_warn_altivec_long
)
18535 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
18537 else if (type
== long_long_unsigned_type_node
18538 || type
== long_long_integer_type_node
)
18539 error ("use of %<long long%> in AltiVec types is invalid");
18540 else if (type
== double_type_node
)
18541 error ("use of %<double%> in AltiVec types is invalid");
18542 else if (type
== long_double_type_node
)
18543 error ("use of %<long double%> in AltiVec types is invalid");
18544 else if (type
== boolean_type_node
)
18545 error ("use of boolean types in AltiVec types is invalid");
18546 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
18547 error ("use of %<complex%> in AltiVec types is invalid");
18548 else if (DECIMAL_FLOAT_MODE_P (mode
))
18549 error ("use of decimal floating point types in AltiVec types is invalid");
18551 switch (altivec_type
)
18554 unsigned_p
= TYPE_UNSIGNED (type
);
18558 result
= (unsigned_p
? unsigned_V4SI_type_node
: V4SI_type_node
);
18561 result
= (unsigned_p
? unsigned_V8HI_type_node
: V8HI_type_node
);
18564 result
= (unsigned_p
? unsigned_V16QI_type_node
: V16QI_type_node
);
18566 case SFmode
: result
= V4SF_type_node
; break;
18567 /* If the user says 'vector int bool', we may be handed the 'bool'
18568 attribute _before_ the 'vector' attribute, and so select the
18569 proper type in the 'b' case below. */
18570 case V4SImode
: case V8HImode
: case V16QImode
: case V4SFmode
:
18578 case SImode
: case V4SImode
: result
= bool_V4SI_type_node
; break;
18579 case HImode
: case V8HImode
: result
= bool_V8HI_type_node
; break;
18580 case QImode
: case V16QImode
: result
= bool_V16QI_type_node
;
18587 case V8HImode
: result
= pixel_V8HI_type_node
;
18593 if (result
&& result
!= type
&& TYPE_READONLY (type
))
18594 result
= build_qualified_type (result
, TYPE_QUAL_CONST
);
18596 *no_add_attrs
= true; /* No need to hang on to the attribute. */
18599 *node
= reconstruct_complex_type (*node
, result
);
18604 /* AltiVec defines four built-in scalar types that serve as vector
18605 elements; we must teach the compiler how to mangle them. */
18607 static const char *
18608 rs6000_mangle_fundamental_type (tree type
)
18610 if (type
== bool_char_type_node
) return "U6__boolc";
18611 if (type
== bool_short_type_node
) return "U6__bools";
18612 if (type
== pixel_type_node
) return "u7__pixel";
18613 if (type
== bool_int_type_node
) return "U6__booli";
18615 /* Mangle IBM extended float long double as `g' (__float128) on
18616 powerpc*-linux where long-double-64 previously was the default. */
18617 if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
18619 && TARGET_LONG_DOUBLE_128
18620 && !TARGET_IEEEQUAD
)
18623 /* For all other types, use normal C++ mangling. */
18627 /* Handle a "longcall" or "shortcall" attribute; arguments as in
18628 struct attribute_spec.handler. */
18631 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
18632 tree args ATTRIBUTE_UNUSED
,
18633 int flags ATTRIBUTE_UNUSED
,
18634 bool *no_add_attrs
)
18636 if (TREE_CODE (*node
) != FUNCTION_TYPE
18637 && TREE_CODE (*node
) != FIELD_DECL
18638 && TREE_CODE (*node
) != TYPE_DECL
)
18640 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
18641 IDENTIFIER_POINTER (name
));
18642 *no_add_attrs
= true;
18648 /* Set longcall attributes on all functions declared when
18649 rs6000_default_long_calls is true. */
18651 rs6000_set_default_type_attributes (tree type
)
18653 if (rs6000_default_long_calls
18654 && (TREE_CODE (type
) == FUNCTION_TYPE
18655 || TREE_CODE (type
) == METHOD_TYPE
))
18656 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
18658 TYPE_ATTRIBUTES (type
));
18661 darwin_set_default_type_attributes (type
);
18665 /* Return a reference suitable for calling a function with the
18666 longcall attribute. */
18669 rs6000_longcall_ref (rtx call_ref
)
18671 const char *call_name
;
18674 if (GET_CODE (call_ref
) != SYMBOL_REF
)
18677 /* System V adds '.' to the internal name, so skip them. */
18678 call_name
= XSTR (call_ref
, 0);
18679 if (*call_name
== '.')
18681 while (*call_name
== '.')
18684 node
= get_identifier (call_name
);
18685 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
18688 return force_reg (Pmode
, call_ref
);
18691 #ifndef TARGET_USE_MS_BITFIELD_LAYOUT
18692 #define TARGET_USE_MS_BITFIELD_LAYOUT 0
18695 /* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
18696 struct attribute_spec.handler. */
18698 rs6000_handle_struct_attribute (tree
*node
, tree name
,
18699 tree args ATTRIBUTE_UNUSED
,
18700 int flags ATTRIBUTE_UNUSED
, bool *no_add_attrs
)
18703 if (DECL_P (*node
))
18705 if (TREE_CODE (*node
) == TYPE_DECL
)
18706 type
= &TREE_TYPE (*node
);
18711 if (!(type
&& (TREE_CODE (*type
) == RECORD_TYPE
18712 || TREE_CODE (*type
) == UNION_TYPE
)))
18714 warning (OPT_Wattributes
, "%qs attribute ignored", IDENTIFIER_POINTER (name
));
18715 *no_add_attrs
= true;
18718 else if ((is_attribute_p ("ms_struct", name
)
18719 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type
)))
18720 || ((is_attribute_p ("gcc_struct", name
)
18721 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type
)))))
18723 warning (OPT_Wattributes
, "%qs incompatible attribute ignored",
18724 IDENTIFIER_POINTER (name
));
18725 *no_add_attrs
= true;
18732 rs6000_ms_bitfield_layout_p (tree record_type
)
18734 return (TARGET_USE_MS_BITFIELD_LAYOUT
&&
18735 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type
)))
18736 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type
));
18739 #ifdef USING_ELFOS_H
18741 /* A get_unnamed_section callback, used for switching to toc_section. */
18744 rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED
)
18746 if (DEFAULT_ABI
== ABI_AIX
18747 && TARGET_MINIMAL_TOC
18748 && !TARGET_RELOCATABLE
)
18750 if (!toc_initialized
)
18752 toc_initialized
= 1;
18753 fprintf (asm_out_file
, "%s\n", TOC_SECTION_ASM_OP
);
18754 (*targetm
.asm_out
.internal_label
) (asm_out_file
, "LCTOC", 0);
18755 fprintf (asm_out_file
, "\t.tc ");
18756 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1[TC],");
18757 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
18758 fprintf (asm_out_file
, "\n");
18760 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
18761 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
18762 fprintf (asm_out_file
, " = .+32768\n");
18765 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
18767 else if (DEFAULT_ABI
== ABI_AIX
&& !TARGET_RELOCATABLE
)
18768 fprintf (asm_out_file
, "%s\n", TOC_SECTION_ASM_OP
);
18771 fprintf (asm_out_file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
18772 if (!toc_initialized
)
18774 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file
, "LCTOC1");
18775 fprintf (asm_out_file
, " = .+32768\n");
18776 toc_initialized
= 1;
18781 /* Implement TARGET_ASM_INIT_SECTIONS. */
18784 rs6000_elf_asm_init_sections (void)
18787 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op
, NULL
);
18790 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
18791 SDATA2_SECTION_ASM_OP
);
18794 /* Implement TARGET_SELECT_RTX_SECTION. */
18797 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
18798 unsigned HOST_WIDE_INT align
)
18800 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
18801 return toc_section
;
18803 return default_elf_select_rtx_section (mode
, x
, align
);
18806 /* Implement TARGET_ASM_SELECT_SECTION for ELF targets. */
18809 rs6000_elf_select_section (tree decl
, int reloc
,
18810 unsigned HOST_WIDE_INT align
)
18812 /* Pretend that we're always building for a shared library when
18813 ABI_AIX, because otherwise we end up with dynamic relocations
18814 in read-only sections. This happens for function pointers,
18815 references to vtables in typeinfo, and probably other cases. */
18816 return default_elf_select_section_1 (decl
, reloc
, align
,
18817 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
18820 /* A C statement to build up a unique section name, expressed as a
18821 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
18822 RELOC indicates whether the initial value of EXP requires
18823 link-time relocations. If you do not define this macro, GCC will use
18824 the symbol name prefixed by `.' as the section name. Note - this
18825 macro can now be called for uninitialized data items as well as
18826 initialized data and functions. */
18829 rs6000_elf_unique_section (tree decl
, int reloc
)
18831 /* As above, pretend that we're always building for a shared library
18832 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
18833 default_unique_section_1 (decl
, reloc
,
18834 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
18837 /* For a SYMBOL_REF, set generic flags and then perform some
18838 target-specific processing.
18840 When the AIX ABI is requested on a non-AIX system, replace the
18841 function name with the real name (with a leading .) rather than the
18842 function descriptor name. This saves a lot of overriding code to
18843 read the prefixes. */
18846 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
18848 default_encode_section_info (decl
, rtl
, first
);
18851 && TREE_CODE (decl
) == FUNCTION_DECL
18853 && DEFAULT_ABI
== ABI_AIX
)
18855 rtx sym_ref
= XEXP (rtl
, 0);
18856 size_t len
= strlen (XSTR (sym_ref
, 0));
18857 char *str
= alloca (len
+ 2);
18859 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
18860 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
18865 rs6000_elf_in_small_data_p (tree decl
)
18867 if (rs6000_sdata
== SDATA_NONE
)
18870 /* We want to merge strings, so we never consider them small data. */
18871 if (TREE_CODE (decl
) == STRING_CST
)
18874 /* Functions are never in the small data area. */
18875 if (TREE_CODE (decl
) == FUNCTION_DECL
)
18878 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
18880 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
18881 if (strcmp (section
, ".sdata") == 0
18882 || strcmp (section
, ".sdata2") == 0
18883 || strcmp (section
, ".sbss") == 0
18884 || strcmp (section
, ".sbss2") == 0
18885 || strcmp (section
, ".PPC.EMB.sdata0") == 0
18886 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
18891 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
18894 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
18895 /* If it's not public, and we're not going to reference it there,
18896 there's no need to put it in the small data section. */
18897 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
18904 #endif /* USING_ELFOS_H */
18906 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
18909 rs6000_use_blocks_for_constant_p (enum machine_mode mode
, rtx x
)
18911 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
);
18914 /* Return a REG that occurs in ADDR with coefficient 1.
18915 ADDR can be effectively incremented by incrementing REG.
18917 r0 is special and we must not select it as an address
18918 register by this routine since our caller will try to
18919 increment the returned register via an "la" instruction. */
18922 find_addr_reg (rtx addr
)
18924 while (GET_CODE (addr
) == PLUS
)
18926 if (GET_CODE (XEXP (addr
, 0)) == REG
18927 && REGNO (XEXP (addr
, 0)) != 0)
18928 addr
= XEXP (addr
, 0);
18929 else if (GET_CODE (XEXP (addr
, 1)) == REG
18930 && REGNO (XEXP (addr
, 1)) != 0)
18931 addr
= XEXP (addr
, 1);
18932 else if (CONSTANT_P (XEXP (addr
, 0)))
18933 addr
= XEXP (addr
, 1);
18934 else if (CONSTANT_P (XEXP (addr
, 1)))
18935 addr
= XEXP (addr
, 0);
18937 gcc_unreachable ();
18939 gcc_assert (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0);
18944 rs6000_fatal_bad_address (rtx op
)
18946 fatal_insn ("bad address", op
);
18951 static tree branch_island_list
= 0;
18953 /* Remember to generate a branch island for far calls to the given
18957 add_compiler_branch_island (tree label_name
, tree function_name
,
18960 tree branch_island
= build_tree_list (function_name
, label_name
);
18961 TREE_TYPE (branch_island
) = build_int_cst (NULL_TREE
, line_number
);
18962 TREE_CHAIN (branch_island
) = branch_island_list
;
18963 branch_island_list
= branch_island
;
18966 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
18967 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
18968 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
18969 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
18971 /* Generate far-jump branch islands for everything on the
18972 branch_island_list. Invoked immediately after the last instruction
18973 of the epilogue has been emitted; the branch-islands must be
18974 appended to, and contiguous with, the function body. Mach-O stubs
18975 are generated in machopic_output_stub(). */
18978 macho_branch_islands (void)
18981 tree branch_island
;
18983 for (branch_island
= branch_island_list
;
18985 branch_island
= TREE_CHAIN (branch_island
))
18987 const char *label
=
18988 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
18990 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
));
18991 char name_buf
[512];
18992 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
18993 if (name
[0] == '*' || name
[0] == '&')
18994 strcpy (name_buf
, name
+1);
18998 strcpy (name_buf
+1, name
);
19000 strcpy (tmp_buf
, "\n");
19001 strcat (tmp_buf
, label
);
19002 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
19003 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
19004 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
19005 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
19008 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
19009 strcat (tmp_buf
, label
);
19010 strcat (tmp_buf
, "_pic\n");
19011 strcat (tmp_buf
, label
);
19012 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
19014 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
19015 strcat (tmp_buf
, name_buf
);
19016 strcat (tmp_buf
, " - ");
19017 strcat (tmp_buf
, label
);
19018 strcat (tmp_buf
, "_pic)\n");
19020 strcat (tmp_buf
, "\tmtlr r0\n");
19022 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
19023 strcat (tmp_buf
, name_buf
);
19024 strcat (tmp_buf
, " - ");
19025 strcat (tmp_buf
, label
);
19026 strcat (tmp_buf
, "_pic)\n");
19028 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
19032 strcat (tmp_buf
, ":\nlis r12,hi16(");
19033 strcat (tmp_buf
, name_buf
);
19034 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
19035 strcat (tmp_buf
, name_buf
);
19036 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
19038 output_asm_insn (tmp_buf
, 0);
19039 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
19040 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
19041 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
19042 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
19045 branch_island_list
= 0;
19048 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
19049 already there or not. */
19052 no_previous_def (tree function_name
)
19054 tree branch_island
;
19055 for (branch_island
= branch_island_list
;
19057 branch_island
= TREE_CHAIN (branch_island
))
19058 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
19063 /* GET_PREV_LABEL gets the label name from the previous definition of
19067 get_prev_label (tree function_name
)
19069 tree branch_island
;
19070 for (branch_island
= branch_island_list
;
19072 branch_island
= TREE_CHAIN (branch_island
))
19073 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
19074 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
19078 #ifndef DARWIN_LINKER_GENERATES_ISLANDS
19079 #define DARWIN_LINKER_GENERATES_ISLANDS 0
19082 /* KEXTs still need branch islands. */
19083 #define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
19084 || flag_mkernel || flag_apple_kext)
19086 /* INSN is either a function call or a millicode call. It may have an
19087 unconditional jump in its delay slot.
19089 CALL_DEST is the routine we are calling. */
19092 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
,
19093 int cookie_operand_number
)
19095 static char buf
[256];
19096 if (DARWIN_GENERATE_ISLANDS
19097 && GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
19098 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
19101 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
19103 if (no_previous_def (funname
))
19105 int line_number
= 0;
19106 rtx label_rtx
= gen_label_rtx ();
19107 char *label_buf
, temp_buf
[256];
19108 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
19109 CODE_LABEL_NUMBER (label_rtx
));
19110 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
19111 labelname
= get_identifier (label_buf
);
19112 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
19114 line_number
= NOTE_LINE_NUMBER (insn
);
19115 add_compiler_branch_island (labelname
, funname
, line_number
);
19118 labelname
= get_prev_label (funname
);
19120 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
19121 instruction will reach 'foo', otherwise link as 'bl L42'".
19122 "L42" should be a 'branch island', that will do a far jump to
19123 'foo'. Branch islands are generated in
19124 macho_branch_islands(). */
19125 sprintf (buf
, "jbsr %%z%d,%.246s",
19126 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
19129 sprintf (buf
, "bl %%z%d", dest_operand_number
);
19133 /* Generate PIC and indirect symbol stubs. */
19136 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
19138 unsigned int length
;
19139 char *symbol_name
, *lazy_ptr_name
;
19140 char *local_label_0
;
19141 static int label
= 0;
19143 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
19144 symb
= (*targetm
.strip_name_encoding
) (symb
);
19147 length
= strlen (symb
);
19148 symbol_name
= alloca (length
+ 32);
19149 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
19151 lazy_ptr_name
= alloca (length
+ 32);
19152 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
19155 switch_to_section (darwin_sections
[machopic_picsymbol_stub1_section
]);
19157 switch_to_section (darwin_sections
[machopic_symbol_stub1_section
]);
19161 fprintf (file
, "\t.align 5\n");
19163 fprintf (file
, "%s:\n", stub
);
19164 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19167 local_label_0
= alloca (sizeof ("\"L00000000000$spb\""));
19168 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
19170 fprintf (file
, "\tmflr r0\n");
19171 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
19172 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
19173 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
19174 lazy_ptr_name
, local_label_0
);
19175 fprintf (file
, "\tmtlr r0\n");
19176 fprintf (file
, "\t%s r12,lo16(%s-%s)(r11)\n",
19177 (TARGET_64BIT
? "ldu" : "lwzu"),
19178 lazy_ptr_name
, local_label_0
);
19179 fprintf (file
, "\tmtctr r12\n");
19180 fprintf (file
, "\tbctr\n");
19184 fprintf (file
, "\t.align 4\n");
19186 fprintf (file
, "%s:\n", stub
);
19187 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19189 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
19190 fprintf (file
, "\t%s r12,lo16(%s)(r11)\n",
19191 (TARGET_64BIT
? "ldu" : "lwzu"),
19193 fprintf (file
, "\tmtctr r12\n");
19194 fprintf (file
, "\tbctr\n");
19197 switch_to_section (darwin_sections
[machopic_lazy_symbol_ptr_section
]);
19198 fprintf (file
, "%s:\n", lazy_ptr_name
);
19199 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
19200 fprintf (file
, "%sdyld_stub_binding_helper\n",
19201 (TARGET_64BIT
? DOUBLE_INT_ASM_OP
: "\t.long\t"));
19204 /* Legitimize PIC addresses. If the address is already
19205 position-independent, we return ORIG. Newly generated
19206 position-independent addresses go into a reg. This is REG if non
19207 zero, otherwise we allocate register(s) as necessary. */
19209 #define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
19212 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
19217 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
19218 reg
= gen_reg_rtx (Pmode
);
19220 if (GET_CODE (orig
) == CONST
)
19224 if (GET_CODE (XEXP (orig
, 0)) == PLUS
19225 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
19228 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
19230 /* Use a different reg for the intermediate value, as
19231 it will be marked UNCHANGING. */
19232 reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
19233 base
= rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
19236 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
19239 if (GET_CODE (offset
) == CONST_INT
)
19241 if (SMALL_INT (offset
))
19242 return plus_constant (base
, INTVAL (offset
));
19243 else if (! reload_in_progress
&& ! reload_completed
)
19244 offset
= force_reg (Pmode
, offset
);
19247 rtx mem
= force_const_mem (Pmode
, orig
);
19248 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
19251 return gen_rtx_PLUS (Pmode
, base
, offset
);
19254 /* Fall back on generic machopic code. */
19255 return machopic_legitimize_pic_address (orig
, mode
, reg
);
19258 /* Output a .machine directive for the Darwin assembler, and call
19259 the generic start_file routine. */
19262 rs6000_darwin_file_start (void)
19264 static const struct
19270 { "ppc64", "ppc64", MASK_64BIT
},
19271 { "970", "ppc970", MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
19272 { "power4", "ppc970", 0 },
19273 { "G5", "ppc970", 0 },
19274 { "7450", "ppc7450", 0 },
19275 { "7400", "ppc7400", MASK_ALTIVEC
},
19276 { "G4", "ppc7400", 0 },
19277 { "750", "ppc750", 0 },
19278 { "740", "ppc750", 0 },
19279 { "G3", "ppc750", 0 },
19280 { "604e", "ppc604e", 0 },
19281 { "604", "ppc604", 0 },
19282 { "603e", "ppc603", 0 },
19283 { "603", "ppc603", 0 },
19284 { "601", "ppc601", 0 },
19285 { NULL
, "ppc", 0 } };
19286 const char *cpu_id
= "";
19289 rs6000_file_start ();
19290 darwin_file_start ();
19292 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
19293 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
19294 if (rs6000_select
[i
].set_arch_p
&& rs6000_select
[i
].string
19295 && rs6000_select
[i
].string
[0] != '\0')
19296 cpu_id
= rs6000_select
[i
].string
;
19298 /* Look through the mapping array. Pick the first name that either
19299 matches the argument, has a bit set in IF_SET that is also set
19300 in the target flags, or has a NULL name. */
19303 while (mapping
[i
].arg
!= NULL
19304 && strcmp (mapping
[i
].arg
, cpu_id
) != 0
19305 && (mapping
[i
].if_set
& target_flags
) == 0)
19308 fprintf (asm_out_file
, "\t.machine %s\n", mapping
[i
].name
);
19311 #endif /* TARGET_MACHO */
19314 static unsigned int
19315 rs6000_elf_section_type_flags (tree decl
, const char *name
, int reloc
)
19317 return default_section_type_flags_1 (decl
, name
, reloc
,
19318 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
19321 /* Record an element in the table of global constructors. SYMBOL is
19322 a SYMBOL_REF of the function to be called; PRIORITY is a number
19323 between 0 and MAX_INIT_PRIORITY.
19325 This differs from default_named_section_asm_out_constructor in
19326 that we have special handling for -mrelocatable. */
19329 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
19331 const char *section
= ".ctors";
19334 if (priority
!= DEFAULT_INIT_PRIORITY
)
19336 sprintf (buf
, ".ctors.%.5u",
19337 /* Invert the numbering so the linker puts us in the proper
19338 order; constructors are run from right to left, and the
19339 linker sorts in increasing order. */
19340 MAX_INIT_PRIORITY
- priority
);
19344 switch_to_section (get_section (section
, SECTION_WRITE
, NULL
));
19345 assemble_align (POINTER_SIZE
);
19347 if (TARGET_RELOCATABLE
)
19349 fputs ("\t.long (", asm_out_file
);
19350 output_addr_const (asm_out_file
, symbol
);
19351 fputs (")@fixup\n", asm_out_file
);
19354 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
19358 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
19360 const char *section
= ".dtors";
19363 if (priority
!= DEFAULT_INIT_PRIORITY
)
19365 sprintf (buf
, ".dtors.%.5u",
19366 /* Invert the numbering so the linker puts us in the proper
19367 order; constructors are run from right to left, and the
19368 linker sorts in increasing order. */
19369 MAX_INIT_PRIORITY
- priority
);
19373 switch_to_section (get_section (section
, SECTION_WRITE
, NULL
));
19374 assemble_align (POINTER_SIZE
);
19376 if (TARGET_RELOCATABLE
)
19378 fputs ("\t.long (", asm_out_file
);
19379 output_addr_const (asm_out_file
, symbol
);
19380 fputs (")@fixup\n", asm_out_file
);
19383 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
19387 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
19391 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
19392 ASM_OUTPUT_LABEL (file
, name
);
19393 fputs (DOUBLE_INT_ASM_OP
, file
);
19394 rs6000_output_function_entry (file
, name
);
19395 fputs (",.TOC.@tocbase,0\n\t.previous\n", file
);
19398 fputs ("\t.size\t", file
);
19399 assemble_name (file
, name
);
19400 fputs (",24\n\t.type\t.", file
);
19401 assemble_name (file
, name
);
19402 fputs (",@function\n", file
);
19403 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
19405 fputs ("\t.globl\t.", file
);
19406 assemble_name (file
, name
);
19411 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
19412 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
19413 rs6000_output_function_entry (file
, name
);
19414 fputs (":\n", file
);
19418 if (TARGET_RELOCATABLE
19419 && !TARGET_SECURE_PLT
19420 && (get_pool_size () != 0 || current_function_profile
)
19425 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
19427 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
19428 fprintf (file
, "\t.long ");
19429 assemble_name (file
, buf
);
19431 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
19432 assemble_name (file
, buf
);
19436 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
19437 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
19439 if (DEFAULT_ABI
== ABI_AIX
)
19441 const char *desc_name
, *orig_name
;
19443 orig_name
= (*targetm
.strip_name_encoding
) (name
);
19444 desc_name
= orig_name
;
19445 while (*desc_name
== '.')
19448 if (TREE_PUBLIC (decl
))
19449 fprintf (file
, "\t.globl %s\n", desc_name
);
19451 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
19452 fprintf (file
, "%s:\n", desc_name
);
19453 fprintf (file
, "\t.long %s\n", orig_name
);
19454 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
19455 if (DEFAULT_ABI
== ABI_AIX
)
19456 fputs ("\t.long 0\n", file
);
19457 fprintf (file
, "\t.previous\n");
19459 ASM_OUTPUT_LABEL (file
, name
);
19463 rs6000_elf_end_indicate_exec_stack (void)
19466 file_end_indicate_exec_stack ();
19472 rs6000_xcoff_asm_output_anchor (rtx symbol
)
19476 sprintf (buffer
, "$ + " HOST_WIDE_INT_PRINT_DEC
,
19477 SYMBOL_REF_BLOCK_OFFSET (symbol
));
19478 ASM_OUTPUT_DEF (asm_out_file
, XSTR (symbol
, 0), buffer
);
19482 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
19484 fputs (GLOBAL_ASM_OP
, stream
);
19485 RS6000_OUTPUT_BASENAME (stream
, name
);
19486 putc ('\n', stream
);
19489 /* A get_unnamed_decl callback, used for read-only sections. PTR
19490 points to the section string variable. */
19493 rs6000_xcoff_output_readonly_section_asm_op (const void *directive
)
19495 fprintf (asm_out_file
, "\t.csect %s[RO],3\n",
19496 *(const char *const *) directive
);
19499 /* Likewise for read-write sections. */
19502 rs6000_xcoff_output_readwrite_section_asm_op (const void *directive
)
19504 fprintf (asm_out_file
, "\t.csect %s[RW],3\n",
19505 *(const char *const *) directive
);
19508 /* A get_unnamed_section callback, used for switching to toc_section. */
19511 rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED
)
19513 if (TARGET_MINIMAL_TOC
)
19515 /* toc_section is always selected at least once from
19516 rs6000_xcoff_file_start, so this is guaranteed to
19517 always be defined once and only once in each file. */
19518 if (!toc_initialized
)
19520 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file
);
19521 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file
);
19522 toc_initialized
= 1;
19524 fprintf (asm_out_file
, "\t.csect toc_table[RW]%s\n",
19525 (TARGET_32BIT
? "" : ",3"));
19528 fputs ("\t.toc\n", asm_out_file
);
19531 /* Implement TARGET_ASM_INIT_SECTIONS. */
19534 rs6000_xcoff_asm_init_sections (void)
19536 read_only_data_section
19537 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op
,
19538 &xcoff_read_only_section_name
);
19540 private_data_section
19541 = get_unnamed_section (SECTION_WRITE
,
19542 rs6000_xcoff_output_readwrite_section_asm_op
,
19543 &xcoff_private_data_section_name
);
19545 read_only_private_data_section
19546 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op
,
19547 &xcoff_private_data_section_name
);
19550 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op
, NULL
);
19552 readonly_data_section
= read_only_data_section
;
19553 exception_section
= data_section
;
19557 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
,
19558 tree decl ATTRIBUTE_UNUSED
)
19561 static const char * const suffix
[3] = { "PR", "RO", "RW" };
19563 if (flags
& SECTION_CODE
)
19565 else if (flags
& SECTION_WRITE
)
19570 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
19571 (flags
& SECTION_CODE
) ? "." : "",
19572 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
19576 rs6000_xcoff_select_section (tree decl
, int reloc
,
19577 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
19579 if (decl_readonly_section_1 (decl
, reloc
, 1))
19581 if (TREE_PUBLIC (decl
))
19582 return read_only_data_section
;
19584 return read_only_private_data_section
;
19588 if (TREE_PUBLIC (decl
))
19589 return data_section
;
19591 return private_data_section
;
19596 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
19600 /* Use select_section for private and uninitialized data. */
19601 if (!TREE_PUBLIC (decl
)
19602 || DECL_COMMON (decl
)
19603 || DECL_INITIAL (decl
) == NULL_TREE
19604 || DECL_INITIAL (decl
) == error_mark_node
19605 || (flag_zero_initialized_in_bss
19606 && initializer_zerop (DECL_INITIAL (decl
))))
19609 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
19610 name
= (*targetm
.strip_name_encoding
) (name
);
19611 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
19614 /* Select section for constant in constant pool.
19616 On RS/6000, all constants are in the private read-only data area.
19617 However, if this is being placed in the TOC it must be output as a
19621 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
19622 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
19624 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
19625 return toc_section
;
19627 return read_only_private_data_section
;
19630 /* Remove any trailing [DS] or the like from the symbol name. */
19632 static const char *
19633 rs6000_xcoff_strip_name_encoding (const char *name
)
19638 len
= strlen (name
);
19639 if (name
[len
- 1] == ']')
19640 return ggc_alloc_string (name
, len
- 4);
19645 /* Section attributes. AIX is always PIC. */
19647 static unsigned int
19648 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
19650 unsigned int align
;
19651 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
19653 /* Align to at least UNIT size. */
19654 if (flags
& SECTION_CODE
)
19655 align
= MIN_UNITS_PER_WORD
;
19657 /* Increase alignment of large objects if not already stricter. */
19658 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
19659 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
19660 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
19662 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
19665 /* Output at beginning of assembler file.
19667 Initialize the section names for the RS/6000 at this point.
19669 Specify filename, including full path, to assembler.
19671 We want to go into the TOC section so at least one .toc will be emitted.
19672 Also, in order to output proper .bs/.es pairs, we need at least one static
19673 [RW] section emitted.
19675 Finally, declare mcount when profiling to make the assembler happy. */
19678 rs6000_xcoff_file_start (void)
19680 rs6000_gen_section_name (&xcoff_bss_section_name
,
19681 main_input_filename
, ".bss_");
19682 rs6000_gen_section_name (&xcoff_private_data_section_name
,
19683 main_input_filename
, ".rw_");
19684 rs6000_gen_section_name (&xcoff_read_only_section_name
,
19685 main_input_filename
, ".ro_");
19687 fputs ("\t.file\t", asm_out_file
);
19688 output_quoted_string (asm_out_file
, main_input_filename
);
19689 fputc ('\n', asm_out_file
);
19690 if (write_symbols
!= NO_DEBUG
)
19691 switch_to_section (private_data_section
);
19692 switch_to_section (text_section
);
19694 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
19695 rs6000_file_start ();
19698 /* Output at end of assembler file.
19699 On the RS/6000, referencing data should automatically pull in text. */
19702 rs6000_xcoff_file_end (void)
19704 switch_to_section (text_section
);
19705 fputs ("_section_.text:\n", asm_out_file
);
19706 switch_to_section (data_section
);
19707 fputs (TARGET_32BIT
19708 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
19711 #endif /* TARGET_XCOFF */
19713 /* Compute a (partial) cost for rtx X. Return true if the complete
19714 cost has been computed, and false if subexpressions should be
19715 scanned. In either case, *TOTAL contains the cost result. */
19718 rs6000_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
19720 enum machine_mode mode
= GET_MODE (x
);
19724 /* On the RS/6000, if it is valid in the insn, it is free. */
19726 if (((outer_code
== SET
19727 || outer_code
== PLUS
19728 || outer_code
== MINUS
)
19729 && (satisfies_constraint_I (x
)
19730 || satisfies_constraint_L (x
)))
19731 || (outer_code
== AND
19732 && (satisfies_constraint_K (x
)
19734 ? satisfies_constraint_L (x
)
19735 : satisfies_constraint_J (x
))
19736 || mask_operand (x
, mode
)
19738 && mask64_operand (x
, DImode
))))
19739 || ((outer_code
== IOR
|| outer_code
== XOR
)
19740 && (satisfies_constraint_K (x
)
19742 ? satisfies_constraint_L (x
)
19743 : satisfies_constraint_J (x
))))
19744 || outer_code
== ASHIFT
19745 || outer_code
== ASHIFTRT
19746 || outer_code
== LSHIFTRT
19747 || outer_code
== ROTATE
19748 || outer_code
== ROTATERT
19749 || outer_code
== ZERO_EXTRACT
19750 || (outer_code
== MULT
19751 && satisfies_constraint_I (x
))
19752 || ((outer_code
== DIV
|| outer_code
== UDIV
19753 || outer_code
== MOD
|| outer_code
== UMOD
)
19754 && exact_log2 (INTVAL (x
)) >= 0)
19755 || (outer_code
== COMPARE
19756 && (satisfies_constraint_I (x
)
19757 || satisfies_constraint_K (x
)))
19758 || (outer_code
== EQ
19759 && (satisfies_constraint_I (x
)
19760 || satisfies_constraint_K (x
)
19762 ? satisfies_constraint_L (x
)
19763 : satisfies_constraint_J (x
))))
19764 || (outer_code
== GTU
19765 && satisfies_constraint_I (x
))
19766 || (outer_code
== LTU
19767 && satisfies_constraint_P (x
)))
19772 else if ((outer_code
== PLUS
19773 && reg_or_add_cint_operand (x
, VOIDmode
))
19774 || (outer_code
== MINUS
19775 && reg_or_sub_cint_operand (x
, VOIDmode
))
19776 || ((outer_code
== SET
19777 || outer_code
== IOR
19778 || outer_code
== XOR
)
19780 & ~ (unsigned HOST_WIDE_INT
) 0xffffffff) == 0))
19782 *total
= COSTS_N_INSNS (1);
19788 if (mode
== DImode
&& code
== CONST_DOUBLE
)
19790 if ((outer_code
== IOR
|| outer_code
== XOR
)
19791 && CONST_DOUBLE_HIGH (x
) == 0
19792 && (CONST_DOUBLE_LOW (x
)
19793 & ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0)
19798 else if ((outer_code
== AND
&& and64_2_operand (x
, DImode
))
19799 || ((outer_code
== SET
19800 || outer_code
== IOR
19801 || outer_code
== XOR
)
19802 && CONST_DOUBLE_HIGH (x
) == 0))
19804 *total
= COSTS_N_INSNS (1);
19814 /* When optimizing for size, MEM should be slightly more expensive
19815 than generating address, e.g., (plus (reg) (const)).
19816 L1 cache latency is about two instructions. */
19817 *total
= optimize_size
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
19825 if (mode
== DFmode
)
19827 if (GET_CODE (XEXP (x
, 0)) == MULT
)
19829 /* FNMA accounted in outer NEG. */
19830 if (outer_code
== NEG
)
19831 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
19833 *total
= rs6000_cost
->dmul
;
19836 *total
= rs6000_cost
->fp
;
19838 else if (mode
== SFmode
)
19840 /* FNMA accounted in outer NEG. */
19841 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
19844 *total
= rs6000_cost
->fp
;
19847 *total
= COSTS_N_INSNS (1);
19851 if (mode
== DFmode
)
19853 if (GET_CODE (XEXP (x
, 0)) == MULT
19854 || GET_CODE (XEXP (x
, 1)) == MULT
)
19856 /* FNMA accounted in outer NEG. */
19857 if (outer_code
== NEG
)
19858 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
19860 *total
= rs6000_cost
->dmul
;
19863 *total
= rs6000_cost
->fp
;
19865 else if (mode
== SFmode
)
19867 /* FNMA accounted in outer NEG. */
19868 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
19871 *total
= rs6000_cost
->fp
;
19874 *total
= COSTS_N_INSNS (1);
19878 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
19879 && satisfies_constraint_I (XEXP (x
, 1)))
19881 if (INTVAL (XEXP (x
, 1)) >= -256
19882 && INTVAL (XEXP (x
, 1)) <= 255)
19883 *total
= rs6000_cost
->mulsi_const9
;
19885 *total
= rs6000_cost
->mulsi_const
;
19887 /* FMA accounted in outer PLUS/MINUS. */
19888 else if ((mode
== DFmode
|| mode
== SFmode
)
19889 && (outer_code
== PLUS
|| outer_code
== MINUS
))
19891 else if (mode
== DFmode
)
19892 *total
= rs6000_cost
->dmul
;
19893 else if (mode
== SFmode
)
19894 *total
= rs6000_cost
->fp
;
19895 else if (mode
== DImode
)
19896 *total
= rs6000_cost
->muldi
;
19898 *total
= rs6000_cost
->mulsi
;
19903 if (FLOAT_MODE_P (mode
))
19905 *total
= mode
== DFmode
? rs6000_cost
->ddiv
19906 : rs6000_cost
->sdiv
;
19913 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
19914 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
19916 if (code
== DIV
|| code
== MOD
)
19918 *total
= COSTS_N_INSNS (2);
19921 *total
= COSTS_N_INSNS (1);
19925 if (GET_MODE (XEXP (x
, 1)) == DImode
)
19926 *total
= rs6000_cost
->divdi
;
19928 *total
= rs6000_cost
->divsi
;
19930 /* Add in shift and subtract for MOD. */
19931 if (code
== MOD
|| code
== UMOD
)
19932 *total
+= COSTS_N_INSNS (2);
19936 *total
= COSTS_N_INSNS (4);
19940 if (outer_code
== AND
|| outer_code
== IOR
|| outer_code
== XOR
)
19951 *total
= COSTS_N_INSNS (1);
19959 /* Handle mul_highpart. */
19960 if (outer_code
== TRUNCATE
19961 && GET_CODE (XEXP (x
, 0)) == MULT
)
19963 if (mode
== DImode
)
19964 *total
= rs6000_cost
->muldi
;
19966 *total
= rs6000_cost
->mulsi
;
19969 else if (outer_code
== AND
)
19972 *total
= COSTS_N_INSNS (1);
19977 if (GET_CODE (XEXP (x
, 0)) == MEM
)
19980 *total
= COSTS_N_INSNS (1);
19986 if (!FLOAT_MODE_P (mode
))
19988 *total
= COSTS_N_INSNS (1);
19994 case UNSIGNED_FLOAT
:
19997 case FLOAT_TRUNCATE
:
19998 *total
= rs6000_cost
->fp
;
20002 if (mode
== DFmode
)
20005 *total
= rs6000_cost
->fp
;
20009 switch (XINT (x
, 1))
20012 *total
= rs6000_cost
->fp
;
20024 *total
= COSTS_N_INSNS (1);
20027 else if (FLOAT_MODE_P (mode
)
20028 && TARGET_PPC_GFXOPT
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20030 *total
= rs6000_cost
->fp
;
20038 /* Carry bit requires mode == Pmode.
20039 NEG or PLUS already counted so only add one. */
20041 && (outer_code
== NEG
|| outer_code
== PLUS
))
20043 *total
= COSTS_N_INSNS (1);
20046 if (outer_code
== SET
)
20048 if (XEXP (x
, 1) == const0_rtx
)
20050 *total
= COSTS_N_INSNS (2);
20053 else if (mode
== Pmode
)
20055 *total
= COSTS_N_INSNS (3);
20064 if (outer_code
== SET
&& (XEXP (x
, 1) == const0_rtx
))
20066 *total
= COSTS_N_INSNS (2);
20070 if (outer_code
== COMPARE
)
20084 /* A C expression returning the cost of moving data from a register of class
20085 CLASS1 to one of CLASS2. */
20088 rs6000_register_move_cost (enum machine_mode mode
,
20089 enum reg_class from
, enum reg_class to
)
20091 /* Moves from/to GENERAL_REGS. */
20092 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
20093 || reg_classes_intersect_p (from
, GENERAL_REGS
))
20095 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
20098 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
20099 return (rs6000_memory_move_cost (mode
, from
, 0)
20100 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
20102 /* It's more expensive to move CR_REGS than CR0_REGS because of the
20104 else if (from
== CR_REGS
)
20108 /* A move will cost one instruction per GPR moved. */
20109 return 2 * hard_regno_nregs
[0][mode
];
20112 /* Moving between two similar registers is just one instruction. */
20113 else if (reg_classes_intersect_p (to
, from
))
20114 return mode
== TFmode
? 4 : 2;
20116 /* Everything else has to go through GENERAL_REGS. */
20118 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
20119 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
20122 /* A C expressions returning the cost of moving data of MODE from a register to
20126 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
20127 int in ATTRIBUTE_UNUSED
)
20129 if (reg_classes_intersect_p (class, GENERAL_REGS
))
20130 return 4 * hard_regno_nregs
[0][mode
];
20131 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
20132 return 4 * hard_regno_nregs
[32][mode
];
20133 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
20134 return 4 * hard_regno_nregs
[FIRST_ALTIVEC_REGNO
][mode
];
20136 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
20139 /* Newton-Raphson approximation of single-precision floating point divide n/d.
20140 Assumes no trapping math and finite arguments. */
20143 rs6000_emit_swdivsf (rtx res
, rtx n
, rtx d
)
20145 rtx x0
, e0
, e1
, y1
, u0
, v0
, one
;
20147 x0
= gen_reg_rtx (SFmode
);
20148 e0
= gen_reg_rtx (SFmode
);
20149 e1
= gen_reg_rtx (SFmode
);
20150 y1
= gen_reg_rtx (SFmode
);
20151 u0
= gen_reg_rtx (SFmode
);
20152 v0
= gen_reg_rtx (SFmode
);
20153 one
= force_reg (SFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, SFmode
));
20155 /* x0 = 1./d estimate */
20156 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
20157 gen_rtx_UNSPEC (SFmode
, gen_rtvec (1, d
),
20159 /* e0 = 1. - d * x0 */
20160 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
20161 gen_rtx_MINUS (SFmode
, one
,
20162 gen_rtx_MULT (SFmode
, d
, x0
))));
20163 /* e1 = e0 + e0 * e0 */
20164 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
20165 gen_rtx_PLUS (SFmode
,
20166 gen_rtx_MULT (SFmode
, e0
, e0
), e0
)));
20167 /* y1 = x0 + e1 * x0 */
20168 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
20169 gen_rtx_PLUS (SFmode
,
20170 gen_rtx_MULT (SFmode
, e1
, x0
), x0
)));
20172 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
20173 gen_rtx_MULT (SFmode
, n
, y1
)));
20174 /* v0 = n - d * u0 */
20175 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
20176 gen_rtx_MINUS (SFmode
, n
,
20177 gen_rtx_MULT (SFmode
, d
, u0
))));
20178 /* res = u0 + v0 * y1 */
20179 emit_insn (gen_rtx_SET (VOIDmode
, res
,
20180 gen_rtx_PLUS (SFmode
,
20181 gen_rtx_MULT (SFmode
, v0
, y1
), u0
)));
20184 /* Newton-Raphson approximation of double-precision floating point divide n/d.
20185 Assumes no trapping math and finite arguments. */
20188 rs6000_emit_swdivdf (rtx res
, rtx n
, rtx d
)
20190 rtx x0
, e0
, e1
, e2
, y1
, y2
, y3
, u0
, v0
, one
;
20192 x0
= gen_reg_rtx (DFmode
);
20193 e0
= gen_reg_rtx (DFmode
);
20194 e1
= gen_reg_rtx (DFmode
);
20195 e2
= gen_reg_rtx (DFmode
);
20196 y1
= gen_reg_rtx (DFmode
);
20197 y2
= gen_reg_rtx (DFmode
);
20198 y3
= gen_reg_rtx (DFmode
);
20199 u0
= gen_reg_rtx (DFmode
);
20200 v0
= gen_reg_rtx (DFmode
);
20201 one
= force_reg (DFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, DFmode
));
20203 /* x0 = 1./d estimate */
20204 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
20205 gen_rtx_UNSPEC (DFmode
, gen_rtvec (1, d
),
20207 /* e0 = 1. - d * x0 */
20208 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
20209 gen_rtx_MINUS (DFmode
, one
,
20210 gen_rtx_MULT (SFmode
, d
, x0
))));
20211 /* y1 = x0 + e0 * x0 */
20212 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
20213 gen_rtx_PLUS (DFmode
,
20214 gen_rtx_MULT (DFmode
, e0
, x0
), x0
)));
20216 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
20217 gen_rtx_MULT (DFmode
, e0
, e0
)));
20218 /* y2 = y1 + e1 * y1 */
20219 emit_insn (gen_rtx_SET (VOIDmode
, y2
,
20220 gen_rtx_PLUS (DFmode
,
20221 gen_rtx_MULT (DFmode
, e1
, y1
), y1
)));
20223 emit_insn (gen_rtx_SET (VOIDmode
, e2
,
20224 gen_rtx_MULT (DFmode
, e1
, e1
)));
20225 /* y3 = y2 + e2 * y2 */
20226 emit_insn (gen_rtx_SET (VOIDmode
, y3
,
20227 gen_rtx_PLUS (DFmode
,
20228 gen_rtx_MULT (DFmode
, e2
, y2
), y2
)));
20230 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
20231 gen_rtx_MULT (DFmode
, n
, y3
)));
20232 /* v0 = n - d * u0 */
20233 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
20234 gen_rtx_MINUS (DFmode
, n
,
20235 gen_rtx_MULT (DFmode
, d
, u0
))));
20236 /* res = u0 + v0 * y3 */
20237 emit_insn (gen_rtx_SET (VOIDmode
, res
,
20238 gen_rtx_PLUS (DFmode
,
20239 gen_rtx_MULT (DFmode
, v0
, y3
), u0
)));
20243 /* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
20244 target, and SRC is the argument operand. */
20247 rs6000_emit_popcount (rtx dst
, rtx src
)
20249 enum machine_mode mode
= GET_MODE (dst
);
20252 tmp1
= gen_reg_rtx (mode
);
20254 if (mode
== SImode
)
20256 emit_insn (gen_popcntbsi2 (tmp1
, src
));
20257 tmp2
= expand_mult (SImode
, tmp1
, GEN_INT (0x01010101),
20259 tmp2
= force_reg (SImode
, tmp2
);
20260 emit_insn (gen_lshrsi3 (dst
, tmp2
, GEN_INT (24)));
20264 emit_insn (gen_popcntbdi2 (tmp1
, src
));
20265 tmp2
= expand_mult (DImode
, tmp1
,
20266 GEN_INT ((HOST_WIDE_INT
)
20267 0x01010101 << 32 | 0x01010101),
20269 tmp2
= force_reg (DImode
, tmp2
);
20270 emit_insn (gen_lshrdi3 (dst
, tmp2
, GEN_INT (56)));
20275 /* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
20276 target, and SRC is the argument operand. */
20279 rs6000_emit_parity (rtx dst
, rtx src
)
20281 enum machine_mode mode
= GET_MODE (dst
);
20284 tmp
= gen_reg_rtx (mode
);
20285 if (mode
== SImode
)
20287 /* Is mult+shift >= shift+xor+shift+xor? */
20288 if (rs6000_cost
->mulsi_const
>= COSTS_N_INSNS (3))
20290 rtx tmp1
, tmp2
, tmp3
, tmp4
;
20292 tmp1
= gen_reg_rtx (SImode
);
20293 emit_insn (gen_popcntbsi2 (tmp1
, src
));
20295 tmp2
= gen_reg_rtx (SImode
);
20296 emit_insn (gen_lshrsi3 (tmp2
, tmp1
, GEN_INT (16)));
20297 tmp3
= gen_reg_rtx (SImode
);
20298 emit_insn (gen_xorsi3 (tmp3
, tmp1
, tmp2
));
20300 tmp4
= gen_reg_rtx (SImode
);
20301 emit_insn (gen_lshrsi3 (tmp4
, tmp3
, GEN_INT (8)));
20302 emit_insn (gen_xorsi3 (tmp
, tmp3
, tmp4
));
20305 rs6000_emit_popcount (tmp
, src
);
20306 emit_insn (gen_andsi3 (dst
, tmp
, const1_rtx
));
20310 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
20311 if (rs6000_cost
->muldi
>= COSTS_N_INSNS (5))
20313 rtx tmp1
, tmp2
, tmp3
, tmp4
, tmp5
, tmp6
;
20315 tmp1
= gen_reg_rtx (DImode
);
20316 emit_insn (gen_popcntbdi2 (tmp1
, src
));
20318 tmp2
= gen_reg_rtx (DImode
);
20319 emit_insn (gen_lshrdi3 (tmp2
, tmp1
, GEN_INT (32)));
20320 tmp3
= gen_reg_rtx (DImode
);
20321 emit_insn (gen_xordi3 (tmp3
, tmp1
, tmp2
));
20323 tmp4
= gen_reg_rtx (DImode
);
20324 emit_insn (gen_lshrdi3 (tmp4
, tmp3
, GEN_INT (16)));
20325 tmp5
= gen_reg_rtx (DImode
);
20326 emit_insn (gen_xordi3 (tmp5
, tmp3
, tmp4
));
20328 tmp6
= gen_reg_rtx (DImode
);
20329 emit_insn (gen_lshrdi3 (tmp6
, tmp5
, GEN_INT (8)));
20330 emit_insn (gen_xordi3 (tmp
, tmp5
, tmp6
));
20333 rs6000_emit_popcount (tmp
, src
);
20334 emit_insn (gen_anddi3 (dst
, tmp
, const1_rtx
));
20338 /* Return an RTX representing where to find the function value of a
20339 function returning MODE. */
20341 rs6000_complex_function_value (enum machine_mode mode
)
20343 unsigned int regno
;
20345 enum machine_mode inner
= GET_MODE_INNER (mode
);
20346 unsigned int inner_bytes
= GET_MODE_SIZE (inner
);
20348 if (FLOAT_MODE_P (mode
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20349 regno
= FP_ARG_RETURN
;
20352 regno
= GP_ARG_RETURN
;
20354 /* 32-bit is OK since it'll go in r3/r4. */
20355 if (TARGET_32BIT
&& inner_bytes
>= 4)
20356 return gen_rtx_REG (mode
, regno
);
20359 if (inner_bytes
>= 8)
20360 return gen_rtx_REG (mode
, regno
);
20362 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
20364 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
20365 GEN_INT (inner_bytes
));
20366 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
20369 /* Define how to find the value returned by a function.
20370 VALTYPE is the data type of the value (as a tree).
20371 If the precise function being called is known, FUNC is its FUNCTION_DECL;
20372 otherwise, FUNC is 0.
20374 On the SPE, both FPs and vectors are returned in r3.
20376 On RS/6000 an integer value is in r3 and a floating-point value is in
20377 fp1, unless -msoft-float. */
20380 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
20382 enum machine_mode mode
;
20383 unsigned int regno
;
20385 /* Special handling for structs in darwin64. */
20386 if (rs6000_darwin64_abi
20387 && TYPE_MODE (valtype
) == BLKmode
20388 && TREE_CODE (valtype
) == RECORD_TYPE
20389 && int_size_in_bytes (valtype
) > 0)
20391 CUMULATIVE_ARGS valcum
;
20395 valcum
.fregno
= FP_ARG_MIN_REG
;
20396 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
20397 /* Do a trial code generation as if this were going to be passed as
20398 an argument; if any part goes in memory, we return NULL. */
20399 valret
= rs6000_darwin64_record_arg (&valcum
, valtype
, 1, true);
20402 /* Otherwise fall through to standard ABI rules. */
20405 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
20407 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20408 return gen_rtx_PARALLEL (DImode
,
20410 gen_rtx_EXPR_LIST (VOIDmode
,
20411 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20413 gen_rtx_EXPR_LIST (VOIDmode
,
20414 gen_rtx_REG (SImode
,
20415 GP_ARG_RETURN
+ 1),
20418 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DCmode
)
20420 return gen_rtx_PARALLEL (DCmode
,
20422 gen_rtx_EXPR_LIST (VOIDmode
,
20423 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20425 gen_rtx_EXPR_LIST (VOIDmode
,
20426 gen_rtx_REG (SImode
,
20427 GP_ARG_RETURN
+ 1),
20429 gen_rtx_EXPR_LIST (VOIDmode
,
20430 gen_rtx_REG (SImode
,
20431 GP_ARG_RETURN
+ 2),
20433 gen_rtx_EXPR_LIST (VOIDmode
,
20434 gen_rtx_REG (SImode
,
20435 GP_ARG_RETURN
+ 3),
20439 if ((INTEGRAL_TYPE_P (valtype
)
20440 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
20441 || POINTER_TYPE_P (valtype
))
20442 mode
= TARGET_32BIT
? SImode
: DImode
;
20444 mode
= TYPE_MODE (valtype
);
20446 if (DECIMAL_FLOAT_MODE_P (mode
))
20447 regno
= GP_ARG_RETURN
;
20448 else if (SCALAR_FLOAT_TYPE_P (valtype
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20449 regno
= FP_ARG_RETURN
;
20450 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
20451 && targetm
.calls
.split_complex_arg
)
20452 return rs6000_complex_function_value (mode
);
20453 else if (TREE_CODE (valtype
) == VECTOR_TYPE
20454 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
20455 && ALTIVEC_VECTOR_MODE (mode
))
20456 regno
= ALTIVEC_ARG_RETURN
;
20457 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
20458 && (mode
== DFmode
|| mode
== DCmode
20459 || mode
== TFmode
|| mode
== TCmode
))
20460 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
20462 regno
= GP_ARG_RETURN
;
20464 return gen_rtx_REG (mode
, regno
);
20467 /* Define how to find the value returned by a library function
20468 assuming the value has mode MODE. */
20470 rs6000_libcall_value (enum machine_mode mode
)
20472 unsigned int regno
;
20474 if (TARGET_32BIT
&& TARGET_POWERPC64
&& mode
== DImode
)
20476 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
20477 return gen_rtx_PARALLEL (DImode
,
20479 gen_rtx_EXPR_LIST (VOIDmode
,
20480 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
20482 gen_rtx_EXPR_LIST (VOIDmode
,
20483 gen_rtx_REG (SImode
,
20484 GP_ARG_RETURN
+ 1),
20488 if (DECIMAL_FLOAT_MODE_P (mode
))
20489 regno
= GP_ARG_RETURN
;
20490 else if (SCALAR_FLOAT_MODE_P (mode
)
20491 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
20492 regno
= FP_ARG_RETURN
;
20493 else if (ALTIVEC_VECTOR_MODE (mode
)
20494 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
20495 regno
= ALTIVEC_ARG_RETURN
;
20496 else if (COMPLEX_MODE_P (mode
) && targetm
.calls
.split_complex_arg
)
20497 return rs6000_complex_function_value (mode
);
20498 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
20499 && (mode
== DFmode
|| mode
== DCmode
20500 || mode
== TFmode
|| mode
== TCmode
))
20501 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
20503 regno
= GP_ARG_RETURN
;
20505 return gen_rtx_REG (mode
, regno
);
20508 /* Define the offset between two registers, FROM to be eliminated and its
20509 replacement TO, at the start of a routine. */
20511 rs6000_initial_elimination_offset (int from
, int to
)
20513 rs6000_stack_t
*info
= rs6000_stack_info ();
20514 HOST_WIDE_INT offset
;
20516 if (from
== HARD_FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20517 offset
= info
->push_p
? 0 : -info
->total_size
;
20518 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20520 offset
= info
->push_p
? 0 : -info
->total_size
;
20521 if (FRAME_GROWS_DOWNWARD
)
20522 offset
+= info
->fixed_size
+ info
->vars_size
+ info
->parm_size
;
20524 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
20525 offset
= FRAME_GROWS_DOWNWARD
20526 ? info
->fixed_size
+ info
->vars_size
+ info
->parm_size
20528 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
20529 offset
= info
->total_size
;
20530 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
20531 offset
= info
->push_p
? info
->total_size
: 0;
20532 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
20535 gcc_unreachable ();
20540 /* Return true if TYPE is a SPE or AltiVec opaque type. */
20543 rs6000_is_opaque_type (tree type
)
20545 return (type
== opaque_V2SI_type_node
20546 || type
== opaque_V2SF_type_node
20547 || type
== opaque_p_V2SI_type_node
20548 || type
== opaque_V4SI_type_node
);
20552 rs6000_dwarf_register_span (rtx reg
)
20557 && (SPE_VECTOR_MODE (GET_MODE (reg
))
20558 || (TARGET_E500_DOUBLE
&& GET_MODE (reg
) == DFmode
)))
20563 regno
= REGNO (reg
);
20565 /* The duality of the SPE register size wreaks all kinds of havoc.
20566 This is a way of distinguishing r0 in 32-bits from r0 in
20569 gen_rtx_PARALLEL (VOIDmode
,
20572 gen_rtx_REG (SImode
, regno
+ 1200),
20573 gen_rtx_REG (SImode
, regno
))
20575 gen_rtx_REG (SImode
, regno
),
20576 gen_rtx_REG (SImode
, regno
+ 1200)));
20579 /* Map internal gcc register numbers to DWARF2 register numbers. */
20582 rs6000_dbx_register_number (unsigned int regno
)
20584 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
20586 if (regno
== MQ_REGNO
)
20588 if (regno
== LINK_REGISTER_REGNUM
)
20590 if (regno
== COUNT_REGISTER_REGNUM
)
20592 if (CR_REGNO_P (regno
))
20593 return regno
- CR0_REGNO
+ 86;
20594 if (regno
== XER_REGNO
)
20596 if (ALTIVEC_REGNO_P (regno
))
20597 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
20598 if (regno
== VRSAVE_REGNO
)
20600 if (regno
== VSCR_REGNO
)
20602 if (regno
== SPE_ACC_REGNO
)
20604 if (regno
== SPEFSCR_REGNO
)
20606 /* SPE high reg number. We get these values of regno from
20607 rs6000_dwarf_register_span. */
20608 gcc_assert (regno
>= 1200 && regno
< 1232);
20612 /* target hook eh_return_filter_mode */
20613 static enum machine_mode
20614 rs6000_eh_return_filter_mode (void)
20616 return TARGET_32BIT
? SImode
: word_mode
;
20619 /* Target hook for scalar_mode_supported_p. */
20621 rs6000_scalar_mode_supported_p (enum machine_mode mode
)
20623 if (DECIMAL_FLOAT_MODE_P (mode
))
20626 return default_scalar_mode_supported_p (mode
);
20629 /* Target hook for vector_mode_supported_p. */
20631 rs6000_vector_mode_supported_p (enum machine_mode mode
)
20634 if (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
20637 else if (TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
20644 /* Target hook for invalid_arg_for_unprototyped_fn. */
20645 static const char *
20646 invalid_arg_for_unprototyped_fn (tree typelist
, tree funcdecl
, tree val
)
20648 return (!rs6000_darwin64_abi
20650 && TREE_CODE (TREE_TYPE (val
)) == VECTOR_TYPE
20651 && (funcdecl
== NULL_TREE
20652 || (TREE_CODE (funcdecl
) == FUNCTION_DECL
20653 && DECL_BUILT_IN_CLASS (funcdecl
) != BUILT_IN_MD
)))
20654 ? N_("AltiVec argument passed to unprototyped function")
20658 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
20659 setup by using __stack_chk_fail_local hidden function instead of
20660 calling __stack_chk_fail directly. Otherwise it is better to call
20661 __stack_chk_fail directly. */
20664 rs6000_stack_protect_fail (void)
20666 return (DEFAULT_ABI
== ABI_V4
&& TARGET_SECURE_PLT
&& flag_pic
)
20667 ? default_hidden_stack_protect_fail ()
20668 : default_external_stack_protect_fail ();
20671 #include "gt-rs6000.h"