Merged revisions 143552,143554,143557,143560,143562,143564-143567,143570-143573,14357...
[official-gcc.git] / gcc / config / rs6000 / rs6000.c
blob3073561c9f0bc84d092ead5d25897c6166d1a7aa
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 3, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "gimple.h"
56 #include "tree-flow.h"
57 #include "intl.h"
58 #include "params.h"
59 #include "tm-constrs.h"
60 #if TARGET_XCOFF
61 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
62 #endif
63 #if TARGET_MACHO
64 #include "gstab.h" /* for N_SLINE */
65 #endif
67 #ifndef TARGET_NO_PROTOTYPE
68 #define TARGET_NO_PROTOTYPE 0
69 #endif
71 #define min(A,B) ((A) < (B) ? (A) : (B))
72 #define max(A,B) ((A) > (B) ? (A) : (B))
74 /* Structure used to define the rs6000 stack */
75 typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 int world_save_p; /* true if we're saving *everything*:
85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
107 not in save_size */
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
112 } rs6000_stack_t;
114 /* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116 typedef struct machine_function GTY(())
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
125 int ra_need_lr;
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
129 /* Temporary stack slot to use for SDmode copies. This slot is
130 64-bits wide and is allocated early enough so that the offset
131 does not overflow the 16-bit load/store offset field. */
132 rtx sdmode_stack_slot;
133 } machine_function;
135 /* Target cpu type */
137 enum processor_type rs6000_cpu;
138 struct rs6000_cpu_select rs6000_select[3] =
140 /* switch name, tune arch */
141 { (const char *)0, "--with-cpu=", 1, 1 },
142 { (const char *)0, "-mcpu=", 1, 1 },
143 { (const char *)0, "-mtune=", 1, 0 },
146 /* Always emit branch hint bits. */
147 static GTY(()) bool rs6000_always_hint;
149 /* Schedule instructions for group formation. */
150 static GTY(()) bool rs6000_sched_groups;
152 /* Align branch targets. */
153 static GTY(()) bool rs6000_align_branch_targets;
155 /* Support for -msched-costly-dep option. */
156 const char *rs6000_sched_costly_dep_str;
157 enum rs6000_dependence_cost rs6000_sched_costly_dep;
159 /* Support for -minsert-sched-nops option. */
160 const char *rs6000_sched_insert_nops_str;
161 enum rs6000_nop_insertion rs6000_sched_insert_nops;
163 /* Support targetm.vectorize.builtin_mask_for_load. */
164 static GTY(()) tree altivec_builtin_mask_for_load;
166 /* Size of long double. */
167 int rs6000_long_double_type_size;
169 /* IEEE quad extended precision long double. */
170 int rs6000_ieeequad;
172 /* Nonzero to use AltiVec ABI. */
173 int rs6000_altivec_abi;
175 /* Nonzero if we want SPE SIMD instructions. */
176 int rs6000_spe;
178 /* Nonzero if we want SPE ABI extensions. */
179 int rs6000_spe_abi;
181 /* Nonzero to use isel instructions. */
182 int rs6000_isel;
184 /* Nonzero if floating point operations are done in the GPRs. */
185 int rs6000_float_gprs = 0;
187 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
188 int rs6000_darwin64_abi;
190 /* Set to nonzero once AIX common-mode calls have been defined. */
191 static GTY(()) int common_mode_defined;
193 /* Save information from a "cmpxx" operation until the branch or scc is
194 emitted. */
195 rtx rs6000_compare_op0, rs6000_compare_op1;
196 int rs6000_compare_fp_p;
198 /* Label number of label created for -mrelocatable, to call to so we can
199 get the address of the GOT section */
200 int rs6000_pic_labelno;
202 #ifdef USING_ELFOS_H
203 /* Which abi to adhere to */
204 const char *rs6000_abi_name;
206 /* Semantics of the small data area */
207 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
209 /* Which small data model to use */
210 const char *rs6000_sdata_name = (char *)0;
212 /* Counter for labels which are to be placed in .fixup. */
213 int fixuplabelno = 0;
214 #endif
216 /* Bit size of immediate TLS offsets and string from which it is decoded. */
217 int rs6000_tls_size = 32;
218 const char *rs6000_tls_size_string;
220 /* ABI enumeration available for subtarget to use. */
221 enum rs6000_abi rs6000_current_abi;
223 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
224 int dot_symbols;
226 /* Debug flags */
227 const char *rs6000_debug_name;
228 int rs6000_debug_stack; /* debug stack applications */
229 int rs6000_debug_arg; /* debug argument handling */
231 /* Value is TRUE if register/mode pair is acceptable. */
232 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
234 /* Built in types. */
236 tree rs6000_builtin_types[RS6000_BTI_MAX];
237 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
239 const char *rs6000_traceback_name;
240 static enum {
241 traceback_default = 0,
242 traceback_none,
243 traceback_part,
244 traceback_full
245 } rs6000_traceback;
247 /* Flag to say the TOC is initialized */
248 int toc_initialized;
249 char toc_label_name[10];
251 /* Cached value of rs6000_variable_issue. This is cached in
252 rs6000_variable_issue hook and returned from rs6000_sched_reorder2. */
253 static short cached_can_issue_more;
255 static GTY(()) section *read_only_data_section;
256 static GTY(()) section *private_data_section;
257 static GTY(()) section *read_only_private_data_section;
258 static GTY(()) section *sdata2_section;
259 static GTY(()) section *toc_section;
261 /* Control alignment for fields within structures. */
262 /* String from -malign-XXXXX. */
263 int rs6000_alignment_flags;
265 /* True for any options that were explicitly set. */
266 struct {
267 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
268 bool alignment; /* True if -malign- was used. */
269 bool spe_abi; /* True if -mabi=spe/no-spe was used. */
270 bool altivec_abi; /* True if -mabi=altivec/no-altivec used. */
271 bool spe; /* True if -mspe= was used. */
272 bool float_gprs; /* True if -mfloat-gprs= was used. */
273 bool isel; /* True if -misel was used. */
274 bool long_double; /* True if -mlong-double- was used. */
275 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
276 bool vrsave; /* True if -mvrsave was used. */
277 } rs6000_explicit_options;
279 struct builtin_description
281 /* mask is not const because we're going to alter it below. This
282 nonsense will go away when we rewrite the -march infrastructure
283 to give us more target flag bits. */
284 unsigned int mask;
285 const enum insn_code icode;
286 const char *const name;
287 const enum rs6000_builtins code;
290 /* Target cpu costs. */
292 struct processor_costs {
293 const int mulsi; /* cost of SImode multiplication. */
294 const int mulsi_const; /* cost of SImode multiplication by constant. */
295 const int mulsi_const9; /* cost of SImode mult by short constant. */
296 const int muldi; /* cost of DImode multiplication. */
297 const int divsi; /* cost of SImode division. */
298 const int divdi; /* cost of DImode division. */
299 const int fp; /* cost of simple SFmode and DFmode insns. */
300 const int dmul; /* cost of DFmode multiplication (and fmadd). */
301 const int sdiv; /* cost of SFmode division (fdivs). */
302 const int ddiv; /* cost of DFmode division (fdiv). */
303 const int cache_line_size; /* cache line size in bytes. */
304 const int l1_cache_size; /* size of l1 cache, in kilobytes. */
305 const int l2_cache_size; /* size of l2 cache, in kilobytes. */
306 const int simultaneous_prefetches; /* number of parallel prefetch
307 operations. */
310 const struct processor_costs *rs6000_cost;
312 /* Processor costs (relative to an add) */
314 /* Instruction size costs on 32bit processors. */
315 static const
316 struct processor_costs size32_cost = {
317 COSTS_N_INSNS (1), /* mulsi */
318 COSTS_N_INSNS (1), /* mulsi_const */
319 COSTS_N_INSNS (1), /* mulsi_const9 */
320 COSTS_N_INSNS (1), /* muldi */
321 COSTS_N_INSNS (1), /* divsi */
322 COSTS_N_INSNS (1), /* divdi */
323 COSTS_N_INSNS (1), /* fp */
324 COSTS_N_INSNS (1), /* dmul */
325 COSTS_N_INSNS (1), /* sdiv */
326 COSTS_N_INSNS (1), /* ddiv */
333 /* Instruction size costs on 64bit processors. */
334 static const
335 struct processor_costs size64_cost = {
336 COSTS_N_INSNS (1), /* mulsi */
337 COSTS_N_INSNS (1), /* mulsi_const */
338 COSTS_N_INSNS (1), /* mulsi_const9 */
339 COSTS_N_INSNS (1), /* muldi */
340 COSTS_N_INSNS (1), /* divsi */
341 COSTS_N_INSNS (1), /* divdi */
342 COSTS_N_INSNS (1), /* fp */
343 COSTS_N_INSNS (1), /* dmul */
344 COSTS_N_INSNS (1), /* sdiv */
345 COSTS_N_INSNS (1), /* ddiv */
346 128,
352 /* Instruction costs on RIOS1 processors. */
353 static const
354 struct processor_costs rios1_cost = {
355 COSTS_N_INSNS (5), /* mulsi */
356 COSTS_N_INSNS (4), /* mulsi_const */
357 COSTS_N_INSNS (3), /* mulsi_const9 */
358 COSTS_N_INSNS (5), /* muldi */
359 COSTS_N_INSNS (19), /* divsi */
360 COSTS_N_INSNS (19), /* divdi */
361 COSTS_N_INSNS (2), /* fp */
362 COSTS_N_INSNS (2), /* dmul */
363 COSTS_N_INSNS (19), /* sdiv */
364 COSTS_N_INSNS (19), /* ddiv */
365 128, /* cache line size */
366 64, /* l1 cache */
367 512, /* l2 cache */
368 0, /* streams */
371 /* Instruction costs on RIOS2 processors. */
372 static const
373 struct processor_costs rios2_cost = {
374 COSTS_N_INSNS (2), /* mulsi */
375 COSTS_N_INSNS (2), /* mulsi_const */
376 COSTS_N_INSNS (2), /* mulsi_const9 */
377 COSTS_N_INSNS (2), /* muldi */
378 COSTS_N_INSNS (13), /* divsi */
379 COSTS_N_INSNS (13), /* divdi */
380 COSTS_N_INSNS (2), /* fp */
381 COSTS_N_INSNS (2), /* dmul */
382 COSTS_N_INSNS (17), /* sdiv */
383 COSTS_N_INSNS (17), /* ddiv */
384 256, /* cache line size */
385 256, /* l1 cache */
386 1024, /* l2 cache */
387 0, /* streams */
390 /* Instruction costs on RS64A processors. */
391 static const
392 struct processor_costs rs64a_cost = {
393 COSTS_N_INSNS (20), /* mulsi */
394 COSTS_N_INSNS (12), /* mulsi_const */
395 COSTS_N_INSNS (8), /* mulsi_const9 */
396 COSTS_N_INSNS (34), /* muldi */
397 COSTS_N_INSNS (65), /* divsi */
398 COSTS_N_INSNS (67), /* divdi */
399 COSTS_N_INSNS (4), /* fp */
400 COSTS_N_INSNS (4), /* dmul */
401 COSTS_N_INSNS (31), /* sdiv */
402 COSTS_N_INSNS (31), /* ddiv */
403 128, /* cache line size */
404 128, /* l1 cache */
405 2048, /* l2 cache */
406 1, /* streams */
409 /* Instruction costs on MPCCORE processors. */
410 static const
411 struct processor_costs mpccore_cost = {
412 COSTS_N_INSNS (2), /* mulsi */
413 COSTS_N_INSNS (2), /* mulsi_const */
414 COSTS_N_INSNS (2), /* mulsi_const9 */
415 COSTS_N_INSNS (2), /* muldi */
416 COSTS_N_INSNS (6), /* divsi */
417 COSTS_N_INSNS (6), /* divdi */
418 COSTS_N_INSNS (4), /* fp */
419 COSTS_N_INSNS (5), /* dmul */
420 COSTS_N_INSNS (10), /* sdiv */
421 COSTS_N_INSNS (17), /* ddiv */
422 32, /* cache line size */
423 4, /* l1 cache */
424 16, /* l2 cache */
425 1, /* streams */
428 /* Instruction costs on PPC403 processors. */
429 static const
430 struct processor_costs ppc403_cost = {
431 COSTS_N_INSNS (4), /* mulsi */
432 COSTS_N_INSNS (4), /* mulsi_const */
433 COSTS_N_INSNS (4), /* mulsi_const9 */
434 COSTS_N_INSNS (4), /* muldi */
435 COSTS_N_INSNS (33), /* divsi */
436 COSTS_N_INSNS (33), /* divdi */
437 COSTS_N_INSNS (11), /* fp */
438 COSTS_N_INSNS (11), /* dmul */
439 COSTS_N_INSNS (11), /* sdiv */
440 COSTS_N_INSNS (11), /* ddiv */
441 32, /* cache line size */
442 4, /* l1 cache */
443 16, /* l2 cache */
444 1, /* streams */
447 /* Instruction costs on PPC405 processors. */
448 static const
449 struct processor_costs ppc405_cost = {
450 COSTS_N_INSNS (5), /* mulsi */
451 COSTS_N_INSNS (4), /* mulsi_const */
452 COSTS_N_INSNS (3), /* mulsi_const9 */
453 COSTS_N_INSNS (5), /* muldi */
454 COSTS_N_INSNS (35), /* divsi */
455 COSTS_N_INSNS (35), /* divdi */
456 COSTS_N_INSNS (11), /* fp */
457 COSTS_N_INSNS (11), /* dmul */
458 COSTS_N_INSNS (11), /* sdiv */
459 COSTS_N_INSNS (11), /* ddiv */
460 32, /* cache line size */
461 16, /* l1 cache */
462 128, /* l2 cache */
463 1, /* streams */
466 /* Instruction costs on PPC440 processors. */
467 static const
468 struct processor_costs ppc440_cost = {
469 COSTS_N_INSNS (3), /* mulsi */
470 COSTS_N_INSNS (2), /* mulsi_const */
471 COSTS_N_INSNS (2), /* mulsi_const9 */
472 COSTS_N_INSNS (3), /* muldi */
473 COSTS_N_INSNS (34), /* divsi */
474 COSTS_N_INSNS (34), /* divdi */
475 COSTS_N_INSNS (5), /* fp */
476 COSTS_N_INSNS (5), /* dmul */
477 COSTS_N_INSNS (19), /* sdiv */
478 COSTS_N_INSNS (33), /* ddiv */
479 32, /* cache line size */
480 32, /* l1 cache */
481 256, /* l2 cache */
482 1, /* streams */
485 /* Instruction costs on PPC601 processors. */
486 static const
487 struct processor_costs ppc601_cost = {
488 COSTS_N_INSNS (5), /* mulsi */
489 COSTS_N_INSNS (5), /* mulsi_const */
490 COSTS_N_INSNS (5), /* mulsi_const9 */
491 COSTS_N_INSNS (5), /* muldi */
492 COSTS_N_INSNS (36), /* divsi */
493 COSTS_N_INSNS (36), /* divdi */
494 COSTS_N_INSNS (4), /* fp */
495 COSTS_N_INSNS (5), /* dmul */
496 COSTS_N_INSNS (17), /* sdiv */
497 COSTS_N_INSNS (31), /* ddiv */
498 32, /* cache line size */
499 32, /* l1 cache */
500 256, /* l2 cache */
501 1, /* streams */
504 /* Instruction costs on PPC603 processors. */
505 static const
506 struct processor_costs ppc603_cost = {
507 COSTS_N_INSNS (5), /* mulsi */
508 COSTS_N_INSNS (3), /* mulsi_const */
509 COSTS_N_INSNS (2), /* mulsi_const9 */
510 COSTS_N_INSNS (5), /* muldi */
511 COSTS_N_INSNS (37), /* divsi */
512 COSTS_N_INSNS (37), /* divdi */
513 COSTS_N_INSNS (3), /* fp */
514 COSTS_N_INSNS (4), /* dmul */
515 COSTS_N_INSNS (18), /* sdiv */
516 COSTS_N_INSNS (33), /* ddiv */
517 32, /* cache line size */
518 8, /* l1 cache */
519 64, /* l2 cache */
520 1, /* streams */
523 /* Instruction costs on PPC604 processors. */
524 static const
525 struct processor_costs ppc604_cost = {
526 COSTS_N_INSNS (4), /* mulsi */
527 COSTS_N_INSNS (4), /* mulsi_const */
528 COSTS_N_INSNS (4), /* mulsi_const9 */
529 COSTS_N_INSNS (4), /* muldi */
530 COSTS_N_INSNS (20), /* divsi */
531 COSTS_N_INSNS (20), /* divdi */
532 COSTS_N_INSNS (3), /* fp */
533 COSTS_N_INSNS (3), /* dmul */
534 COSTS_N_INSNS (18), /* sdiv */
535 COSTS_N_INSNS (32), /* ddiv */
536 32, /* cache line size */
537 16, /* l1 cache */
538 512, /* l2 cache */
539 1, /* streams */
542 /* Instruction costs on PPC604e processors. */
543 static const
544 struct processor_costs ppc604e_cost = {
545 COSTS_N_INSNS (2), /* mulsi */
546 COSTS_N_INSNS (2), /* mulsi_const */
547 COSTS_N_INSNS (2), /* mulsi_const9 */
548 COSTS_N_INSNS (2), /* muldi */
549 COSTS_N_INSNS (20), /* divsi */
550 COSTS_N_INSNS (20), /* divdi */
551 COSTS_N_INSNS (3), /* fp */
552 COSTS_N_INSNS (3), /* dmul */
553 COSTS_N_INSNS (18), /* sdiv */
554 COSTS_N_INSNS (32), /* ddiv */
555 32, /* cache line size */
556 32, /* l1 cache */
557 1024, /* l2 cache */
558 1, /* streams */
561 /* Instruction costs on PPC620 processors. */
562 static const
563 struct processor_costs ppc620_cost = {
564 COSTS_N_INSNS (5), /* mulsi */
565 COSTS_N_INSNS (4), /* mulsi_const */
566 COSTS_N_INSNS (3), /* mulsi_const9 */
567 COSTS_N_INSNS (7), /* muldi */
568 COSTS_N_INSNS (21), /* divsi */
569 COSTS_N_INSNS (37), /* divdi */
570 COSTS_N_INSNS (3), /* fp */
571 COSTS_N_INSNS (3), /* dmul */
572 COSTS_N_INSNS (18), /* sdiv */
573 COSTS_N_INSNS (32), /* ddiv */
574 128, /* cache line size */
575 32, /* l1 cache */
576 1024, /* l2 cache */
577 1, /* streams */
580 /* Instruction costs on PPC630 processors. */
581 static const
582 struct processor_costs ppc630_cost = {
583 COSTS_N_INSNS (5), /* mulsi */
584 COSTS_N_INSNS (4), /* mulsi_const */
585 COSTS_N_INSNS (3), /* mulsi_const9 */
586 COSTS_N_INSNS (7), /* muldi */
587 COSTS_N_INSNS (21), /* divsi */
588 COSTS_N_INSNS (37), /* divdi */
589 COSTS_N_INSNS (3), /* fp */
590 COSTS_N_INSNS (3), /* dmul */
591 COSTS_N_INSNS (17), /* sdiv */
592 COSTS_N_INSNS (21), /* ddiv */
593 128, /* cache line size */
594 64, /* l1 cache */
595 1024, /* l2 cache */
596 1, /* streams */
599 /* Instruction costs on Cell processor. */
600 /* COSTS_N_INSNS (1) ~ one add. */
601 static const
602 struct processor_costs ppccell_cost = {
603 COSTS_N_INSNS (9/2)+2, /* mulsi */
604 COSTS_N_INSNS (6/2), /* mulsi_const */
605 COSTS_N_INSNS (6/2), /* mulsi_const9 */
606 COSTS_N_INSNS (15/2)+2, /* muldi */
607 COSTS_N_INSNS (38/2), /* divsi */
608 COSTS_N_INSNS (70/2), /* divdi */
609 COSTS_N_INSNS (10/2), /* fp */
610 COSTS_N_INSNS (10/2), /* dmul */
611 COSTS_N_INSNS (74/2), /* sdiv */
612 COSTS_N_INSNS (74/2), /* ddiv */
613 128, /* cache line size */
614 32, /* l1 cache */
615 512, /* l2 cache */
616 6, /* streams */
619 /* Instruction costs on PPC750 and PPC7400 processors. */
620 static const
621 struct processor_costs ppc750_cost = {
622 COSTS_N_INSNS (5), /* mulsi */
623 COSTS_N_INSNS (3), /* mulsi_const */
624 COSTS_N_INSNS (2), /* mulsi_const9 */
625 COSTS_N_INSNS (5), /* muldi */
626 COSTS_N_INSNS (17), /* divsi */
627 COSTS_N_INSNS (17), /* divdi */
628 COSTS_N_INSNS (3), /* fp */
629 COSTS_N_INSNS (3), /* dmul */
630 COSTS_N_INSNS (17), /* sdiv */
631 COSTS_N_INSNS (31), /* ddiv */
632 32, /* cache line size */
633 32, /* l1 cache */
634 512, /* l2 cache */
635 1, /* streams */
638 /* Instruction costs on PPC7450 processors. */
639 static const
640 struct processor_costs ppc7450_cost = {
641 COSTS_N_INSNS (4), /* mulsi */
642 COSTS_N_INSNS (3), /* mulsi_const */
643 COSTS_N_INSNS (3), /* mulsi_const9 */
644 COSTS_N_INSNS (4), /* muldi */
645 COSTS_N_INSNS (23), /* divsi */
646 COSTS_N_INSNS (23), /* divdi */
647 COSTS_N_INSNS (5), /* fp */
648 COSTS_N_INSNS (5), /* dmul */
649 COSTS_N_INSNS (21), /* sdiv */
650 COSTS_N_INSNS (35), /* ddiv */
651 32, /* cache line size */
652 32, /* l1 cache */
653 1024, /* l2 cache */
654 1, /* streams */
657 /* Instruction costs on PPC8540 processors. */
658 static const
659 struct processor_costs ppc8540_cost = {
660 COSTS_N_INSNS (4), /* mulsi */
661 COSTS_N_INSNS (4), /* mulsi_const */
662 COSTS_N_INSNS (4), /* mulsi_const9 */
663 COSTS_N_INSNS (4), /* muldi */
664 COSTS_N_INSNS (19), /* divsi */
665 COSTS_N_INSNS (19), /* divdi */
666 COSTS_N_INSNS (4), /* fp */
667 COSTS_N_INSNS (4), /* dmul */
668 COSTS_N_INSNS (29), /* sdiv */
669 COSTS_N_INSNS (29), /* ddiv */
670 32, /* cache line size */
671 32, /* l1 cache */
672 256, /* l2 cache */
673 1, /* prefetch streams /*/
676 /* Instruction costs on E300C2 and E300C3 cores. */
677 static const
678 struct processor_costs ppce300c2c3_cost = {
679 COSTS_N_INSNS (4), /* mulsi */
680 COSTS_N_INSNS (4), /* mulsi_const */
681 COSTS_N_INSNS (4), /* mulsi_const9 */
682 COSTS_N_INSNS (4), /* muldi */
683 COSTS_N_INSNS (19), /* divsi */
684 COSTS_N_INSNS (19), /* divdi */
685 COSTS_N_INSNS (3), /* fp */
686 COSTS_N_INSNS (4), /* dmul */
687 COSTS_N_INSNS (18), /* sdiv */
688 COSTS_N_INSNS (33), /* ddiv */
690 16, /* l1 cache */
691 16, /* l2 cache */
692 1, /* prefetch streams /*/
695 /* Instruction costs on PPCE500MC processors. */
696 static const
697 struct processor_costs ppce500mc_cost = {
698 COSTS_N_INSNS (4), /* mulsi */
699 COSTS_N_INSNS (4), /* mulsi_const */
700 COSTS_N_INSNS (4), /* mulsi_const9 */
701 COSTS_N_INSNS (4), /* muldi */
702 COSTS_N_INSNS (14), /* divsi */
703 COSTS_N_INSNS (14), /* divdi */
704 COSTS_N_INSNS (8), /* fp */
705 COSTS_N_INSNS (10), /* dmul */
706 COSTS_N_INSNS (36), /* sdiv */
707 COSTS_N_INSNS (66), /* ddiv */
708 64, /* cache line size */
709 32, /* l1 cache */
710 128, /* l2 cache */
711 1, /* prefetch streams /*/
714 /* Instruction costs on POWER4 and POWER5 processors. */
715 static const
716 struct processor_costs power4_cost = {
717 COSTS_N_INSNS (3), /* mulsi */
718 COSTS_N_INSNS (2), /* mulsi_const */
719 COSTS_N_INSNS (2), /* mulsi_const9 */
720 COSTS_N_INSNS (4), /* muldi */
721 COSTS_N_INSNS (18), /* divsi */
722 COSTS_N_INSNS (34), /* divdi */
723 COSTS_N_INSNS (3), /* fp */
724 COSTS_N_INSNS (3), /* dmul */
725 COSTS_N_INSNS (17), /* sdiv */
726 COSTS_N_INSNS (17), /* ddiv */
727 128, /* cache line size */
728 32, /* l1 cache */
729 1024, /* l2 cache */
730 8, /* prefetch streams /*/
733 /* Instruction costs on POWER6 processors. */
734 static const
735 struct processor_costs power6_cost = {
736 COSTS_N_INSNS (8), /* mulsi */
737 COSTS_N_INSNS (8), /* mulsi_const */
738 COSTS_N_INSNS (8), /* mulsi_const9 */
739 COSTS_N_INSNS (8), /* muldi */
740 COSTS_N_INSNS (22), /* divsi */
741 COSTS_N_INSNS (28), /* divdi */
742 COSTS_N_INSNS (3), /* fp */
743 COSTS_N_INSNS (3), /* dmul */
744 COSTS_N_INSNS (13), /* sdiv */
745 COSTS_N_INSNS (16), /* ddiv */
746 128, /* cache line size */
747 64, /* l1 cache */
748 2048, /* l2 cache */
749 16, /* prefetch streams */
753 static bool rs6000_function_ok_for_sibcall (tree, tree);
754 static const char *rs6000_invalid_within_doloop (const_rtx);
755 static rtx rs6000_generate_compare (enum rtx_code);
756 static void rs6000_emit_stack_tie (void);
757 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
758 static bool spe_func_has_64bit_regs_p (void);
759 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
760 int, HOST_WIDE_INT);
761 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
762 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int, int);
763 static unsigned rs6000_hash_constant (rtx);
764 static unsigned toc_hash_function (const void *);
765 static int toc_hash_eq (const void *, const void *);
766 static bool constant_pool_expr_p (rtx);
767 static bool legitimate_small_data_p (enum machine_mode, rtx);
768 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
769 static struct machine_function * rs6000_init_machine_status (void);
770 static bool rs6000_assemble_integer (rtx, unsigned int, int);
771 static bool no_global_regs_above (int, bool);
772 #ifdef HAVE_GAS_HIDDEN
773 static void rs6000_assemble_visibility (tree, int);
774 #endif
775 static int rs6000_ra_ever_killed (void);
776 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
777 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
778 static bool rs6000_ms_bitfield_layout_p (const_tree);
779 static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
780 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
781 static const char *rs6000_mangle_type (const_tree);
782 extern const struct attribute_spec rs6000_attribute_table[];
783 static void rs6000_set_default_type_attributes (tree);
784 static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
785 static void rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
786 static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
787 enum machine_mode, bool, bool, bool);
788 static bool rs6000_reg_live_or_pic_offset_p (int);
789 static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
790 static void rs6000_restore_saved_cr (rtx, int);
791 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
792 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
793 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
794 tree);
795 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
796 static bool rs6000_return_in_memory (const_tree, const_tree);
797 static void rs6000_file_start (void);
798 #if TARGET_ELF
799 static int rs6000_elf_reloc_rw_mask (void);
800 static void rs6000_elf_asm_out_constructor (rtx, int);
801 static void rs6000_elf_asm_out_destructor (rtx, int);
802 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
803 static void rs6000_elf_asm_init_sections (void);
804 static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
805 unsigned HOST_WIDE_INT);
806 static void rs6000_elf_encode_section_info (tree, rtx, int)
807 ATTRIBUTE_UNUSED;
808 #endif
809 static bool rs6000_use_blocks_for_constant_p (enum machine_mode, const_rtx);
810 static void rs6000_alloc_sdmode_stack_slot (void);
811 static void rs6000_instantiate_decls (void);
812 #if TARGET_XCOFF
813 static void rs6000_xcoff_asm_output_anchor (rtx);
814 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
815 static void rs6000_xcoff_asm_init_sections (void);
816 static int rs6000_xcoff_reloc_rw_mask (void);
817 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
818 static section *rs6000_xcoff_select_section (tree, int,
819 unsigned HOST_WIDE_INT);
820 static void rs6000_xcoff_unique_section (tree, int);
821 static section *rs6000_xcoff_select_rtx_section
822 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
823 static const char * rs6000_xcoff_strip_name_encoding (const char *);
824 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
825 static void rs6000_xcoff_file_start (void);
826 static void rs6000_xcoff_file_end (void);
827 #endif
828 static int rs6000_variable_issue (FILE *, int, rtx, int);
829 static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
830 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
831 static void rs6000_sched_init (FILE *, int, int);
832 static bool is_microcoded_insn (rtx);
833 static bool is_nonpipeline_insn (rtx);
834 static bool is_cracked_insn (rtx);
835 static bool is_branch_slot_insn (rtx);
836 static bool is_load_insn (rtx);
837 static rtx get_store_dest (rtx pat);
838 static bool is_store_insn (rtx);
839 static bool set_to_load_agen (rtx,rtx);
840 static bool adjacent_mem_locations (rtx,rtx);
841 static int rs6000_adjust_priority (rtx, int);
842 static int rs6000_issue_rate (void);
843 static bool rs6000_is_costly_dependence (dep_t, int, int);
844 static rtx get_next_active_insn (rtx, rtx);
845 static bool insn_terminates_group_p (rtx , enum group_termination);
846 static bool insn_must_be_first_in_group (rtx);
847 static bool insn_must_be_last_in_group (rtx);
848 static bool is_costly_group (rtx *, rtx);
849 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
850 static int redefine_groups (FILE *, int, rtx, rtx);
851 static int pad_groups (FILE *, int, rtx, rtx);
852 static void rs6000_sched_finish (FILE *, int);
853 static int rs6000_sched_reorder (FILE *, int, rtx *, int *, int);
854 static int rs6000_sched_reorder2 (FILE *, int, rtx *, int *, int);
855 static int rs6000_use_sched_lookahead (void);
856 static int rs6000_use_sched_lookahead_guard (rtx);
857 static void * rs6000_alloc_sched_context (void);
858 static void rs6000_init_sched_context (void *, bool);
859 static void rs6000_set_sched_context (void *);
860 static void rs6000_free_sched_context (void *);
861 static tree rs6000_builtin_reciprocal (unsigned int, bool, bool);
862 static tree rs6000_builtin_mask_for_load (void);
863 static tree rs6000_builtin_mul_widen_even (tree);
864 static tree rs6000_builtin_mul_widen_odd (tree);
865 static tree rs6000_builtin_conversion (enum tree_code, tree);
866 static tree rs6000_builtin_vec_perm (tree, tree *);
868 static void def_builtin (int, const char *, tree, int);
869 static bool rs6000_vector_alignment_reachable (const_tree, bool);
870 static void rs6000_init_builtins (void);
871 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
872 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
873 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
874 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
875 static void altivec_init_builtins (void);
876 static void rs6000_common_init_builtins (void);
877 static void rs6000_init_libfuncs (void);
879 static void paired_init_builtins (void);
880 static rtx paired_expand_builtin (tree, rtx, bool *);
881 static rtx paired_expand_lv_builtin (enum insn_code, tree, rtx);
882 static rtx paired_expand_stv_builtin (enum insn_code, tree);
883 static rtx paired_expand_predicate_builtin (enum insn_code, tree, rtx);
885 static void enable_mask_for_builtins (struct builtin_description *, int,
886 enum rs6000_builtins,
887 enum rs6000_builtins);
888 static tree build_opaque_vector_type (tree, int);
889 static void spe_init_builtins (void);
890 static rtx spe_expand_builtin (tree, rtx, bool *);
891 static rtx spe_expand_stv_builtin (enum insn_code, tree);
892 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
893 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
894 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
895 static rs6000_stack_t *rs6000_stack_info (void);
896 static void debug_stack_info (rs6000_stack_t *);
898 static rtx altivec_expand_builtin (tree, rtx, bool *);
899 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
900 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
901 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
902 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
903 static rtx altivec_expand_predicate_builtin (enum insn_code,
904 const char *, tree, rtx);
905 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
906 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
907 static rtx altivec_expand_vec_set_builtin (tree);
908 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
909 static int get_element_number (tree, tree);
910 static bool rs6000_handle_option (size_t, const char *, int);
911 static void rs6000_parse_tls_size_option (void);
912 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
913 static int first_altivec_reg_to_save (void);
914 static unsigned int compute_vrsave_mask (void);
915 static void compute_save_world_info (rs6000_stack_t *info_ptr);
916 static void is_altivec_return_reg (rtx, void *);
917 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
918 int easy_vector_constant (rtx, enum machine_mode);
919 static bool rs6000_is_opaque_type (const_tree);
920 static rtx rs6000_dwarf_register_span (rtx);
921 static void rs6000_init_dwarf_reg_sizes_extra (tree);
922 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
923 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
924 static rtx rs6000_tls_get_addr (void);
925 static rtx rs6000_got_sym (void);
926 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
927 static const char *rs6000_get_some_local_dynamic_name (void);
928 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
929 static rtx rs6000_complex_function_value (enum machine_mode);
930 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
931 enum machine_mode, tree);
932 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
933 HOST_WIDE_INT);
934 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
935 tree, HOST_WIDE_INT);
936 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
937 HOST_WIDE_INT,
938 rtx[], int *);
939 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
940 const_tree, HOST_WIDE_INT,
941 rtx[], int *);
942 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool);
943 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
944 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
945 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
946 enum machine_mode, tree,
947 int *, int);
948 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
949 const_tree, bool);
950 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
951 tree, bool);
952 static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
953 #if TARGET_MACHO
954 static void macho_branch_islands (void);
955 static int no_previous_def (tree function_name);
956 static tree get_prev_label (tree function_name);
957 static void rs6000_darwin_file_start (void);
958 #endif
960 static tree rs6000_build_builtin_va_list (void);
961 static void rs6000_va_start (tree, rtx);
962 static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
963 static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
964 static bool rs6000_scalar_mode_supported_p (enum machine_mode);
965 static bool rs6000_vector_mode_supported_p (enum machine_mode);
966 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
967 enum machine_mode);
968 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
969 enum machine_mode);
970 static int get_vsel_insn (enum machine_mode);
971 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
972 static tree rs6000_stack_protect_fail (void);
974 const int INSN_NOT_AVAILABLE = -1;
975 static enum machine_mode rs6000_eh_return_filter_mode (void);
977 /* Hash table stuff for keeping track of TOC entries. */
979 struct toc_hash_struct GTY(())
981 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
982 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
983 rtx key;
984 enum machine_mode key_mode;
985 int labelno;
988 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
990 /* Default register names. */
991 char rs6000_reg_names[][8] =
993 "0", "1", "2", "3", "4", "5", "6", "7",
994 "8", "9", "10", "11", "12", "13", "14", "15",
995 "16", "17", "18", "19", "20", "21", "22", "23",
996 "24", "25", "26", "27", "28", "29", "30", "31",
997 "0", "1", "2", "3", "4", "5", "6", "7",
998 "8", "9", "10", "11", "12", "13", "14", "15",
999 "16", "17", "18", "19", "20", "21", "22", "23",
1000 "24", "25", "26", "27", "28", "29", "30", "31",
1001 "mq", "lr", "ctr","ap",
1002 "0", "1", "2", "3", "4", "5", "6", "7",
1003 "xer",
1004 /* AltiVec registers. */
1005 "0", "1", "2", "3", "4", "5", "6", "7",
1006 "8", "9", "10", "11", "12", "13", "14", "15",
1007 "16", "17", "18", "19", "20", "21", "22", "23",
1008 "24", "25", "26", "27", "28", "29", "30", "31",
1009 "vrsave", "vscr",
1010 /* SPE registers. */
1011 "spe_acc", "spefscr",
1012 /* Soft frame pointer. */
1013 "sfp"
1016 #ifdef TARGET_REGNAMES
1017 static const char alt_reg_names[][8] =
1019 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
1020 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
1021 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
1022 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
1023 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
1024 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1025 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1026 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1027 "mq", "lr", "ctr", "ap",
1028 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
1029 "xer",
1030 /* AltiVec registers. */
1031 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
1032 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1033 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1034 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1035 "vrsave", "vscr",
1036 /* SPE registers. */
1037 "spe_acc", "spefscr",
1038 /* Soft frame pointer. */
1039 "sfp"
1041 #endif
1043 #ifndef MASK_STRICT_ALIGN
1044 #define MASK_STRICT_ALIGN 0
1045 #endif
1046 #ifndef TARGET_PROFILE_KERNEL
1047 #define TARGET_PROFILE_KERNEL 0
1048 #endif
1050 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
1051 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
1053 /* Initialize the GCC target structure. */
1054 #undef TARGET_ATTRIBUTE_TABLE
1055 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
1056 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
1057 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
1059 #undef TARGET_ASM_ALIGNED_DI_OP
1060 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
1062 /* Default unaligned ops are only provided for ELF. Find the ops needed
1063 for non-ELF systems. */
1064 #ifndef OBJECT_FORMAT_ELF
1065 #if TARGET_XCOFF
1066 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
1067 64-bit targets. */
1068 #undef TARGET_ASM_UNALIGNED_HI_OP
1069 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
1070 #undef TARGET_ASM_UNALIGNED_SI_OP
1071 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
1072 #undef TARGET_ASM_UNALIGNED_DI_OP
1073 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
1074 #else
1075 /* For Darwin. */
1076 #undef TARGET_ASM_UNALIGNED_HI_OP
1077 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
1078 #undef TARGET_ASM_UNALIGNED_SI_OP
1079 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
1080 #undef TARGET_ASM_UNALIGNED_DI_OP
1081 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
1082 #undef TARGET_ASM_ALIGNED_DI_OP
1083 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
1084 #endif
1085 #endif
1087 /* This hook deals with fixups for relocatable code and DI-mode objects
1088 in 64-bit code. */
1089 #undef TARGET_ASM_INTEGER
1090 #define TARGET_ASM_INTEGER rs6000_assemble_integer
1092 #ifdef HAVE_GAS_HIDDEN
1093 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
1094 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
1095 #endif
1097 #undef TARGET_HAVE_TLS
1098 #define TARGET_HAVE_TLS HAVE_AS_TLS
1100 #undef TARGET_CANNOT_FORCE_CONST_MEM
1101 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
1103 #undef TARGET_ASM_FUNCTION_PROLOGUE
1104 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1105 #undef TARGET_ASM_FUNCTION_EPILOGUE
1106 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1108 #undef TARGET_SCHED_VARIABLE_ISSUE
1109 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1111 #undef TARGET_SCHED_ISSUE_RATE
1112 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
1113 #undef TARGET_SCHED_ADJUST_COST
1114 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
1115 #undef TARGET_SCHED_ADJUST_PRIORITY
1116 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
1117 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
1118 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
1119 #undef TARGET_SCHED_INIT
1120 #define TARGET_SCHED_INIT rs6000_sched_init
1121 #undef TARGET_SCHED_FINISH
1122 #define TARGET_SCHED_FINISH rs6000_sched_finish
1123 #undef TARGET_SCHED_REORDER
1124 #define TARGET_SCHED_REORDER rs6000_sched_reorder
1125 #undef TARGET_SCHED_REORDER2
1126 #define TARGET_SCHED_REORDER2 rs6000_sched_reorder2
1128 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
1129 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
1131 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD
1132 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD_GUARD rs6000_use_sched_lookahead_guard
1134 #undef TARGET_SCHED_ALLOC_SCHED_CONTEXT
1135 #define TARGET_SCHED_ALLOC_SCHED_CONTEXT rs6000_alloc_sched_context
1136 #undef TARGET_SCHED_INIT_SCHED_CONTEXT
1137 #define TARGET_SCHED_INIT_SCHED_CONTEXT rs6000_init_sched_context
1138 #undef TARGET_SCHED_SET_SCHED_CONTEXT
1139 #define TARGET_SCHED_SET_SCHED_CONTEXT rs6000_set_sched_context
1140 #undef TARGET_SCHED_FREE_SCHED_CONTEXT
1141 #define TARGET_SCHED_FREE_SCHED_CONTEXT rs6000_free_sched_context
1143 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
1144 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
1145 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN
1146 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN rs6000_builtin_mul_widen_even
1147 #undef TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD
1148 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
1149 #undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1150 #define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
1151 #undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1152 #define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
1154 #undef TARGET_VECTOR_ALIGNMENT_REACHABLE
1155 #define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1157 #undef TARGET_INIT_BUILTINS
1158 #define TARGET_INIT_BUILTINS rs6000_init_builtins
1160 #undef TARGET_EXPAND_BUILTIN
1161 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
1163 #undef TARGET_MANGLE_TYPE
1164 #define TARGET_MANGLE_TYPE rs6000_mangle_type
1166 #undef TARGET_INIT_LIBFUNCS
1167 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
1169 #if TARGET_MACHO
1170 #undef TARGET_BINDS_LOCAL_P
1171 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
1172 #endif
1174 #undef TARGET_MS_BITFIELD_LAYOUT_P
1175 #define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
1177 #undef TARGET_ASM_OUTPUT_MI_THUNK
1178 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
1180 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
1181 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
1183 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
1184 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1186 #undef TARGET_INVALID_WITHIN_DOLOOP
1187 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
1189 #undef TARGET_RTX_COSTS
1190 #define TARGET_RTX_COSTS rs6000_rtx_costs
1191 #undef TARGET_ADDRESS_COST
1192 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
1194 #undef TARGET_VECTOR_OPAQUE_P
1195 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
1197 #undef TARGET_DWARF_REGISTER_SPAN
1198 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
1200 #undef TARGET_INIT_DWARF_REG_SIZES_EXTRA
1201 #define TARGET_INIT_DWARF_REG_SIZES_EXTRA rs6000_init_dwarf_reg_sizes_extra
1203 /* On rs6000, function arguments are promoted, as are function return
1204 values. */
1205 #undef TARGET_PROMOTE_FUNCTION_ARGS
1206 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
1207 #undef TARGET_PROMOTE_FUNCTION_RETURN
1208 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
1210 #undef TARGET_RETURN_IN_MEMORY
1211 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
1213 #undef TARGET_SETUP_INCOMING_VARARGS
1214 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
1216 /* Always strict argument naming on rs6000. */
1217 #undef TARGET_STRICT_ARGUMENT_NAMING
1218 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
1219 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
1220 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
1221 #undef TARGET_SPLIT_COMPLEX_ARG
1222 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_const_tree_true
1223 #undef TARGET_MUST_PASS_IN_STACK
1224 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
1225 #undef TARGET_PASS_BY_REFERENCE
1226 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
1227 #undef TARGET_ARG_PARTIAL_BYTES
1228 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
1230 #undef TARGET_BUILD_BUILTIN_VA_LIST
1231 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1233 #undef TARGET_EXPAND_BUILTIN_VA_START
1234 #define TARGET_EXPAND_BUILTIN_VA_START rs6000_va_start
1236 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
1237 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
1239 #undef TARGET_EH_RETURN_FILTER_MODE
1240 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
1242 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1243 #define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1245 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1246 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1248 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1249 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1251 #undef TARGET_HANDLE_OPTION
1252 #define TARGET_HANDLE_OPTION rs6000_handle_option
1254 #undef TARGET_DEFAULT_TARGET_FLAGS
1255 #define TARGET_DEFAULT_TARGET_FLAGS \
1256 (TARGET_DEFAULT)
1258 #undef TARGET_STACK_PROTECT_FAIL
1259 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1261 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1262 The PowerPC architecture requires only weak consistency among
1263 processors--that is, memory accesses between processors need not be
1264 sequentially consistent and memory accesses among processors can occur
1265 in any order. The ability to order memory accesses weakly provides
1266 opportunities for more efficient use of the system bus. Unless a
1267 dependency exists, the 604e allows read operations to precede store
1268 operations. */
1269 #undef TARGET_RELAXED_ORDERING
1270 #define TARGET_RELAXED_ORDERING true
1272 #ifdef HAVE_AS_TLS
1273 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1274 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1275 #endif
1277 /* Use a 32-bit anchor range. This leads to sequences like:
1279 addis tmp,anchor,high
1280 add dest,tmp,low
1282 where tmp itself acts as an anchor, and can be shared between
1283 accesses to the same 64k page. */
1284 #undef TARGET_MIN_ANCHOR_OFFSET
1285 #define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1286 #undef TARGET_MAX_ANCHOR_OFFSET
1287 #define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1288 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1289 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1291 #undef TARGET_BUILTIN_RECIPROCAL
1292 #define TARGET_BUILTIN_RECIPROCAL rs6000_builtin_reciprocal
1294 #undef TARGET_EXPAND_TO_RTL_HOOK
1295 #define TARGET_EXPAND_TO_RTL_HOOK rs6000_alloc_sdmode_stack_slot
1297 #undef TARGET_INSTANTIATE_DECLS
1298 #define TARGET_INSTANTIATE_DECLS rs6000_instantiate_decls
1300 struct gcc_target targetm = TARGET_INITIALIZER;
1303 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1304 MODE. */
1305 static int
1306 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1308 /* The GPRs can hold any mode, but values bigger than one register
1309 cannot go past R31. */
1310 if (INT_REGNO_P (regno))
1311 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1313 /* The float registers can only hold floating modes and DImode.
1314 This excludes the 32-bit decimal float mode for now. */
1315 if (FP_REGNO_P (regno))
1316 return
1317 ((SCALAR_FLOAT_MODE_P (mode)
1318 && (mode != TDmode || (regno % 2) == 0)
1319 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1320 || (GET_MODE_CLASS (mode) == MODE_INT
1321 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD)
1322 || (PAIRED_SIMD_REGNO_P (regno) && TARGET_PAIRED_FLOAT
1323 && PAIRED_VECTOR_MODE (mode)));
1325 /* The CR register can only hold CC modes. */
1326 if (CR_REGNO_P (regno))
1327 return GET_MODE_CLASS (mode) == MODE_CC;
1329 if (XER_REGNO_P (regno))
1330 return mode == PSImode;
1332 /* AltiVec only in AldyVec registers. */
1333 if (ALTIVEC_REGNO_P (regno))
1334 return ALTIVEC_VECTOR_MODE (mode);
1336 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1337 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1338 return 1;
1340 /* We cannot put TImode anywhere except general register and it must be
1341 able to fit within the register set. */
1343 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1346 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1347 static void
1348 rs6000_init_hard_regno_mode_ok (void)
1350 int r, m;
1352 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1353 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1354 if (rs6000_hard_regno_mode_ok (r, m))
1355 rs6000_hard_regno_mode_ok_p[m][r] = true;
1358 #if TARGET_MACHO
1359 /* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
1361 static void
1362 darwin_rs6000_override_options (void)
1364 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
1365 off. */
1366 rs6000_altivec_abi = 1;
1367 TARGET_ALTIVEC_VRSAVE = 1;
1368 if (DEFAULT_ABI == ABI_DARWIN)
1370 if (MACHO_DYNAMIC_NO_PIC_P)
1372 if (flag_pic)
1373 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
1374 flag_pic = 0;
1376 else if (flag_pic == 1)
1378 flag_pic = 2;
1381 if (TARGET_64BIT && ! TARGET_POWERPC64)
1383 target_flags |= MASK_POWERPC64;
1384 warning (0, "-m64 requires PowerPC64 architecture, enabling");
1386 if (flag_mkernel)
1388 rs6000_default_long_calls = 1;
1389 target_flags |= MASK_SOFT_FLOAT;
1392 /* Make -m64 imply -maltivec. Darwin's 64-bit ABI includes
1393 Altivec. */
1394 if (!flag_mkernel && !flag_apple_kext
1395 && TARGET_64BIT
1396 && ! (target_flags_explicit & MASK_ALTIVEC))
1397 target_flags |= MASK_ALTIVEC;
1399 /* Unless the user (not the configurer) has explicitly overridden
1400 it with -mcpu=G3 or -mno-altivec, then 10.5+ targets default to
1401 G4 unless targetting the kernel. */
1402 if (!flag_mkernel
1403 && !flag_apple_kext
1404 && strverscmp (darwin_macosx_version_min, "10.5") >= 0
1405 && ! (target_flags_explicit & MASK_ALTIVEC)
1406 && ! rs6000_select[1].string)
1408 target_flags |= MASK_ALTIVEC;
1411 #endif
1413 /* If not otherwise specified by a target, make 'long double' equivalent to
1414 'double'. */
1416 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1417 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1418 #endif
1420 /* Override command line options. Mostly we process the processor
1421 type and sometimes adjust other TARGET_ options. */
1423 void
1424 rs6000_override_options (const char *default_cpu)
1426 size_t i, j;
1427 struct rs6000_cpu_select *ptr;
1428 int set_masks;
1430 /* Simplifications for entries below. */
1432 enum {
1433 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1434 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1437 /* This table occasionally claims that a processor does not support
1438 a particular feature even though it does, but the feature is slower
1439 than the alternative. Thus, it shouldn't be relied on as a
1440 complete description of the processor's support.
1442 Please keep this list in order, and don't forget to update the
1443 documentation in invoke.texi when adding a new processor or
1444 flag. */
1445 static struct ptt
1447 const char *const name; /* Canonical processor name. */
1448 const enum processor_type processor; /* Processor type enum value. */
1449 const int target_enable; /* Target flags to enable. */
1450 } const processor_target_table[]
1451 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1452 {"403", PROCESSOR_PPC403,
1453 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1454 {"405", PROCESSOR_PPC405,
1455 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1456 {"405fp", PROCESSOR_PPC405,
1457 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1458 {"440", PROCESSOR_PPC440,
1459 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1460 {"440fp", PROCESSOR_PPC440,
1461 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1462 {"464", PROCESSOR_PPC440,
1463 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1464 {"464fp", PROCESSOR_PPC440,
1465 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1466 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1467 {"601", PROCESSOR_PPC601,
1468 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1469 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1470 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1471 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1472 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1473 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1474 {"620", PROCESSOR_PPC620,
1475 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1476 {"630", PROCESSOR_PPC630,
1477 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1478 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1479 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1480 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1481 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1482 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1483 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1484 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1485 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
1486 /* 8548 has a dummy entry for now. */
1487 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN},
1488 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1489 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
1490 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1491 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1492 {"970", PROCESSOR_POWER4,
1493 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1494 {"cell", PROCESSOR_CELL,
1495 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1496 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1497 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1498 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1499 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1500 {"G5", PROCESSOR_POWER4,
1501 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1502 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1503 {"power2", PROCESSOR_POWER,
1504 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1505 {"power3", PROCESSOR_PPC630,
1506 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1507 {"power4", PROCESSOR_POWER4,
1508 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1509 | MASK_MFCRF},
1510 {"power5", PROCESSOR_POWER5,
1511 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1512 | MASK_MFCRF | MASK_POPCNTB},
1513 {"power5+", PROCESSOR_POWER5,
1514 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1515 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1516 {"power6", PROCESSOR_POWER6,
1517 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1518 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
1519 {"power6x", PROCESSOR_POWER6,
1520 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1521 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1522 | MASK_MFPGPR},
1523 {"power7", PROCESSOR_POWER5,
1524 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1525 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
1526 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1527 {"powerpc64", PROCESSOR_POWERPC64,
1528 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1529 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1530 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1531 {"rios2", PROCESSOR_RIOS2,
1532 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1533 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1534 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1535 {"rs64", PROCESSOR_RS64A,
1536 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1539 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1541 /* Some OSs don't support saving the high part of 64-bit registers on
1542 context switch. Other OSs don't support saving Altivec registers.
1543 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1544 settings; if the user wants either, the user must explicitly specify
1545 them and we won't interfere with the user's specification. */
1547 enum {
1548 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1549 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
1550 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1551 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1552 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP)
1555 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1556 #ifdef OS_MISSING_POWERPC64
1557 if (OS_MISSING_POWERPC64)
1558 set_masks &= ~MASK_POWERPC64;
1559 #endif
1560 #ifdef OS_MISSING_ALTIVEC
1561 if (OS_MISSING_ALTIVEC)
1562 set_masks &= ~MASK_ALTIVEC;
1563 #endif
1565 /* Don't override by the processor default if given explicitly. */
1566 set_masks &= ~target_flags_explicit;
1568 /* Identify the processor type. */
1569 rs6000_select[0].string = default_cpu;
1570 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1572 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1574 ptr = &rs6000_select[i];
1575 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1577 for (j = 0; j < ptt_size; j++)
1578 if (! strcmp (ptr->string, processor_target_table[j].name))
1580 if (ptr->set_tune_p)
1581 rs6000_cpu = processor_target_table[j].processor;
1583 if (ptr->set_arch_p)
1585 target_flags &= ~set_masks;
1586 target_flags |= (processor_target_table[j].target_enable
1587 & set_masks);
1589 break;
1592 if (j == ptt_size)
1593 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1597 if ((TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1598 && !rs6000_explicit_options.isel)
1599 rs6000_isel = 1;
1601 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
1602 || rs6000_cpu == PROCESSOR_PPCE500MC)
1604 if (TARGET_ALTIVEC)
1605 error ("AltiVec not supported in this target");
1606 if (TARGET_SPE)
1607 error ("Spe not supported in this target");
1610 /* Disable Cell microcode if we are optimizing for the Cell
1611 and not optimizing for size. */
1612 if (rs6000_gen_cell_microcode == -1)
1613 rs6000_gen_cell_microcode = !(rs6000_cpu == PROCESSOR_CELL
1614 && !optimize_size);
1616 /* If we are optimizing big endian systems for space, use the load/store
1617 multiple and string instructions unless we are not generating
1618 Cell microcode. */
1619 if (BYTES_BIG_ENDIAN && optimize_size && !rs6000_gen_cell_microcode)
1620 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1622 /* Don't allow -mmultiple or -mstring on little endian systems
1623 unless the cpu is a 750, because the hardware doesn't support the
1624 instructions used in little endian mode, and causes an alignment
1625 trap. The 750 does not cause an alignment trap (except when the
1626 target is unaligned). */
1628 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1630 if (TARGET_MULTIPLE)
1632 target_flags &= ~MASK_MULTIPLE;
1633 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1634 warning (0, "-mmultiple is not supported on little endian systems");
1637 if (TARGET_STRING)
1639 target_flags &= ~MASK_STRING;
1640 if ((target_flags_explicit & MASK_STRING) != 0)
1641 warning (0, "-mstring is not supported on little endian systems");
1645 /* Set debug flags */
1646 if (rs6000_debug_name)
1648 if (! strcmp (rs6000_debug_name, "all"))
1649 rs6000_debug_stack = rs6000_debug_arg = 1;
1650 else if (! strcmp (rs6000_debug_name, "stack"))
1651 rs6000_debug_stack = 1;
1652 else if (! strcmp (rs6000_debug_name, "arg"))
1653 rs6000_debug_arg = 1;
1654 else
1655 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1658 if (rs6000_traceback_name)
1660 if (! strncmp (rs6000_traceback_name, "full", 4))
1661 rs6000_traceback = traceback_full;
1662 else if (! strncmp (rs6000_traceback_name, "part", 4))
1663 rs6000_traceback = traceback_part;
1664 else if (! strncmp (rs6000_traceback_name, "no", 2))
1665 rs6000_traceback = traceback_none;
1666 else
1667 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1668 rs6000_traceback_name);
1671 if (!rs6000_explicit_options.long_double)
1672 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1674 #ifndef POWERPC_LINUX
1675 if (!rs6000_explicit_options.ieee)
1676 rs6000_ieeequad = 1;
1677 #endif
1679 /* Enable Altivec ABI for AIX -maltivec. */
1680 if (TARGET_XCOFF && TARGET_ALTIVEC)
1681 rs6000_altivec_abi = 1;
1683 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
1684 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
1685 be explicitly overridden in either case. */
1686 if (TARGET_ELF)
1688 if (!rs6000_explicit_options.altivec_abi
1689 && (TARGET_64BIT || TARGET_ALTIVEC))
1690 rs6000_altivec_abi = 1;
1692 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
1693 if (!rs6000_explicit_options.vrsave)
1694 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
1697 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1698 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1700 rs6000_darwin64_abi = 1;
1701 #if TARGET_MACHO
1702 darwin_one_byte_bool = 1;
1703 #endif
1704 /* Default to natural alignment, for better performance. */
1705 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1708 /* Place FP constants in the constant pool instead of TOC
1709 if section anchors enabled. */
1710 if (flag_section_anchors)
1711 TARGET_NO_FP_IN_TOC = 1;
1713 /* Handle -mtls-size option. */
1714 rs6000_parse_tls_size_option ();
1716 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1717 SUBTARGET_OVERRIDE_OPTIONS;
1718 #endif
1719 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1720 SUBSUBTARGET_OVERRIDE_OPTIONS;
1721 #endif
1722 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1723 SUB3TARGET_OVERRIDE_OPTIONS;
1724 #endif
1726 if (TARGET_E500 || rs6000_cpu == PROCESSOR_PPCE500MC)
1728 /* The e500 and e500mc do not have string instructions, and we set
1729 MASK_STRING above when optimizing for size. */
1730 if ((target_flags & MASK_STRING) != 0)
1731 target_flags = target_flags & ~MASK_STRING;
1733 else if (rs6000_select[1].string != NULL)
1735 /* For the powerpc-eabispe configuration, we set all these by
1736 default, so let's unset them if we manually set another
1737 CPU that is not the E500. */
1738 if (!rs6000_explicit_options.spe_abi)
1739 rs6000_spe_abi = 0;
1740 if (!rs6000_explicit_options.spe)
1741 rs6000_spe = 0;
1742 if (!rs6000_explicit_options.float_gprs)
1743 rs6000_float_gprs = 0;
1744 if (!rs6000_explicit_options.isel)
1745 rs6000_isel = 0;
1748 /* Detect invalid option combinations with E500. */
1749 CHECK_E500_OPTIONS;
1751 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1752 && rs6000_cpu != PROCESSOR_POWER5
1753 && rs6000_cpu != PROCESSOR_POWER6
1754 && rs6000_cpu != PROCESSOR_CELL);
1755 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1756 || rs6000_cpu == PROCESSOR_POWER5);
1757 rs6000_align_branch_targets = (rs6000_cpu == PROCESSOR_POWER4
1758 || rs6000_cpu == PROCESSOR_POWER5
1759 || rs6000_cpu == PROCESSOR_POWER6);
1761 rs6000_sched_restricted_insns_priority
1762 = (rs6000_sched_groups ? 1 : 0);
1764 /* Handle -msched-costly-dep option. */
1765 rs6000_sched_costly_dep
1766 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1768 if (rs6000_sched_costly_dep_str)
1770 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1771 rs6000_sched_costly_dep = no_dep_costly;
1772 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1773 rs6000_sched_costly_dep = all_deps_costly;
1774 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1775 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1776 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1777 rs6000_sched_costly_dep = store_to_load_dep_costly;
1778 else
1779 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1782 /* Handle -minsert-sched-nops option. */
1783 rs6000_sched_insert_nops
1784 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1786 if (rs6000_sched_insert_nops_str)
1788 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1789 rs6000_sched_insert_nops = sched_finish_none;
1790 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1791 rs6000_sched_insert_nops = sched_finish_pad_groups;
1792 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1793 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1794 else
1795 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1798 #ifdef TARGET_REGNAMES
1799 /* If the user desires alternate register names, copy in the
1800 alternate names now. */
1801 if (TARGET_REGNAMES)
1802 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1803 #endif
1805 /* Set aix_struct_return last, after the ABI is determined.
1806 If -maix-struct-return or -msvr4-struct-return was explicitly
1807 used, don't override with the ABI default. */
1808 if (!rs6000_explicit_options.aix_struct_ret)
1809 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1811 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
1812 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1814 if (TARGET_TOC)
1815 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1817 /* We can only guarantee the availability of DI pseudo-ops when
1818 assembling for 64-bit targets. */
1819 if (!TARGET_64BIT)
1821 targetm.asm_out.aligned_op.di = NULL;
1822 targetm.asm_out.unaligned_op.di = NULL;
1825 /* Set branch target alignment, if not optimizing for size. */
1826 if (!optimize_size)
1828 /* Cell wants to be aligned 8byte for dual issue. */
1829 if (rs6000_cpu == PROCESSOR_CELL)
1831 if (align_functions <= 0)
1832 align_functions = 8;
1833 if (align_jumps <= 0)
1834 align_jumps = 8;
1835 if (align_loops <= 0)
1836 align_loops = 8;
1838 if (rs6000_align_branch_targets)
1840 if (align_functions <= 0)
1841 align_functions = 16;
1842 if (align_jumps <= 0)
1843 align_jumps = 16;
1844 if (align_loops <= 0)
1845 align_loops = 16;
1847 if (align_jumps_max_skip <= 0)
1848 align_jumps_max_skip = 15;
1849 if (align_loops_max_skip <= 0)
1850 align_loops_max_skip = 15;
1853 /* Arrange to save and restore machine status around nested functions. */
1854 init_machine_status = rs6000_init_machine_status;
1856 /* We should always be splitting complex arguments, but we can't break
1857 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1858 if (DEFAULT_ABI != ABI_AIX)
1859 targetm.calls.split_complex_arg = NULL;
1861 /* Initialize rs6000_cost with the appropriate target costs. */
1862 if (optimize_size)
1863 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1864 else
1865 switch (rs6000_cpu)
1867 case PROCESSOR_RIOS1:
1868 rs6000_cost = &rios1_cost;
1869 break;
1871 case PROCESSOR_RIOS2:
1872 rs6000_cost = &rios2_cost;
1873 break;
1875 case PROCESSOR_RS64A:
1876 rs6000_cost = &rs64a_cost;
1877 break;
1879 case PROCESSOR_MPCCORE:
1880 rs6000_cost = &mpccore_cost;
1881 break;
1883 case PROCESSOR_PPC403:
1884 rs6000_cost = &ppc403_cost;
1885 break;
1887 case PROCESSOR_PPC405:
1888 rs6000_cost = &ppc405_cost;
1889 break;
1891 case PROCESSOR_PPC440:
1892 rs6000_cost = &ppc440_cost;
1893 break;
1895 case PROCESSOR_PPC601:
1896 rs6000_cost = &ppc601_cost;
1897 break;
1899 case PROCESSOR_PPC603:
1900 rs6000_cost = &ppc603_cost;
1901 break;
1903 case PROCESSOR_PPC604:
1904 rs6000_cost = &ppc604_cost;
1905 break;
1907 case PROCESSOR_PPC604e:
1908 rs6000_cost = &ppc604e_cost;
1909 break;
1911 case PROCESSOR_PPC620:
1912 rs6000_cost = &ppc620_cost;
1913 break;
1915 case PROCESSOR_PPC630:
1916 rs6000_cost = &ppc630_cost;
1917 break;
1919 case PROCESSOR_CELL:
1920 rs6000_cost = &ppccell_cost;
1921 break;
1923 case PROCESSOR_PPC750:
1924 case PROCESSOR_PPC7400:
1925 rs6000_cost = &ppc750_cost;
1926 break;
1928 case PROCESSOR_PPC7450:
1929 rs6000_cost = &ppc7450_cost;
1930 break;
1932 case PROCESSOR_PPC8540:
1933 rs6000_cost = &ppc8540_cost;
1934 break;
1936 case PROCESSOR_PPCE300C2:
1937 case PROCESSOR_PPCE300C3:
1938 rs6000_cost = &ppce300c2c3_cost;
1939 break;
1941 case PROCESSOR_PPCE500MC:
1942 rs6000_cost = &ppce500mc_cost;
1943 break;
1945 case PROCESSOR_POWER4:
1946 case PROCESSOR_POWER5:
1947 rs6000_cost = &power4_cost;
1948 break;
1950 case PROCESSOR_POWER6:
1951 rs6000_cost = &power6_cost;
1952 break;
1954 default:
1955 gcc_unreachable ();
1958 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES))
1959 set_param_value ("simultaneous-prefetches",
1960 rs6000_cost->simultaneous_prefetches);
1961 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE))
1962 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size);
1963 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE))
1964 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size);
1965 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE))
1966 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size);
1968 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
1969 can be optimized to ap = __builtin_next_arg (0). */
1970 if (DEFAULT_ABI != ABI_V4)
1971 targetm.expand_builtin_va_start = NULL;
1973 /* Set up single/double float flags.
1974 If TARGET_HARD_FLOAT is set, but neither single or double is set,
1975 then set both flags. */
1976 if (TARGET_HARD_FLOAT && TARGET_FPRS
1977 && rs6000_single_float == 0 && rs6000_double_float == 0)
1978 rs6000_single_float = rs6000_double_float = 1;
1980 /* Reset single and double FP flags if target is E500. */
1981 if (TARGET_E500)
1983 rs6000_single_float = rs6000_double_float = 0;
1984 if (TARGET_E500_SINGLE)
1985 rs6000_single_float = 1;
1986 if (TARGET_E500_DOUBLE)
1987 rs6000_single_float = rs6000_double_float = 1;
1990 /* If not explicitly specified via option, decide whether to generate indexed
1991 load/store instructions. */
1992 if (TARGET_AVOID_XFORM == -1)
1993 /* Avoid indexed addressing when targeting Power6 in order to avoid
1994 the DERAT mispredict penalty. */
1995 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB);
1997 rs6000_init_hard_regno_mode_ok ();
2000 /* Implement targetm.vectorize.builtin_mask_for_load. */
2001 static tree
2002 rs6000_builtin_mask_for_load (void)
2004 if (TARGET_ALTIVEC)
2005 return altivec_builtin_mask_for_load;
2006 else
2007 return 0;
2010 /* Implement targetm.vectorize.builtin_conversion.
2011 Returns a decl of a function that implements conversion of an integer vector
2012 into a floating-point vector, or vice-versa. TYPE is the type of the integer
2013 side of the conversion.
2014 Return NULL_TREE if it is not available. */
2015 static tree
2016 rs6000_builtin_conversion (enum tree_code code, tree type)
2018 if (!TARGET_ALTIVEC)
2019 return NULL_TREE;
2021 switch (code)
2023 case FIX_TRUNC_EXPR:
2024 switch (TYPE_MODE (type))
2026 case V4SImode:
2027 return TYPE_UNSIGNED (type)
2028 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTUXS]
2029 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCTSXS];
2030 default:
2031 return NULL_TREE;
2034 case FLOAT_EXPR:
2035 switch (TYPE_MODE (type))
2037 case V4SImode:
2038 return TYPE_UNSIGNED (type)
2039 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFUX]
2040 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VCFSX];
2041 default:
2042 return NULL_TREE;
2045 default:
2046 return NULL_TREE;
2050 /* Implement targetm.vectorize.builtin_mul_widen_even. */
2051 static tree
2052 rs6000_builtin_mul_widen_even (tree type)
2054 if (!TARGET_ALTIVEC)
2055 return NULL_TREE;
2057 switch (TYPE_MODE (type))
2059 case V8HImode:
2060 return TYPE_UNSIGNED (type)
2061 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUH]
2062 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESH];
2064 case V16QImode:
2065 return TYPE_UNSIGNED (type)
2066 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULEUB]
2067 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULESB];
2068 default:
2069 return NULL_TREE;
2073 /* Implement targetm.vectorize.builtin_mul_widen_odd. */
2074 static tree
2075 rs6000_builtin_mul_widen_odd (tree type)
2077 if (!TARGET_ALTIVEC)
2078 return NULL_TREE;
2080 switch (TYPE_MODE (type))
2082 case V8HImode:
2083 return TYPE_UNSIGNED (type)
2084 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUH]
2085 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSH];
2087 case V16QImode:
2088 return TYPE_UNSIGNED (type)
2089 ? rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOUB]
2090 : rs6000_builtin_decls[ALTIVEC_BUILTIN_VMULOSB];
2091 default:
2092 return NULL_TREE;
2097 /* Return true iff, data reference of TYPE can reach vector alignment (16)
2098 after applying N number of iterations. This routine does not determine
2099 how may iterations are required to reach desired alignment. */
2101 static bool
2102 rs6000_vector_alignment_reachable (const_tree type ATTRIBUTE_UNUSED, bool is_packed)
2104 if (is_packed)
2105 return false;
2107 if (TARGET_32BIT)
2109 if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
2110 return true;
2112 if (rs6000_alignment_flags == MASK_ALIGN_POWER)
2113 return true;
2115 return false;
2117 else
2119 if (TARGET_MACHO)
2120 return false;
2122 /* Assuming that all other types are naturally aligned. CHECKME! */
2123 return true;
2127 /* Implement targetm.vectorize.builtin_vec_perm. */
2128 tree
2129 rs6000_builtin_vec_perm (tree type, tree *mask_element_type)
2131 tree d;
2133 *mask_element_type = unsigned_char_type_node;
2135 switch (TYPE_MODE (type))
2137 case V16QImode:
2138 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_16QI];
2139 break;
2141 case V8HImode:
2142 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_8HI];
2143 break;
2145 case V4SImode:
2146 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SI];
2147 break;
2149 case V4SFmode:
2150 d = rs6000_builtin_decls[ALTIVEC_BUILTIN_VPERM_4SF];
2151 break;
2153 default:
2154 return NULL_TREE;
2157 gcc_assert (d);
2158 return d;
2161 /* Handle generic options of the form -mfoo=yes/no.
2162 NAME is the option name.
2163 VALUE is the option value.
2164 FLAG is the pointer to the flag where to store a 1 or 0, depending on
2165 whether the option value is 'yes' or 'no' respectively. */
2166 static void
2167 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
2169 if (value == 0)
2170 return;
2171 else if (!strcmp (value, "yes"))
2172 *flag = 1;
2173 else if (!strcmp (value, "no"))
2174 *flag = 0;
2175 else
2176 error ("unknown -m%s= option specified: '%s'", name, value);
2179 /* Validate and record the size specified with the -mtls-size option. */
2181 static void
2182 rs6000_parse_tls_size_option (void)
2184 if (rs6000_tls_size_string == 0)
2185 return;
2186 else if (strcmp (rs6000_tls_size_string, "16") == 0)
2187 rs6000_tls_size = 16;
2188 else if (strcmp (rs6000_tls_size_string, "32") == 0)
2189 rs6000_tls_size = 32;
2190 else if (strcmp (rs6000_tls_size_string, "64") == 0)
2191 rs6000_tls_size = 64;
2192 else
2193 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
2196 void
2197 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
2199 if (DEFAULT_ABI == ABI_DARWIN)
2200 /* The Darwin libraries never set errno, so we might as well
2201 avoid calling them when that's the only reason we would. */
2202 flag_errno_math = 0;
2204 /* Double growth factor to counter reduced min jump length. */
2205 set_param_value ("max-grow-copy-bb-insns", 16);
2207 /* Enable section anchors by default.
2208 Skip section anchors for Objective C and Objective C++
2209 until front-ends fixed. */
2210 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
2211 flag_section_anchors = 2;
2214 static enum fpu_type_t
2215 rs6000_parse_fpu_option (const char *option)
2217 if (!strcmp("none", option)) return FPU_NONE;
2218 if (!strcmp("sp_lite", option)) return FPU_SF_LITE;
2219 if (!strcmp("dp_lite", option)) return FPU_DF_LITE;
2220 if (!strcmp("sp_full", option)) return FPU_SF_FULL;
2221 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
2222 error("unknown value %s for -mfpu", option);
2223 return FPU_NONE;
2226 /* Implement TARGET_HANDLE_OPTION. */
2228 static bool
2229 rs6000_handle_option (size_t code, const char *arg, int value)
2231 enum fpu_type_t fpu_type = FPU_NONE;
2233 switch (code)
2235 case OPT_mno_power:
2236 target_flags &= ~(MASK_POWER | MASK_POWER2
2237 | MASK_MULTIPLE | MASK_STRING);
2238 target_flags_explicit |= (MASK_POWER | MASK_POWER2
2239 | MASK_MULTIPLE | MASK_STRING);
2240 break;
2241 case OPT_mno_powerpc:
2242 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
2243 | MASK_PPC_GFXOPT | MASK_POWERPC64);
2244 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
2245 | MASK_PPC_GFXOPT | MASK_POWERPC64);
2246 break;
2247 case OPT_mfull_toc:
2248 target_flags &= ~MASK_MINIMAL_TOC;
2249 TARGET_NO_FP_IN_TOC = 0;
2250 TARGET_NO_SUM_IN_TOC = 0;
2251 target_flags_explicit |= MASK_MINIMAL_TOC;
2252 #ifdef TARGET_USES_SYSV4_OPT
2253 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
2254 just the same as -mminimal-toc. */
2255 target_flags |= MASK_MINIMAL_TOC;
2256 target_flags_explicit |= MASK_MINIMAL_TOC;
2257 #endif
2258 break;
2260 #ifdef TARGET_USES_SYSV4_OPT
2261 case OPT_mtoc:
2262 /* Make -mtoc behave like -mminimal-toc. */
2263 target_flags |= MASK_MINIMAL_TOC;
2264 target_flags_explicit |= MASK_MINIMAL_TOC;
2265 break;
2266 #endif
2268 #ifdef TARGET_USES_AIX64_OPT
2269 case OPT_maix64:
2270 #else
2271 case OPT_m64:
2272 #endif
2273 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
2274 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
2275 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
2276 break;
2278 #ifdef TARGET_USES_AIX64_OPT
2279 case OPT_maix32:
2280 #else
2281 case OPT_m32:
2282 #endif
2283 target_flags &= ~MASK_POWERPC64;
2284 target_flags_explicit |= MASK_POWERPC64;
2285 break;
2287 case OPT_minsert_sched_nops_:
2288 rs6000_sched_insert_nops_str = arg;
2289 break;
2291 case OPT_mminimal_toc:
2292 if (value == 1)
2294 TARGET_NO_FP_IN_TOC = 0;
2295 TARGET_NO_SUM_IN_TOC = 0;
2297 break;
2299 case OPT_mpower:
2300 if (value == 1)
2302 target_flags |= (MASK_MULTIPLE | MASK_STRING);
2303 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
2305 break;
2307 case OPT_mpower2:
2308 if (value == 1)
2310 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2311 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
2313 break;
2315 case OPT_mpowerpc_gpopt:
2316 case OPT_mpowerpc_gfxopt:
2317 if (value == 1)
2319 target_flags |= MASK_POWERPC;
2320 target_flags_explicit |= MASK_POWERPC;
2322 break;
2324 case OPT_maix_struct_return:
2325 case OPT_msvr4_struct_return:
2326 rs6000_explicit_options.aix_struct_ret = true;
2327 break;
2329 case OPT_mvrsave_:
2330 rs6000_explicit_options.vrsave = true;
2331 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
2332 break;
2334 case OPT_misel:
2335 rs6000_explicit_options.isel = true;
2336 rs6000_isel = value;
2337 break;
2339 case OPT_misel_:
2340 rs6000_explicit_options.isel = true;
2341 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
2342 break;
2344 case OPT_mspe:
2345 rs6000_explicit_options.spe = true;
2346 rs6000_spe = value;
2347 break;
2349 case OPT_mspe_:
2350 rs6000_explicit_options.spe = true;
2351 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
2352 break;
2354 case OPT_mdebug_:
2355 rs6000_debug_name = arg;
2356 break;
2358 #ifdef TARGET_USES_SYSV4_OPT
2359 case OPT_mcall_:
2360 rs6000_abi_name = arg;
2361 break;
2363 case OPT_msdata_:
2364 rs6000_sdata_name = arg;
2365 break;
2367 case OPT_mtls_size_:
2368 rs6000_tls_size_string = arg;
2369 break;
2371 case OPT_mrelocatable:
2372 if (value == 1)
2374 target_flags |= MASK_MINIMAL_TOC;
2375 target_flags_explicit |= MASK_MINIMAL_TOC;
2376 TARGET_NO_FP_IN_TOC = 1;
2378 break;
2380 case OPT_mrelocatable_lib:
2381 if (value == 1)
2383 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2384 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
2385 TARGET_NO_FP_IN_TOC = 1;
2387 else
2389 target_flags &= ~MASK_RELOCATABLE;
2390 target_flags_explicit |= MASK_RELOCATABLE;
2392 break;
2393 #endif
2395 case OPT_mabi_:
2396 if (!strcmp (arg, "altivec"))
2398 rs6000_explicit_options.altivec_abi = true;
2399 rs6000_altivec_abi = 1;
2401 /* Enabling the AltiVec ABI turns off the SPE ABI. */
2402 rs6000_spe_abi = 0;
2404 else if (! strcmp (arg, "no-altivec"))
2406 rs6000_explicit_options.altivec_abi = true;
2407 rs6000_altivec_abi = 0;
2409 else if (! strcmp (arg, "spe"))
2411 rs6000_explicit_options.spe_abi = true;
2412 rs6000_spe_abi = 1;
2413 rs6000_altivec_abi = 0;
2414 if (!TARGET_SPE_ABI)
2415 error ("not configured for ABI: '%s'", arg);
2417 else if (! strcmp (arg, "no-spe"))
2419 rs6000_explicit_options.spe_abi = true;
2420 rs6000_spe_abi = 0;
2423 /* These are here for testing during development only, do not
2424 document in the manual please. */
2425 else if (! strcmp (arg, "d64"))
2427 rs6000_darwin64_abi = 1;
2428 warning (0, "Using darwin64 ABI");
2430 else if (! strcmp (arg, "d32"))
2432 rs6000_darwin64_abi = 0;
2433 warning (0, "Using old darwin ABI");
2436 else if (! strcmp (arg, "ibmlongdouble"))
2438 rs6000_explicit_options.ieee = true;
2439 rs6000_ieeequad = 0;
2440 warning (0, "Using IBM extended precision long double");
2442 else if (! strcmp (arg, "ieeelongdouble"))
2444 rs6000_explicit_options.ieee = true;
2445 rs6000_ieeequad = 1;
2446 warning (0, "Using IEEE extended precision long double");
2449 else
2451 error ("unknown ABI specified: '%s'", arg);
2452 return false;
2454 break;
2456 case OPT_mcpu_:
2457 rs6000_select[1].string = arg;
2458 break;
2460 case OPT_mtune_:
2461 rs6000_select[2].string = arg;
2462 break;
2464 case OPT_mtraceback_:
2465 rs6000_traceback_name = arg;
2466 break;
2468 case OPT_mfloat_gprs_:
2469 rs6000_explicit_options.float_gprs = true;
2470 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
2471 rs6000_float_gprs = 1;
2472 else if (! strcmp (arg, "double"))
2473 rs6000_float_gprs = 2;
2474 else if (! strcmp (arg, "no"))
2475 rs6000_float_gprs = 0;
2476 else
2478 error ("invalid option for -mfloat-gprs: '%s'", arg);
2479 return false;
2481 break;
2483 case OPT_mlong_double_:
2484 rs6000_explicit_options.long_double = true;
2485 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2486 if (value != 64 && value != 128)
2488 error ("Unknown switch -mlong-double-%s", arg);
2489 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2490 return false;
2492 else
2493 rs6000_long_double_type_size = value;
2494 break;
2496 case OPT_msched_costly_dep_:
2497 rs6000_sched_costly_dep_str = arg;
2498 break;
2500 case OPT_malign_:
2501 rs6000_explicit_options.alignment = true;
2502 if (! strcmp (arg, "power"))
2504 /* On 64-bit Darwin, power alignment is ABI-incompatible with
2505 some C library functions, so warn about it. The flag may be
2506 useful for performance studies from time to time though, so
2507 don't disable it entirely. */
2508 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
2509 warning (0, "-malign-power is not supported for 64-bit Darwin;"
2510 " it is incompatible with the installed C and C++ libraries");
2511 rs6000_alignment_flags = MASK_ALIGN_POWER;
2513 else if (! strcmp (arg, "natural"))
2514 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2515 else
2517 error ("unknown -malign-XXXXX option specified: '%s'", arg);
2518 return false;
2520 break;
2522 case OPT_msingle_float:
2523 if (!TARGET_SINGLE_FPU)
2524 warning (0, "-msingle-float option equivalent to -mhard-float");
2525 /* -msingle-float implies -mno-double-float and TARGET_HARD_FLOAT. */
2526 rs6000_double_float = 0;
2527 target_flags &= ~MASK_SOFT_FLOAT;
2528 target_flags_explicit |= MASK_SOFT_FLOAT;
2529 break;
2531 case OPT_mdouble_float:
2532 /* -mdouble-float implies -msingle-float and TARGET_HARD_FLOAT. */
2533 rs6000_single_float = 1;
2534 target_flags &= ~MASK_SOFT_FLOAT;
2535 target_flags_explicit |= MASK_SOFT_FLOAT;
2536 break;
2538 case OPT_msimple_fpu:
2539 if (!TARGET_SINGLE_FPU)
2540 warning (0, "-msimple-fpu option ignored");
2541 break;
2543 case OPT_mhard_float:
2544 /* -mhard_float implies -msingle-float and -mdouble-float. */
2545 rs6000_single_float = rs6000_double_float = 1;
2546 break;
2548 case OPT_msoft_float:
2549 /* -msoft_float implies -mnosingle-float and -mnodouble-float. */
2550 rs6000_single_float = rs6000_double_float = 0;
2551 break;
2553 case OPT_mfpu_:
2554 fpu_type = rs6000_parse_fpu_option(arg);
2555 if (fpu_type != FPU_NONE)
2556 /* If -mfpu is not none, then turn off SOFT_FLOAT, turn on HARD_FLOAT. */
2558 target_flags &= ~MASK_SOFT_FLOAT;
2559 target_flags_explicit |= MASK_SOFT_FLOAT;
2560 rs6000_xilinx_fpu = 1;
2561 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_SF_FULL)
2562 rs6000_single_float = 1;
2563 if (fpu_type == FPU_DF_LITE || fpu_type == FPU_DF_FULL)
2564 rs6000_single_float = rs6000_double_float = 1;
2565 if (fpu_type == FPU_SF_LITE || fpu_type == FPU_DF_LITE)
2566 rs6000_simple_fpu = 1;
2568 else
2570 /* -mfpu=none is equivalent to -msoft-float */
2571 target_flags |= MASK_SOFT_FLOAT;
2572 target_flags_explicit |= MASK_SOFT_FLOAT;
2573 rs6000_single_float = rs6000_double_float = 0;
2575 break;
2577 return true;
2580 /* Do anything needed at the start of the asm file. */
2582 static void
2583 rs6000_file_start (void)
2585 size_t i;
2586 char buffer[80];
2587 const char *start = buffer;
2588 struct rs6000_cpu_select *ptr;
2589 const char *default_cpu = TARGET_CPU_DEFAULT;
2590 FILE *file = asm_out_file;
2592 default_file_start ();
2594 #ifdef TARGET_BI_ARCH
2595 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
2596 default_cpu = 0;
2597 #endif
2599 if (flag_verbose_asm)
2601 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
2602 rs6000_select[0].string = default_cpu;
2604 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
2606 ptr = &rs6000_select[i];
2607 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
2609 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
2610 start = "";
2614 if (PPC405_ERRATUM77)
2616 fprintf (file, "%s PPC405CR_ERRATUM77", start);
2617 start = "";
2620 #ifdef USING_ELFOS_H
2621 switch (rs6000_sdata)
2623 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
2624 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
2625 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
2626 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
2629 if (rs6000_sdata && g_switch_value)
2631 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
2632 g_switch_value);
2633 start = "";
2635 #endif
2637 if (*start == '\0')
2638 putc ('\n', file);
2641 #ifdef HAVE_AS_GNU_ATTRIBUTE
2642 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
2644 fprintf (file, "\t.gnu_attribute 4, %d\n",
2645 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
2646 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
2647 : 2));
2648 fprintf (file, "\t.gnu_attribute 8, %d\n",
2649 (TARGET_ALTIVEC_ABI ? 2
2650 : TARGET_SPE_ABI ? 3
2651 : 1));
2652 fprintf (file, "\t.gnu_attribute 12, %d\n",
2653 aix_struct_return ? 2 : 1);
2656 #endif
2658 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
2660 switch_to_section (toc_section);
2661 switch_to_section (text_section);
2666 /* Return nonzero if this function is known to have a null epilogue. */
2669 direct_return (void)
2671 if (reload_completed)
2673 rs6000_stack_t *info = rs6000_stack_info ();
2675 if (info->first_gp_reg_save == 32
2676 && info->first_fp_reg_save == 64
2677 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
2678 && ! info->lr_save_p
2679 && ! info->cr_save_p
2680 && info->vrsave_mask == 0
2681 && ! info->push_p)
2682 return 1;
2685 return 0;
2688 /* Return the number of instructions it takes to form a constant in an
2689 integer register. */
2692 num_insns_constant_wide (HOST_WIDE_INT value)
2694 /* signed constant loadable with {cal|addi} */
2695 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
2696 return 1;
2698 /* constant loadable with {cau|addis} */
2699 else if ((value & 0xffff) == 0
2700 && (value >> 31 == -1 || value >> 31 == 0))
2701 return 1;
2703 #if HOST_BITS_PER_WIDE_INT == 64
2704 else if (TARGET_POWERPC64)
2706 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2707 HOST_WIDE_INT high = value >> 31;
2709 if (high == 0 || high == -1)
2710 return 2;
2712 high >>= 1;
2714 if (low == 0)
2715 return num_insns_constant_wide (high) + 1;
2716 else
2717 return (num_insns_constant_wide (high)
2718 + num_insns_constant_wide (low) + 1);
2720 #endif
2722 else
2723 return 2;
2727 num_insns_constant (rtx op, enum machine_mode mode)
2729 HOST_WIDE_INT low, high;
2731 switch (GET_CODE (op))
2733 case CONST_INT:
2734 #if HOST_BITS_PER_WIDE_INT == 64
2735 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2736 && mask64_operand (op, mode))
2737 return 2;
2738 else
2739 #endif
2740 return num_insns_constant_wide (INTVAL (op));
2742 case CONST_DOUBLE:
2743 if (mode == SFmode || mode == SDmode)
2745 long l;
2746 REAL_VALUE_TYPE rv;
2748 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2749 if (DECIMAL_FLOAT_MODE_P (mode))
2750 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
2751 else
2752 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2753 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2756 if (mode == VOIDmode || mode == DImode)
2758 high = CONST_DOUBLE_HIGH (op);
2759 low = CONST_DOUBLE_LOW (op);
2761 else
2763 long l[2];
2764 REAL_VALUE_TYPE rv;
2766 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2767 if (DECIMAL_FLOAT_MODE_P (mode))
2768 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, l);
2769 else
2770 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2771 high = l[WORDS_BIG_ENDIAN == 0];
2772 low = l[WORDS_BIG_ENDIAN != 0];
2775 if (TARGET_32BIT)
2776 return (num_insns_constant_wide (low)
2777 + num_insns_constant_wide (high));
2778 else
2780 if ((high == 0 && low >= 0)
2781 || (high == -1 && low < 0))
2782 return num_insns_constant_wide (low);
2784 else if (mask64_operand (op, mode))
2785 return 2;
2787 else if (low == 0)
2788 return num_insns_constant_wide (high) + 1;
2790 else
2791 return (num_insns_constant_wide (high)
2792 + num_insns_constant_wide (low) + 1);
2795 default:
2796 gcc_unreachable ();
2800 /* Interpret element ELT of the CONST_VECTOR OP as an integer value.
2801 If the mode of OP is MODE_VECTOR_INT, this simply returns the
2802 corresponding element of the vector, but for V4SFmode and V2SFmode,
2803 the corresponding "float" is interpreted as an SImode integer. */
2805 HOST_WIDE_INT
2806 const_vector_elt_as_int (rtx op, unsigned int elt)
2808 rtx tmp = CONST_VECTOR_ELT (op, elt);
2809 if (GET_MODE (op) == V4SFmode
2810 || GET_MODE (op) == V2SFmode)
2811 tmp = gen_lowpart (SImode, tmp);
2812 return INTVAL (tmp);
2815 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2816 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2817 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2818 all items are set to the same value and contain COPIES replicas of the
2819 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2820 operand and the others are set to the value of the operand's msb. */
2822 static bool
2823 vspltis_constant (rtx op, unsigned step, unsigned copies)
2825 enum machine_mode mode = GET_MODE (op);
2826 enum machine_mode inner = GET_MODE_INNER (mode);
2828 unsigned i;
2829 unsigned nunits = GET_MODE_NUNITS (mode);
2830 unsigned bitsize = GET_MODE_BITSIZE (inner);
2831 unsigned mask = GET_MODE_MASK (inner);
2833 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1);
2834 HOST_WIDE_INT splat_val = val;
2835 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2837 /* Construct the value to be splatted, if possible. If not, return 0. */
2838 for (i = 2; i <= copies; i *= 2)
2840 HOST_WIDE_INT small_val;
2841 bitsize /= 2;
2842 small_val = splat_val >> bitsize;
2843 mask >>= bitsize;
2844 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2845 return false;
2846 splat_val = small_val;
2849 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2850 if (EASY_VECTOR_15 (splat_val))
2853 /* Also check if we can splat, and then add the result to itself. Do so if
2854 the value is positive, of if the splat instruction is using OP's mode;
2855 for splat_val < 0, the splat and the add should use the same mode. */
2856 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2857 && (splat_val >= 0 || (step == 1 && copies == 1)))
2860 else
2861 return false;
2863 /* Check if VAL is present in every STEP-th element, and the
2864 other elements are filled with its most significant bit. */
2865 for (i = 0; i < nunits - 1; ++i)
2867 HOST_WIDE_INT desired_val;
2868 if (((i + 1) & (step - 1)) == 0)
2869 desired_val = val;
2870 else
2871 desired_val = msb_val;
2873 if (desired_val != const_vector_elt_as_int (op, i))
2874 return false;
2877 return true;
2881 /* Return true if OP is of the given MODE and can be synthesized
2882 with a vspltisb, vspltish or vspltisw. */
2884 bool
2885 easy_altivec_constant (rtx op, enum machine_mode mode)
2887 unsigned step, copies;
2889 if (mode == VOIDmode)
2890 mode = GET_MODE (op);
2891 else if (mode != GET_MODE (op))
2892 return false;
2894 /* Start with a vspltisw. */
2895 step = GET_MODE_NUNITS (mode) / 4;
2896 copies = 1;
2898 if (vspltis_constant (op, step, copies))
2899 return true;
2901 /* Then try with a vspltish. */
2902 if (step == 1)
2903 copies <<= 1;
2904 else
2905 step >>= 1;
2907 if (vspltis_constant (op, step, copies))
2908 return true;
2910 /* And finally a vspltisb. */
2911 if (step == 1)
2912 copies <<= 1;
2913 else
2914 step >>= 1;
2916 if (vspltis_constant (op, step, copies))
2917 return true;
2919 return false;
2922 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2923 result is OP. Abort if it is not possible. */
2926 gen_easy_altivec_constant (rtx op)
2928 enum machine_mode mode = GET_MODE (op);
2929 int nunits = GET_MODE_NUNITS (mode);
2930 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2931 unsigned step = nunits / 4;
2932 unsigned copies = 1;
2934 /* Start with a vspltisw. */
2935 if (vspltis_constant (op, step, copies))
2936 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2938 /* Then try with a vspltish. */
2939 if (step == 1)
2940 copies <<= 1;
2941 else
2942 step >>= 1;
2944 if (vspltis_constant (op, step, copies))
2945 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2947 /* And finally a vspltisb. */
2948 if (step == 1)
2949 copies <<= 1;
2950 else
2951 step >>= 1;
2953 if (vspltis_constant (op, step, copies))
2954 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2956 gcc_unreachable ();
2959 const char *
2960 output_vec_const_move (rtx *operands)
2962 int cst, cst2;
2963 enum machine_mode mode;
2964 rtx dest, vec;
2966 dest = operands[0];
2967 vec = operands[1];
2968 mode = GET_MODE (dest);
2970 if (TARGET_ALTIVEC)
2972 rtx splat_vec;
2973 if (zero_constant (vec, mode))
2974 return "vxor %0,%0,%0";
2976 splat_vec = gen_easy_altivec_constant (vec);
2977 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2978 operands[1] = XEXP (splat_vec, 0);
2979 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2980 return "#";
2982 switch (GET_MODE (splat_vec))
2984 case V4SImode:
2985 return "vspltisw %0,%1";
2987 case V8HImode:
2988 return "vspltish %0,%1";
2990 case V16QImode:
2991 return "vspltisb %0,%1";
2993 default:
2994 gcc_unreachable ();
2998 gcc_assert (TARGET_SPE);
3000 /* Vector constant 0 is handled as a splitter of V2SI, and in the
3001 pattern of V1DI, V4HI, and V2SF.
3003 FIXME: We should probably return # and add post reload
3004 splitters for these, but this way is so easy ;-). */
3005 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
3006 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
3007 operands[1] = CONST_VECTOR_ELT (vec, 0);
3008 operands[2] = CONST_VECTOR_ELT (vec, 1);
3009 if (cst == cst2)
3010 return "li %0,%1\n\tevmergelo %0,%0,%0";
3011 else
3012 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
3015 /* Initialize TARGET of vector PAIRED to VALS. */
3017 void
3018 paired_expand_vector_init (rtx target, rtx vals)
3020 enum machine_mode mode = GET_MODE (target);
3021 int n_elts = GET_MODE_NUNITS (mode);
3022 int n_var = 0;
3023 rtx x, new_rtx, tmp, constant_op, op1, op2;
3024 int i;
3026 for (i = 0; i < n_elts; ++i)
3028 x = XVECEXP (vals, 0, i);
3029 if (!CONSTANT_P (x))
3030 ++n_var;
3032 if (n_var == 0)
3034 /* Load from constant pool. */
3035 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
3036 return;
3039 if (n_var == 2)
3041 /* The vector is initialized only with non-constants. */
3042 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, XVECEXP (vals, 0, 0),
3043 XVECEXP (vals, 0, 1));
3045 emit_move_insn (target, new_rtx);
3046 return;
3049 /* One field is non-constant and the other one is a constant. Load the
3050 constant from the constant pool and use ps_merge instruction to
3051 construct the whole vector. */
3052 op1 = XVECEXP (vals, 0, 0);
3053 op2 = XVECEXP (vals, 0, 1);
3055 constant_op = (CONSTANT_P (op1)) ? op1 : op2;
3057 tmp = gen_reg_rtx (GET_MODE (constant_op));
3058 emit_move_insn (tmp, constant_op);
3060 if (CONSTANT_P (op1))
3061 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, tmp, op2);
3062 else
3063 new_rtx = gen_rtx_VEC_CONCAT (V2SFmode, op1, tmp);
3065 emit_move_insn (target, new_rtx);
3068 void
3069 paired_expand_vector_move (rtx operands[])
3071 rtx op0 = operands[0], op1 = operands[1];
3073 emit_move_insn (op0, op1);
3076 /* Emit vector compare for code RCODE. DEST is destination, OP1 and
3077 OP2 are two VEC_COND_EXPR operands, CC_OP0 and CC_OP1 are the two
3078 operands for the relation operation COND. This is a recursive
3079 function. */
3081 static void
3082 paired_emit_vector_compare (enum rtx_code rcode,
3083 rtx dest, rtx op0, rtx op1,
3084 rtx cc_op0, rtx cc_op1)
3086 rtx tmp = gen_reg_rtx (V2SFmode);
3087 rtx tmp1, max, min, equal_zero;
3089 gcc_assert (TARGET_PAIRED_FLOAT);
3090 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
3092 switch (rcode)
3094 case LT:
3095 case LTU:
3096 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3097 return;
3098 case GE:
3099 case GEU:
3100 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3101 emit_insn (gen_selv2sf4 (dest, tmp, op0, op1, CONST0_RTX (SFmode)));
3102 return;
3103 case LE:
3104 case LEU:
3105 paired_emit_vector_compare (GE, dest, op0, op1, cc_op1, cc_op0);
3106 return;
3107 case GT:
3108 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3109 return;
3110 case EQ:
3111 tmp1 = gen_reg_rtx (V2SFmode);
3112 max = gen_reg_rtx (V2SFmode);
3113 min = gen_reg_rtx (V2SFmode);
3114 equal_zero = gen_reg_rtx (V2SFmode);
3116 emit_insn (gen_subv2sf3 (tmp, cc_op0, cc_op1));
3117 emit_insn (gen_selv2sf4
3118 (max, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3119 emit_insn (gen_subv2sf3 (tmp, cc_op1, cc_op0));
3120 emit_insn (gen_selv2sf4
3121 (min, tmp, cc_op0, cc_op1, CONST0_RTX (SFmode)));
3122 emit_insn (gen_subv2sf3 (tmp1, min, max));
3123 emit_insn (gen_selv2sf4 (dest, tmp1, op0, op1, CONST0_RTX (SFmode)));
3124 return;
3125 case NE:
3126 paired_emit_vector_compare (EQ, dest, op1, op0, cc_op0, cc_op1);
3127 return;
3128 case UNLE:
3129 paired_emit_vector_compare (LE, dest, op1, op0, cc_op0, cc_op1);
3130 return;
3131 case UNLT:
3132 paired_emit_vector_compare (LT, dest, op1, op0, cc_op0, cc_op1);
3133 return;
3134 case UNGE:
3135 paired_emit_vector_compare (GE, dest, op1, op0, cc_op0, cc_op1);
3136 return;
3137 case UNGT:
3138 paired_emit_vector_compare (GT, dest, op1, op0, cc_op0, cc_op1);
3139 return;
3140 default:
3141 gcc_unreachable ();
3144 return;
3147 /* Emit vector conditional expression.
3148 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
3149 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
3152 paired_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
3153 rtx cond, rtx cc_op0, rtx cc_op1)
3155 enum rtx_code rcode = GET_CODE (cond);
3157 if (!TARGET_PAIRED_FLOAT)
3158 return 0;
3160 paired_emit_vector_compare (rcode, dest, op1, op2, cc_op0, cc_op1);
3162 return 1;
3165 /* Initialize vector TARGET to VALS. */
3167 void
3168 rs6000_expand_vector_init (rtx target, rtx vals)
3170 enum machine_mode mode = GET_MODE (target);
3171 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3172 int n_elts = GET_MODE_NUNITS (mode);
3173 int n_var = 0, one_var = -1;
3174 bool all_same = true, all_const_zero = true;
3175 rtx x, mem;
3176 int i;
3178 for (i = 0; i < n_elts; ++i)
3180 x = XVECEXP (vals, 0, i);
3181 if (!CONSTANT_P (x))
3182 ++n_var, one_var = i;
3183 else if (x != CONST0_RTX (inner_mode))
3184 all_const_zero = false;
3186 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
3187 all_same = false;
3190 if (n_var == 0)
3192 rtx const_vec = gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0));
3193 if (mode != V4SFmode && all_const_zero)
3195 /* Zero register. */
3196 emit_insn (gen_rtx_SET (VOIDmode, target,
3197 gen_rtx_XOR (mode, target, target)));
3198 return;
3200 else if (mode != V4SFmode && easy_vector_constant (const_vec, mode))
3202 /* Splat immediate. */
3203 emit_insn (gen_rtx_SET (VOIDmode, target, const_vec));
3204 return;
3206 else if (all_same)
3207 ; /* Splat vector element. */
3208 else
3210 /* Load from constant pool. */
3211 emit_move_insn (target, const_vec);
3212 return;
3216 /* Store value to stack temp. Load vector element. Splat. */
3217 if (all_same)
3219 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3220 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
3221 XVECEXP (vals, 0, 0));
3222 x = gen_rtx_UNSPEC (VOIDmode,
3223 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3224 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3225 gen_rtvec (2,
3226 gen_rtx_SET (VOIDmode,
3227 target, mem),
3228 x)));
3229 x = gen_rtx_VEC_SELECT (inner_mode, target,
3230 gen_rtx_PARALLEL (VOIDmode,
3231 gen_rtvec (1, const0_rtx)));
3232 emit_insn (gen_rtx_SET (VOIDmode, target,
3233 gen_rtx_VEC_DUPLICATE (mode, x)));
3234 return;
3237 /* One field is non-constant. Load constant then overwrite
3238 varying field. */
3239 if (n_var == 1)
3241 rtx copy = copy_rtx (vals);
3243 /* Load constant part of vector, substitute neighboring value for
3244 varying element. */
3245 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
3246 rs6000_expand_vector_init (target, copy);
3248 /* Insert variable. */
3249 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
3250 return;
3253 /* Construct the vector in memory one field at a time
3254 and load the whole vector. */
3255 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3256 for (i = 0; i < n_elts; i++)
3257 emit_move_insn (adjust_address_nv (mem, inner_mode,
3258 i * GET_MODE_SIZE (inner_mode)),
3259 XVECEXP (vals, 0, i));
3260 emit_move_insn (target, mem);
3263 /* Set field ELT of TARGET to VAL. */
3265 void
3266 rs6000_expand_vector_set (rtx target, rtx val, int elt)
3268 enum machine_mode mode = GET_MODE (target);
3269 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3270 rtx reg = gen_reg_rtx (mode);
3271 rtx mask, mem, x;
3272 int width = GET_MODE_SIZE (inner_mode);
3273 int i;
3275 /* Load single variable value. */
3276 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
3277 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
3278 x = gen_rtx_UNSPEC (VOIDmode,
3279 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
3280 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3281 gen_rtvec (2,
3282 gen_rtx_SET (VOIDmode,
3283 reg, mem),
3284 x)));
3286 /* Linear sequence. */
3287 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
3288 for (i = 0; i < 16; ++i)
3289 XVECEXP (mask, 0, i) = GEN_INT (i);
3291 /* Set permute mask to insert element into target. */
3292 for (i = 0; i < width; ++i)
3293 XVECEXP (mask, 0, elt*width + i)
3294 = GEN_INT (i + 0x10);
3295 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
3296 x = gen_rtx_UNSPEC (mode,
3297 gen_rtvec (3, target, reg,
3298 force_reg (V16QImode, x)),
3299 UNSPEC_VPERM);
3300 emit_insn (gen_rtx_SET (VOIDmode, target, x));
3303 /* Extract field ELT from VEC into TARGET. */
3305 void
3306 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
3308 enum machine_mode mode = GET_MODE (vec);
3309 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3310 rtx mem, x;
3312 /* Allocate mode-sized buffer. */
3313 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3315 /* Add offset to field within buffer matching vector element. */
3316 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
3318 /* Store single field into mode-sized buffer. */
3319 x = gen_rtx_UNSPEC (VOIDmode,
3320 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
3321 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3322 gen_rtvec (2,
3323 gen_rtx_SET (VOIDmode,
3324 mem, vec),
3325 x)));
3326 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
3329 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
3330 implement ANDing by the mask IN. */
3331 void
3332 build_mask64_2_operands (rtx in, rtx *out)
3334 #if HOST_BITS_PER_WIDE_INT >= 64
3335 unsigned HOST_WIDE_INT c, lsb, m1, m2;
3336 int shift;
3338 gcc_assert (GET_CODE (in) == CONST_INT);
3340 c = INTVAL (in);
3341 if (c & 1)
3343 /* Assume c initially something like 0x00fff000000fffff. The idea
3344 is to rotate the word so that the middle ^^^^^^ group of zeros
3345 is at the MS end and can be cleared with an rldicl mask. We then
3346 rotate back and clear off the MS ^^ group of zeros with a
3347 second rldicl. */
3348 c = ~c; /* c == 0xff000ffffff00000 */
3349 lsb = c & -c; /* lsb == 0x0000000000100000 */
3350 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
3351 c = ~c; /* c == 0x00fff000000fffff */
3352 c &= -lsb; /* c == 0x00fff00000000000 */
3353 lsb = c & -c; /* lsb == 0x0000100000000000 */
3354 c = ~c; /* c == 0xff000fffffffffff */
3355 c &= -lsb; /* c == 0xff00000000000000 */
3356 shift = 0;
3357 while ((lsb >>= 1) != 0)
3358 shift++; /* shift == 44 on exit from loop */
3359 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
3360 m1 = ~m1; /* m1 == 0x000000ffffffffff */
3361 m2 = ~c; /* m2 == 0x00ffffffffffffff */
3363 else
3365 /* Assume c initially something like 0xff000f0000000000. The idea
3366 is to rotate the word so that the ^^^ middle group of zeros
3367 is at the LS end and can be cleared with an rldicr mask. We then
3368 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
3369 a second rldicr. */
3370 lsb = c & -c; /* lsb == 0x0000010000000000 */
3371 m2 = -lsb; /* m2 == 0xffffff0000000000 */
3372 c = ~c; /* c == 0x00fff0ffffffffff */
3373 c &= -lsb; /* c == 0x00fff00000000000 */
3374 lsb = c & -c; /* lsb == 0x0000100000000000 */
3375 c = ~c; /* c == 0xff000fffffffffff */
3376 c &= -lsb; /* c == 0xff00000000000000 */
3377 shift = 0;
3378 while ((lsb >>= 1) != 0)
3379 shift++; /* shift == 44 on exit from loop */
3380 m1 = ~c; /* m1 == 0x00ffffffffffffff */
3381 m1 >>= shift; /* m1 == 0x0000000000000fff */
3382 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
3385 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
3386 masks will be all 1's. We are guaranteed more than one transition. */
3387 out[0] = GEN_INT (64 - shift);
3388 out[1] = GEN_INT (m1);
3389 out[2] = GEN_INT (shift);
3390 out[3] = GEN_INT (m2);
3391 #else
3392 (void)in;
3393 (void)out;
3394 gcc_unreachable ();
3395 #endif
3398 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
3400 bool
3401 invalid_e500_subreg (rtx op, enum machine_mode mode)
3403 if (TARGET_E500_DOUBLE)
3405 /* Reject (subreg:SI (reg:DF)); likewise with subreg:DI or
3406 subreg:TI and reg:TF. Decimal float modes are like integer
3407 modes (only low part of each register used) for this
3408 purpose. */
3409 if (GET_CODE (op) == SUBREG
3410 && (mode == SImode || mode == DImode || mode == TImode
3411 || mode == DDmode || mode == TDmode)
3412 && REG_P (SUBREG_REG (op))
3413 && (GET_MODE (SUBREG_REG (op)) == DFmode
3414 || GET_MODE (SUBREG_REG (op)) == TFmode))
3415 return true;
3417 /* Reject (subreg:DF (reg:DI)); likewise with subreg:TF and
3418 reg:TI. */
3419 if (GET_CODE (op) == SUBREG
3420 && (mode == DFmode || mode == TFmode)
3421 && REG_P (SUBREG_REG (op))
3422 && (GET_MODE (SUBREG_REG (op)) == DImode
3423 || GET_MODE (SUBREG_REG (op)) == TImode
3424 || GET_MODE (SUBREG_REG (op)) == DDmode
3425 || GET_MODE (SUBREG_REG (op)) == TDmode))
3426 return true;
3429 if (TARGET_SPE
3430 && GET_CODE (op) == SUBREG
3431 && mode == SImode
3432 && REG_P (SUBREG_REG (op))
3433 && SPE_VECTOR_MODE (GET_MODE (SUBREG_REG (op))))
3434 return true;
3436 return false;
3439 /* AIX increases natural record alignment to doubleword if the first
3440 field is an FP double while the FP fields remain word aligned. */
3442 unsigned int
3443 rs6000_special_round_type_align (tree type, unsigned int computed,
3444 unsigned int specified)
3446 unsigned int align = MAX (computed, specified);
3447 tree field = TYPE_FIELDS (type);
3449 /* Skip all non field decls */
3450 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3451 field = TREE_CHAIN (field);
3453 if (field != NULL && field != type)
3455 type = TREE_TYPE (field);
3456 while (TREE_CODE (type) == ARRAY_TYPE)
3457 type = TREE_TYPE (type);
3459 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
3460 align = MAX (align, 64);
3463 return align;
3466 /* Darwin increases record alignment to the natural alignment of
3467 the first field. */
3469 unsigned int
3470 darwin_rs6000_special_round_type_align (tree type, unsigned int computed,
3471 unsigned int specified)
3473 unsigned int align = MAX (computed, specified);
3475 if (TYPE_PACKED (type))
3476 return align;
3478 /* Find the first field, looking down into aggregates. */
3479 do {
3480 tree field = TYPE_FIELDS (type);
3481 /* Skip all non field decls */
3482 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
3483 field = TREE_CHAIN (field);
3484 if (! field)
3485 break;
3486 type = TREE_TYPE (field);
3487 while (TREE_CODE (type) == ARRAY_TYPE)
3488 type = TREE_TYPE (type);
3489 } while (AGGREGATE_TYPE_P (type));
3491 if (! AGGREGATE_TYPE_P (type) && type != error_mark_node)
3492 align = MAX (align, TYPE_ALIGN (type));
3494 return align;
3497 /* Return 1 for an operand in small memory on V.4/eabi. */
3500 small_data_operand (rtx op ATTRIBUTE_UNUSED,
3501 enum machine_mode mode ATTRIBUTE_UNUSED)
3503 #if TARGET_ELF
3504 rtx sym_ref;
3506 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
3507 return 0;
3509 if (DEFAULT_ABI != ABI_V4)
3510 return 0;
3512 /* Vector and float memory instructions have a limited offset on the
3513 SPE, so using a vector or float variable directly as an operand is
3514 not useful. */
3515 if (TARGET_SPE
3516 && (SPE_VECTOR_MODE (mode) || FLOAT_MODE_P (mode)))
3517 return 0;
3519 if (GET_CODE (op) == SYMBOL_REF)
3520 sym_ref = op;
3522 else if (GET_CODE (op) != CONST
3523 || GET_CODE (XEXP (op, 0)) != PLUS
3524 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
3525 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
3526 return 0;
3528 else
3530 rtx sum = XEXP (op, 0);
3531 HOST_WIDE_INT summand;
3533 /* We have to be careful here, because it is the referenced address
3534 that must be 32k from _SDA_BASE_, not just the symbol. */
3535 summand = INTVAL (XEXP (sum, 1));
3536 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
3537 return 0;
3539 sym_ref = XEXP (sum, 0);
3542 return SYMBOL_REF_SMALL_P (sym_ref);
3543 #else
3544 return 0;
3545 #endif
3548 /* Return true if either operand is a general purpose register. */
3550 bool
3551 gpr_or_gpr_p (rtx op0, rtx op1)
3553 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
3554 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
3558 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3560 static bool
3561 constant_pool_expr_p (rtx op)
3563 rtx base, offset;
3565 split_const (op, &base, &offset);
3566 return (GET_CODE (base) == SYMBOL_REF
3567 && CONSTANT_POOL_ADDRESS_P (base)
3568 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
3571 bool
3572 toc_relative_expr_p (rtx op)
3574 rtx base, offset;
3576 if (GET_CODE (op) != CONST)
3577 return false;
3579 split_const (op, &base, &offset);
3580 return (GET_CODE (base) == UNSPEC
3581 && XINT (base, 1) == UNSPEC_TOCREL);
3584 bool
3585 legitimate_constant_pool_address_p (rtx x)
3587 return (TARGET_TOC
3588 && GET_CODE (x) == PLUS
3589 && GET_CODE (XEXP (x, 0)) == REG
3590 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
3591 && toc_relative_expr_p (XEXP (x, 1)));
3594 static bool
3595 legitimate_small_data_p (enum machine_mode mode, rtx x)
3597 return (DEFAULT_ABI == ABI_V4
3598 && !flag_pic && !TARGET_TOC
3599 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
3600 && small_data_operand (x, mode));
3603 /* SPE offset addressing is limited to 5-bits worth of double words. */
3604 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3606 bool
3607 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
3609 unsigned HOST_WIDE_INT offset, extra;
3611 if (GET_CODE (x) != PLUS)
3612 return false;
3613 if (GET_CODE (XEXP (x, 0)) != REG)
3614 return false;
3615 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3616 return false;
3617 if (legitimate_constant_pool_address_p (x))
3618 return true;
3619 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
3620 return false;
3622 offset = INTVAL (XEXP (x, 1));
3623 extra = 0;
3624 switch (mode)
3626 case V16QImode:
3627 case V8HImode:
3628 case V4SFmode:
3629 case V4SImode:
3630 /* AltiVec vector modes. Only reg+reg addressing is valid and
3631 constant offset zero should not occur due to canonicalization. */
3632 return false;
3634 case V4HImode:
3635 case V2SImode:
3636 case V1DImode:
3637 case V2SFmode:
3638 /* Paired vector modes. Only reg+reg addressing is valid and
3639 constant offset zero should not occur due to canonicalization. */
3640 if (TARGET_PAIRED_FLOAT)
3641 return false;
3642 /* SPE vector modes. */
3643 return SPE_CONST_OFFSET_OK (offset);
3645 case DFmode:
3646 if (TARGET_E500_DOUBLE)
3647 return SPE_CONST_OFFSET_OK (offset);
3649 case DDmode:
3650 case DImode:
3651 /* On e500v2, we may have:
3653 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
3655 Which gets addressed with evldd instructions. */
3656 if (TARGET_E500_DOUBLE)
3657 return SPE_CONST_OFFSET_OK (offset);
3659 if (mode == DFmode || mode == DDmode || !TARGET_POWERPC64)
3660 extra = 4;
3661 else if (offset & 3)
3662 return false;
3663 break;
3665 case TFmode:
3666 if (TARGET_E500_DOUBLE)
3667 return (SPE_CONST_OFFSET_OK (offset)
3668 && SPE_CONST_OFFSET_OK (offset + 8));
3670 case TDmode:
3671 case TImode:
3672 if (mode == TFmode || mode == TDmode || !TARGET_POWERPC64)
3673 extra = 12;
3674 else if (offset & 3)
3675 return false;
3676 else
3677 extra = 8;
3678 break;
3680 default:
3681 break;
3684 offset += 0x8000;
3685 return (offset < 0x10000) && (offset + extra < 0x10000);
3688 bool
3689 legitimate_indexed_address_p (rtx x, int strict)
3691 rtx op0, op1;
3693 if (GET_CODE (x) != PLUS)
3694 return false;
3696 op0 = XEXP (x, 0);
3697 op1 = XEXP (x, 1);
3699 /* Recognize the rtl generated by reload which we know will later be
3700 replaced with proper base and index regs. */
3701 if (!strict
3702 && reload_in_progress
3703 && (REG_P (op0) || GET_CODE (op0) == PLUS)
3704 && REG_P (op1))
3705 return true;
3707 return (REG_P (op0) && REG_P (op1)
3708 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
3709 && INT_REG_OK_FOR_INDEX_P (op1, strict))
3710 || (INT_REG_OK_FOR_BASE_P (op1, strict)
3711 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
3714 bool
3715 avoiding_indexed_address_p (enum machine_mode mode)
3717 /* Avoid indexed addressing for modes that have non-indexed
3718 load/store instruction forms. */
3719 return TARGET_AVOID_XFORM && !ALTIVEC_VECTOR_MODE (mode);
3722 inline bool
3723 legitimate_indirect_address_p (rtx x, int strict)
3725 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
3728 bool
3729 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
3731 if (!TARGET_MACHO || !flag_pic
3732 || mode != SImode || GET_CODE (x) != MEM)
3733 return false;
3734 x = XEXP (x, 0);
3736 if (GET_CODE (x) != LO_SUM)
3737 return false;
3738 if (GET_CODE (XEXP (x, 0)) != REG)
3739 return false;
3740 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
3741 return false;
3742 x = XEXP (x, 1);
3744 return CONSTANT_P (x);
3747 static bool
3748 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
3750 if (GET_CODE (x) != LO_SUM)
3751 return false;
3752 if (GET_CODE (XEXP (x, 0)) != REG)
3753 return false;
3754 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
3755 return false;
3756 /* Restrict addressing for DI because of our SUBREG hackery. */
3757 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3758 || mode == DDmode || mode == TDmode
3759 || mode == DImode))
3760 return false;
3761 x = XEXP (x, 1);
3763 if (TARGET_ELF || TARGET_MACHO)
3765 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
3766 return false;
3767 if (TARGET_TOC)
3768 return false;
3769 if (GET_MODE_NUNITS (mode) != 1)
3770 return false;
3771 if (GET_MODE_BITSIZE (mode) > 64
3772 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
3773 && !(TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
3774 && (mode == DFmode || mode == DDmode))))
3775 return false;
3777 return CONSTANT_P (x);
3780 return false;
3784 /* Try machine-dependent ways of modifying an illegitimate address
3785 to be legitimate. If we find one, return the new, valid address.
3786 This is used from only one place: `memory_address' in explow.c.
3788 OLDX is the address as it was before break_out_memory_refs was
3789 called. In some cases it is useful to look at this to decide what
3790 needs to be done.
3792 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3794 It is always safe for this function to do nothing. It exists to
3795 recognize opportunities to optimize the output.
3797 On RS/6000, first check for the sum of a register with a constant
3798 integer that is out of range. If so, generate code to add the
3799 constant with the low-order 16 bits masked to the register and force
3800 this result into another register (this can be done with `cau').
3801 Then generate an address of REG+(CONST&0xffff), allowing for the
3802 possibility of bit 16 being a one.
3804 Then check for the sum of a register and something not constant, try to
3805 load the other things into a register and return the sum. */
3808 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3809 enum machine_mode mode)
3811 if (GET_CODE (x) == SYMBOL_REF)
3813 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3814 if (model != 0)
3815 return rs6000_legitimize_tls_address (x, model);
3818 if (GET_CODE (x) == PLUS
3819 && GET_CODE (XEXP (x, 0)) == REG
3820 && GET_CODE (XEXP (x, 1)) == CONST_INT
3821 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000
3822 && !((TARGET_POWERPC64
3823 && (mode == DImode || mode == TImode)
3824 && (INTVAL (XEXP (x, 1)) & 3) != 0)
3825 || SPE_VECTOR_MODE (mode)
3826 || ALTIVEC_VECTOR_MODE (mode)
3827 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3828 || mode == DImode || mode == DDmode
3829 || mode == TDmode))))
3831 HOST_WIDE_INT high_int, low_int;
3832 rtx sum;
3833 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3834 high_int = INTVAL (XEXP (x, 1)) - low_int;
3835 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
3836 GEN_INT (high_int)), 0);
3837 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
3839 else if (GET_CODE (x) == PLUS
3840 && GET_CODE (XEXP (x, 0)) == REG
3841 && GET_CODE (XEXP (x, 1)) != CONST_INT
3842 && GET_MODE_NUNITS (mode) == 1
3843 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
3844 || TARGET_POWERPC64
3845 || ((mode != DImode && mode != DFmode && mode != DDmode)
3846 || (TARGET_E500_DOUBLE && mode != DDmode)))
3847 && (TARGET_POWERPC64 || mode != DImode)
3848 && !avoiding_indexed_address_p (mode)
3849 && mode != TImode
3850 && mode != TFmode
3851 && mode != TDmode)
3853 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
3854 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
3856 else if (ALTIVEC_VECTOR_MODE (mode))
3858 rtx reg;
3860 /* Make sure both operands are registers. */
3861 if (GET_CODE (x) == PLUS)
3862 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
3863 force_reg (Pmode, XEXP (x, 1)));
3865 reg = force_reg (Pmode, x);
3866 return reg;
3868 else if (SPE_VECTOR_MODE (mode)
3869 || (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
3870 || mode == DDmode || mode == TDmode
3871 || mode == DImode)))
3873 if (mode == DImode)
3874 return NULL_RTX;
3875 /* We accept [reg + reg] and [reg + OFFSET]. */
3877 if (GET_CODE (x) == PLUS)
3879 rtx op1 = XEXP (x, 0);
3880 rtx op2 = XEXP (x, 1);
3881 rtx y;
3883 op1 = force_reg (Pmode, op1);
3885 if (GET_CODE (op2) != REG
3886 && (GET_CODE (op2) != CONST_INT
3887 || !SPE_CONST_OFFSET_OK (INTVAL (op2))
3888 || (GET_MODE_SIZE (mode) > 8
3889 && !SPE_CONST_OFFSET_OK (INTVAL (op2) + 8))))
3890 op2 = force_reg (Pmode, op2);
3892 /* We can't always do [reg + reg] for these, because [reg +
3893 reg + offset] is not a legitimate addressing mode. */
3894 y = gen_rtx_PLUS (Pmode, op1, op2);
3896 if ((GET_MODE_SIZE (mode) > 8 || mode == DDmode) && REG_P (op2))
3897 return force_reg (Pmode, y);
3898 else
3899 return y;
3902 return force_reg (Pmode, x);
3904 else if (TARGET_ELF
3905 && TARGET_32BIT
3906 && TARGET_NO_TOC
3907 && ! flag_pic
3908 && GET_CODE (x) != CONST_INT
3909 && GET_CODE (x) != CONST_DOUBLE
3910 && CONSTANT_P (x)
3911 && GET_MODE_NUNITS (mode) == 1
3912 && (GET_MODE_BITSIZE (mode) <= 32
3913 || ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
3914 && (mode == DFmode || mode == DDmode))))
3916 rtx reg = gen_reg_rtx (Pmode);
3917 emit_insn (gen_elf_high (reg, x));
3918 return gen_rtx_LO_SUM (Pmode, reg, x);
3920 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3921 && ! flag_pic
3922 #if TARGET_MACHO
3923 && ! MACHO_DYNAMIC_NO_PIC_P
3924 #endif
3925 && GET_CODE (x) != CONST_INT
3926 && GET_CODE (x) != CONST_DOUBLE
3927 && CONSTANT_P (x)
3928 && GET_MODE_NUNITS (mode) == 1
3929 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
3930 || (mode != DFmode && mode != DDmode))
3931 && mode != DImode
3932 && mode != TImode)
3934 rtx reg = gen_reg_rtx (Pmode);
3935 emit_insn (gen_macho_high (reg, x));
3936 return gen_rtx_LO_SUM (Pmode, reg, x);
3938 else if (TARGET_TOC
3939 && GET_CODE (x) == SYMBOL_REF
3940 && constant_pool_expr_p (x)
3941 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3943 return create_TOC_reference (x);
3945 else
3946 return NULL_RTX;
3949 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3950 We need to emit DTP-relative relocations. */
3952 static void
3953 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3955 switch (size)
3957 case 4:
3958 fputs ("\t.long\t", file);
3959 break;
3960 case 8:
3961 fputs (DOUBLE_INT_ASM_OP, file);
3962 break;
3963 default:
3964 gcc_unreachable ();
3966 output_addr_const (file, x);
3967 fputs ("@dtprel+0x8000", file);
3970 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3972 static GTY(()) rtx rs6000_tls_symbol;
3973 static rtx
3974 rs6000_tls_get_addr (void)
3976 if (!rs6000_tls_symbol)
3977 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3979 return rs6000_tls_symbol;
3982 /* Construct the SYMBOL_REF for TLS GOT references. */
3984 static GTY(()) rtx rs6000_got_symbol;
3985 static rtx
3986 rs6000_got_sym (void)
3988 if (!rs6000_got_symbol)
3990 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3991 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3992 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3995 return rs6000_got_symbol;
3998 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3999 this (thread-local) address. */
4001 static rtx
4002 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
4004 rtx dest, insn;
4006 dest = gen_reg_rtx (Pmode);
4007 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
4009 rtx tlsreg;
4011 if (TARGET_64BIT)
4013 tlsreg = gen_rtx_REG (Pmode, 13);
4014 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
4016 else
4018 tlsreg = gen_rtx_REG (Pmode, 2);
4019 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
4021 emit_insn (insn);
4023 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
4025 rtx tlsreg, tmp;
4027 tmp = gen_reg_rtx (Pmode);
4028 if (TARGET_64BIT)
4030 tlsreg = gen_rtx_REG (Pmode, 13);
4031 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
4033 else
4035 tlsreg = gen_rtx_REG (Pmode, 2);
4036 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
4038 emit_insn (insn);
4039 if (TARGET_64BIT)
4040 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
4041 else
4042 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
4043 emit_insn (insn);
4045 else
4047 rtx r3, got, tga, tmp1, tmp2, eqv;
4049 /* We currently use relocations like @got@tlsgd for tls, which
4050 means the linker will handle allocation of tls entries, placing
4051 them in the .got section. So use a pointer to the .got section,
4052 not one to secondary TOC sections used by 64-bit -mminimal-toc,
4053 or to secondary GOT sections used by 32-bit -fPIC. */
4054 if (TARGET_64BIT)
4055 got = gen_rtx_REG (Pmode, 2);
4056 else
4058 if (flag_pic == 1)
4059 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
4060 else
4062 rtx gsym = rs6000_got_sym ();
4063 got = gen_reg_rtx (Pmode);
4064 if (flag_pic == 0)
4065 rs6000_emit_move (got, gsym, Pmode);
4066 else
4068 rtx tmp3, mem;
4069 rtx first, last;
4071 tmp1 = gen_reg_rtx (Pmode);
4072 tmp2 = gen_reg_rtx (Pmode);
4073 tmp3 = gen_reg_rtx (Pmode);
4074 mem = gen_const_mem (Pmode, tmp1);
4076 first = emit_insn (gen_load_toc_v4_PIC_1b (gsym));
4077 emit_move_insn (tmp1,
4078 gen_rtx_REG (Pmode, LR_REGNO));
4079 emit_move_insn (tmp2, mem);
4080 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
4081 last = emit_move_insn (got, tmp3);
4082 set_unique_reg_note (last, REG_EQUAL, gsym);
4087 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
4089 r3 = gen_rtx_REG (Pmode, 3);
4090 tga = rs6000_tls_get_addr ();
4092 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4093 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
4094 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4095 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
4096 else if (DEFAULT_ABI == ABI_V4)
4097 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
4098 else
4099 gcc_unreachable ();
4101 start_sequence ();
4102 insn = emit_call_insn (insn);
4103 RTL_CONST_CALL_P (insn) = 1;
4104 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
4105 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4106 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
4107 insn = get_insns ();
4108 end_sequence ();
4109 emit_libcall_block (insn, dest, r3, addr);
4111 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
4113 r3 = gen_rtx_REG (Pmode, 3);
4114 tga = rs6000_tls_get_addr ();
4116 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4117 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
4118 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
4119 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
4120 else if (DEFAULT_ABI == ABI_V4)
4121 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
4122 else
4123 gcc_unreachable ();
4125 start_sequence ();
4126 insn = emit_call_insn (insn);
4127 RTL_CONST_CALL_P (insn) = 1;
4128 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
4129 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
4130 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
4131 insn = get_insns ();
4132 end_sequence ();
4133 tmp1 = gen_reg_rtx (Pmode);
4134 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
4135 UNSPEC_TLSLD);
4136 emit_libcall_block (insn, tmp1, r3, eqv);
4137 if (rs6000_tls_size == 16)
4139 if (TARGET_64BIT)
4140 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
4141 else
4142 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
4144 else if (rs6000_tls_size == 32)
4146 tmp2 = gen_reg_rtx (Pmode);
4147 if (TARGET_64BIT)
4148 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
4149 else
4150 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
4151 emit_insn (insn);
4152 if (TARGET_64BIT)
4153 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
4154 else
4155 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
4157 else
4159 tmp2 = gen_reg_rtx (Pmode);
4160 if (TARGET_64BIT)
4161 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
4162 else
4163 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
4164 emit_insn (insn);
4165 insn = gen_rtx_SET (Pmode, dest,
4166 gen_rtx_PLUS (Pmode, tmp2, tmp1));
4168 emit_insn (insn);
4170 else
4172 /* IE, or 64-bit offset LE. */
4173 tmp2 = gen_reg_rtx (Pmode);
4174 if (TARGET_64BIT)
4175 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
4176 else
4177 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
4178 emit_insn (insn);
4179 if (TARGET_64BIT)
4180 insn = gen_tls_tls_64 (dest, tmp2, addr);
4181 else
4182 insn = gen_tls_tls_32 (dest, tmp2, addr);
4183 emit_insn (insn);
4187 return dest;
4190 /* Return 1 if X contains a thread-local symbol. */
4192 bool
4193 rs6000_tls_referenced_p (rtx x)
4195 if (! TARGET_HAVE_TLS)
4196 return false;
4198 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
4201 /* Return 1 if *X is a thread-local symbol. This is the same as
4202 rs6000_tls_symbol_ref except for the type of the unused argument. */
4204 static int
4205 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
4207 return RS6000_SYMBOL_REF_TLS_P (*x);
4210 /* The convention appears to be to define this wherever it is used.
4211 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
4212 is now used here. */
4213 #ifndef REG_MODE_OK_FOR_BASE_P
4214 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
4215 #endif
4217 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
4218 replace the input X, or the original X if no replacement is called for.
4219 The output parameter *WIN is 1 if the calling macro should goto WIN,
4220 0 if it should not.
4222 For RS/6000, we wish to handle large displacements off a base
4223 register by splitting the addend across an addiu/addis and the mem insn.
4224 This cuts number of extra insns needed from 3 to 1.
4226 On Darwin, we use this to generate code for floating point constants.
4227 A movsf_low is generated so we wind up with 2 instructions rather than 3.
4228 The Darwin code is inside #if TARGET_MACHO because only then are the
4229 machopic_* functions defined. */
4231 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
4232 int opnum, int type,
4233 int ind_levels ATTRIBUTE_UNUSED, int *win)
4235 /* We must recognize output that we have already generated ourselves. */
4236 if (GET_CODE (x) == PLUS
4237 && GET_CODE (XEXP (x, 0)) == PLUS
4238 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4239 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4240 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4242 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4243 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4244 opnum, (enum reload_type)type);
4245 *win = 1;
4246 return x;
4249 #if TARGET_MACHO
4250 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
4251 && GET_CODE (x) == LO_SUM
4252 && GET_CODE (XEXP (x, 0)) == PLUS
4253 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
4254 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
4255 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
4256 && machopic_operand_p (XEXP (x, 1)))
4258 /* Result of previous invocation of this function on Darwin
4259 floating point constant. */
4260 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4261 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4262 opnum, (enum reload_type)type);
4263 *win = 1;
4264 return x;
4266 #endif
4268 /* Force ld/std non-word aligned offset into base register by wrapping
4269 in offset 0. */
4270 if (GET_CODE (x) == PLUS
4271 && GET_CODE (XEXP (x, 0)) == REG
4272 && REGNO (XEXP (x, 0)) < 32
4273 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4274 && GET_CODE (XEXP (x, 1)) == CONST_INT
4275 && (INTVAL (XEXP (x, 1)) & 3) != 0
4276 && !ALTIVEC_VECTOR_MODE (mode)
4277 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
4278 && TARGET_POWERPC64)
4280 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
4281 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4282 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4283 opnum, (enum reload_type) type);
4284 *win = 1;
4285 return x;
4288 if (GET_CODE (x) == PLUS
4289 && GET_CODE (XEXP (x, 0)) == REG
4290 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
4291 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
4292 && GET_CODE (XEXP (x, 1)) == CONST_INT
4293 && !SPE_VECTOR_MODE (mode)
4294 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
4295 || mode == DDmode || mode == TDmode
4296 || mode == DImode))
4297 && !ALTIVEC_VECTOR_MODE (mode))
4299 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
4300 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
4301 HOST_WIDE_INT high
4302 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
4304 /* Check for 32-bit overflow. */
4305 if (high + low != val)
4307 *win = 0;
4308 return x;
4311 /* Reload the high part into a base reg; leave the low part
4312 in the mem directly. */
4314 x = gen_rtx_PLUS (GET_MODE (x),
4315 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
4316 GEN_INT (high)),
4317 GEN_INT (low));
4319 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4320 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
4321 opnum, (enum reload_type)type);
4322 *win = 1;
4323 return x;
4326 if (GET_CODE (x) == SYMBOL_REF
4327 && !ALTIVEC_VECTOR_MODE (mode)
4328 && !SPE_VECTOR_MODE (mode)
4329 #if TARGET_MACHO
4330 && DEFAULT_ABI == ABI_DARWIN
4331 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
4332 #else
4333 && DEFAULT_ABI == ABI_V4
4334 && !flag_pic
4335 #endif
4336 /* Don't do this for TFmode or TDmode, since the result isn't offsettable.
4337 The same goes for DImode without 64-bit gprs and DFmode and DDmode
4338 without fprs. */
4339 && mode != TFmode
4340 && mode != TDmode
4341 && (mode != DImode || TARGET_POWERPC64)
4342 && ((mode != DFmode && mode != DDmode) || TARGET_POWERPC64
4343 || (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)))
4345 #if TARGET_MACHO
4346 if (flag_pic)
4348 rtx offset = machopic_gen_offset (x);
4349 x = gen_rtx_LO_SUM (GET_MODE (x),
4350 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
4351 gen_rtx_HIGH (Pmode, offset)), offset);
4353 else
4354 #endif
4355 x = gen_rtx_LO_SUM (GET_MODE (x),
4356 gen_rtx_HIGH (Pmode, x), x);
4358 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
4359 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
4360 opnum, (enum reload_type)type);
4361 *win = 1;
4362 return x;
4365 /* Reload an offset address wrapped by an AND that represents the
4366 masking of the lower bits. Strip the outer AND and let reload
4367 convert the offset address into an indirect address. */
4368 if (TARGET_ALTIVEC
4369 && ALTIVEC_VECTOR_MODE (mode)
4370 && GET_CODE (x) == AND
4371 && GET_CODE (XEXP (x, 0)) == PLUS
4372 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
4373 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4374 && GET_CODE (XEXP (x, 1)) == CONST_INT
4375 && INTVAL (XEXP (x, 1)) == -16)
4377 x = XEXP (x, 0);
4378 *win = 1;
4379 return x;
4382 if (TARGET_TOC
4383 && GET_CODE (x) == SYMBOL_REF
4384 && constant_pool_expr_p (x)
4385 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
4387 x = create_TOC_reference (x);
4388 *win = 1;
4389 return x;
4391 *win = 0;
4392 return x;
4395 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
4396 that is a valid memory address for an instruction.
4397 The MODE argument is the machine mode for the MEM expression
4398 that wants to use this address.
4400 On the RS/6000, there are four valid address: a SYMBOL_REF that
4401 refers to a constant pool entry of an address (or the sum of it
4402 plus a constant), a short (16-bit signed) constant plus a register,
4403 the sum of two registers, or a register indirect, possibly with an
4404 auto-increment. For DFmode, DDmode and DImode with a constant plus
4405 register, we must ensure that both words are addressable or PowerPC64
4406 with offset word aligned.
4408 For modes spanning multiple registers (DFmode and DDmode in 32-bit GPRs,
4409 32-bit DImode, TImode, TFmode, TDmode), indexed addressing cannot be used
4410 because adjacent memory cells are accessed by adding word-sized offsets
4411 during assembly output. */
4413 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
4415 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
4416 if (TARGET_ALTIVEC
4417 && ALTIVEC_VECTOR_MODE (mode)
4418 && GET_CODE (x) == AND
4419 && GET_CODE (XEXP (x, 1)) == CONST_INT
4420 && INTVAL (XEXP (x, 1)) == -16)
4421 x = XEXP (x, 0);
4423 if (RS6000_SYMBOL_REF_TLS_P (x))
4424 return 0;
4425 if (legitimate_indirect_address_p (x, reg_ok_strict))
4426 return 1;
4427 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
4428 && !ALTIVEC_VECTOR_MODE (mode)
4429 && !SPE_VECTOR_MODE (mode)
4430 && mode != TFmode
4431 && mode != TDmode
4432 /* Restrict addressing for DI because of our SUBREG hackery. */
4433 && !(TARGET_E500_DOUBLE
4434 && (mode == DFmode || mode == DDmode || mode == DImode))
4435 && TARGET_UPDATE
4436 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
4437 return 1;
4438 if (legitimate_small_data_p (mode, x))
4439 return 1;
4440 if (legitimate_constant_pool_address_p (x))
4441 return 1;
4442 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
4443 if (! reg_ok_strict
4444 && GET_CODE (x) == PLUS
4445 && GET_CODE (XEXP (x, 0)) == REG
4446 && (XEXP (x, 0) == virtual_stack_vars_rtx
4447 || XEXP (x, 0) == arg_pointer_rtx)
4448 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4449 return 1;
4450 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
4451 return 1;
4452 if (mode != TImode
4453 && mode != TFmode
4454 && mode != TDmode
4455 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
4456 || TARGET_POWERPC64
4457 || (mode != DFmode && mode != DDmode)
4458 || (TARGET_E500_DOUBLE && mode != DDmode))
4459 && (TARGET_POWERPC64 || mode != DImode)
4460 && !avoiding_indexed_address_p (mode)
4461 && legitimate_indexed_address_p (x, reg_ok_strict))
4462 return 1;
4463 if (GET_CODE (x) == PRE_MODIFY
4464 && mode != TImode
4465 && mode != TFmode
4466 && mode != TDmode
4467 && ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT)
4468 || TARGET_POWERPC64
4469 || ((mode != DFmode && mode != DDmode) || TARGET_E500_DOUBLE))
4470 && (TARGET_POWERPC64 || mode != DImode)
4471 && !ALTIVEC_VECTOR_MODE (mode)
4472 && !SPE_VECTOR_MODE (mode)
4473 /* Restrict addressing for DI because of our SUBREG hackery. */
4474 && !(TARGET_E500_DOUBLE
4475 && (mode == DFmode || mode == DDmode || mode == DImode))
4476 && TARGET_UPDATE
4477 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict)
4478 && (rs6000_legitimate_offset_address_p (mode, XEXP (x, 1), reg_ok_strict)
4479 || (!avoiding_indexed_address_p (mode)
4480 && legitimate_indexed_address_p (XEXP (x, 1), reg_ok_strict)))
4481 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4482 return 1;
4483 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
4484 return 1;
4485 return 0;
4488 /* Go to LABEL if ADDR (a legitimate address expression)
4489 has an effect that depends on the machine mode it is used for.
4491 On the RS/6000 this is true of all integral offsets (since AltiVec
4492 modes don't allow them) or is a pre-increment or decrement.
4494 ??? Except that due to conceptual problems in offsettable_address_p
4495 we can't really report the problems of integral offsets. So leave
4496 this assuming that the adjustable offset must be valid for the
4497 sub-words of a TFmode operand, which is what we had before. */
4499 bool
4500 rs6000_mode_dependent_address (rtx addr)
4502 switch (GET_CODE (addr))
4504 case PLUS:
4505 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
4507 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
4508 return val + 12 + 0x8000 >= 0x10000;
4510 break;
4512 case LO_SUM:
4513 return true;
4515 /* Auto-increment cases are now treated generically in recog.c. */
4516 case PRE_MODIFY:
4517 return TARGET_UPDATE;
4519 default:
4520 break;
4523 return false;
4526 /* Implement FIND_BASE_TERM. */
4529 rs6000_find_base_term (rtx op)
4531 rtx base, offset;
4533 split_const (op, &base, &offset);
4534 if (GET_CODE (base) == UNSPEC)
4535 switch (XINT (base, 1))
4537 case UNSPEC_TOCREL:
4538 case UNSPEC_MACHOPIC_OFFSET:
4539 /* OP represents SYM [+ OFFSET] - ANCHOR. SYM is the base term
4540 for aliasing purposes. */
4541 return XVECEXP (base, 0, 0);
4544 return op;
4547 /* More elaborate version of recog's offsettable_memref_p predicate
4548 that works around the ??? note of rs6000_mode_dependent_address.
4549 In particular it accepts
4551 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
4553 in 32-bit mode, that the recog predicate rejects. */
4555 bool
4556 rs6000_offsettable_memref_p (rtx op)
4558 if (!MEM_P (op))
4559 return false;
4561 /* First mimic offsettable_memref_p. */
4562 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
4563 return true;
4565 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
4566 the latter predicate knows nothing about the mode of the memory
4567 reference and, therefore, assumes that it is the largest supported
4568 mode (TFmode). As a consequence, legitimate offsettable memory
4569 references are rejected. rs6000_legitimate_offset_address_p contains
4570 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
4571 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
4574 /* Return number of consecutive hard regs needed starting at reg REGNO
4575 to hold something of mode MODE.
4576 This is ordinarily the length in words of a value of mode MODE
4577 but can be less for certain modes in special long registers.
4579 For the SPE, GPRs are 64 bits but only 32 bits are visible in
4580 scalar instructions. The upper 32 bits are only available to the
4581 SIMD instructions.
4583 POWER and PowerPC GPRs hold 32 bits worth;
4584 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
4587 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
4589 if (FP_REGNO_P (regno))
4590 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4592 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
4593 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
4595 if (ALTIVEC_REGNO_P (regno))
4596 return
4597 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
4599 /* The value returned for SCmode in the E500 double case is 2 for
4600 ABI compatibility; storing an SCmode value in a single register
4601 would require function_arg and rs6000_spe_function_arg to handle
4602 SCmode so as to pass the value correctly in a pair of
4603 registers. */
4604 if (TARGET_E500_DOUBLE && FLOAT_MODE_P (mode) && mode != SCmode
4605 && !DECIMAL_FLOAT_MODE_P (mode))
4606 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
4608 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4611 /* Change register usage conditional on target flags. */
4612 void
4613 rs6000_conditional_register_usage (void)
4615 int i;
4617 /* Set MQ register fixed (already call_used) if not POWER
4618 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
4619 be allocated. */
4620 if (! TARGET_POWER)
4621 fixed_regs[64] = 1;
4623 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
4624 if (TARGET_64BIT)
4625 fixed_regs[13] = call_used_regs[13]
4626 = call_really_used_regs[13] = 1;
4628 /* Conditionally disable FPRs. */
4629 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
4630 for (i = 32; i < 64; i++)
4631 fixed_regs[i] = call_used_regs[i]
4632 = call_really_used_regs[i] = 1;
4634 /* The TOC register is not killed across calls in a way that is
4635 visible to the compiler. */
4636 if (DEFAULT_ABI == ABI_AIX)
4637 call_really_used_regs[2] = 0;
4639 if (DEFAULT_ABI == ABI_V4
4640 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4641 && flag_pic == 2)
4642 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4644 if (DEFAULT_ABI == ABI_V4
4645 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4646 && flag_pic == 1)
4647 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4648 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4649 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4651 if (DEFAULT_ABI == ABI_DARWIN
4652 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4653 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4654 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4655 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4657 if (TARGET_TOC && TARGET_MINIMAL_TOC)
4658 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
4659 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
4661 if (TARGET_SPE)
4663 global_regs[SPEFSCR_REGNO] = 1;
4664 /* We used to use r14 as FIXED_SCRATCH to address SPE 64-bit
4665 registers in prologues and epilogues. We no longer use r14
4666 for FIXED_SCRATCH, but we're keeping r14 out of the allocation
4667 pool for link-compatibility with older versions of GCC. Once
4668 "old" code has died out, we can return r14 to the allocation
4669 pool. */
4670 fixed_regs[14]
4671 = call_used_regs[14]
4672 = call_really_used_regs[14] = 1;
4675 if (!TARGET_ALTIVEC)
4677 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
4678 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4679 call_really_used_regs[VRSAVE_REGNO] = 1;
4682 if (TARGET_ALTIVEC)
4683 global_regs[VSCR_REGNO] = 1;
4685 if (TARGET_ALTIVEC_ABI)
4687 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
4688 call_used_regs[i] = call_really_used_regs[i] = 1;
4690 /* AIX reserves VR20:31 in non-extended ABI mode. */
4691 if (TARGET_XCOFF)
4692 for (i = FIRST_ALTIVEC_REGNO + 20; i < FIRST_ALTIVEC_REGNO + 32; ++i)
4693 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
4697 /* Try to output insns to set TARGET equal to the constant C if it can
4698 be done in less than N insns. Do all computations in MODE.
4699 Returns the place where the output has been placed if it can be
4700 done and the insns have been emitted. If it would take more than N
4701 insns, zero is returned and no insns and emitted. */
4704 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
4705 rtx source, int n ATTRIBUTE_UNUSED)
4707 rtx result, insn, set;
4708 HOST_WIDE_INT c0, c1;
4710 switch (mode)
4712 case QImode:
4713 case HImode:
4714 if (dest == NULL)
4715 dest = gen_reg_rtx (mode);
4716 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
4717 return dest;
4719 case SImode:
4720 result = !can_create_pseudo_p () ? dest : gen_reg_rtx (SImode);
4722 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (result),
4723 GEN_INT (INTVAL (source)
4724 & (~ (HOST_WIDE_INT) 0xffff))));
4725 emit_insn (gen_rtx_SET (VOIDmode, dest,
4726 gen_rtx_IOR (SImode, copy_rtx (result),
4727 GEN_INT (INTVAL (source) & 0xffff))));
4728 result = dest;
4729 break;
4731 case DImode:
4732 switch (GET_CODE (source))
4734 case CONST_INT:
4735 c0 = INTVAL (source);
4736 c1 = -(c0 < 0);
4737 break;
4739 case CONST_DOUBLE:
4740 #if HOST_BITS_PER_WIDE_INT >= 64
4741 c0 = CONST_DOUBLE_LOW (source);
4742 c1 = -(c0 < 0);
4743 #else
4744 c0 = CONST_DOUBLE_LOW (source);
4745 c1 = CONST_DOUBLE_HIGH (source);
4746 #endif
4747 break;
4749 default:
4750 gcc_unreachable ();
4753 result = rs6000_emit_set_long_const (dest, c0, c1);
4754 break;
4756 default:
4757 gcc_unreachable ();
4760 insn = get_last_insn ();
4761 set = single_set (insn);
4762 if (! CONSTANT_P (SET_SRC (set)))
4763 set_unique_reg_note (insn, REG_EQUAL, source);
4765 return result;
4768 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4769 fall back to a straight forward decomposition. We do this to avoid
4770 exponential run times encountered when looking for longer sequences
4771 with rs6000_emit_set_const. */
4772 static rtx
4773 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
4775 if (!TARGET_POWERPC64)
4777 rtx operand1, operand2;
4779 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
4780 DImode);
4781 operand2 = operand_subword_force (copy_rtx (dest), WORDS_BIG_ENDIAN != 0,
4782 DImode);
4783 emit_move_insn (operand1, GEN_INT (c1));
4784 emit_move_insn (operand2, GEN_INT (c2));
4786 else
4788 HOST_WIDE_INT ud1, ud2, ud3, ud4;
4790 ud1 = c1 & 0xffff;
4791 ud2 = (c1 & 0xffff0000) >> 16;
4792 #if HOST_BITS_PER_WIDE_INT >= 64
4793 c2 = c1 >> 32;
4794 #endif
4795 ud3 = c2 & 0xffff;
4796 ud4 = (c2 & 0xffff0000) >> 16;
4798 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
4799 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
4801 if (ud1 & 0x8000)
4802 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
4803 else
4804 emit_move_insn (dest, GEN_INT (ud1));
4807 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
4808 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
4810 if (ud2 & 0x8000)
4811 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
4812 - 0x80000000));
4813 else
4814 emit_move_insn (dest, GEN_INT (ud2 << 16));
4815 if (ud1 != 0)
4816 emit_move_insn (copy_rtx (dest),
4817 gen_rtx_IOR (DImode, copy_rtx (dest),
4818 GEN_INT (ud1)));
4820 else if ((ud4 == 0xffff && (ud3 & 0x8000))
4821 || (ud4 == 0 && ! (ud3 & 0x8000)))
4823 if (ud3 & 0x8000)
4824 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
4825 - 0x80000000));
4826 else
4827 emit_move_insn (dest, GEN_INT (ud3 << 16));
4829 if (ud2 != 0)
4830 emit_move_insn (copy_rtx (dest),
4831 gen_rtx_IOR (DImode, copy_rtx (dest),
4832 GEN_INT (ud2)));
4833 emit_move_insn (copy_rtx (dest),
4834 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4835 GEN_INT (16)));
4836 if (ud1 != 0)
4837 emit_move_insn (copy_rtx (dest),
4838 gen_rtx_IOR (DImode, copy_rtx (dest),
4839 GEN_INT (ud1)));
4841 else
4843 if (ud4 & 0x8000)
4844 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
4845 - 0x80000000));
4846 else
4847 emit_move_insn (dest, GEN_INT (ud4 << 16));
4849 if (ud3 != 0)
4850 emit_move_insn (copy_rtx (dest),
4851 gen_rtx_IOR (DImode, copy_rtx (dest),
4852 GEN_INT (ud3)));
4854 emit_move_insn (copy_rtx (dest),
4855 gen_rtx_ASHIFT (DImode, copy_rtx (dest),
4856 GEN_INT (32)));
4857 if (ud2 != 0)
4858 emit_move_insn (copy_rtx (dest),
4859 gen_rtx_IOR (DImode, copy_rtx (dest),
4860 GEN_INT (ud2 << 16)));
4861 if (ud1 != 0)
4862 emit_move_insn (copy_rtx (dest),
4863 gen_rtx_IOR (DImode, copy_rtx (dest), GEN_INT (ud1)));
4866 return dest;
4869 /* Helper for the following. Get rid of [r+r] memory refs
4870 in cases where it won't work (TImode, TFmode, TDmode). */
4872 static void
4873 rs6000_eliminate_indexed_memrefs (rtx operands[2])
4875 if (GET_CODE (operands[0]) == MEM
4876 && GET_CODE (XEXP (operands[0], 0)) != REG
4877 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
4878 && ! reload_in_progress)
4879 operands[0]
4880 = replace_equiv_address (operands[0],
4881 copy_addr_to_reg (XEXP (operands[0], 0)));
4883 if (GET_CODE (operands[1]) == MEM
4884 && GET_CODE (XEXP (operands[1], 0)) != REG
4885 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
4886 && ! reload_in_progress)
4887 operands[1]
4888 = replace_equiv_address (operands[1],
4889 copy_addr_to_reg (XEXP (operands[1], 0)));
4892 /* Emit a move from SOURCE to DEST in mode MODE. */
4893 void
4894 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
4896 rtx operands[2];
4897 operands[0] = dest;
4898 operands[1] = source;
4900 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4901 if (GET_CODE (operands[1]) == CONST_DOUBLE
4902 && ! FLOAT_MODE_P (mode)
4903 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4905 /* FIXME. This should never happen. */
4906 /* Since it seems that it does, do the safe thing and convert
4907 to a CONST_INT. */
4908 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
4910 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
4911 || FLOAT_MODE_P (mode)
4912 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
4913 || CONST_DOUBLE_LOW (operands[1]) < 0)
4914 && (CONST_DOUBLE_HIGH (operands[1]) != -1
4915 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
4917 /* Check if GCC is setting up a block move that will end up using FP
4918 registers as temporaries. We must make sure this is acceptable. */
4919 if (GET_CODE (operands[0]) == MEM
4920 && GET_CODE (operands[1]) == MEM
4921 && mode == DImode
4922 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
4923 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
4924 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
4925 ? 32 : MEM_ALIGN (operands[0])))
4926 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
4927 ? 32
4928 : MEM_ALIGN (operands[1]))))
4929 && ! MEM_VOLATILE_P (operands [0])
4930 && ! MEM_VOLATILE_P (operands [1]))
4932 emit_move_insn (adjust_address (operands[0], SImode, 0),
4933 adjust_address (operands[1], SImode, 0));
4934 emit_move_insn (adjust_address (copy_rtx (operands[0]), SImode, 4),
4935 adjust_address (copy_rtx (operands[1]), SImode, 4));
4936 return;
4939 if (can_create_pseudo_p () && GET_CODE (operands[0]) == MEM
4940 && !gpc_reg_operand (operands[1], mode))
4941 operands[1] = force_reg (mode, operands[1]);
4943 if (mode == SFmode && ! TARGET_POWERPC
4944 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT
4945 && GET_CODE (operands[0]) == MEM)
4947 int regnum;
4949 if (reload_in_progress || reload_completed)
4950 regnum = true_regnum (operands[1]);
4951 else if (GET_CODE (operands[1]) == REG)
4952 regnum = REGNO (operands[1]);
4953 else
4954 regnum = -1;
4956 /* If operands[1] is a register, on POWER it may have
4957 double-precision data in it, so truncate it to single
4958 precision. */
4959 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
4961 rtx newreg;
4962 newreg = (!can_create_pseudo_p () ? copy_rtx (operands[1])
4963 : gen_reg_rtx (mode));
4964 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
4965 operands[1] = newreg;
4969 /* Recognize the case where operand[1] is a reference to thread-local
4970 data and load its address to a register. */
4971 if (rs6000_tls_referenced_p (operands[1]))
4973 enum tls_model model;
4974 rtx tmp = operands[1];
4975 rtx addend = NULL;
4977 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4979 addend = XEXP (XEXP (tmp, 0), 1);
4980 tmp = XEXP (XEXP (tmp, 0), 0);
4983 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4984 model = SYMBOL_REF_TLS_MODEL (tmp);
4985 gcc_assert (model != 0);
4987 tmp = rs6000_legitimize_tls_address (tmp, model);
4988 if (addend)
4990 tmp = gen_rtx_PLUS (mode, tmp, addend);
4991 tmp = force_operand (tmp, operands[0]);
4993 operands[1] = tmp;
4996 /* Handle the case where reload calls us with an invalid address. */
4997 if (reload_in_progress && mode == Pmode
4998 && (! general_operand (operands[1], mode)
4999 || ! nonimmediate_operand (operands[0], mode)))
5000 goto emit_set;
5002 /* 128-bit constant floating-point values on Darwin should really be
5003 loaded as two parts. */
5004 if (!TARGET_IEEEQUAD && TARGET_LONG_DOUBLE_128
5005 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
5007 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
5008 know how to get a DFmode SUBREG of a TFmode. */
5009 enum machine_mode imode = (TARGET_E500_DOUBLE ? DFmode : DImode);
5010 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode, 0),
5011 simplify_gen_subreg (imode, operands[1], mode, 0),
5012 imode);
5013 rs6000_emit_move (simplify_gen_subreg (imode, operands[0], mode,
5014 GET_MODE_SIZE (imode)),
5015 simplify_gen_subreg (imode, operands[1], mode,
5016 GET_MODE_SIZE (imode)),
5017 imode);
5018 return;
5021 if (reload_in_progress && cfun->machine->sdmode_stack_slot != NULL_RTX)
5022 cfun->machine->sdmode_stack_slot =
5023 eliminate_regs (cfun->machine->sdmode_stack_slot, VOIDmode, NULL_RTX);
5025 if (reload_in_progress
5026 && mode == SDmode
5027 && MEM_P (operands[0])
5028 && rtx_equal_p (operands[0], cfun->machine->sdmode_stack_slot)
5029 && REG_P (operands[1]))
5031 if (FP_REGNO_P (REGNO (operands[1])))
5033 rtx mem = adjust_address_nv (operands[0], DDmode, 0);
5034 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5035 emit_insn (gen_movsd_store (mem, operands[1]));
5037 else if (INT_REGNO_P (REGNO (operands[1])))
5039 rtx mem = adjust_address_nv (operands[0], mode, 4);
5040 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5041 emit_insn (gen_movsd_hardfloat (mem, operands[1]));
5043 else
5044 gcc_unreachable();
5045 return;
5047 if (reload_in_progress
5048 && mode == SDmode
5049 && REG_P (operands[0])
5050 && MEM_P (operands[1])
5051 && rtx_equal_p (operands[1], cfun->machine->sdmode_stack_slot))
5053 if (FP_REGNO_P (REGNO (operands[0])))
5055 rtx mem = adjust_address_nv (operands[1], DDmode, 0);
5056 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5057 emit_insn (gen_movsd_load (operands[0], mem));
5059 else if (INT_REGNO_P (REGNO (operands[0])))
5061 rtx mem = adjust_address_nv (operands[1], mode, 4);
5062 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
5063 emit_insn (gen_movsd_hardfloat (operands[0], mem));
5065 else
5066 gcc_unreachable();
5067 return;
5070 /* FIXME: In the long term, this switch statement should go away
5071 and be replaced by a sequence of tests based on things like
5072 mode == Pmode. */
5073 switch (mode)
5075 case HImode:
5076 case QImode:
5077 if (CONSTANT_P (operands[1])
5078 && GET_CODE (operands[1]) != CONST_INT)
5079 operands[1] = force_const_mem (mode, operands[1]);
5080 break;
5082 case TFmode:
5083 case TDmode:
5084 rs6000_eliminate_indexed_memrefs (operands);
5085 /* fall through */
5087 case DFmode:
5088 case DDmode:
5089 case SFmode:
5090 case SDmode:
5091 if (CONSTANT_P (operands[1])
5092 && ! easy_fp_constant (operands[1], mode))
5093 operands[1] = force_const_mem (mode, operands[1]);
5094 break;
5096 case V16QImode:
5097 case V8HImode:
5098 case V4SFmode:
5099 case V4SImode:
5100 case V4HImode:
5101 case V2SFmode:
5102 case V2SImode:
5103 case V1DImode:
5104 if (CONSTANT_P (operands[1])
5105 && !easy_vector_constant (operands[1], mode))
5106 operands[1] = force_const_mem (mode, operands[1]);
5107 break;
5109 case SImode:
5110 case DImode:
5111 /* Use default pattern for address of ELF small data */
5112 if (TARGET_ELF
5113 && mode == Pmode
5114 && DEFAULT_ABI == ABI_V4
5115 && (GET_CODE (operands[1]) == SYMBOL_REF
5116 || GET_CODE (operands[1]) == CONST)
5117 && small_data_operand (operands[1], mode))
5119 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5120 return;
5123 if (DEFAULT_ABI == ABI_V4
5124 && mode == Pmode && mode == SImode
5125 && flag_pic == 1 && got_operand (operands[1], mode))
5127 emit_insn (gen_movsi_got (operands[0], operands[1]));
5128 return;
5131 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
5132 && TARGET_NO_TOC
5133 && ! flag_pic
5134 && mode == Pmode
5135 && CONSTANT_P (operands[1])
5136 && GET_CODE (operands[1]) != HIGH
5137 && GET_CODE (operands[1]) != CONST_INT)
5139 rtx target = (!can_create_pseudo_p ()
5140 ? operands[0]
5141 : gen_reg_rtx (mode));
5143 /* If this is a function address on -mcall-aixdesc,
5144 convert it to the address of the descriptor. */
5145 if (DEFAULT_ABI == ABI_AIX
5146 && GET_CODE (operands[1]) == SYMBOL_REF
5147 && XSTR (operands[1], 0)[0] == '.')
5149 const char *name = XSTR (operands[1], 0);
5150 rtx new_ref;
5151 while (*name == '.')
5152 name++;
5153 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
5154 CONSTANT_POOL_ADDRESS_P (new_ref)
5155 = CONSTANT_POOL_ADDRESS_P (operands[1]);
5156 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
5157 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
5158 SYMBOL_REF_DATA (new_ref) = SYMBOL_REF_DATA (operands[1]);
5159 operands[1] = new_ref;
5162 if (DEFAULT_ABI == ABI_DARWIN)
5164 #if TARGET_MACHO
5165 if (MACHO_DYNAMIC_NO_PIC_P)
5167 /* Take care of any required data indirection. */
5168 operands[1] = rs6000_machopic_legitimize_pic_address (
5169 operands[1], mode, operands[0]);
5170 if (operands[0] != operands[1])
5171 emit_insn (gen_rtx_SET (VOIDmode,
5172 operands[0], operands[1]));
5173 return;
5175 #endif
5176 emit_insn (gen_macho_high (target, operands[1]));
5177 emit_insn (gen_macho_low (operands[0], target, operands[1]));
5178 return;
5181 emit_insn (gen_elf_high (target, operands[1]));
5182 emit_insn (gen_elf_low (operands[0], target, operands[1]));
5183 return;
5186 /* If this is a SYMBOL_REF that refers to a constant pool entry,
5187 and we have put it in the TOC, we just need to make a TOC-relative
5188 reference to it. */
5189 if (TARGET_TOC
5190 && GET_CODE (operands[1]) == SYMBOL_REF
5191 && constant_pool_expr_p (operands[1])
5192 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
5193 get_pool_mode (operands[1])))
5195 operands[1] = create_TOC_reference (operands[1]);
5197 else if (mode == Pmode
5198 && CONSTANT_P (operands[1])
5199 && ((GET_CODE (operands[1]) != CONST_INT
5200 && ! easy_fp_constant (operands[1], mode))
5201 || (GET_CODE (operands[1]) == CONST_INT
5202 && num_insns_constant (operands[1], mode) > 2)
5203 || (GET_CODE (operands[0]) == REG
5204 && FP_REGNO_P (REGNO (operands[0]))))
5205 && GET_CODE (operands[1]) != HIGH
5206 && ! legitimate_constant_pool_address_p (operands[1])
5207 && ! toc_relative_expr_p (operands[1]))
5209 /* Emit a USE operation so that the constant isn't deleted if
5210 expensive optimizations are turned on because nobody
5211 references it. This should only be done for operands that
5212 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
5213 This should not be done for operands that contain LABEL_REFs.
5214 For now, we just handle the obvious case. */
5215 if (GET_CODE (operands[1]) != LABEL_REF)
5216 emit_use (operands[1]);
5218 #if TARGET_MACHO
5219 /* Darwin uses a special PIC legitimizer. */
5220 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
5222 operands[1] =
5223 rs6000_machopic_legitimize_pic_address (operands[1], mode,
5224 operands[0]);
5225 if (operands[0] != operands[1])
5226 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5227 return;
5229 #endif
5231 /* If we are to limit the number of things we put in the TOC and
5232 this is a symbol plus a constant we can add in one insn,
5233 just put the symbol in the TOC and add the constant. Don't do
5234 this if reload is in progress. */
5235 if (GET_CODE (operands[1]) == CONST
5236 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
5237 && GET_CODE (XEXP (operands[1], 0)) == PLUS
5238 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
5239 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
5240 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
5241 && ! side_effects_p (operands[0]))
5243 rtx sym =
5244 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
5245 rtx other = XEXP (XEXP (operands[1], 0), 1);
5247 sym = force_reg (mode, sym);
5248 if (mode == SImode)
5249 emit_insn (gen_addsi3 (operands[0], sym, other));
5250 else
5251 emit_insn (gen_adddi3 (operands[0], sym, other));
5252 return;
5255 operands[1] = force_const_mem (mode, operands[1]);
5257 if (TARGET_TOC
5258 && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
5259 && constant_pool_expr_p (XEXP (operands[1], 0))
5260 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
5261 get_pool_constant (XEXP (operands[1], 0)),
5262 get_pool_mode (XEXP (operands[1], 0))))
5264 operands[1]
5265 = gen_const_mem (mode,
5266 create_TOC_reference (XEXP (operands[1], 0)));
5267 set_mem_alias_set (operands[1], get_TOC_alias_set ());
5270 break;
5272 case TImode:
5273 rs6000_eliminate_indexed_memrefs (operands);
5275 if (TARGET_POWER)
5277 emit_insn (gen_rtx_PARALLEL (VOIDmode,
5278 gen_rtvec (2,
5279 gen_rtx_SET (VOIDmode,
5280 operands[0], operands[1]),
5281 gen_rtx_CLOBBER (VOIDmode,
5282 gen_rtx_SCRATCH (SImode)))));
5283 return;
5285 break;
5287 default:
5288 gcc_unreachable ();
5291 /* Above, we may have called force_const_mem which may have returned
5292 an invalid address. If we can, fix this up; otherwise, reload will
5293 have to deal with it. */
5294 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
5295 operands[1] = validize_mem (operands[1]);
5297 emit_set:
5298 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
5301 /* Nonzero if we can use a floating-point register to pass this arg. */
5302 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
5303 (SCALAR_FLOAT_MODE_P (MODE) \
5304 && (CUM)->fregno <= FP_ARG_MAX_REG \
5305 && TARGET_HARD_FLOAT && TARGET_FPRS)
5307 /* Nonzero if we can use an AltiVec register to pass this arg. */
5308 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
5309 (ALTIVEC_VECTOR_MODE (MODE) \
5310 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
5311 && TARGET_ALTIVEC_ABI \
5312 && (NAMED))
5314 /* Return a nonzero value to say to return the function value in
5315 memory, just as large structures are always returned. TYPE will be
5316 the data type of the value, and FNTYPE will be the type of the
5317 function doing the returning, or @code{NULL} for libcalls.
5319 The AIX ABI for the RS/6000 specifies that all structures are
5320 returned in memory. The Darwin ABI does the same. The SVR4 ABI
5321 specifies that structures <= 8 bytes are returned in r3/r4, but a
5322 draft put them in memory, and GCC used to implement the draft
5323 instead of the final standard. Therefore, aix_struct_return
5324 controls this instead of DEFAULT_ABI; V.4 targets needing backward
5325 compatibility can change DRAFT_V4_STRUCT_RET to override the
5326 default, and -m switches get the final word. See
5327 rs6000_override_options for more details.
5329 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
5330 long double support is enabled. These values are returned in memory.
5332 int_size_in_bytes returns -1 for variable size objects, which go in
5333 memory always. The cast to unsigned makes -1 > 8. */
5335 static bool
5336 rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5338 /* In the darwin64 abi, try to use registers for larger structs
5339 if possible. */
5340 if (rs6000_darwin64_abi
5341 && TREE_CODE (type) == RECORD_TYPE
5342 && int_size_in_bytes (type) > 0)
5344 CUMULATIVE_ARGS valcum;
5345 rtx valret;
5347 valcum.words = 0;
5348 valcum.fregno = FP_ARG_MIN_REG;
5349 valcum.vregno = ALTIVEC_ARG_MIN_REG;
5350 /* Do a trial code generation as if this were going to be passed
5351 as an argument; if any part goes in memory, we return NULL. */
5352 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true);
5353 if (valret)
5354 return false;
5355 /* Otherwise fall through to more conventional ABI rules. */
5358 if (AGGREGATE_TYPE_P (type)
5359 && (aix_struct_return
5360 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
5361 return true;
5363 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5364 modes only exist for GCC vector types if -maltivec. */
5365 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI
5366 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
5367 return false;
5369 /* Return synthetic vectors in memory. */
5370 if (TREE_CODE (type) == VECTOR_TYPE
5371 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
5373 static bool warned_for_return_big_vectors = false;
5374 if (!warned_for_return_big_vectors)
5376 warning (0, "GCC vector returned by reference: "
5377 "non-standard ABI extension with no compatibility guarantee");
5378 warned_for_return_big_vectors = true;
5380 return true;
5383 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && TYPE_MODE (type) == TFmode)
5384 return true;
5386 return false;
5389 /* Initialize a variable CUM of type CUMULATIVE_ARGS
5390 for a call to a function whose data type is FNTYPE.
5391 For a library call, FNTYPE is 0.
5393 For incoming args we set the number of arguments in the prototype large
5394 so we never return a PARALLEL. */
5396 void
5397 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
5398 rtx libname ATTRIBUTE_UNUSED, int incoming,
5399 int libcall, int n_named_args)
5401 static CUMULATIVE_ARGS zero_cumulative;
5403 *cum = zero_cumulative;
5404 cum->words = 0;
5405 cum->fregno = FP_ARG_MIN_REG;
5406 cum->vregno = ALTIVEC_ARG_MIN_REG;
5407 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
5408 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
5409 ? CALL_LIBCALL : CALL_NORMAL);
5410 cum->sysv_gregno = GP_ARG_MIN_REG;
5411 cum->stdarg = fntype
5412 && (TYPE_ARG_TYPES (fntype) != 0
5413 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5414 != void_type_node));
5416 cum->nargs_prototype = 0;
5417 if (incoming || cum->prototype)
5418 cum->nargs_prototype = n_named_args;
5420 /* Check for a longcall attribute. */
5421 if ((!fntype && rs6000_default_long_calls)
5422 || (fntype
5423 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
5424 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype))))
5425 cum->call_cookie |= CALL_LONG;
5427 if (TARGET_DEBUG_ARG)
5429 fprintf (stderr, "\ninit_cumulative_args:");
5430 if (fntype)
5432 tree ret_type = TREE_TYPE (fntype);
5433 fprintf (stderr, " ret code = %s,",
5434 tree_code_name[ (int)TREE_CODE (ret_type) ]);
5437 if (cum->call_cookie & CALL_LONG)
5438 fprintf (stderr, " longcall,");
5440 fprintf (stderr, " proto = %d, nargs = %d\n",
5441 cum->prototype, cum->nargs_prototype);
5444 if (fntype
5445 && !TARGET_ALTIVEC
5446 && TARGET_ALTIVEC_ABI
5447 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
5449 error ("cannot return value in vector register because"
5450 " altivec instructions are disabled, use -maltivec"
5451 " to enable them");
5455 /* Return true if TYPE must be passed on the stack and not in registers. */
5457 static bool
5458 rs6000_must_pass_in_stack (enum machine_mode mode, const_tree type)
5460 if (DEFAULT_ABI == ABI_AIX || TARGET_64BIT)
5461 return must_pass_in_stack_var_size (mode, type);
5462 else
5463 return must_pass_in_stack_var_size_or_pad (mode, type);
5466 /* If defined, a C expression which determines whether, and in which
5467 direction, to pad out an argument with extra space. The value
5468 should be of type `enum direction': either `upward' to pad above
5469 the argument, `downward' to pad below, or `none' to inhibit
5470 padding.
5472 For the AIX ABI structs are always stored left shifted in their
5473 argument slot. */
5475 enum direction
5476 function_arg_padding (enum machine_mode mode, const_tree type)
5478 #ifndef AGGREGATE_PADDING_FIXED
5479 #define AGGREGATE_PADDING_FIXED 0
5480 #endif
5481 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
5482 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
5483 #endif
5485 if (!AGGREGATE_PADDING_FIXED)
5487 /* GCC used to pass structures of the same size as integer types as
5488 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
5489 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
5490 passed padded downward, except that -mstrict-align further
5491 muddied the water in that multi-component structures of 2 and 4
5492 bytes in size were passed padded upward.
5494 The following arranges for best compatibility with previous
5495 versions of gcc, but removes the -mstrict-align dependency. */
5496 if (BYTES_BIG_ENDIAN)
5498 HOST_WIDE_INT size = 0;
5500 if (mode == BLKmode)
5502 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
5503 size = int_size_in_bytes (type);
5505 else
5506 size = GET_MODE_SIZE (mode);
5508 if (size == 1 || size == 2 || size == 4)
5509 return downward;
5511 return upward;
5514 if (AGGREGATES_PAD_UPWARD_ALWAYS)
5516 if (type != 0 && AGGREGATE_TYPE_P (type))
5517 return upward;
5520 /* Fall back to the default. */
5521 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
5524 /* If defined, a C expression that gives the alignment boundary, in bits,
5525 of an argument with the specified mode and type. If it is not defined,
5526 PARM_BOUNDARY is used for all arguments.
5528 V.4 wants long longs and doubles to be double word aligned. Just
5529 testing the mode size is a boneheaded way to do this as it means
5530 that other types such as complex int are also double word aligned.
5531 However, we're stuck with this because changing the ABI might break
5532 existing library interfaces.
5534 Doubleword align SPE vectors.
5535 Quadword align Altivec vectors.
5536 Quadword align large synthetic vector types. */
5539 function_arg_boundary (enum machine_mode mode, tree type)
5541 if (DEFAULT_ABI == ABI_V4
5542 && (GET_MODE_SIZE (mode) == 8
5543 || (TARGET_HARD_FLOAT
5544 && TARGET_FPRS
5545 && (mode == TFmode || mode == TDmode))))
5546 return 64;
5547 else if (SPE_VECTOR_MODE (mode)
5548 || (type && TREE_CODE (type) == VECTOR_TYPE
5549 && int_size_in_bytes (type) >= 8
5550 && int_size_in_bytes (type) < 16))
5551 return 64;
5552 else if (ALTIVEC_VECTOR_MODE (mode)
5553 || (type && TREE_CODE (type) == VECTOR_TYPE
5554 && int_size_in_bytes (type) >= 16))
5555 return 128;
5556 else if (rs6000_darwin64_abi && mode == BLKmode
5557 && type && TYPE_ALIGN (type) > 64)
5558 return 128;
5559 else
5560 return PARM_BOUNDARY;
5563 /* For a function parm of MODE and TYPE, return the starting word in
5564 the parameter area. NWORDS of the parameter area are already used. */
5566 static unsigned int
5567 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords)
5569 unsigned int align;
5570 unsigned int parm_offset;
5572 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
5573 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
5574 return nwords + (-(parm_offset + nwords) & align);
5577 /* Compute the size (in words) of a function argument. */
5579 static unsigned long
5580 rs6000_arg_size (enum machine_mode mode, tree type)
5582 unsigned long size;
5584 if (mode != BLKmode)
5585 size = GET_MODE_SIZE (mode);
5586 else
5587 size = int_size_in_bytes (type);
5589 if (TARGET_32BIT)
5590 return (size + 3) >> 2;
5591 else
5592 return (size + 7) >> 3;
5595 /* Use this to flush pending int fields. */
5597 static void
5598 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
5599 HOST_WIDE_INT bitpos)
5601 unsigned int startbit, endbit;
5602 int intregs, intoffset;
5603 enum machine_mode mode;
5605 if (cum->intoffset == -1)
5606 return;
5608 intoffset = cum->intoffset;
5609 cum->intoffset = -1;
5611 if (intoffset % BITS_PER_WORD != 0)
5613 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5614 MODE_INT, 0);
5615 if (mode == BLKmode)
5617 /* We couldn't find an appropriate mode, which happens,
5618 e.g., in packed structs when there are 3 bytes to load.
5619 Back intoffset back to the beginning of the word in this
5620 case. */
5621 intoffset = intoffset & -BITS_PER_WORD;
5625 startbit = intoffset & -BITS_PER_WORD;
5626 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
5627 intregs = (endbit - startbit) / BITS_PER_WORD;
5628 cum->words += intregs;
5631 /* The darwin64 ABI calls for us to recurse down through structs,
5632 looking for elements passed in registers. Unfortunately, we have
5633 to track int register count here also because of misalignments
5634 in powerpc alignment mode. */
5636 static void
5637 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
5638 tree type,
5639 HOST_WIDE_INT startbitpos)
5641 tree f;
5643 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5644 if (TREE_CODE (f) == FIELD_DECL)
5646 HOST_WIDE_INT bitpos = startbitpos;
5647 tree ftype = TREE_TYPE (f);
5648 enum machine_mode mode;
5649 if (ftype == error_mark_node)
5650 continue;
5651 mode = TYPE_MODE (ftype);
5653 if (DECL_SIZE (f) != 0
5654 && host_integerp (bit_position (f), 1))
5655 bitpos += int_bit_position (f);
5657 /* ??? FIXME: else assume zero offset. */
5659 if (TREE_CODE (ftype) == RECORD_TYPE)
5660 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
5661 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
5663 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5664 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5665 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3;
5667 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
5669 rs6000_darwin64_record_arg_advance_flush (cum, bitpos);
5670 cum->vregno++;
5671 cum->words += 2;
5673 else if (cum->intoffset == -1)
5674 cum->intoffset = bitpos;
5678 /* Update the data in CUM to advance over an argument
5679 of mode MODE and data type TYPE.
5680 (TYPE is null for libcalls where that information may not be available.)
5682 Note that for args passed by reference, function_arg will be called
5683 with MODE and TYPE set to that of the pointer to the arg, not the arg
5684 itself. */
5686 void
5687 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5688 tree type, int named, int depth)
5690 int size;
5692 /* Only tick off an argument if we're not recursing. */
5693 if (depth == 0)
5694 cum->nargs_prototype--;
5696 if (TARGET_ALTIVEC_ABI
5697 && (ALTIVEC_VECTOR_MODE (mode)
5698 || (type && TREE_CODE (type) == VECTOR_TYPE
5699 && int_size_in_bytes (type) == 16)))
5701 bool stack = false;
5703 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
5705 cum->vregno++;
5706 if (!TARGET_ALTIVEC)
5707 error ("cannot pass argument in vector register because"
5708 " altivec instructions are disabled, use -maltivec"
5709 " to enable them");
5711 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
5712 even if it is going to be passed in a vector register.
5713 Darwin does the same for variable-argument functions. */
5714 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5715 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
5716 stack = true;
5718 else
5719 stack = true;
5721 if (stack)
5723 int align;
5725 /* Vector parameters must be 16-byte aligned. This places
5726 them at 2 mod 4 in terms of words in 32-bit mode, since
5727 the parameter save area starts at offset 24 from the
5728 stack. In 64-bit mode, they just have to start on an
5729 even word, since the parameter save area is 16-byte
5730 aligned. Space for GPRs is reserved even if the argument
5731 will be passed in memory. */
5732 if (TARGET_32BIT)
5733 align = (2 - cum->words) & 3;
5734 else
5735 align = cum->words & 1;
5736 cum->words += align + rs6000_arg_size (mode, type);
5738 if (TARGET_DEBUG_ARG)
5740 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
5741 cum->words, align);
5742 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
5743 cum->nargs_prototype, cum->prototype,
5744 GET_MODE_NAME (mode));
5748 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
5749 && !cum->stdarg
5750 && cum->sysv_gregno <= GP_ARG_MAX_REG)
5751 cum->sysv_gregno++;
5753 else if (rs6000_darwin64_abi
5754 && mode == BLKmode
5755 && TREE_CODE (type) == RECORD_TYPE
5756 && (size = int_size_in_bytes (type)) > 0)
5758 /* Variable sized types have size == -1 and are
5759 treated as if consisting entirely of ints.
5760 Pad to 16 byte boundary if needed. */
5761 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
5762 && (cum->words % 2) != 0)
5763 cum->words++;
5764 /* For varargs, we can just go up by the size of the struct. */
5765 if (!named)
5766 cum->words += (size + 7) / 8;
5767 else
5769 /* It is tempting to say int register count just goes up by
5770 sizeof(type)/8, but this is wrong in a case such as
5771 { int; double; int; } [powerpc alignment]. We have to
5772 grovel through the fields for these too. */
5773 cum->intoffset = 0;
5774 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
5775 rs6000_darwin64_record_arg_advance_flush (cum,
5776 size * BITS_PER_UNIT);
5779 else if (DEFAULT_ABI == ABI_V4)
5781 if (TARGET_HARD_FLOAT && TARGET_FPRS
5782 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
5783 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
5784 || (mode == TFmode && !TARGET_IEEEQUAD)
5785 || mode == SDmode || mode == DDmode || mode == TDmode))
5787 /* _Decimal128 must use an even/odd register pair. This assumes
5788 that the register number is odd when fregno is odd. */
5789 if (mode == TDmode && (cum->fregno % 2) == 1)
5790 cum->fregno++;
5792 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
5793 <= FP_ARG_V4_MAX_REG)
5794 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5795 else
5797 cum->fregno = FP_ARG_V4_MAX_REG + 1;
5798 if (mode == DFmode || mode == TFmode
5799 || mode == DDmode || mode == TDmode)
5800 cum->words += cum->words & 1;
5801 cum->words += rs6000_arg_size (mode, type);
5804 else
5806 int n_words = rs6000_arg_size (mode, type);
5807 int gregno = cum->sysv_gregno;
5809 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5810 (r7,r8) or (r9,r10). As does any other 2 word item such
5811 as complex int due to a historical mistake. */
5812 if (n_words == 2)
5813 gregno += (1 - gregno) & 1;
5815 /* Multi-reg args are not split between registers and stack. */
5816 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5818 /* Long long and SPE vectors are aligned on the stack.
5819 So are other 2 word items such as complex int due to
5820 a historical mistake. */
5821 if (n_words == 2)
5822 cum->words += cum->words & 1;
5823 cum->words += n_words;
5826 /* Note: continuing to accumulate gregno past when we've started
5827 spilling to the stack indicates the fact that we've started
5828 spilling to the stack to expand_builtin_saveregs. */
5829 cum->sysv_gregno = gregno + n_words;
5832 if (TARGET_DEBUG_ARG)
5834 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5835 cum->words, cum->fregno);
5836 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
5837 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
5838 fprintf (stderr, "mode = %4s, named = %d\n",
5839 GET_MODE_NAME (mode), named);
5842 else
5844 int n_words = rs6000_arg_size (mode, type);
5845 int start_words = cum->words;
5846 int align_words = rs6000_parm_start (mode, type, start_words);
5848 cum->words = align_words + n_words;
5850 if (SCALAR_FLOAT_MODE_P (mode)
5851 && TARGET_HARD_FLOAT && TARGET_FPRS)
5853 /* _Decimal128 must be passed in an even/odd float register pair.
5854 This assumes that the register number is odd when fregno is
5855 odd. */
5856 if (mode == TDmode && (cum->fregno % 2) == 1)
5857 cum->fregno++;
5858 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
5861 if (TARGET_DEBUG_ARG)
5863 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
5864 cum->words, cum->fregno);
5865 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
5866 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
5867 fprintf (stderr, "named = %d, align = %d, depth = %d\n",
5868 named, align_words - start_words, depth);
5873 static rtx
5874 spe_build_register_parallel (enum machine_mode mode, int gregno)
5876 rtx r1, r3, r5, r7;
5878 switch (mode)
5880 case DFmode:
5881 r1 = gen_rtx_REG (DImode, gregno);
5882 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5883 return gen_rtx_PARALLEL (mode, gen_rtvec (1, r1));
5885 case DCmode:
5886 case TFmode:
5887 r1 = gen_rtx_REG (DImode, gregno);
5888 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5889 r3 = gen_rtx_REG (DImode, gregno + 2);
5890 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5891 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r3));
5893 case TCmode:
5894 r1 = gen_rtx_REG (DImode, gregno);
5895 r1 = gen_rtx_EXPR_LIST (VOIDmode, r1, const0_rtx);
5896 r3 = gen_rtx_REG (DImode, gregno + 2);
5897 r3 = gen_rtx_EXPR_LIST (VOIDmode, r3, GEN_INT (8));
5898 r5 = gen_rtx_REG (DImode, gregno + 4);
5899 r5 = gen_rtx_EXPR_LIST (VOIDmode, r5, GEN_INT (16));
5900 r7 = gen_rtx_REG (DImode, gregno + 6);
5901 r7 = gen_rtx_EXPR_LIST (VOIDmode, r7, GEN_INT (24));
5902 return gen_rtx_PARALLEL (mode, gen_rtvec (4, r1, r3, r5, r7));
5904 default:
5905 gcc_unreachable ();
5909 /* Determine where to put a SIMD argument on the SPE. */
5910 static rtx
5911 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5912 tree type)
5914 int gregno = cum->sysv_gregno;
5916 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
5917 are passed and returned in a pair of GPRs for ABI compatibility. */
5918 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode
5919 || mode == DCmode || mode == TCmode))
5921 int n_words = rs6000_arg_size (mode, type);
5923 /* Doubles go in an odd/even register pair (r5/r6, etc). */
5924 if (mode == DFmode)
5925 gregno += (1 - gregno) & 1;
5927 /* Multi-reg args are not split between registers and stack. */
5928 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
5929 return NULL_RTX;
5931 return spe_build_register_parallel (mode, gregno);
5933 if (cum->stdarg)
5935 int n_words = rs6000_arg_size (mode, type);
5937 /* SPE vectors are put in odd registers. */
5938 if (n_words == 2 && (gregno & 1) == 0)
5939 gregno += 1;
5941 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
5943 rtx r1, r2;
5944 enum machine_mode m = SImode;
5946 r1 = gen_rtx_REG (m, gregno);
5947 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
5948 r2 = gen_rtx_REG (m, gregno + 1);
5949 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
5950 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
5952 else
5953 return NULL_RTX;
5955 else
5957 if (gregno <= GP_ARG_MAX_REG)
5958 return gen_rtx_REG (mode, gregno);
5959 else
5960 return NULL_RTX;
5964 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
5965 structure between cum->intoffset and bitpos to integer registers. */
5967 static void
5968 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *cum,
5969 HOST_WIDE_INT bitpos, rtx rvec[], int *k)
5971 enum machine_mode mode;
5972 unsigned int regno;
5973 unsigned int startbit, endbit;
5974 int this_regno, intregs, intoffset;
5975 rtx reg;
5977 if (cum->intoffset == -1)
5978 return;
5980 intoffset = cum->intoffset;
5981 cum->intoffset = -1;
5983 /* If this is the trailing part of a word, try to only load that
5984 much into the register. Otherwise load the whole register. Note
5985 that in the latter case we may pick up unwanted bits. It's not a
5986 problem at the moment but may wish to revisit. */
5988 if (intoffset % BITS_PER_WORD != 0)
5990 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
5991 MODE_INT, 0);
5992 if (mode == BLKmode)
5994 /* We couldn't find an appropriate mode, which happens,
5995 e.g., in packed structs when there are 3 bytes to load.
5996 Back intoffset back to the beginning of the word in this
5997 case. */
5998 intoffset = intoffset & -BITS_PER_WORD;
5999 mode = word_mode;
6002 else
6003 mode = word_mode;
6005 startbit = intoffset & -BITS_PER_WORD;
6006 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
6007 intregs = (endbit - startbit) / BITS_PER_WORD;
6008 this_regno = cum->words + intoffset / BITS_PER_WORD;
6010 if (intregs > 0 && intregs > GP_ARG_NUM_REG - this_regno)
6011 cum->use_stack = 1;
6013 intregs = MIN (intregs, GP_ARG_NUM_REG - this_regno);
6014 if (intregs <= 0)
6015 return;
6017 intoffset /= BITS_PER_UNIT;
6020 regno = GP_ARG_MIN_REG + this_regno;
6021 reg = gen_rtx_REG (mode, regno);
6022 rvec[(*k)++] =
6023 gen_rtx_EXPR_LIST (VOIDmode, reg, GEN_INT (intoffset));
6025 this_regno += 1;
6026 intoffset = (intoffset | (UNITS_PER_WORD-1)) + 1;
6027 mode = word_mode;
6028 intregs -= 1;
6030 while (intregs > 0);
6033 /* Recursive workhorse for the following. */
6035 static void
6036 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *cum, const_tree type,
6037 HOST_WIDE_INT startbitpos, rtx rvec[],
6038 int *k)
6040 tree f;
6042 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
6043 if (TREE_CODE (f) == FIELD_DECL)
6045 HOST_WIDE_INT bitpos = startbitpos;
6046 tree ftype = TREE_TYPE (f);
6047 enum machine_mode mode;
6048 if (ftype == error_mark_node)
6049 continue;
6050 mode = TYPE_MODE (ftype);
6052 if (DECL_SIZE (f) != 0
6053 && host_integerp (bit_position (f), 1))
6054 bitpos += int_bit_position (f);
6056 /* ??? FIXME: else assume zero offset. */
6058 if (TREE_CODE (ftype) == RECORD_TYPE)
6059 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
6060 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
6062 #if 0
6063 switch (mode)
6065 case SCmode: mode = SFmode; break;
6066 case DCmode: mode = DFmode; break;
6067 case TCmode: mode = TFmode; break;
6068 default: break;
6070 #endif
6071 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6072 rvec[(*k)++]
6073 = gen_rtx_EXPR_LIST (VOIDmode,
6074 gen_rtx_REG (mode, cum->fregno++),
6075 GEN_INT (bitpos / BITS_PER_UNIT));
6076 if (mode == TFmode || mode == TDmode)
6077 cum->fregno++;
6079 else if (cum->named && USE_ALTIVEC_FOR_ARG_P (cum, mode, ftype, 1))
6081 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
6082 rvec[(*k)++]
6083 = gen_rtx_EXPR_LIST (VOIDmode,
6084 gen_rtx_REG (mode, cum->vregno++),
6085 GEN_INT (bitpos / BITS_PER_UNIT));
6087 else if (cum->intoffset == -1)
6088 cum->intoffset = bitpos;
6092 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
6093 the register(s) to be used for each field and subfield of a struct
6094 being passed by value, along with the offset of where the
6095 register's value may be found in the block. FP fields go in FP
6096 register, vector fields go in vector registers, and everything
6097 else goes in int registers, packed as in memory.
6099 This code is also used for function return values. RETVAL indicates
6100 whether this is the case.
6102 Much of this is taken from the SPARC V9 port, which has a similar
6103 calling convention. */
6105 static rtx
6106 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
6107 int named, bool retval)
6109 rtx rvec[FIRST_PSEUDO_REGISTER];
6110 int k = 1, kbase = 1;
6111 HOST_WIDE_INT typesize = int_size_in_bytes (type);
6112 /* This is a copy; modifications are not visible to our caller. */
6113 CUMULATIVE_ARGS copy_cum = *orig_cum;
6114 CUMULATIVE_ARGS *cum = &copy_cum;
6116 /* Pad to 16 byte boundary if needed. */
6117 if (!retval && TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
6118 && (cum->words % 2) != 0)
6119 cum->words++;
6121 cum->intoffset = 0;
6122 cum->use_stack = 0;
6123 cum->named = named;
6125 /* Put entries into rvec[] for individual FP and vector fields, and
6126 for the chunks of memory that go in int regs. Note we start at
6127 element 1; 0 is reserved for an indication of using memory, and
6128 may or may not be filled in below. */
6129 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k);
6130 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
6132 /* If any part of the struct went on the stack put all of it there.
6133 This hack is because the generic code for
6134 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
6135 parts of the struct are not at the beginning. */
6136 if (cum->use_stack)
6138 if (retval)
6139 return NULL_RTX; /* doesn't go in registers at all */
6140 kbase = 0;
6141 rvec[0] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6143 if (k > 1 || cum->use_stack)
6144 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k - kbase, &rvec[kbase]));
6145 else
6146 return NULL_RTX;
6149 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
6151 static rtx
6152 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words)
6154 int n_units;
6155 int i, k;
6156 rtx rvec[GP_ARG_NUM_REG + 1];
6158 if (align_words >= GP_ARG_NUM_REG)
6159 return NULL_RTX;
6161 n_units = rs6000_arg_size (mode, type);
6163 /* Optimize the simple case where the arg fits in one gpr, except in
6164 the case of BLKmode due to assign_parms assuming that registers are
6165 BITS_PER_WORD wide. */
6166 if (n_units == 0
6167 || (n_units == 1 && mode != BLKmode))
6168 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6170 k = 0;
6171 if (align_words + n_units > GP_ARG_NUM_REG)
6172 /* Not all of the arg fits in gprs. Say that it goes in memory too,
6173 using a magic NULL_RTX component.
6174 This is not strictly correct. Only some of the arg belongs in
6175 memory, not all of it. However, the normal scheme using
6176 function_arg_partial_nregs can result in unusual subregs, eg.
6177 (subreg:SI (reg:DF) 4), which are not handled well. The code to
6178 store the whole arg to memory is often more efficient than code
6179 to store pieces, and we know that space is available in the right
6180 place for the whole arg. */
6181 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6183 i = 0;
6186 rtx r = gen_rtx_REG (SImode, GP_ARG_MIN_REG + align_words);
6187 rtx off = GEN_INT (i++ * 4);
6188 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6190 while (++align_words < GP_ARG_NUM_REG && --n_units != 0);
6192 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
6195 /* Determine where to put an argument to a function.
6196 Value is zero to push the argument on the stack,
6197 or a hard register in which to store the argument.
6199 MODE is the argument's machine mode.
6200 TYPE is the data type of the argument (as a tree).
6201 This is null for libcalls where that information may
6202 not be available.
6203 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6204 the preceding args and about the function being called. It is
6205 not modified in this routine.
6206 NAMED is nonzero if this argument is a named parameter
6207 (otherwise it is an extra parameter matching an ellipsis).
6209 On RS/6000 the first eight words of non-FP are normally in registers
6210 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
6211 Under V.4, the first 8 FP args are in registers.
6213 If this is floating-point and no prototype is specified, we use
6214 both an FP and integer register (or possibly FP reg and stack). Library
6215 functions (when CALL_LIBCALL is set) always have the proper types for args,
6216 so we can pass the FP value just in one register. emit_library_function
6217 doesn't support PARALLEL anyway.
6219 Note that for args passed by reference, function_arg will be called
6220 with MODE and TYPE set to that of the pointer to the arg, not the arg
6221 itself. */
6224 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6225 tree type, int named)
6227 enum rs6000_abi abi = DEFAULT_ABI;
6229 /* Return a marker to indicate whether CR1 needs to set or clear the
6230 bit that V.4 uses to say fp args were passed in registers.
6231 Assume that we don't need the marker for software floating point,
6232 or compiler generated library calls. */
6233 if (mode == VOIDmode)
6235 if (abi == ABI_V4
6236 && (cum->call_cookie & CALL_LIBCALL) == 0
6237 && (cum->stdarg
6238 || (cum->nargs_prototype < 0
6239 && (cum->prototype || TARGET_NO_PROTOTYPE))))
6241 /* For the SPE, we need to crxor CR6 always. */
6242 if (TARGET_SPE_ABI)
6243 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
6244 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
6245 return GEN_INT (cum->call_cookie
6246 | ((cum->fregno == FP_ARG_MIN_REG)
6247 ? CALL_V4_SET_FP_ARGS
6248 : CALL_V4_CLEAR_FP_ARGS));
6251 return GEN_INT (cum->call_cookie);
6254 if (rs6000_darwin64_abi && mode == BLKmode
6255 && TREE_CODE (type) == RECORD_TYPE)
6257 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
6258 if (rslt != NULL_RTX)
6259 return rslt;
6260 /* Else fall through to usual handling. */
6263 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
6264 if (TARGET_64BIT && ! cum->prototype)
6266 /* Vector parameters get passed in vector register
6267 and also in GPRs or memory, in absence of prototype. */
6268 int align_words;
6269 rtx slot;
6270 align_words = (cum->words + 1) & ~1;
6272 if (align_words >= GP_ARG_NUM_REG)
6274 slot = NULL_RTX;
6276 else
6278 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6280 return gen_rtx_PARALLEL (mode,
6281 gen_rtvec (2,
6282 gen_rtx_EXPR_LIST (VOIDmode,
6283 slot, const0_rtx),
6284 gen_rtx_EXPR_LIST (VOIDmode,
6285 gen_rtx_REG (mode, cum->vregno),
6286 const0_rtx)));
6288 else
6289 return gen_rtx_REG (mode, cum->vregno);
6290 else if (TARGET_ALTIVEC_ABI
6291 && (ALTIVEC_VECTOR_MODE (mode)
6292 || (type && TREE_CODE (type) == VECTOR_TYPE
6293 && int_size_in_bytes (type) == 16)))
6295 if (named || abi == ABI_V4)
6296 return NULL_RTX;
6297 else
6299 /* Vector parameters to varargs functions under AIX or Darwin
6300 get passed in memory and possibly also in GPRs. */
6301 int align, align_words, n_words;
6302 enum machine_mode part_mode;
6304 /* Vector parameters must be 16-byte aligned. This places them at
6305 2 mod 4 in terms of words in 32-bit mode, since the parameter
6306 save area starts at offset 24 from the stack. In 64-bit mode,
6307 they just have to start on an even word, since the parameter
6308 save area is 16-byte aligned. */
6309 if (TARGET_32BIT)
6310 align = (2 - cum->words) & 3;
6311 else
6312 align = cum->words & 1;
6313 align_words = cum->words + align;
6315 /* Out of registers? Memory, then. */
6316 if (align_words >= GP_ARG_NUM_REG)
6317 return NULL_RTX;
6319 if (TARGET_32BIT && TARGET_POWERPC64)
6320 return rs6000_mixed_function_arg (mode, type, align_words);
6322 /* The vector value goes in GPRs. Only the part of the
6323 value in GPRs is reported here. */
6324 part_mode = mode;
6325 n_words = rs6000_arg_size (mode, type);
6326 if (align_words + n_words > GP_ARG_NUM_REG)
6327 /* Fortunately, there are only two possibilities, the value
6328 is either wholly in GPRs or half in GPRs and half not. */
6329 part_mode = DImode;
6331 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
6334 else if (TARGET_SPE_ABI && TARGET_SPE
6335 && (SPE_VECTOR_MODE (mode)
6336 || (TARGET_E500_DOUBLE && (mode == DFmode
6337 || mode == DCmode
6338 || mode == TFmode
6339 || mode == TCmode))))
6340 return rs6000_spe_function_arg (cum, mode, type);
6342 else if (abi == ABI_V4)
6344 if (TARGET_HARD_FLOAT && TARGET_FPRS
6345 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
6346 || (TARGET_DOUBLE_FLOAT && mode == DFmode)
6347 || (mode == TFmode && !TARGET_IEEEQUAD)
6348 || mode == SDmode || mode == DDmode || mode == TDmode))
6350 /* _Decimal128 must use an even/odd register pair. This assumes
6351 that the register number is odd when fregno is odd. */
6352 if (mode == TDmode && (cum->fregno % 2) == 1)
6353 cum->fregno++;
6355 if (cum->fregno + (mode == TFmode || mode == TDmode ? 1 : 0)
6356 <= FP_ARG_V4_MAX_REG)
6357 return gen_rtx_REG (mode, cum->fregno);
6358 else
6359 return NULL_RTX;
6361 else
6363 int n_words = rs6000_arg_size (mode, type);
6364 int gregno = cum->sysv_gregno;
6366 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
6367 (r7,r8) or (r9,r10). As does any other 2 word item such
6368 as complex int due to a historical mistake. */
6369 if (n_words == 2)
6370 gregno += (1 - gregno) & 1;
6372 /* Multi-reg args are not split between registers and stack. */
6373 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
6374 return NULL_RTX;
6376 if (TARGET_32BIT && TARGET_POWERPC64)
6377 return rs6000_mixed_function_arg (mode, type,
6378 gregno - GP_ARG_MIN_REG);
6379 return gen_rtx_REG (mode, gregno);
6382 else
6384 int align_words = rs6000_parm_start (mode, type, cum->words);
6386 /* _Decimal128 must be passed in an even/odd float register pair.
6387 This assumes that the register number is odd when fregno is odd. */
6388 if (mode == TDmode && (cum->fregno % 2) == 1)
6389 cum->fregno++;
6391 if (USE_FP_FOR_ARG_P (cum, mode, type))
6393 rtx rvec[GP_ARG_NUM_REG + 1];
6394 rtx r;
6395 int k;
6396 bool needs_psave;
6397 enum machine_mode fmode = mode;
6398 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
6400 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
6402 /* Currently, we only ever need one reg here because complex
6403 doubles are split. */
6404 gcc_assert (cum->fregno == FP_ARG_MAX_REG
6405 && (fmode == TFmode || fmode == TDmode));
6407 /* Long double or _Decimal128 split over regs and memory. */
6408 fmode = DECIMAL_FLOAT_MODE_P (fmode) ? DDmode : DFmode;
6411 /* Do we also need to pass this arg in the parameter save
6412 area? */
6413 needs_psave = (type
6414 && (cum->nargs_prototype <= 0
6415 || (DEFAULT_ABI == ABI_AIX
6416 && TARGET_XL_COMPAT
6417 && align_words >= GP_ARG_NUM_REG)));
6419 if (!needs_psave && mode == fmode)
6420 return gen_rtx_REG (fmode, cum->fregno);
6422 k = 0;
6423 if (needs_psave)
6425 /* Describe the part that goes in gprs or the stack.
6426 This piece must come first, before the fprs. */
6427 if (align_words < GP_ARG_NUM_REG)
6429 unsigned long n_words = rs6000_arg_size (mode, type);
6431 if (align_words + n_words > GP_ARG_NUM_REG
6432 || (TARGET_32BIT && TARGET_POWERPC64))
6434 /* If this is partially on the stack, then we only
6435 include the portion actually in registers here. */
6436 enum machine_mode rmode = TARGET_32BIT ? SImode : DImode;
6437 rtx off;
6438 int i = 0;
6439 if (align_words + n_words > GP_ARG_NUM_REG)
6440 /* Not all of the arg fits in gprs. Say that it
6441 goes in memory too, using a magic NULL_RTX
6442 component. Also see comment in
6443 rs6000_mixed_function_arg for why the normal
6444 function_arg_partial_nregs scheme doesn't work
6445 in this case. */
6446 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX,
6447 const0_rtx);
6450 r = gen_rtx_REG (rmode,
6451 GP_ARG_MIN_REG + align_words);
6452 off = GEN_INT (i++ * GET_MODE_SIZE (rmode));
6453 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, off);
6455 while (++align_words < GP_ARG_NUM_REG && --n_words != 0);
6457 else
6459 /* The whole arg fits in gprs. */
6460 r = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6461 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6464 else
6465 /* It's entirely in memory. */
6466 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, NULL_RTX, const0_rtx);
6469 /* Describe where this piece goes in the fprs. */
6470 r = gen_rtx_REG (fmode, cum->fregno);
6471 rvec[k++] = gen_rtx_EXPR_LIST (VOIDmode, r, const0_rtx);
6473 return gen_rtx_PARALLEL (mode, gen_rtvec_v (k, rvec));
6475 else if (align_words < GP_ARG_NUM_REG)
6477 if (TARGET_32BIT && TARGET_POWERPC64)
6478 return rs6000_mixed_function_arg (mode, type, align_words);
6480 if (mode == BLKmode)
6481 mode = Pmode;
6483 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
6485 else
6486 return NULL_RTX;
6490 /* For an arg passed partly in registers and partly in memory, this is
6491 the number of bytes passed in registers. For args passed entirely in
6492 registers or entirely in memory, zero. When an arg is described by a
6493 PARALLEL, perhaps using more than one register type, this function
6494 returns the number of bytes used by the first element of the PARALLEL. */
6496 static int
6497 rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6498 tree type, bool named)
6500 int ret = 0;
6501 int align_words;
6503 if (DEFAULT_ABI == ABI_V4)
6504 return 0;
6506 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
6507 && cum->nargs_prototype >= 0)
6508 return 0;
6510 /* In this complicated case we just disable the partial_nregs code. */
6511 if (rs6000_darwin64_abi && mode == BLKmode
6512 && TREE_CODE (type) == RECORD_TYPE
6513 && int_size_in_bytes (type) > 0)
6514 return 0;
6516 align_words = rs6000_parm_start (mode, type, cum->words);
6518 if (USE_FP_FOR_ARG_P (cum, mode, type))
6520 /* If we are passing this arg in the fixed parameter save area
6521 (gprs or memory) as well as fprs, then this function should
6522 return the number of partial bytes passed in the parameter
6523 save area rather than partial bytes passed in fprs. */
6524 if (type
6525 && (cum->nargs_prototype <= 0
6526 || (DEFAULT_ABI == ABI_AIX
6527 && TARGET_XL_COMPAT
6528 && align_words >= GP_ARG_NUM_REG)))
6529 return 0;
6530 else if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3)
6531 > FP_ARG_MAX_REG + 1)
6532 ret = (FP_ARG_MAX_REG + 1 - cum->fregno) * 8;
6533 else if (cum->nargs_prototype >= 0)
6534 return 0;
6537 if (align_words < GP_ARG_NUM_REG
6538 && GP_ARG_NUM_REG < align_words + rs6000_arg_size (mode, type))
6539 ret = (GP_ARG_NUM_REG - align_words) * (TARGET_32BIT ? 4 : 8);
6541 if (ret != 0 && TARGET_DEBUG_ARG)
6542 fprintf (stderr, "rs6000_arg_partial_bytes: %d\n", ret);
6544 return ret;
6547 /* A C expression that indicates when an argument must be passed by
6548 reference. If nonzero for an argument, a copy of that argument is
6549 made in memory and a pointer to the argument is passed instead of
6550 the argument itself. The pointer is passed in whatever way is
6551 appropriate for passing a pointer to that type.
6553 Under V.4, aggregates and long double are passed by reference.
6555 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
6556 reference unless the AltiVec vector extension ABI is in force.
6558 As an extension to all ABIs, variable sized types are passed by
6559 reference. */
6561 static bool
6562 rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
6563 enum machine_mode mode, const_tree type,
6564 bool named ATTRIBUTE_UNUSED)
6566 if (DEFAULT_ABI == ABI_V4 && TARGET_IEEEQUAD && mode == TFmode)
6568 if (TARGET_DEBUG_ARG)
6569 fprintf (stderr, "function_arg_pass_by_reference: V4 long double\n");
6570 return 1;
6573 if (!type)
6574 return 0;
6576 if (DEFAULT_ABI == ABI_V4 && AGGREGATE_TYPE_P (type))
6578 if (TARGET_DEBUG_ARG)
6579 fprintf (stderr, "function_arg_pass_by_reference: V4 aggregate\n");
6580 return 1;
6583 if (int_size_in_bytes (type) < 0)
6585 if (TARGET_DEBUG_ARG)
6586 fprintf (stderr, "function_arg_pass_by_reference: variable size\n");
6587 return 1;
6590 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
6591 modes only exist for GCC vector types if -maltivec. */
6592 if (TARGET_32BIT && !TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
6594 if (TARGET_DEBUG_ARG)
6595 fprintf (stderr, "function_arg_pass_by_reference: AltiVec\n");
6596 return 1;
6599 /* Pass synthetic vectors in memory. */
6600 if (TREE_CODE (type) == VECTOR_TYPE
6601 && int_size_in_bytes (type) > (TARGET_ALTIVEC_ABI ? 16 : 8))
6603 static bool warned_for_pass_big_vectors = false;
6604 if (TARGET_DEBUG_ARG)
6605 fprintf (stderr, "function_arg_pass_by_reference: synthetic vector\n");
6606 if (!warned_for_pass_big_vectors)
6608 warning (0, "GCC vector passed by reference: "
6609 "non-standard ABI extension with no compatibility guarantee");
6610 warned_for_pass_big_vectors = true;
6612 return 1;
6615 return 0;
6618 static void
6619 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
6621 int i;
6622 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
6624 if (nregs == 0)
6625 return;
6627 for (i = 0; i < nregs; i++)
6629 rtx tem = adjust_address_nv (x, reg_mode, i * GET_MODE_SIZE (reg_mode));
6630 if (reload_completed)
6632 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
6633 tem = NULL_RTX;
6634 else
6635 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
6636 i * GET_MODE_SIZE (reg_mode));
6638 else
6639 tem = replace_equiv_address (tem, XEXP (tem, 0));
6641 gcc_assert (tem);
6643 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
6647 /* Perform any needed actions needed for a function that is receiving a
6648 variable number of arguments.
6650 CUM is as above.
6652 MODE and TYPE are the mode and type of the current parameter.
6654 PRETEND_SIZE is a variable that should be set to the amount of stack
6655 that must be pushed by the prolog to pretend that our caller pushed
6658 Normally, this macro will push all remaining incoming registers on the
6659 stack and set PRETEND_SIZE to the length of the registers pushed. */
6661 static void
6662 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6663 tree type, int *pretend_size ATTRIBUTE_UNUSED,
6664 int no_rtl)
6666 CUMULATIVE_ARGS next_cum;
6667 int reg_size = TARGET_32BIT ? 4 : 8;
6668 rtx save_area = NULL_RTX, mem;
6669 int first_reg_offset;
6670 alias_set_type set;
6672 /* Skip the last named argument. */
6673 next_cum = *cum;
6674 function_arg_advance (&next_cum, mode, type, 1, 0);
6676 if (DEFAULT_ABI == ABI_V4)
6678 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
6680 if (! no_rtl)
6682 int gpr_reg_num = 0, gpr_size = 0, fpr_size = 0;
6683 HOST_WIDE_INT offset = 0;
6685 /* Try to optimize the size of the varargs save area.
6686 The ABI requires that ap.reg_save_area is doubleword
6687 aligned, but we don't need to allocate space for all
6688 the bytes, only those to which we actually will save
6689 anything. */
6690 if (cfun->va_list_gpr_size && first_reg_offset < GP_ARG_NUM_REG)
6691 gpr_reg_num = GP_ARG_NUM_REG - first_reg_offset;
6692 if (TARGET_HARD_FLOAT && TARGET_FPRS
6693 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6694 && cfun->va_list_fpr_size)
6696 if (gpr_reg_num)
6697 fpr_size = (next_cum.fregno - FP_ARG_MIN_REG)
6698 * UNITS_PER_FP_WORD;
6699 if (cfun->va_list_fpr_size
6700 < FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6701 fpr_size += cfun->va_list_fpr_size * UNITS_PER_FP_WORD;
6702 else
6703 fpr_size += (FP_ARG_V4_MAX_REG + 1 - next_cum.fregno)
6704 * UNITS_PER_FP_WORD;
6706 if (gpr_reg_num)
6708 offset = -((first_reg_offset * reg_size) & ~7);
6709 if (!fpr_size && gpr_reg_num > cfun->va_list_gpr_size)
6711 gpr_reg_num = cfun->va_list_gpr_size;
6712 if (reg_size == 4 && (first_reg_offset & 1))
6713 gpr_reg_num++;
6715 gpr_size = (gpr_reg_num * reg_size + 7) & ~7;
6717 else if (fpr_size)
6718 offset = - (int) (next_cum.fregno - FP_ARG_MIN_REG)
6719 * UNITS_PER_FP_WORD
6720 - (int) (GP_ARG_NUM_REG * reg_size);
6722 if (gpr_size + fpr_size)
6724 rtx reg_save_area
6725 = assign_stack_local (BLKmode, gpr_size + fpr_size, 64);
6726 gcc_assert (GET_CODE (reg_save_area) == MEM);
6727 reg_save_area = XEXP (reg_save_area, 0);
6728 if (GET_CODE (reg_save_area) == PLUS)
6730 gcc_assert (XEXP (reg_save_area, 0)
6731 == virtual_stack_vars_rtx);
6732 gcc_assert (GET_CODE (XEXP (reg_save_area, 1)) == CONST_INT);
6733 offset += INTVAL (XEXP (reg_save_area, 1));
6735 else
6736 gcc_assert (reg_save_area == virtual_stack_vars_rtx);
6739 cfun->machine->varargs_save_offset = offset;
6740 save_area = plus_constant (virtual_stack_vars_rtx, offset);
6743 else
6745 first_reg_offset = next_cum.words;
6746 save_area = virtual_incoming_args_rtx;
6748 if (targetm.calls.must_pass_in_stack (mode, type))
6749 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
6752 set = get_varargs_alias_set ();
6753 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG
6754 && cfun->va_list_gpr_size)
6756 int nregs = GP_ARG_NUM_REG - first_reg_offset;
6758 if (va_list_gpr_counter_field)
6760 /* V4 va_list_gpr_size counts number of registers needed. */
6761 if (nregs > cfun->va_list_gpr_size)
6762 nregs = cfun->va_list_gpr_size;
6764 else
6766 /* char * va_list instead counts number of bytes needed. */
6767 if (nregs > cfun->va_list_gpr_size / reg_size)
6768 nregs = cfun->va_list_gpr_size / reg_size;
6771 mem = gen_rtx_MEM (BLKmode,
6772 plus_constant (save_area,
6773 first_reg_offset * reg_size));
6774 MEM_NOTRAP_P (mem) = 1;
6775 set_mem_alias_set (mem, set);
6776 set_mem_align (mem, BITS_PER_WORD);
6778 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
6779 nregs);
6782 /* Save FP registers if needed. */
6783 if (DEFAULT_ABI == ABI_V4
6784 && TARGET_HARD_FLOAT && TARGET_FPRS
6785 && ! no_rtl
6786 && next_cum.fregno <= FP_ARG_V4_MAX_REG
6787 && cfun->va_list_fpr_size)
6789 int fregno = next_cum.fregno, nregs;
6790 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
6791 rtx lab = gen_label_rtx ();
6792 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG)
6793 * UNITS_PER_FP_WORD);
6795 emit_jump_insn
6796 (gen_rtx_SET (VOIDmode,
6797 pc_rtx,
6798 gen_rtx_IF_THEN_ELSE (VOIDmode,
6799 gen_rtx_NE (VOIDmode, cr1,
6800 const0_rtx),
6801 gen_rtx_LABEL_REF (VOIDmode, lab),
6802 pc_rtx)));
6804 for (nregs = 0;
6805 fregno <= FP_ARG_V4_MAX_REG && nregs < cfun->va_list_fpr_size;
6806 fregno++, off += UNITS_PER_FP_WORD, nregs++)
6808 mem = gen_rtx_MEM ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6809 ? DFmode : SFmode,
6810 plus_constant (save_area, off));
6811 MEM_NOTRAP_P (mem) = 1;
6812 set_mem_alias_set (mem, set);
6813 set_mem_align (mem, GET_MODE_ALIGNMENT (
6814 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6815 ? DFmode : SFmode));
6816 emit_move_insn (mem, gen_rtx_REG (
6817 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
6818 ? DFmode : SFmode, fregno));
6821 emit_label (lab);
6825 /* Create the va_list data type. */
6827 static tree
6828 rs6000_build_builtin_va_list (void)
6830 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
6832 /* For AIX, prefer 'char *' because that's what the system
6833 header files like. */
6834 if (DEFAULT_ABI != ABI_V4)
6835 return build_pointer_type (char_type_node);
6837 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
6838 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6840 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
6841 unsigned_char_type_node);
6842 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
6843 unsigned_char_type_node);
6844 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
6845 every user file. */
6846 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
6847 short_unsigned_type_node);
6848 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
6849 ptr_type_node);
6850 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
6851 ptr_type_node);
6853 va_list_gpr_counter_field = f_gpr;
6854 va_list_fpr_counter_field = f_fpr;
6856 DECL_FIELD_CONTEXT (f_gpr) = record;
6857 DECL_FIELD_CONTEXT (f_fpr) = record;
6858 DECL_FIELD_CONTEXT (f_res) = record;
6859 DECL_FIELD_CONTEXT (f_ovf) = record;
6860 DECL_FIELD_CONTEXT (f_sav) = record;
6862 TREE_CHAIN (record) = type_decl;
6863 TYPE_NAME (record) = type_decl;
6864 TYPE_FIELDS (record) = f_gpr;
6865 TREE_CHAIN (f_gpr) = f_fpr;
6866 TREE_CHAIN (f_fpr) = f_res;
6867 TREE_CHAIN (f_res) = f_ovf;
6868 TREE_CHAIN (f_ovf) = f_sav;
6870 layout_type (record);
6872 /* The correct type is an array type of one element. */
6873 return build_array_type (record, build_index_type (size_zero_node));
6876 /* Implement va_start. */
6878 static void
6879 rs6000_va_start (tree valist, rtx nextarg)
6881 HOST_WIDE_INT words, n_gpr, n_fpr;
6882 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6883 tree gpr, fpr, ovf, sav, t;
6885 /* Only SVR4 needs something special. */
6886 if (DEFAULT_ABI != ABI_V4)
6888 std_expand_builtin_va_start (valist, nextarg);
6889 return;
6892 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6893 f_fpr = TREE_CHAIN (f_gpr);
6894 f_res = TREE_CHAIN (f_fpr);
6895 f_ovf = TREE_CHAIN (f_res);
6896 f_sav = TREE_CHAIN (f_ovf);
6898 valist = build_va_arg_indirect_ref (valist);
6899 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6900 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
6901 f_fpr, NULL_TREE);
6902 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
6903 f_ovf, NULL_TREE);
6904 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
6905 f_sav, NULL_TREE);
6907 /* Count number of gp and fp argument registers used. */
6908 words = crtl->args.info.words;
6909 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
6910 GP_ARG_NUM_REG);
6911 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
6912 FP_ARG_NUM_REG);
6914 if (TARGET_DEBUG_ARG)
6915 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
6916 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
6917 words, n_gpr, n_fpr);
6919 if (cfun->va_list_gpr_size)
6921 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
6922 build_int_cst (NULL_TREE, n_gpr));
6923 TREE_SIDE_EFFECTS (t) = 1;
6924 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6927 if (cfun->va_list_fpr_size)
6929 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
6930 build_int_cst (NULL_TREE, n_fpr));
6931 TREE_SIDE_EFFECTS (t) = 1;
6932 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6935 /* Find the overflow area. */
6936 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6937 if (words != 0)
6938 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
6939 size_int (words * UNITS_PER_WORD));
6940 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6941 TREE_SIDE_EFFECTS (t) = 1;
6942 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6944 /* If there were no va_arg invocations, don't set up the register
6945 save area. */
6946 if (!cfun->va_list_gpr_size
6947 && !cfun->va_list_fpr_size
6948 && n_gpr < GP_ARG_NUM_REG
6949 && n_fpr < FP_ARG_V4_MAX_REG)
6950 return;
6952 /* Find the register save area. */
6953 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
6954 if (cfun->machine->varargs_save_offset)
6955 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
6956 size_int (cfun->machine->varargs_save_offset));
6957 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6958 TREE_SIDE_EFFECTS (t) = 1;
6959 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6962 /* Implement va_arg. */
6964 tree
6965 rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
6966 gimple_seq *post_p)
6968 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
6969 tree gpr, fpr, ovf, sav, reg, t, u;
6970 int size, rsize, n_reg, sav_ofs, sav_scale;
6971 tree lab_false, lab_over, addr;
6972 int align;
6973 tree ptrtype = build_pointer_type (type);
6974 int regalign = 0;
6975 gimple stmt;
6977 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6979 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
6980 return build_va_arg_indirect_ref (t);
6983 if (DEFAULT_ABI != ABI_V4)
6985 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
6987 tree elem_type = TREE_TYPE (type);
6988 enum machine_mode elem_mode = TYPE_MODE (elem_type);
6989 int elem_size = GET_MODE_SIZE (elem_mode);
6991 if (elem_size < UNITS_PER_WORD)
6993 tree real_part, imag_part;
6994 gimple_seq post = NULL;
6996 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
6997 &post);
6998 /* Copy the value into a temporary, lest the formal temporary
6999 be reused out from under us. */
7000 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
7001 gimple_seq_add_seq (pre_p, post);
7003 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
7004 post_p);
7006 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
7010 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
7013 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7014 f_fpr = TREE_CHAIN (f_gpr);
7015 f_res = TREE_CHAIN (f_fpr);
7016 f_ovf = TREE_CHAIN (f_res);
7017 f_sav = TREE_CHAIN (f_ovf);
7019 valist = build_va_arg_indirect_ref (valist);
7020 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7021 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
7022 f_fpr, NULL_TREE);
7023 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
7024 f_ovf, NULL_TREE);
7025 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
7026 f_sav, NULL_TREE);
7028 size = int_size_in_bytes (type);
7029 rsize = (size + 3) / 4;
7030 align = 1;
7032 if (TARGET_HARD_FLOAT && TARGET_FPRS
7033 && ((TARGET_SINGLE_FLOAT && TYPE_MODE (type) == SFmode)
7034 || (TARGET_DOUBLE_FLOAT
7035 && (TYPE_MODE (type) == DFmode
7036 || TYPE_MODE (type) == TFmode
7037 || TYPE_MODE (type) == SDmode
7038 || TYPE_MODE (type) == DDmode
7039 || TYPE_MODE (type) == TDmode))))
7041 /* FP args go in FP registers, if present. */
7042 reg = fpr;
7043 n_reg = (size + 7) / 8;
7044 sav_ofs = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4) * 4;
7045 sav_scale = ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? 8 : 4);
7046 if (TYPE_MODE (type) != SFmode && TYPE_MODE (type) != SDmode)
7047 align = 8;
7049 else
7051 /* Otherwise into GP registers. */
7052 reg = gpr;
7053 n_reg = rsize;
7054 sav_ofs = 0;
7055 sav_scale = 4;
7056 if (n_reg == 2)
7057 align = 8;
7060 /* Pull the value out of the saved registers.... */
7062 lab_over = NULL;
7063 addr = create_tmp_var (ptr_type_node, "addr");
7064 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7066 /* AltiVec vectors never go in registers when -mabi=altivec. */
7067 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
7068 align = 16;
7069 else
7071 lab_false = create_artificial_label ();
7072 lab_over = create_artificial_label ();
7074 /* Long long and SPE vectors are aligned in the registers.
7075 As are any other 2 gpr item such as complex int due to a
7076 historical mistake. */
7077 u = reg;
7078 if (n_reg == 2 && reg == gpr)
7080 regalign = 1;
7081 u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
7082 build_int_cst (TREE_TYPE (reg), n_reg - 1));
7083 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
7084 unshare_expr (reg), u);
7086 /* _Decimal128 is passed in even/odd fpr pairs; the stored
7087 reg number is 0 for f1, so we want to make it odd. */
7088 else if (reg == fpr && TYPE_MODE (type) == TDmode)
7090 regalign = 1;
7091 t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
7092 build_int_cst (TREE_TYPE (reg), 1));
7093 u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
7096 t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
7097 t = build2 (GE_EXPR, boolean_type_node, u, t);
7098 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7099 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7100 gimplify_and_add (t, pre_p);
7102 t = sav;
7103 if (sav_ofs)
7104 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
7106 u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
7107 build_int_cst (TREE_TYPE (reg), n_reg));
7108 u = fold_convert (sizetype, u);
7109 u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
7110 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
7112 /* _Decimal32 varargs are located in the second word of the 64-bit
7113 FP register for 32-bit binaries. */
7114 if (!TARGET_POWERPC64
7115 && TARGET_HARD_FLOAT && TARGET_FPRS
7116 && TYPE_MODE (type) == SDmode)
7117 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7119 gimplify_assign (addr, t, pre_p);
7121 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
7123 stmt = gimple_build_label (lab_false);
7124 gimple_seq_add_stmt (pre_p, stmt);
7126 if ((n_reg == 2 && !regalign) || n_reg > 2)
7128 /* Ensure that we don't find any more args in regs.
7129 Alignment has taken care of for special cases. */
7130 gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
7134 /* ... otherwise out of the overflow area. */
7136 /* Care for on-stack alignment if needed. */
7137 t = ovf;
7138 if (align != 1)
7140 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
7141 t = fold_convert (sizetype, t);
7142 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
7143 size_int (-align));
7144 t = fold_convert (TREE_TYPE (ovf), t);
7146 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7148 gimplify_assign (unshare_expr (addr), t, pre_p);
7150 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
7151 gimplify_assign (unshare_expr (ovf), t, pre_p);
7153 if (lab_over)
7155 stmt = gimple_build_label (lab_over);
7156 gimple_seq_add_stmt (pre_p, stmt);
7159 if (STRICT_ALIGNMENT
7160 && (TYPE_ALIGN (type)
7161 > (unsigned) BITS_PER_UNIT * (align < 4 ? 4 : align)))
7163 /* The value (of type complex double, for example) may not be
7164 aligned in memory in the saved registers, so copy via a
7165 temporary. (This is the same code as used for SPARC.) */
7166 tree tmp = create_tmp_var (type, "va_arg_tmp");
7167 tree dest_addr = build_fold_addr_expr (tmp);
7169 tree copy = build_call_expr (implicit_built_in_decls[BUILT_IN_MEMCPY],
7170 3, dest_addr, addr, size_int (rsize * 4));
7172 gimplify_and_add (copy, pre_p);
7173 addr = dest_addr;
7176 addr = fold_convert (ptrtype, addr);
7177 return build_va_arg_indirect_ref (addr);
7180 /* Builtins. */
7182 static void
7183 def_builtin (int mask, const char *name, tree type, int code)
7185 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
7187 if (rs6000_builtin_decls[code])
7188 abort ();
7190 rs6000_builtin_decls[code] =
7191 add_builtin_function (name, type, code, BUILT_IN_MD,
7192 NULL, NULL_TREE);
7196 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
7198 static const struct builtin_description bdesc_3arg[] =
7200 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
7201 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
7202 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
7203 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
7204 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
7205 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
7206 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
7207 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
7208 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
7209 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
7210 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
7211 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
7212 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
7213 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
7214 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
7215 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
7216 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
7217 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
7218 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_v16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
7219 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
7220 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
7221 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
7222 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_v4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
7224 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD },
7225 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS },
7226 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD },
7227 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS },
7228 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM },
7229 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM },
7230 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM },
7231 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM },
7232 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM },
7233 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS },
7234 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS },
7235 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
7236 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
7237 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
7238 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
7240 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
7241 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
7242 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
7243 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
7244 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
7245 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
7246 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
7247 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
7248 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
7251 /* DST operations: void foo (void *, const int, const char). */
7253 static const struct builtin_description bdesc_dst[] =
7255 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
7256 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
7257 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
7258 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT },
7260 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST },
7261 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT },
7262 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST },
7263 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT }
7266 /* Simple binary operations: VECc = foo (VECa, VECb). */
7268 static struct builtin_description bdesc_2arg[] =
7270 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
7271 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
7272 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
7273 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
7274 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
7275 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
7276 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
7277 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
7278 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
7279 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
7280 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
7281 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
7282 { MASK_ALTIVEC, CODE_FOR_andcv4si3, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
7283 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
7284 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
7285 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
7286 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
7287 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
7288 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
7289 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
7290 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
7291 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
7292 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
7293 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
7294 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
7295 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
7296 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
7297 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
7298 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
7299 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
7300 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
7301 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
7302 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
7303 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
7304 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
7305 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
7306 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
7307 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
7308 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
7309 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
7310 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
7311 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
7312 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
7313 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
7314 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
7315 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
7316 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
7317 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
7318 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
7319 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
7320 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
7321 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
7322 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
7323 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
7324 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
7325 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
7326 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
7327 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
7328 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
7329 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
7330 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
7331 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
7332 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
7333 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
7334 { MASK_ALTIVEC, CODE_FOR_altivec_norv4si3, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
7335 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
7336 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
7337 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
7338 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
7339 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
7340 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
7341 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
7342 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
7343 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
7344 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
7345 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
7346 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
7347 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
7348 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
7349 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
7350 { MASK_ALTIVEC, CODE_FOR_vashlv4si3, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
7351 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
7352 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
7353 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
7354 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
7355 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
7356 { MASK_ALTIVEC, CODE_FOR_vlshrv16qi3, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
7357 { MASK_ALTIVEC, CODE_FOR_vlshrv8hi3, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
7358 { MASK_ALTIVEC, CODE_FOR_vlshrv4si3, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
7359 { MASK_ALTIVEC, CODE_FOR_vashrv16qi3, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
7360 { MASK_ALTIVEC, CODE_FOR_vashrv8hi3, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
7361 { MASK_ALTIVEC, CODE_FOR_vashrv4si3, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
7362 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
7363 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
7364 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
7365 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
7366 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
7367 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
7368 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
7369 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
7370 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
7371 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
7372 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
7373 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
7374 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
7375 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
7376 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
7377 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
7378 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
7379 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
7380 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
7382 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD },
7383 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP },
7384 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM },
7385 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM },
7386 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM },
7387 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC },
7388 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS },
7389 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS },
7390 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS },
7391 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS },
7392 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS },
7393 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS },
7394 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS },
7395 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND },
7396 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC },
7397 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG },
7398 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW },
7399 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW },
7400 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH },
7401 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH },
7402 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB },
7403 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB },
7404 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB },
7405 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ },
7406 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP },
7407 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW },
7408 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH },
7409 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB },
7410 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE },
7411 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT },
7412 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP },
7413 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW },
7414 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW },
7415 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH },
7416 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH },
7417 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB },
7418 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB },
7419 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE },
7420 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT },
7421 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX },
7422 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP },
7423 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW },
7424 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW },
7425 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH },
7426 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH },
7427 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB },
7428 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB },
7429 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH },
7430 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW },
7431 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH },
7432 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB },
7433 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL },
7434 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW },
7435 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH },
7436 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB },
7437 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN },
7438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP },
7439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW },
7440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW },
7441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH },
7442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH },
7443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB },
7444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB },
7445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE },
7446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB },
7447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB },
7448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH },
7449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH },
7450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO },
7451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH },
7452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH },
7453 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB },
7454 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB },
7455 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR },
7456 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR },
7457 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK },
7458 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM },
7459 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM },
7460 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX },
7461 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS },
7462 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS },
7463 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS },
7464 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
7465 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
7466 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
7467 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
7468 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
7469 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
7470 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
7471 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
7472 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
7473 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
7474 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW },
7475 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH },
7476 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB },
7477 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL },
7478 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO },
7479 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR },
7480 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW },
7481 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH },
7482 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB },
7483 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA },
7484 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW },
7485 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH },
7486 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB },
7487 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL },
7488 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO },
7489 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB },
7490 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP },
7491 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM },
7492 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM },
7493 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM },
7494 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC },
7495 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS },
7496 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS },
7497 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS },
7498 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS },
7499 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS },
7500 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS },
7501 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS },
7502 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S },
7503 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS },
7504 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS },
7505 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS },
7506 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S },
7507 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS },
7508 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR },
7510 { 0, CODE_FOR_divv2sf3, "__builtin_paired_divv2sf3", PAIRED_BUILTIN_DIVV2SF3 },
7511 { 0, CODE_FOR_addv2sf3, "__builtin_paired_addv2sf3", PAIRED_BUILTIN_ADDV2SF3 },
7512 { 0, CODE_FOR_subv2sf3, "__builtin_paired_subv2sf3", PAIRED_BUILTIN_SUBV2SF3 },
7513 { 0, CODE_FOR_mulv2sf3, "__builtin_paired_mulv2sf3", PAIRED_BUILTIN_MULV2SF3 },
7514 { 0, CODE_FOR_paired_muls0, "__builtin_paired_muls0", PAIRED_BUILTIN_MULS0 },
7515 { 0, CODE_FOR_paired_muls1, "__builtin_paired_muls1", PAIRED_BUILTIN_MULS1 },
7516 { 0, CODE_FOR_paired_merge00, "__builtin_paired_merge00", PAIRED_BUILTIN_MERGE00 },
7517 { 0, CODE_FOR_paired_merge01, "__builtin_paired_merge01", PAIRED_BUILTIN_MERGE01 },
7518 { 0, CODE_FOR_paired_merge10, "__builtin_paired_merge10", PAIRED_BUILTIN_MERGE10 },
7519 { 0, CODE_FOR_paired_merge11, "__builtin_paired_merge11", PAIRED_BUILTIN_MERGE11 },
7521 /* Place holder, leave as first spe builtin. */
7522 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
7523 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
7524 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
7525 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
7526 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
7527 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
7528 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
7529 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
7530 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
7531 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
7532 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
7533 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
7534 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
7535 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
7536 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
7537 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
7538 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
7539 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
7540 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
7541 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
7542 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
7543 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
7544 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
7545 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
7546 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
7547 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
7548 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
7549 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
7550 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
7551 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
7552 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
7553 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
7554 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
7555 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
7556 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
7557 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
7558 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
7559 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
7560 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
7561 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
7562 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
7563 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
7564 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
7565 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
7566 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
7567 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
7568 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
7569 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
7570 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
7571 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
7572 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
7573 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
7574 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
7575 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
7576 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
7577 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
7578 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
7579 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
7580 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
7581 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
7582 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
7583 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
7584 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
7585 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
7586 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
7587 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
7588 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
7589 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
7590 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
7591 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
7592 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
7593 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
7594 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
7595 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
7596 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
7597 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
7598 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
7599 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
7600 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
7601 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
7602 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
7603 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
7604 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
7605 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
7606 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
7607 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
7608 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
7609 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
7610 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
7611 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
7612 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
7613 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
7614 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
7615 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
7616 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
7617 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
7618 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
7619 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
7620 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
7621 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
7622 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
7623 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
7624 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
7625 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
7626 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
7627 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
7628 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
7629 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
7630 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
7632 /* SPE binary operations expecting a 5-bit unsigned literal. */
7633 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
7635 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
7636 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
7637 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
7638 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
7639 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
7640 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
7641 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
7642 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
7643 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
7644 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
7645 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
7646 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
7647 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
7648 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
7649 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
7650 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
7651 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
7652 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
7653 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
7654 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
7655 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
7656 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
7657 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
7658 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
7659 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
7660 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
7662 /* Place-holder. Leave as last binary SPE builtin. */
7663 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR }
7666 /* AltiVec predicates. */
7668 struct builtin_description_predicates
7670 const unsigned int mask;
7671 const enum insn_code icode;
7672 const char *opcode;
7673 const char *const name;
7674 const enum rs6000_builtins code;
7677 static const struct builtin_description_predicates bdesc_altivec_preds[] =
7679 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
7680 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
7681 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
7682 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
7683 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
7684 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
7685 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
7686 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
7687 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
7688 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
7689 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
7690 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
7691 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P },
7693 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P },
7694 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P },
7695 { MASK_ALTIVEC, 0, NULL, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P }
7698 /* SPE predicates. */
7699 static struct builtin_description bdesc_spe_predicates[] =
7701 /* Place-holder. Leave as first. */
7702 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
7703 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
7704 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
7705 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
7706 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
7707 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
7708 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
7709 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
7710 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
7711 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
7712 /* Place-holder. Leave as last. */
7713 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
7716 /* SPE evsel predicates. */
7717 static struct builtin_description bdesc_spe_evsel[] =
7719 /* Place-holder. Leave as first. */
7720 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
7721 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
7722 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
7723 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
7724 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
7725 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
7726 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
7727 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
7728 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
7729 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
7730 /* Place-holder. Leave as last. */
7731 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
7734 /* PAIRED predicates. */
7735 static const struct builtin_description bdesc_paired_preds[] =
7737 /* Place-holder. Leave as first. */
7738 { 0, CODE_FOR_paired_cmpu0, "__builtin_paired_cmpu0", PAIRED_BUILTIN_CMPU0 },
7739 /* Place-holder. Leave as last. */
7740 { 0, CODE_FOR_paired_cmpu1, "__builtin_paired_cmpu1", PAIRED_BUILTIN_CMPU1 },
7743 /* ABS* operations. */
7745 static const struct builtin_description bdesc_abs[] =
7747 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
7748 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
7749 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
7750 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
7751 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
7752 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
7753 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
7756 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
7757 foo (VECa). */
7759 static struct builtin_description bdesc_1arg[] =
7761 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
7762 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
7763 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
7764 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
7765 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
7766 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
7767 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
7768 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
7769 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
7770 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
7771 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
7772 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
7773 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
7774 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
7775 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
7776 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
7777 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
7779 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS },
7780 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS },
7781 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL },
7782 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE },
7783 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
7784 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
7785 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
7786 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
7787 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
7788 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
7789 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
7790 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
7791 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
7792 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
7793 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB },
7794 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL },
7795 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX },
7796 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH },
7797 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB },
7799 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
7800 end with SPE_BUILTIN_EVSUBFUSIAAW. */
7801 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
7802 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
7803 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
7804 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
7805 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
7806 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
7807 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
7808 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
7809 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
7810 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
7811 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
7812 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
7813 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
7814 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
7815 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
7816 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
7817 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
7818 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
7819 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
7820 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
7821 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
7822 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
7823 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
7824 { 0, CODE_FOR_negv2si2, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
7825 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
7826 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
7827 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
7828 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
7830 /* Place-holder. Leave as last unary SPE builtin. */
7831 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
7833 { 0, CODE_FOR_absv2sf2, "__builtin_paired_absv2sf2", PAIRED_BUILTIN_ABSV2SF2 },
7834 { 0, CODE_FOR_nabsv2sf2, "__builtin_paired_nabsv2sf2", PAIRED_BUILTIN_NABSV2SF2 },
7835 { 0, CODE_FOR_negv2sf2, "__builtin_paired_negv2sf2", PAIRED_BUILTIN_NEGV2SF2 },
7836 { 0, CODE_FOR_sqrtv2sf2, "__builtin_paired_sqrtv2sf2", PAIRED_BUILTIN_SQRTV2SF2 },
7837 { 0, CODE_FOR_resv2sf2, "__builtin_paired_resv2sf2", PAIRED_BUILTIN_RESV2SF2 }
7840 static rtx
7841 rs6000_expand_unop_builtin (enum insn_code icode, tree exp, rtx target)
7843 rtx pat;
7844 tree arg0 = CALL_EXPR_ARG (exp, 0);
7845 rtx op0 = expand_normal (arg0);
7846 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7847 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7849 if (icode == CODE_FOR_nothing)
7850 /* Builtin not supported on this processor. */
7851 return 0;
7853 /* If we got invalid arguments bail out before generating bad rtl. */
7854 if (arg0 == error_mark_node)
7855 return const0_rtx;
7857 if (icode == CODE_FOR_altivec_vspltisb
7858 || icode == CODE_FOR_altivec_vspltish
7859 || icode == CODE_FOR_altivec_vspltisw
7860 || icode == CODE_FOR_spe_evsplatfi
7861 || icode == CODE_FOR_spe_evsplati)
7863 /* Only allow 5-bit *signed* literals. */
7864 if (GET_CODE (op0) != CONST_INT
7865 || INTVAL (op0) > 15
7866 || INTVAL (op0) < -16)
7868 error ("argument 1 must be a 5-bit signed literal");
7869 return const0_rtx;
7873 if (target == 0
7874 || GET_MODE (target) != tmode
7875 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7876 target = gen_reg_rtx (tmode);
7878 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7879 op0 = copy_to_mode_reg (mode0, op0);
7881 pat = GEN_FCN (icode) (target, op0);
7882 if (! pat)
7883 return 0;
7884 emit_insn (pat);
7886 return target;
7889 static rtx
7890 altivec_expand_abs_builtin (enum insn_code icode, tree exp, rtx target)
7892 rtx pat, scratch1, scratch2;
7893 tree arg0 = CALL_EXPR_ARG (exp, 0);
7894 rtx op0 = expand_normal (arg0);
7895 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7896 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7898 /* If we have invalid arguments, bail out before generating bad rtl. */
7899 if (arg0 == error_mark_node)
7900 return const0_rtx;
7902 if (target == 0
7903 || GET_MODE (target) != tmode
7904 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7905 target = gen_reg_rtx (tmode);
7907 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7908 op0 = copy_to_mode_reg (mode0, op0);
7910 scratch1 = gen_reg_rtx (mode0);
7911 scratch2 = gen_reg_rtx (mode0);
7913 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
7914 if (! pat)
7915 return 0;
7916 emit_insn (pat);
7918 return target;
7921 static rtx
7922 rs6000_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
7924 rtx pat;
7925 tree arg0 = CALL_EXPR_ARG (exp, 0);
7926 tree arg1 = CALL_EXPR_ARG (exp, 1);
7927 rtx op0 = expand_normal (arg0);
7928 rtx op1 = expand_normal (arg1);
7929 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7930 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
7931 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
7933 if (icode == CODE_FOR_nothing)
7934 /* Builtin not supported on this processor. */
7935 return 0;
7937 /* If we got invalid arguments bail out before generating bad rtl. */
7938 if (arg0 == error_mark_node || arg1 == error_mark_node)
7939 return const0_rtx;
7941 if (icode == CODE_FOR_altivec_vcfux
7942 || icode == CODE_FOR_altivec_vcfsx
7943 || icode == CODE_FOR_altivec_vctsxs
7944 || icode == CODE_FOR_altivec_vctuxs
7945 || icode == CODE_FOR_altivec_vspltb
7946 || icode == CODE_FOR_altivec_vsplth
7947 || icode == CODE_FOR_altivec_vspltw
7948 || icode == CODE_FOR_spe_evaddiw
7949 || icode == CODE_FOR_spe_evldd
7950 || icode == CODE_FOR_spe_evldh
7951 || icode == CODE_FOR_spe_evldw
7952 || icode == CODE_FOR_spe_evlhhesplat
7953 || icode == CODE_FOR_spe_evlhhossplat
7954 || icode == CODE_FOR_spe_evlhhousplat
7955 || icode == CODE_FOR_spe_evlwhe
7956 || icode == CODE_FOR_spe_evlwhos
7957 || icode == CODE_FOR_spe_evlwhou
7958 || icode == CODE_FOR_spe_evlwhsplat
7959 || icode == CODE_FOR_spe_evlwwsplat
7960 || icode == CODE_FOR_spe_evrlwi
7961 || icode == CODE_FOR_spe_evslwi
7962 || icode == CODE_FOR_spe_evsrwis
7963 || icode == CODE_FOR_spe_evsubifw
7964 || icode == CODE_FOR_spe_evsrwiu)
7966 /* Only allow 5-bit unsigned literals. */
7967 STRIP_NOPS (arg1);
7968 if (TREE_CODE (arg1) != INTEGER_CST
7969 || TREE_INT_CST_LOW (arg1) & ~0x1f)
7971 error ("argument 2 must be a 5-bit unsigned literal");
7972 return const0_rtx;
7976 if (target == 0
7977 || GET_MODE (target) != tmode
7978 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
7979 target = gen_reg_rtx (tmode);
7981 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
7982 op0 = copy_to_mode_reg (mode0, op0);
7983 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
7984 op1 = copy_to_mode_reg (mode1, op1);
7986 pat = GEN_FCN (icode) (target, op0, op1);
7987 if (! pat)
7988 return 0;
7989 emit_insn (pat);
7991 return target;
7994 static rtx
7995 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
7996 tree exp, rtx target)
7998 rtx pat, scratch;
7999 tree cr6_form = CALL_EXPR_ARG (exp, 0);
8000 tree arg0 = CALL_EXPR_ARG (exp, 1);
8001 tree arg1 = CALL_EXPR_ARG (exp, 2);
8002 rtx op0 = expand_normal (arg0);
8003 rtx op1 = expand_normal (arg1);
8004 enum machine_mode tmode = SImode;
8005 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8006 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8007 int cr6_form_int;
8009 if (TREE_CODE (cr6_form) != INTEGER_CST)
8011 error ("argument 1 of __builtin_altivec_predicate must be a constant");
8012 return const0_rtx;
8014 else
8015 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
8017 gcc_assert (mode0 == mode1);
8019 /* If we have invalid arguments, bail out before generating bad rtl. */
8020 if (arg0 == error_mark_node || arg1 == error_mark_node)
8021 return const0_rtx;
8023 if (target == 0
8024 || GET_MODE (target) != tmode
8025 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8026 target = gen_reg_rtx (tmode);
8028 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8029 op0 = copy_to_mode_reg (mode0, op0);
8030 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8031 op1 = copy_to_mode_reg (mode1, op1);
8033 scratch = gen_reg_rtx (mode0);
8035 pat = GEN_FCN (icode) (scratch, op0, op1,
8036 gen_rtx_SYMBOL_REF (Pmode, opcode));
8037 if (! pat)
8038 return 0;
8039 emit_insn (pat);
8041 /* The vec_any* and vec_all* predicates use the same opcodes for two
8042 different operations, but the bits in CR6 will be different
8043 depending on what information we want. So we have to play tricks
8044 with CR6 to get the right bits out.
8046 If you think this is disgusting, look at the specs for the
8047 AltiVec predicates. */
8049 switch (cr6_form_int)
8051 case 0:
8052 emit_insn (gen_cr6_test_for_zero (target));
8053 break;
8054 case 1:
8055 emit_insn (gen_cr6_test_for_zero_reverse (target));
8056 break;
8057 case 2:
8058 emit_insn (gen_cr6_test_for_lt (target));
8059 break;
8060 case 3:
8061 emit_insn (gen_cr6_test_for_lt_reverse (target));
8062 break;
8063 default:
8064 error ("argument 1 of __builtin_altivec_predicate is out of range");
8065 break;
8068 return target;
8071 static rtx
8072 paired_expand_lv_builtin (enum insn_code icode, tree exp, rtx target)
8074 rtx pat, addr;
8075 tree arg0 = CALL_EXPR_ARG (exp, 0);
8076 tree arg1 = CALL_EXPR_ARG (exp, 1);
8077 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8078 enum machine_mode mode0 = Pmode;
8079 enum machine_mode mode1 = Pmode;
8080 rtx op0 = expand_normal (arg0);
8081 rtx op1 = expand_normal (arg1);
8083 if (icode == CODE_FOR_nothing)
8084 /* Builtin not supported on this processor. */
8085 return 0;
8087 /* If we got invalid arguments bail out before generating bad rtl. */
8088 if (arg0 == error_mark_node || arg1 == error_mark_node)
8089 return const0_rtx;
8091 if (target == 0
8092 || GET_MODE (target) != tmode
8093 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8094 target = gen_reg_rtx (tmode);
8096 op1 = copy_to_mode_reg (mode1, op1);
8098 if (op0 == const0_rtx)
8100 addr = gen_rtx_MEM (tmode, op1);
8102 else
8104 op0 = copy_to_mode_reg (mode0, op0);
8105 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
8108 pat = GEN_FCN (icode) (target, addr);
8110 if (! pat)
8111 return 0;
8112 emit_insn (pat);
8114 return target;
8117 static rtx
8118 altivec_expand_lv_builtin (enum insn_code icode, tree exp, rtx target, bool blk)
8120 rtx pat, addr;
8121 tree arg0 = CALL_EXPR_ARG (exp, 0);
8122 tree arg1 = CALL_EXPR_ARG (exp, 1);
8123 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8124 enum machine_mode mode0 = Pmode;
8125 enum machine_mode mode1 = Pmode;
8126 rtx op0 = expand_normal (arg0);
8127 rtx op1 = expand_normal (arg1);
8129 if (icode == CODE_FOR_nothing)
8130 /* Builtin not supported on this processor. */
8131 return 0;
8133 /* If we got invalid arguments bail out before generating bad rtl. */
8134 if (arg0 == error_mark_node || arg1 == error_mark_node)
8135 return const0_rtx;
8137 if (target == 0
8138 || GET_MODE (target) != tmode
8139 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8140 target = gen_reg_rtx (tmode);
8142 op1 = copy_to_mode_reg (mode1, op1);
8144 if (op0 == const0_rtx)
8146 addr = gen_rtx_MEM (blk ? BLKmode : tmode, op1);
8148 else
8150 op0 = copy_to_mode_reg (mode0, op0);
8151 addr = gen_rtx_MEM (blk ? BLKmode : tmode, gen_rtx_PLUS (Pmode, op0, op1));
8154 pat = GEN_FCN (icode) (target, addr);
8156 if (! pat)
8157 return 0;
8158 emit_insn (pat);
8160 return target;
8163 static rtx
8164 spe_expand_stv_builtin (enum insn_code icode, tree exp)
8166 tree arg0 = CALL_EXPR_ARG (exp, 0);
8167 tree arg1 = CALL_EXPR_ARG (exp, 1);
8168 tree arg2 = CALL_EXPR_ARG (exp, 2);
8169 rtx op0 = expand_normal (arg0);
8170 rtx op1 = expand_normal (arg1);
8171 rtx op2 = expand_normal (arg2);
8172 rtx pat;
8173 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
8174 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
8175 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
8177 /* Invalid arguments. Bail before doing anything stoopid! */
8178 if (arg0 == error_mark_node
8179 || arg1 == error_mark_node
8180 || arg2 == error_mark_node)
8181 return const0_rtx;
8183 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
8184 op0 = copy_to_mode_reg (mode2, op0);
8185 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
8186 op1 = copy_to_mode_reg (mode0, op1);
8187 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
8188 op2 = copy_to_mode_reg (mode1, op2);
8190 pat = GEN_FCN (icode) (op1, op2, op0);
8191 if (pat)
8192 emit_insn (pat);
8193 return NULL_RTX;
8196 static rtx
8197 paired_expand_stv_builtin (enum insn_code icode, tree exp)
8199 tree arg0 = CALL_EXPR_ARG (exp, 0);
8200 tree arg1 = CALL_EXPR_ARG (exp, 1);
8201 tree arg2 = CALL_EXPR_ARG (exp, 2);
8202 rtx op0 = expand_normal (arg0);
8203 rtx op1 = expand_normal (arg1);
8204 rtx op2 = expand_normal (arg2);
8205 rtx pat, addr;
8206 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8207 enum machine_mode mode1 = Pmode;
8208 enum machine_mode mode2 = Pmode;
8210 /* Invalid arguments. Bail before doing anything stoopid! */
8211 if (arg0 == error_mark_node
8212 || arg1 == error_mark_node
8213 || arg2 == error_mark_node)
8214 return const0_rtx;
8216 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8217 op0 = copy_to_mode_reg (tmode, op0);
8219 op2 = copy_to_mode_reg (mode2, op2);
8221 if (op1 == const0_rtx)
8223 addr = gen_rtx_MEM (tmode, op2);
8225 else
8227 op1 = copy_to_mode_reg (mode1, op1);
8228 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8231 pat = GEN_FCN (icode) (addr, op0);
8232 if (pat)
8233 emit_insn (pat);
8234 return NULL_RTX;
8237 static rtx
8238 altivec_expand_stv_builtin (enum insn_code icode, tree exp)
8240 tree arg0 = CALL_EXPR_ARG (exp, 0);
8241 tree arg1 = CALL_EXPR_ARG (exp, 1);
8242 tree arg2 = CALL_EXPR_ARG (exp, 2);
8243 rtx op0 = expand_normal (arg0);
8244 rtx op1 = expand_normal (arg1);
8245 rtx op2 = expand_normal (arg2);
8246 rtx pat, addr;
8247 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8248 enum machine_mode mode1 = Pmode;
8249 enum machine_mode mode2 = Pmode;
8251 /* Invalid arguments. Bail before doing anything stoopid! */
8252 if (arg0 == error_mark_node
8253 || arg1 == error_mark_node
8254 || arg2 == error_mark_node)
8255 return const0_rtx;
8257 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
8258 op0 = copy_to_mode_reg (tmode, op0);
8260 op2 = copy_to_mode_reg (mode2, op2);
8262 if (op1 == const0_rtx)
8264 addr = gen_rtx_MEM (tmode, op2);
8266 else
8268 op1 = copy_to_mode_reg (mode1, op1);
8269 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
8272 pat = GEN_FCN (icode) (addr, op0);
8273 if (pat)
8274 emit_insn (pat);
8275 return NULL_RTX;
8278 static rtx
8279 rs6000_expand_ternop_builtin (enum insn_code icode, tree exp, rtx target)
8281 rtx pat;
8282 tree arg0 = CALL_EXPR_ARG (exp, 0);
8283 tree arg1 = CALL_EXPR_ARG (exp, 1);
8284 tree arg2 = CALL_EXPR_ARG (exp, 2);
8285 rtx op0 = expand_normal (arg0);
8286 rtx op1 = expand_normal (arg1);
8287 rtx op2 = expand_normal (arg2);
8288 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8289 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
8290 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
8291 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
8293 if (icode == CODE_FOR_nothing)
8294 /* Builtin not supported on this processor. */
8295 return 0;
8297 /* If we got invalid arguments bail out before generating bad rtl. */
8298 if (arg0 == error_mark_node
8299 || arg1 == error_mark_node
8300 || arg2 == error_mark_node)
8301 return const0_rtx;
8303 if (icode == CODE_FOR_altivec_vsldoi_v4sf
8304 || icode == CODE_FOR_altivec_vsldoi_v4si
8305 || icode == CODE_FOR_altivec_vsldoi_v8hi
8306 || icode == CODE_FOR_altivec_vsldoi_v16qi)
8308 /* Only allow 4-bit unsigned literals. */
8309 STRIP_NOPS (arg2);
8310 if (TREE_CODE (arg2) != INTEGER_CST
8311 || TREE_INT_CST_LOW (arg2) & ~0xf)
8313 error ("argument 3 must be a 4-bit unsigned literal");
8314 return const0_rtx;
8318 if (target == 0
8319 || GET_MODE (target) != tmode
8320 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8321 target = gen_reg_rtx (tmode);
8323 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8324 op0 = copy_to_mode_reg (mode0, op0);
8325 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
8326 op1 = copy_to_mode_reg (mode1, op1);
8327 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
8328 op2 = copy_to_mode_reg (mode2, op2);
8330 if (TARGET_PAIRED_FLOAT && icode == CODE_FOR_selv2sf4)
8331 pat = GEN_FCN (icode) (target, op0, op1, op2, CONST0_RTX (SFmode));
8332 else
8333 pat = GEN_FCN (icode) (target, op0, op1, op2);
8334 if (! pat)
8335 return 0;
8336 emit_insn (pat);
8338 return target;
8341 /* Expand the lvx builtins. */
8342 static rtx
8343 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
8345 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8346 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8347 tree arg0;
8348 enum machine_mode tmode, mode0;
8349 rtx pat, op0;
8350 enum insn_code icode;
8352 switch (fcode)
8354 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
8355 icode = CODE_FOR_altivec_lvx_v16qi;
8356 break;
8357 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
8358 icode = CODE_FOR_altivec_lvx_v8hi;
8359 break;
8360 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
8361 icode = CODE_FOR_altivec_lvx_v4si;
8362 break;
8363 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
8364 icode = CODE_FOR_altivec_lvx_v4sf;
8365 break;
8366 default:
8367 *expandedp = false;
8368 return NULL_RTX;
8371 *expandedp = true;
8373 arg0 = CALL_EXPR_ARG (exp, 0);
8374 op0 = expand_normal (arg0);
8375 tmode = insn_data[icode].operand[0].mode;
8376 mode0 = insn_data[icode].operand[1].mode;
8378 if (target == 0
8379 || GET_MODE (target) != tmode
8380 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8381 target = gen_reg_rtx (tmode);
8383 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8384 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8386 pat = GEN_FCN (icode) (target, op0);
8387 if (! pat)
8388 return 0;
8389 emit_insn (pat);
8390 return target;
8393 /* Expand the stvx builtins. */
8394 static rtx
8395 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
8396 bool *expandedp)
8398 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8399 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8400 tree arg0, arg1;
8401 enum machine_mode mode0, mode1;
8402 rtx pat, op0, op1;
8403 enum insn_code icode;
8405 switch (fcode)
8407 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
8408 icode = CODE_FOR_altivec_stvx_v16qi;
8409 break;
8410 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
8411 icode = CODE_FOR_altivec_stvx_v8hi;
8412 break;
8413 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
8414 icode = CODE_FOR_altivec_stvx_v4si;
8415 break;
8416 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
8417 icode = CODE_FOR_altivec_stvx_v4sf;
8418 break;
8419 default:
8420 *expandedp = false;
8421 return NULL_RTX;
8424 arg0 = CALL_EXPR_ARG (exp, 0);
8425 arg1 = CALL_EXPR_ARG (exp, 1);
8426 op0 = expand_normal (arg0);
8427 op1 = expand_normal (arg1);
8428 mode0 = insn_data[icode].operand[0].mode;
8429 mode1 = insn_data[icode].operand[1].mode;
8431 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8432 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
8433 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
8434 op1 = copy_to_mode_reg (mode1, op1);
8436 pat = GEN_FCN (icode) (op0, op1);
8437 if (pat)
8438 emit_insn (pat);
8440 *expandedp = true;
8441 return NULL_RTX;
8444 /* Expand the dst builtins. */
8445 static rtx
8446 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
8447 bool *expandedp)
8449 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8450 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8451 tree arg0, arg1, arg2;
8452 enum machine_mode mode0, mode1, mode2;
8453 rtx pat, op0, op1, op2;
8454 const struct builtin_description *d;
8455 size_t i;
8457 *expandedp = false;
8459 /* Handle DST variants. */
8460 d = bdesc_dst;
8461 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
8462 if (d->code == fcode)
8464 arg0 = CALL_EXPR_ARG (exp, 0);
8465 arg1 = CALL_EXPR_ARG (exp, 1);
8466 arg2 = CALL_EXPR_ARG (exp, 2);
8467 op0 = expand_normal (arg0);
8468 op1 = expand_normal (arg1);
8469 op2 = expand_normal (arg2);
8470 mode0 = insn_data[d->icode].operand[0].mode;
8471 mode1 = insn_data[d->icode].operand[1].mode;
8472 mode2 = insn_data[d->icode].operand[2].mode;
8474 /* Invalid arguments, bail out before generating bad rtl. */
8475 if (arg0 == error_mark_node
8476 || arg1 == error_mark_node
8477 || arg2 == error_mark_node)
8478 return const0_rtx;
8480 *expandedp = true;
8481 STRIP_NOPS (arg2);
8482 if (TREE_CODE (arg2) != INTEGER_CST
8483 || TREE_INT_CST_LOW (arg2) & ~0x3)
8485 error ("argument to %qs must be a 2-bit unsigned literal", d->name);
8486 return const0_rtx;
8489 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
8490 op0 = copy_to_mode_reg (Pmode, op0);
8491 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
8492 op1 = copy_to_mode_reg (mode1, op1);
8494 pat = GEN_FCN (d->icode) (op0, op1, op2);
8495 if (pat != 0)
8496 emit_insn (pat);
8498 return NULL_RTX;
8501 return NULL_RTX;
8504 /* Expand vec_init builtin. */
8505 static rtx
8506 altivec_expand_vec_init_builtin (tree type, tree exp, rtx target)
8508 enum machine_mode tmode = TYPE_MODE (type);
8509 enum machine_mode inner_mode = GET_MODE_INNER (tmode);
8510 int i, n_elt = GET_MODE_NUNITS (tmode);
8511 rtvec v = rtvec_alloc (n_elt);
8513 gcc_assert (VECTOR_MODE_P (tmode));
8514 gcc_assert (n_elt == call_expr_nargs (exp));
8516 for (i = 0; i < n_elt; ++i)
8518 rtx x = expand_normal (CALL_EXPR_ARG (exp, i));
8519 RTVEC_ELT (v, i) = gen_lowpart (inner_mode, x);
8522 if (!target || !register_operand (target, tmode))
8523 target = gen_reg_rtx (tmode);
8525 rs6000_expand_vector_init (target, gen_rtx_PARALLEL (tmode, v));
8526 return target;
8529 /* Return the integer constant in ARG. Constrain it to be in the range
8530 of the subparts of VEC_TYPE; issue an error if not. */
8532 static int
8533 get_element_number (tree vec_type, tree arg)
8535 unsigned HOST_WIDE_INT elt, max = TYPE_VECTOR_SUBPARTS (vec_type) - 1;
8537 if (!host_integerp (arg, 1)
8538 || (elt = tree_low_cst (arg, 1), elt > max))
8540 error ("selector must be an integer constant in the range 0..%wi", max);
8541 return 0;
8544 return elt;
8547 /* Expand vec_set builtin. */
8548 static rtx
8549 altivec_expand_vec_set_builtin (tree exp)
8551 enum machine_mode tmode, mode1;
8552 tree arg0, arg1, arg2;
8553 int elt;
8554 rtx op0, op1;
8556 arg0 = CALL_EXPR_ARG (exp, 0);
8557 arg1 = CALL_EXPR_ARG (exp, 1);
8558 arg2 = CALL_EXPR_ARG (exp, 2);
8560 tmode = TYPE_MODE (TREE_TYPE (arg0));
8561 mode1 = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8562 gcc_assert (VECTOR_MODE_P (tmode));
8564 op0 = expand_expr (arg0, NULL_RTX, tmode, 0);
8565 op1 = expand_expr (arg1, NULL_RTX, mode1, 0);
8566 elt = get_element_number (TREE_TYPE (arg0), arg2);
8568 if (GET_MODE (op1) != mode1 && GET_MODE (op1) != VOIDmode)
8569 op1 = convert_modes (mode1, GET_MODE (op1), op1, true);
8571 op0 = force_reg (tmode, op0);
8572 op1 = force_reg (mode1, op1);
8574 rs6000_expand_vector_set (op0, op1, elt);
8576 return op0;
8579 /* Expand vec_ext builtin. */
8580 static rtx
8581 altivec_expand_vec_ext_builtin (tree exp, rtx target)
8583 enum machine_mode tmode, mode0;
8584 tree arg0, arg1;
8585 int elt;
8586 rtx op0;
8588 arg0 = CALL_EXPR_ARG (exp, 0);
8589 arg1 = CALL_EXPR_ARG (exp, 1);
8591 op0 = expand_normal (arg0);
8592 elt = get_element_number (TREE_TYPE (arg0), arg1);
8594 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0)));
8595 mode0 = TYPE_MODE (TREE_TYPE (arg0));
8596 gcc_assert (VECTOR_MODE_P (mode0));
8598 op0 = force_reg (mode0, op0);
8600 if (optimize || !target || !register_operand (target, tmode))
8601 target = gen_reg_rtx (tmode);
8603 rs6000_expand_vector_extract (target, op0, elt);
8605 return target;
8608 /* Expand the builtin in EXP and store the result in TARGET. Store
8609 true in *EXPANDEDP if we found a builtin to expand. */
8610 static rtx
8611 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
8613 const struct builtin_description *d;
8614 const struct builtin_description_predicates *dp;
8615 size_t i;
8616 enum insn_code icode;
8617 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8618 tree arg0;
8619 rtx op0, pat;
8620 enum machine_mode tmode, mode0;
8621 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8623 if (fcode >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8624 && fcode <= ALTIVEC_BUILTIN_OVERLOADED_LAST)
8626 *expandedp = true;
8627 error ("unresolved overload for Altivec builtin %qF", fndecl);
8628 return const0_rtx;
8631 target = altivec_expand_ld_builtin (exp, target, expandedp);
8632 if (*expandedp)
8633 return target;
8635 target = altivec_expand_st_builtin (exp, target, expandedp);
8636 if (*expandedp)
8637 return target;
8639 target = altivec_expand_dst_builtin (exp, target, expandedp);
8640 if (*expandedp)
8641 return target;
8643 *expandedp = true;
8645 switch (fcode)
8647 case ALTIVEC_BUILTIN_STVX:
8648 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp);
8649 case ALTIVEC_BUILTIN_STVEBX:
8650 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
8651 case ALTIVEC_BUILTIN_STVEHX:
8652 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
8653 case ALTIVEC_BUILTIN_STVEWX:
8654 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, exp);
8655 case ALTIVEC_BUILTIN_STVXL:
8656 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, exp);
8658 case ALTIVEC_BUILTIN_STVLX:
8659 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlx, exp);
8660 case ALTIVEC_BUILTIN_STVLXL:
8661 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
8662 case ALTIVEC_BUILTIN_STVRX:
8663 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
8664 case ALTIVEC_BUILTIN_STVRXL:
8665 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
8667 case ALTIVEC_BUILTIN_MFVSCR:
8668 icode = CODE_FOR_altivec_mfvscr;
8669 tmode = insn_data[icode].operand[0].mode;
8671 if (target == 0
8672 || GET_MODE (target) != tmode
8673 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8674 target = gen_reg_rtx (tmode);
8676 pat = GEN_FCN (icode) (target);
8677 if (! pat)
8678 return 0;
8679 emit_insn (pat);
8680 return target;
8682 case ALTIVEC_BUILTIN_MTVSCR:
8683 icode = CODE_FOR_altivec_mtvscr;
8684 arg0 = CALL_EXPR_ARG (exp, 0);
8685 op0 = expand_normal (arg0);
8686 mode0 = insn_data[icode].operand[0].mode;
8688 /* If we got invalid arguments bail out before generating bad rtl. */
8689 if (arg0 == error_mark_node)
8690 return const0_rtx;
8692 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8693 op0 = copy_to_mode_reg (mode0, op0);
8695 pat = GEN_FCN (icode) (op0);
8696 if (pat)
8697 emit_insn (pat);
8698 return NULL_RTX;
8700 case ALTIVEC_BUILTIN_DSSALL:
8701 emit_insn (gen_altivec_dssall ());
8702 return NULL_RTX;
8704 case ALTIVEC_BUILTIN_DSS:
8705 icode = CODE_FOR_altivec_dss;
8706 arg0 = CALL_EXPR_ARG (exp, 0);
8707 STRIP_NOPS (arg0);
8708 op0 = expand_normal (arg0);
8709 mode0 = insn_data[icode].operand[0].mode;
8711 /* If we got invalid arguments bail out before generating bad rtl. */
8712 if (arg0 == error_mark_node)
8713 return const0_rtx;
8715 if (TREE_CODE (arg0) != INTEGER_CST
8716 || TREE_INT_CST_LOW (arg0) & ~0x3)
8718 error ("argument to dss must be a 2-bit unsigned literal");
8719 return const0_rtx;
8722 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8723 op0 = copy_to_mode_reg (mode0, op0);
8725 emit_insn (gen_altivec_dss (op0));
8726 return NULL_RTX;
8728 case ALTIVEC_BUILTIN_VEC_INIT_V4SI:
8729 case ALTIVEC_BUILTIN_VEC_INIT_V8HI:
8730 case ALTIVEC_BUILTIN_VEC_INIT_V16QI:
8731 case ALTIVEC_BUILTIN_VEC_INIT_V4SF:
8732 return altivec_expand_vec_init_builtin (TREE_TYPE (exp), exp, target);
8734 case ALTIVEC_BUILTIN_VEC_SET_V4SI:
8735 case ALTIVEC_BUILTIN_VEC_SET_V8HI:
8736 case ALTIVEC_BUILTIN_VEC_SET_V16QI:
8737 case ALTIVEC_BUILTIN_VEC_SET_V4SF:
8738 return altivec_expand_vec_set_builtin (exp);
8740 case ALTIVEC_BUILTIN_VEC_EXT_V4SI:
8741 case ALTIVEC_BUILTIN_VEC_EXT_V8HI:
8742 case ALTIVEC_BUILTIN_VEC_EXT_V16QI:
8743 case ALTIVEC_BUILTIN_VEC_EXT_V4SF:
8744 return altivec_expand_vec_ext_builtin (exp, target);
8746 default:
8747 break;
8748 /* Fall through. */
8751 /* Expand abs* operations. */
8752 d = bdesc_abs;
8753 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
8754 if (d->code == fcode)
8755 return altivec_expand_abs_builtin (d->icode, exp, target);
8757 /* Expand the AltiVec predicates. */
8758 dp = bdesc_altivec_preds;
8759 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
8760 if (dp->code == fcode)
8761 return altivec_expand_predicate_builtin (dp->icode, dp->opcode,
8762 exp, target);
8764 /* LV* are funky. We initialized them differently. */
8765 switch (fcode)
8767 case ALTIVEC_BUILTIN_LVSL:
8768 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
8769 exp, target, false);
8770 case ALTIVEC_BUILTIN_LVSR:
8771 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
8772 exp, target, false);
8773 case ALTIVEC_BUILTIN_LVEBX:
8774 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
8775 exp, target, false);
8776 case ALTIVEC_BUILTIN_LVEHX:
8777 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
8778 exp, target, false);
8779 case ALTIVEC_BUILTIN_LVEWX:
8780 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
8781 exp, target, false);
8782 case ALTIVEC_BUILTIN_LVXL:
8783 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
8784 exp, target, false);
8785 case ALTIVEC_BUILTIN_LVX:
8786 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
8787 exp, target, false);
8788 case ALTIVEC_BUILTIN_LVLX:
8789 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
8790 exp, target, true);
8791 case ALTIVEC_BUILTIN_LVLXL:
8792 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlxl,
8793 exp, target, true);
8794 case ALTIVEC_BUILTIN_LVRX:
8795 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
8796 exp, target, true);
8797 case ALTIVEC_BUILTIN_LVRXL:
8798 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
8799 exp, target, true);
8800 default:
8801 break;
8802 /* Fall through. */
8805 *expandedp = false;
8806 return NULL_RTX;
8809 /* Expand the builtin in EXP and store the result in TARGET. Store
8810 true in *EXPANDEDP if we found a builtin to expand. */
8811 static rtx
8812 paired_expand_builtin (tree exp, rtx target, bool * expandedp)
8814 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8815 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8816 const struct builtin_description *d;
8817 size_t i;
8819 *expandedp = true;
8821 switch (fcode)
8823 case PAIRED_BUILTIN_STX:
8824 return paired_expand_stv_builtin (CODE_FOR_paired_stx, exp);
8825 case PAIRED_BUILTIN_LX:
8826 return paired_expand_lv_builtin (CODE_FOR_paired_lx, exp, target);
8827 default:
8828 break;
8829 /* Fall through. */
8832 /* Expand the paired predicates. */
8833 d = bdesc_paired_preds;
8834 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); i++, d++)
8835 if (d->code == fcode)
8836 return paired_expand_predicate_builtin (d->icode, exp, target);
8838 *expandedp = false;
8839 return NULL_RTX;
8842 /* Binops that need to be initialized manually, but can be expanded
8843 automagically by rs6000_expand_binop_builtin. */
8844 static struct builtin_description bdesc_2arg_spe[] =
8846 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
8847 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
8848 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
8849 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
8850 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
8851 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
8852 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
8853 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
8854 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
8855 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
8856 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
8857 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
8858 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
8859 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
8860 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
8861 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
8862 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
8863 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
8864 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
8865 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
8866 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
8867 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
8870 /* Expand the builtin in EXP and store the result in TARGET. Store
8871 true in *EXPANDEDP if we found a builtin to expand.
8873 This expands the SPE builtins that are not simple unary and binary
8874 operations. */
8875 static rtx
8876 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
8878 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8879 tree arg1, arg0;
8880 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8881 enum insn_code icode;
8882 enum machine_mode tmode, mode0;
8883 rtx pat, op0;
8884 struct builtin_description *d;
8885 size_t i;
8887 *expandedp = true;
8889 /* Syntax check for a 5-bit unsigned immediate. */
8890 switch (fcode)
8892 case SPE_BUILTIN_EVSTDD:
8893 case SPE_BUILTIN_EVSTDH:
8894 case SPE_BUILTIN_EVSTDW:
8895 case SPE_BUILTIN_EVSTWHE:
8896 case SPE_BUILTIN_EVSTWHO:
8897 case SPE_BUILTIN_EVSTWWE:
8898 case SPE_BUILTIN_EVSTWWO:
8899 arg1 = CALL_EXPR_ARG (exp, 2);
8900 if (TREE_CODE (arg1) != INTEGER_CST
8901 || TREE_INT_CST_LOW (arg1) & ~0x1f)
8903 error ("argument 2 must be a 5-bit unsigned literal");
8904 return const0_rtx;
8906 break;
8907 default:
8908 break;
8911 /* The evsplat*i instructions are not quite generic. */
8912 switch (fcode)
8914 case SPE_BUILTIN_EVSPLATFI:
8915 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
8916 exp, target);
8917 case SPE_BUILTIN_EVSPLATI:
8918 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
8919 exp, target);
8920 default:
8921 break;
8924 d = (struct builtin_description *) bdesc_2arg_spe;
8925 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
8926 if (d->code == fcode)
8927 return rs6000_expand_binop_builtin (d->icode, exp, target);
8929 d = (struct builtin_description *) bdesc_spe_predicates;
8930 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
8931 if (d->code == fcode)
8932 return spe_expand_predicate_builtin (d->icode, exp, target);
8934 d = (struct builtin_description *) bdesc_spe_evsel;
8935 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
8936 if (d->code == fcode)
8937 return spe_expand_evsel_builtin (d->icode, exp, target);
8939 switch (fcode)
8941 case SPE_BUILTIN_EVSTDDX:
8942 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx, exp);
8943 case SPE_BUILTIN_EVSTDHX:
8944 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx, exp);
8945 case SPE_BUILTIN_EVSTDWX:
8946 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx, exp);
8947 case SPE_BUILTIN_EVSTWHEX:
8948 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex, exp);
8949 case SPE_BUILTIN_EVSTWHOX:
8950 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox, exp);
8951 case SPE_BUILTIN_EVSTWWEX:
8952 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex, exp);
8953 case SPE_BUILTIN_EVSTWWOX:
8954 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox, exp);
8955 case SPE_BUILTIN_EVSTDD:
8956 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd, exp);
8957 case SPE_BUILTIN_EVSTDH:
8958 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh, exp);
8959 case SPE_BUILTIN_EVSTDW:
8960 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw, exp);
8961 case SPE_BUILTIN_EVSTWHE:
8962 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe, exp);
8963 case SPE_BUILTIN_EVSTWHO:
8964 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho, exp);
8965 case SPE_BUILTIN_EVSTWWE:
8966 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe, exp);
8967 case SPE_BUILTIN_EVSTWWO:
8968 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo, exp);
8969 case SPE_BUILTIN_MFSPEFSCR:
8970 icode = CODE_FOR_spe_mfspefscr;
8971 tmode = insn_data[icode].operand[0].mode;
8973 if (target == 0
8974 || GET_MODE (target) != tmode
8975 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8976 target = gen_reg_rtx (tmode);
8978 pat = GEN_FCN (icode) (target);
8979 if (! pat)
8980 return 0;
8981 emit_insn (pat);
8982 return target;
8983 case SPE_BUILTIN_MTSPEFSCR:
8984 icode = CODE_FOR_spe_mtspefscr;
8985 arg0 = CALL_EXPR_ARG (exp, 0);
8986 op0 = expand_normal (arg0);
8987 mode0 = insn_data[icode].operand[0].mode;
8989 if (arg0 == error_mark_node)
8990 return const0_rtx;
8992 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
8993 op0 = copy_to_mode_reg (mode0, op0);
8995 pat = GEN_FCN (icode) (op0);
8996 if (pat)
8997 emit_insn (pat);
8998 return NULL_RTX;
8999 default:
9000 break;
9003 *expandedp = false;
9004 return NULL_RTX;
9007 static rtx
9008 paired_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9010 rtx pat, scratch, tmp;
9011 tree form = CALL_EXPR_ARG (exp, 0);
9012 tree arg0 = CALL_EXPR_ARG (exp, 1);
9013 tree arg1 = CALL_EXPR_ARG (exp, 2);
9014 rtx op0 = expand_normal (arg0);
9015 rtx op1 = expand_normal (arg1);
9016 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9017 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9018 int form_int;
9019 enum rtx_code code;
9021 if (TREE_CODE (form) != INTEGER_CST)
9023 error ("argument 1 of __builtin_paired_predicate must be a constant");
9024 return const0_rtx;
9026 else
9027 form_int = TREE_INT_CST_LOW (form);
9029 gcc_assert (mode0 == mode1);
9031 if (arg0 == error_mark_node || arg1 == error_mark_node)
9032 return const0_rtx;
9034 if (target == 0
9035 || GET_MODE (target) != SImode
9036 || !(*insn_data[icode].operand[0].predicate) (target, SImode))
9037 target = gen_reg_rtx (SImode);
9038 if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
9039 op0 = copy_to_mode_reg (mode0, op0);
9040 if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
9041 op1 = copy_to_mode_reg (mode1, op1);
9043 scratch = gen_reg_rtx (CCFPmode);
9045 pat = GEN_FCN (icode) (scratch, op0, op1);
9046 if (!pat)
9047 return const0_rtx;
9049 emit_insn (pat);
9051 switch (form_int)
9053 /* LT bit. */
9054 case 0:
9055 code = LT;
9056 break;
9057 /* GT bit. */
9058 case 1:
9059 code = GT;
9060 break;
9061 /* EQ bit. */
9062 case 2:
9063 code = EQ;
9064 break;
9065 /* UN bit. */
9066 case 3:
9067 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9068 return target;
9069 default:
9070 error ("argument 1 of __builtin_paired_predicate is out of range");
9071 return const0_rtx;
9074 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9075 emit_move_insn (target, tmp);
9076 return target;
9079 static rtx
9080 spe_expand_predicate_builtin (enum insn_code icode, tree exp, rtx target)
9082 rtx pat, scratch, tmp;
9083 tree form = CALL_EXPR_ARG (exp, 0);
9084 tree arg0 = CALL_EXPR_ARG (exp, 1);
9085 tree arg1 = CALL_EXPR_ARG (exp, 2);
9086 rtx op0 = expand_normal (arg0);
9087 rtx op1 = expand_normal (arg1);
9088 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9089 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9090 int form_int;
9091 enum rtx_code code;
9093 if (TREE_CODE (form) != INTEGER_CST)
9095 error ("argument 1 of __builtin_spe_predicate must be a constant");
9096 return const0_rtx;
9098 else
9099 form_int = TREE_INT_CST_LOW (form);
9101 gcc_assert (mode0 == mode1);
9103 if (arg0 == error_mark_node || arg1 == error_mark_node)
9104 return const0_rtx;
9106 if (target == 0
9107 || GET_MODE (target) != SImode
9108 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
9109 target = gen_reg_rtx (SImode);
9111 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9112 op0 = copy_to_mode_reg (mode0, op0);
9113 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
9114 op1 = copy_to_mode_reg (mode1, op1);
9116 scratch = gen_reg_rtx (CCmode);
9118 pat = GEN_FCN (icode) (scratch, op0, op1);
9119 if (! pat)
9120 return const0_rtx;
9121 emit_insn (pat);
9123 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
9124 _lower_. We use one compare, but look in different bits of the
9125 CR for each variant.
9127 There are 2 elements in each SPE simd type (upper/lower). The CR
9128 bits are set as follows:
9130 BIT0 | BIT 1 | BIT 2 | BIT 3
9131 U | L | (U | L) | (U & L)
9133 So, for an "all" relationship, BIT 3 would be set.
9134 For an "any" relationship, BIT 2 would be set. Etc.
9136 Following traditional nomenclature, these bits map to:
9138 BIT0 | BIT 1 | BIT 2 | BIT 3
9139 LT | GT | EQ | OV
9141 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
9144 switch (form_int)
9146 /* All variant. OV bit. */
9147 case 0:
9148 /* We need to get to the OV bit, which is the ORDERED bit. We
9149 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
9150 that's ugly and will make validate_condition_mode die.
9151 So let's just use another pattern. */
9152 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
9153 return target;
9154 /* Any variant. EQ bit. */
9155 case 1:
9156 code = EQ;
9157 break;
9158 /* Upper variant. LT bit. */
9159 case 2:
9160 code = LT;
9161 break;
9162 /* Lower variant. GT bit. */
9163 case 3:
9164 code = GT;
9165 break;
9166 default:
9167 error ("argument 1 of __builtin_spe_predicate is out of range");
9168 return const0_rtx;
9171 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
9172 emit_move_insn (target, tmp);
9174 return target;
9177 /* The evsel builtins look like this:
9179 e = __builtin_spe_evsel_OP (a, b, c, d);
9181 and work like this:
9183 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
9184 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
9187 static rtx
9188 spe_expand_evsel_builtin (enum insn_code icode, tree exp, rtx target)
9190 rtx pat, scratch;
9191 tree arg0 = CALL_EXPR_ARG (exp, 0);
9192 tree arg1 = CALL_EXPR_ARG (exp, 1);
9193 tree arg2 = CALL_EXPR_ARG (exp, 2);
9194 tree arg3 = CALL_EXPR_ARG (exp, 3);
9195 rtx op0 = expand_normal (arg0);
9196 rtx op1 = expand_normal (arg1);
9197 rtx op2 = expand_normal (arg2);
9198 rtx op3 = expand_normal (arg3);
9199 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
9200 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
9202 gcc_assert (mode0 == mode1);
9204 if (arg0 == error_mark_node || arg1 == error_mark_node
9205 || arg2 == error_mark_node || arg3 == error_mark_node)
9206 return const0_rtx;
9208 if (target == 0
9209 || GET_MODE (target) != mode0
9210 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
9211 target = gen_reg_rtx (mode0);
9213 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9214 op0 = copy_to_mode_reg (mode0, op0);
9215 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
9216 op1 = copy_to_mode_reg (mode0, op1);
9217 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
9218 op2 = copy_to_mode_reg (mode0, op2);
9219 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
9220 op3 = copy_to_mode_reg (mode0, op3);
9222 /* Generate the compare. */
9223 scratch = gen_reg_rtx (CCmode);
9224 pat = GEN_FCN (icode) (scratch, op0, op1);
9225 if (! pat)
9226 return const0_rtx;
9227 emit_insn (pat);
9229 if (mode0 == V2SImode)
9230 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
9231 else
9232 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
9234 return target;
9237 /* Expand an expression EXP that calls a built-in function,
9238 with result going to TARGET if that's convenient
9239 (and in mode MODE if that's convenient).
9240 SUBTARGET may be used as the target for computing one of EXP's operands.
9241 IGNORE is nonzero if the value is to be ignored. */
9243 static rtx
9244 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
9245 enum machine_mode mode ATTRIBUTE_UNUSED,
9246 int ignore ATTRIBUTE_UNUSED)
9248 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9249 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
9250 const struct builtin_description *d;
9251 size_t i;
9252 rtx ret;
9253 bool success;
9255 if (fcode == RS6000_BUILTIN_RECIP)
9256 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
9258 if (fcode == RS6000_BUILTIN_RECIPF)
9259 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
9261 if (fcode == RS6000_BUILTIN_RSQRTF)
9262 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
9264 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD
9265 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9267 int icode = (int) CODE_FOR_altivec_lvsr;
9268 enum machine_mode tmode = insn_data[icode].operand[0].mode;
9269 enum machine_mode mode = insn_data[icode].operand[1].mode;
9270 tree arg;
9271 rtx op, addr, pat;
9273 gcc_assert (TARGET_ALTIVEC);
9275 arg = CALL_EXPR_ARG (exp, 0);
9276 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE);
9277 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
9278 addr = memory_address (mode, op);
9279 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
9280 op = addr;
9281 else
9283 /* For the load case need to negate the address. */
9284 op = gen_reg_rtx (GET_MODE (addr));
9285 emit_insn (gen_rtx_SET (VOIDmode, op,
9286 gen_rtx_NEG (GET_MODE (addr), addr)));
9288 op = gen_rtx_MEM (mode, op);
9290 if (target == 0
9291 || GET_MODE (target) != tmode
9292 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9293 target = gen_reg_rtx (tmode);
9295 /*pat = gen_altivec_lvsr (target, op);*/
9296 pat = GEN_FCN (icode) (target, op);
9297 if (!pat)
9298 return 0;
9299 emit_insn (pat);
9301 return target;
9304 /* FIXME: There's got to be a nicer way to handle this case than
9305 constructing a new CALL_EXPR. */
9306 if (fcode == ALTIVEC_BUILTIN_VCFUX
9307 || fcode == ALTIVEC_BUILTIN_VCFSX
9308 || fcode == ALTIVEC_BUILTIN_VCTUXS
9309 || fcode == ALTIVEC_BUILTIN_VCTSXS)
9311 if (call_expr_nargs (exp) == 1)
9312 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
9313 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
9316 if (TARGET_ALTIVEC)
9318 ret = altivec_expand_builtin (exp, target, &success);
9320 if (success)
9321 return ret;
9323 if (TARGET_SPE)
9325 ret = spe_expand_builtin (exp, target, &success);
9327 if (success)
9328 return ret;
9330 if (TARGET_PAIRED_FLOAT)
9332 ret = paired_expand_builtin (exp, target, &success);
9334 if (success)
9335 return ret;
9338 gcc_assert (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT);
9340 /* Handle simple unary operations. */
9341 d = (struct builtin_description *) bdesc_1arg;
9342 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9343 if (d->code == fcode)
9344 return rs6000_expand_unop_builtin (d->icode, exp, target);
9346 /* Handle simple binary operations. */
9347 d = (struct builtin_description *) bdesc_2arg;
9348 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9349 if (d->code == fcode)
9350 return rs6000_expand_binop_builtin (d->icode, exp, target);
9352 /* Handle simple ternary operations. */
9353 d = bdesc_3arg;
9354 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
9355 if (d->code == fcode)
9356 return rs6000_expand_ternop_builtin (d->icode, exp, target);
9358 gcc_unreachable ();
9361 static tree
9362 build_opaque_vector_type (tree node, int nunits)
9364 node = copy_node (node);
9365 TYPE_MAIN_VARIANT (node) = node;
9366 TYPE_CANONICAL (node) = node;
9367 return build_vector_type (node, nunits);
9370 static void
9371 rs6000_init_builtins (void)
9373 V2SI_type_node = build_vector_type (intSI_type_node, 2);
9374 V2SF_type_node = build_vector_type (float_type_node, 2);
9375 V4HI_type_node = build_vector_type (intHI_type_node, 4);
9376 V4SI_type_node = build_vector_type (intSI_type_node, 4);
9377 V4SF_type_node = build_vector_type (float_type_node, 4);
9378 V8HI_type_node = build_vector_type (intHI_type_node, 8);
9379 V16QI_type_node = build_vector_type (intQI_type_node, 16);
9381 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
9382 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
9383 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
9385 opaque_V2SF_type_node = build_opaque_vector_type (float_type_node, 2);
9386 opaque_V2SI_type_node = build_opaque_vector_type (intSI_type_node, 2);
9387 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
9388 opaque_V4SI_type_node = copy_node (V4SI_type_node);
9390 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
9391 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
9392 'vector unsigned short'. */
9394 bool_char_type_node = build_distinct_type_copy (unsigned_intQI_type_node);
9395 bool_short_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9396 bool_int_type_node = build_distinct_type_copy (unsigned_intSI_type_node);
9397 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
9399 long_integer_type_internal_node = long_integer_type_node;
9400 long_unsigned_type_internal_node = long_unsigned_type_node;
9401 intQI_type_internal_node = intQI_type_node;
9402 uintQI_type_internal_node = unsigned_intQI_type_node;
9403 intHI_type_internal_node = intHI_type_node;
9404 uintHI_type_internal_node = unsigned_intHI_type_node;
9405 intSI_type_internal_node = intSI_type_node;
9406 uintSI_type_internal_node = unsigned_intSI_type_node;
9407 float_type_internal_node = float_type_node;
9408 void_type_internal_node = void_type_node;
9410 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9411 get_identifier ("__bool char"),
9412 bool_char_type_node));
9413 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9414 get_identifier ("__bool short"),
9415 bool_short_type_node));
9416 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9417 get_identifier ("__bool int"),
9418 bool_int_type_node));
9419 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9420 get_identifier ("__pixel"),
9421 pixel_type_node));
9423 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
9424 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
9425 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
9426 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
9428 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9429 get_identifier ("__vector unsigned char"),
9430 unsigned_V16QI_type_node));
9431 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9432 get_identifier ("__vector signed char"),
9433 V16QI_type_node));
9434 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9435 get_identifier ("__vector __bool char"),
9436 bool_V16QI_type_node));
9438 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9439 get_identifier ("__vector unsigned short"),
9440 unsigned_V8HI_type_node));
9441 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9442 get_identifier ("__vector signed short"),
9443 V8HI_type_node));
9444 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9445 get_identifier ("__vector __bool short"),
9446 bool_V8HI_type_node));
9448 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9449 get_identifier ("__vector unsigned int"),
9450 unsigned_V4SI_type_node));
9451 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9452 get_identifier ("__vector signed int"),
9453 V4SI_type_node));
9454 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9455 get_identifier ("__vector __bool int"),
9456 bool_V4SI_type_node));
9458 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9459 get_identifier ("__vector float"),
9460 V4SF_type_node));
9461 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
9462 get_identifier ("__vector __pixel"),
9463 pixel_V8HI_type_node));
9465 if (TARGET_PAIRED_FLOAT)
9466 paired_init_builtins ();
9467 if (TARGET_SPE)
9468 spe_init_builtins ();
9469 if (TARGET_ALTIVEC)
9470 altivec_init_builtins ();
9471 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT)
9472 rs6000_common_init_builtins ();
9473 if (TARGET_PPC_GFXOPT)
9475 tree ftype = build_function_type_list (float_type_node,
9476 float_type_node,
9477 float_type_node,
9478 NULL_TREE);
9479 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
9480 RS6000_BUILTIN_RECIPF);
9482 ftype = build_function_type_list (float_type_node,
9483 float_type_node,
9484 NULL_TREE);
9485 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
9486 RS6000_BUILTIN_RSQRTF);
9488 if (TARGET_POPCNTB)
9490 tree ftype = build_function_type_list (double_type_node,
9491 double_type_node,
9492 double_type_node,
9493 NULL_TREE);
9494 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
9495 RS6000_BUILTIN_RECIP);
9499 #if TARGET_XCOFF
9500 /* AIX libm provides clog as __clog. */
9501 if (built_in_decls [BUILT_IN_CLOG])
9502 set_user_assembler_name (built_in_decls [BUILT_IN_CLOG], "__clog");
9503 #endif
9505 #ifdef SUBTARGET_INIT_BUILTINS
9506 SUBTARGET_INIT_BUILTINS;
9507 #endif
9510 /* Search through a set of builtins and enable the mask bits.
9511 DESC is an array of builtins.
9512 SIZE is the total number of builtins.
9513 START is the builtin enum at which to start.
9514 END is the builtin enum at which to end. */
9515 static void
9516 enable_mask_for_builtins (struct builtin_description *desc, int size,
9517 enum rs6000_builtins start,
9518 enum rs6000_builtins end)
9520 int i;
9522 for (i = 0; i < size; ++i)
9523 if (desc[i].code == start)
9524 break;
9526 if (i == size)
9527 return;
9529 for (; i < size; ++i)
9531 /* Flip all the bits on. */
9532 desc[i].mask = target_flags;
9533 if (desc[i].code == end)
9534 break;
9538 static void
9539 spe_init_builtins (void)
9541 tree endlink = void_list_node;
9542 tree puint_type_node = build_pointer_type (unsigned_type_node);
9543 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
9544 struct builtin_description *d;
9545 size_t i;
9547 tree v2si_ftype_4_v2si
9548 = build_function_type
9549 (opaque_V2SI_type_node,
9550 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9551 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9552 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9553 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9554 endlink)))));
9556 tree v2sf_ftype_4_v2sf
9557 = build_function_type
9558 (opaque_V2SF_type_node,
9559 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9560 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9561 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9562 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9563 endlink)))));
9565 tree int_ftype_int_v2si_v2si
9566 = build_function_type
9567 (integer_type_node,
9568 tree_cons (NULL_TREE, integer_type_node,
9569 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9570 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9571 endlink))));
9573 tree int_ftype_int_v2sf_v2sf
9574 = build_function_type
9575 (integer_type_node,
9576 tree_cons (NULL_TREE, integer_type_node,
9577 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9578 tree_cons (NULL_TREE, opaque_V2SF_type_node,
9579 endlink))));
9581 tree void_ftype_v2si_puint_int
9582 = build_function_type (void_type_node,
9583 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9584 tree_cons (NULL_TREE, puint_type_node,
9585 tree_cons (NULL_TREE,
9586 integer_type_node,
9587 endlink))));
9589 tree void_ftype_v2si_puint_char
9590 = build_function_type (void_type_node,
9591 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9592 tree_cons (NULL_TREE, puint_type_node,
9593 tree_cons (NULL_TREE,
9594 char_type_node,
9595 endlink))));
9597 tree void_ftype_v2si_pv2si_int
9598 = build_function_type (void_type_node,
9599 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9600 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
9601 tree_cons (NULL_TREE,
9602 integer_type_node,
9603 endlink))));
9605 tree void_ftype_v2si_pv2si_char
9606 = build_function_type (void_type_node,
9607 tree_cons (NULL_TREE, opaque_V2SI_type_node,
9608 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
9609 tree_cons (NULL_TREE,
9610 char_type_node,
9611 endlink))));
9613 tree void_ftype_int
9614 = build_function_type (void_type_node,
9615 tree_cons (NULL_TREE, integer_type_node, endlink));
9617 tree int_ftype_void
9618 = build_function_type (integer_type_node, endlink);
9620 tree v2si_ftype_pv2si_int
9621 = build_function_type (opaque_V2SI_type_node,
9622 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
9623 tree_cons (NULL_TREE, integer_type_node,
9624 endlink)));
9626 tree v2si_ftype_puint_int
9627 = build_function_type (opaque_V2SI_type_node,
9628 tree_cons (NULL_TREE, puint_type_node,
9629 tree_cons (NULL_TREE, integer_type_node,
9630 endlink)));
9632 tree v2si_ftype_pushort_int
9633 = build_function_type (opaque_V2SI_type_node,
9634 tree_cons (NULL_TREE, pushort_type_node,
9635 tree_cons (NULL_TREE, integer_type_node,
9636 endlink)));
9638 tree v2si_ftype_signed_char
9639 = build_function_type (opaque_V2SI_type_node,
9640 tree_cons (NULL_TREE, signed_char_type_node,
9641 endlink));
9643 /* The initialization of the simple binary and unary builtins is
9644 done in rs6000_common_init_builtins, but we have to enable the
9645 mask bits here manually because we have run out of `target_flags'
9646 bits. We really need to redesign this mask business. */
9648 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
9649 ARRAY_SIZE (bdesc_2arg),
9650 SPE_BUILTIN_EVADDW,
9651 SPE_BUILTIN_EVXOR);
9652 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
9653 ARRAY_SIZE (bdesc_1arg),
9654 SPE_BUILTIN_EVABS,
9655 SPE_BUILTIN_EVSUBFUSIAAW);
9656 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
9657 ARRAY_SIZE (bdesc_spe_predicates),
9658 SPE_BUILTIN_EVCMPEQ,
9659 SPE_BUILTIN_EVFSTSTLT);
9660 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
9661 ARRAY_SIZE (bdesc_spe_evsel),
9662 SPE_BUILTIN_EVSEL_CMPGTS,
9663 SPE_BUILTIN_EVSEL_FSTSTEQ);
9665 (*lang_hooks.decls.pushdecl)
9666 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
9667 opaque_V2SI_type_node));
9669 /* Initialize irregular SPE builtins. */
9671 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
9672 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
9673 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
9674 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
9675 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
9676 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
9677 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
9678 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
9679 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
9680 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
9681 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
9682 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
9683 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
9684 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
9685 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
9686 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
9687 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
9688 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
9690 /* Loads. */
9691 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
9692 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
9693 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
9694 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
9695 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
9696 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
9697 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
9698 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
9699 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
9700 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
9701 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
9702 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
9703 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
9704 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
9705 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
9706 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
9707 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
9708 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
9709 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
9710 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
9711 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
9712 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
9714 /* Predicates. */
9715 d = (struct builtin_description *) bdesc_spe_predicates;
9716 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
9718 tree type;
9720 switch (insn_data[d->icode].operand[1].mode)
9722 case V2SImode:
9723 type = int_ftype_int_v2si_v2si;
9724 break;
9725 case V2SFmode:
9726 type = int_ftype_int_v2sf_v2sf;
9727 break;
9728 default:
9729 gcc_unreachable ();
9732 def_builtin (d->mask, d->name, type, d->code);
9735 /* Evsel predicates. */
9736 d = (struct builtin_description *) bdesc_spe_evsel;
9737 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
9739 tree type;
9741 switch (insn_data[d->icode].operand[1].mode)
9743 case V2SImode:
9744 type = v2si_ftype_4_v2si;
9745 break;
9746 case V2SFmode:
9747 type = v2sf_ftype_4_v2sf;
9748 break;
9749 default:
9750 gcc_unreachable ();
9753 def_builtin (d->mask, d->name, type, d->code);
9757 static void
9758 paired_init_builtins (void)
9760 const struct builtin_description *d;
9761 size_t i;
9762 tree endlink = void_list_node;
9764 tree int_ftype_int_v2sf_v2sf
9765 = build_function_type
9766 (integer_type_node,
9767 tree_cons (NULL_TREE, integer_type_node,
9768 tree_cons (NULL_TREE, V2SF_type_node,
9769 tree_cons (NULL_TREE, V2SF_type_node,
9770 endlink))));
9771 tree pcfloat_type_node =
9772 build_pointer_type (build_qualified_type
9773 (float_type_node, TYPE_QUAL_CONST));
9775 tree v2sf_ftype_long_pcfloat = build_function_type_list (V2SF_type_node,
9776 long_integer_type_node,
9777 pcfloat_type_node,
9778 NULL_TREE);
9779 tree void_ftype_v2sf_long_pcfloat =
9780 build_function_type_list (void_type_node,
9781 V2SF_type_node,
9782 long_integer_type_node,
9783 pcfloat_type_node,
9784 NULL_TREE);
9787 def_builtin (0, "__builtin_paired_lx", v2sf_ftype_long_pcfloat,
9788 PAIRED_BUILTIN_LX);
9791 def_builtin (0, "__builtin_paired_stx", void_ftype_v2sf_long_pcfloat,
9792 PAIRED_BUILTIN_STX);
9794 /* Predicates. */
9795 d = bdesc_paired_preds;
9796 for (i = 0; i < ARRAY_SIZE (bdesc_paired_preds); ++i, d++)
9798 tree type;
9800 switch (insn_data[d->icode].operand[1].mode)
9802 case V2SFmode:
9803 type = int_ftype_int_v2sf_v2sf;
9804 break;
9805 default:
9806 gcc_unreachable ();
9809 def_builtin (d->mask, d->name, type, d->code);
9813 static void
9814 altivec_init_builtins (void)
9816 const struct builtin_description *d;
9817 const struct builtin_description_predicates *dp;
9818 size_t i;
9819 tree ftype;
9821 tree pfloat_type_node = build_pointer_type (float_type_node);
9822 tree pint_type_node = build_pointer_type (integer_type_node);
9823 tree pshort_type_node = build_pointer_type (short_integer_type_node);
9824 tree pchar_type_node = build_pointer_type (char_type_node);
9826 tree pvoid_type_node = build_pointer_type (void_type_node);
9828 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
9829 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
9830 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
9831 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
9833 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
9835 tree int_ftype_opaque
9836 = build_function_type_list (integer_type_node,
9837 opaque_V4SI_type_node, NULL_TREE);
9838 tree opaque_ftype_opaque
9839 = build_function_type (integer_type_node,
9840 NULL_TREE);
9841 tree opaque_ftype_opaque_int
9842 = build_function_type_list (opaque_V4SI_type_node,
9843 opaque_V4SI_type_node, integer_type_node, NULL_TREE);
9844 tree opaque_ftype_opaque_opaque_int
9845 = build_function_type_list (opaque_V4SI_type_node,
9846 opaque_V4SI_type_node, opaque_V4SI_type_node,
9847 integer_type_node, NULL_TREE);
9848 tree int_ftype_int_opaque_opaque
9849 = build_function_type_list (integer_type_node,
9850 integer_type_node, opaque_V4SI_type_node,
9851 opaque_V4SI_type_node, NULL_TREE);
9852 tree int_ftype_int_v4si_v4si
9853 = build_function_type_list (integer_type_node,
9854 integer_type_node, V4SI_type_node,
9855 V4SI_type_node, NULL_TREE);
9856 tree v4sf_ftype_pcfloat
9857 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
9858 tree void_ftype_pfloat_v4sf
9859 = build_function_type_list (void_type_node,
9860 pfloat_type_node, V4SF_type_node, NULL_TREE);
9861 tree v4si_ftype_pcint
9862 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
9863 tree void_ftype_pint_v4si
9864 = build_function_type_list (void_type_node,
9865 pint_type_node, V4SI_type_node, NULL_TREE);
9866 tree v8hi_ftype_pcshort
9867 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
9868 tree void_ftype_pshort_v8hi
9869 = build_function_type_list (void_type_node,
9870 pshort_type_node, V8HI_type_node, NULL_TREE);
9871 tree v16qi_ftype_pcchar
9872 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
9873 tree void_ftype_pchar_v16qi
9874 = build_function_type_list (void_type_node,
9875 pchar_type_node, V16QI_type_node, NULL_TREE);
9876 tree void_ftype_v4si
9877 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
9878 tree v8hi_ftype_void
9879 = build_function_type (V8HI_type_node, void_list_node);
9880 tree void_ftype_void
9881 = build_function_type (void_type_node, void_list_node);
9882 tree void_ftype_int
9883 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
9885 tree opaque_ftype_long_pcvoid
9886 = build_function_type_list (opaque_V4SI_type_node,
9887 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9888 tree v16qi_ftype_long_pcvoid
9889 = build_function_type_list (V16QI_type_node,
9890 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9891 tree v8hi_ftype_long_pcvoid
9892 = build_function_type_list (V8HI_type_node,
9893 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9894 tree v4si_ftype_long_pcvoid
9895 = build_function_type_list (V4SI_type_node,
9896 long_integer_type_node, pcvoid_type_node, NULL_TREE);
9898 tree void_ftype_opaque_long_pvoid
9899 = build_function_type_list (void_type_node,
9900 opaque_V4SI_type_node, long_integer_type_node,
9901 pvoid_type_node, NULL_TREE);
9902 tree void_ftype_v4si_long_pvoid
9903 = build_function_type_list (void_type_node,
9904 V4SI_type_node, long_integer_type_node,
9905 pvoid_type_node, NULL_TREE);
9906 tree void_ftype_v16qi_long_pvoid
9907 = build_function_type_list (void_type_node,
9908 V16QI_type_node, long_integer_type_node,
9909 pvoid_type_node, NULL_TREE);
9910 tree void_ftype_v8hi_long_pvoid
9911 = build_function_type_list (void_type_node,
9912 V8HI_type_node, long_integer_type_node,
9913 pvoid_type_node, NULL_TREE);
9914 tree int_ftype_int_v8hi_v8hi
9915 = build_function_type_list (integer_type_node,
9916 integer_type_node, V8HI_type_node,
9917 V8HI_type_node, NULL_TREE);
9918 tree int_ftype_int_v16qi_v16qi
9919 = build_function_type_list (integer_type_node,
9920 integer_type_node, V16QI_type_node,
9921 V16QI_type_node, NULL_TREE);
9922 tree int_ftype_int_v4sf_v4sf
9923 = build_function_type_list (integer_type_node,
9924 integer_type_node, V4SF_type_node,
9925 V4SF_type_node, NULL_TREE);
9926 tree v4si_ftype_v4si
9927 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
9928 tree v8hi_ftype_v8hi
9929 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
9930 tree v16qi_ftype_v16qi
9931 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
9932 tree v4sf_ftype_v4sf
9933 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
9934 tree void_ftype_pcvoid_int_int
9935 = build_function_type_list (void_type_node,
9936 pcvoid_type_node, integer_type_node,
9937 integer_type_node, NULL_TREE);
9939 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
9940 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
9941 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
9942 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
9943 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
9944 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
9945 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
9946 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
9947 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
9948 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
9949 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
9950 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
9951 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
9952 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
9953 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
9954 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
9955 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
9956 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
9957 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
9958 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
9959 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
9960 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
9961 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
9962 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
9963 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
9964 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
9965 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
9966 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
9967 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
9968 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
9969 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
9970 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
9971 def_builtin (MASK_ALTIVEC, "__builtin_vec_ld", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LD);
9972 def_builtin (MASK_ALTIVEC, "__builtin_vec_lde", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDE);
9973 def_builtin (MASK_ALTIVEC, "__builtin_vec_ldl", opaque_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LDL);
9974 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSL);
9975 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVSR);
9976 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEBX);
9977 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEHX);
9978 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVEWX);
9979 def_builtin (MASK_ALTIVEC, "__builtin_vec_st", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_ST);
9980 def_builtin (MASK_ALTIVEC, "__builtin_vec_ste", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STE);
9981 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
9982 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
9983 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
9984 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
9986 if (rs6000_cpu == PROCESSOR_CELL)
9988 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
9989 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
9990 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
9991 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRXL);
9993 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLX);
9994 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVLXL);
9995 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRX);
9996 def_builtin (MASK_ALTIVEC, "__builtin_vec_lvrxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_VEC_LVRXL);
9998 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLX);
9999 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVLXL);
10000 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRX);
10001 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVRXL);
10003 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLX);
10004 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvlxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVLXL);
10005 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRX);
10006 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvrxl", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_VEC_STVRXL);
10008 def_builtin (MASK_ALTIVEC, "__builtin_vec_step", int_ftype_opaque, ALTIVEC_BUILTIN_VEC_STEP);
10009 def_builtin (MASK_ALTIVEC, "__builtin_vec_splats", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_SPLATS);
10010 def_builtin (MASK_ALTIVEC, "__builtin_vec_promote", opaque_ftype_opaque, ALTIVEC_BUILTIN_VEC_PROMOTE);
10012 def_builtin (MASK_ALTIVEC, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_SLD);
10013 def_builtin (MASK_ALTIVEC, "__builtin_vec_splat", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_SPLAT);
10014 def_builtin (MASK_ALTIVEC, "__builtin_vec_extract", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_EXTRACT);
10015 def_builtin (MASK_ALTIVEC, "__builtin_vec_insert", opaque_ftype_opaque_opaque_int, ALTIVEC_BUILTIN_VEC_INSERT);
10016 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltw", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTW);
10017 def_builtin (MASK_ALTIVEC, "__builtin_vec_vsplth", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTH);
10018 def_builtin (MASK_ALTIVEC, "__builtin_vec_vspltb", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VSPLTB);
10019 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctf", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTF);
10020 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfsx", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFSX);
10021 def_builtin (MASK_ALTIVEC, "__builtin_vec_vcfux", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_VCFUX);
10022 def_builtin (MASK_ALTIVEC, "__builtin_vec_cts", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTS);
10023 def_builtin (MASK_ALTIVEC, "__builtin_vec_ctu", opaque_ftype_opaque_int, ALTIVEC_BUILTIN_VEC_CTU);
10025 /* Add the DST variants. */
10026 d = bdesc_dst;
10027 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
10028 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
10030 /* Initialize the predicates. */
10031 dp = bdesc_altivec_preds;
10032 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
10034 enum machine_mode mode1;
10035 tree type;
10036 bool is_overloaded = dp->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10037 && dp->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10039 if (is_overloaded)
10040 mode1 = VOIDmode;
10041 else
10042 mode1 = insn_data[dp->icode].operand[1].mode;
10044 switch (mode1)
10046 case VOIDmode:
10047 type = int_ftype_int_opaque_opaque;
10048 break;
10049 case V4SImode:
10050 type = int_ftype_int_v4si_v4si;
10051 break;
10052 case V8HImode:
10053 type = int_ftype_int_v8hi_v8hi;
10054 break;
10055 case V16QImode:
10056 type = int_ftype_int_v16qi_v16qi;
10057 break;
10058 case V4SFmode:
10059 type = int_ftype_int_v4sf_v4sf;
10060 break;
10061 default:
10062 gcc_unreachable ();
10065 def_builtin (dp->mask, dp->name, type, dp->code);
10068 /* Initialize the abs* operators. */
10069 d = bdesc_abs;
10070 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
10072 enum machine_mode mode0;
10073 tree type;
10075 mode0 = insn_data[d->icode].operand[0].mode;
10077 switch (mode0)
10079 case V4SImode:
10080 type = v4si_ftype_v4si;
10081 break;
10082 case V8HImode:
10083 type = v8hi_ftype_v8hi;
10084 break;
10085 case V16QImode:
10086 type = v16qi_ftype_v16qi;
10087 break;
10088 case V4SFmode:
10089 type = v4sf_ftype_v4sf;
10090 break;
10091 default:
10092 gcc_unreachable ();
10095 def_builtin (d->mask, d->name, type, d->code);
10098 if (TARGET_ALTIVEC)
10100 tree decl;
10102 /* Initialize target builtin that implements
10103 targetm.vectorize.builtin_mask_for_load. */
10105 decl = add_builtin_function ("__builtin_altivec_mask_for_load",
10106 v16qi_ftype_long_pcvoid,
10107 ALTIVEC_BUILTIN_MASK_FOR_LOAD,
10108 BUILT_IN_MD, NULL, NULL_TREE);
10109 TREE_READONLY (decl) = 1;
10110 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
10111 altivec_builtin_mask_for_load = decl;
10114 /* Access to the vec_init patterns. */
10115 ftype = build_function_type_list (V4SI_type_node, integer_type_node,
10116 integer_type_node, integer_type_node,
10117 integer_type_node, NULL_TREE);
10118 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4si", ftype,
10119 ALTIVEC_BUILTIN_VEC_INIT_V4SI);
10121 ftype = build_function_type_list (V8HI_type_node, short_integer_type_node,
10122 short_integer_type_node,
10123 short_integer_type_node,
10124 short_integer_type_node,
10125 short_integer_type_node,
10126 short_integer_type_node,
10127 short_integer_type_node,
10128 short_integer_type_node, NULL_TREE);
10129 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v8hi", ftype,
10130 ALTIVEC_BUILTIN_VEC_INIT_V8HI);
10132 ftype = build_function_type_list (V16QI_type_node, char_type_node,
10133 char_type_node, char_type_node,
10134 char_type_node, char_type_node,
10135 char_type_node, char_type_node,
10136 char_type_node, char_type_node,
10137 char_type_node, char_type_node,
10138 char_type_node, char_type_node,
10139 char_type_node, char_type_node,
10140 char_type_node, NULL_TREE);
10141 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v16qi", ftype,
10142 ALTIVEC_BUILTIN_VEC_INIT_V16QI);
10144 ftype = build_function_type_list (V4SF_type_node, float_type_node,
10145 float_type_node, float_type_node,
10146 float_type_node, NULL_TREE);
10147 def_builtin (MASK_ALTIVEC, "__builtin_vec_init_v4sf", ftype,
10148 ALTIVEC_BUILTIN_VEC_INIT_V4SF);
10150 /* Access to the vec_set patterns. */
10151 ftype = build_function_type_list (V4SI_type_node, V4SI_type_node,
10152 intSI_type_node,
10153 integer_type_node, NULL_TREE);
10154 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4si", ftype,
10155 ALTIVEC_BUILTIN_VEC_SET_V4SI);
10157 ftype = build_function_type_list (V8HI_type_node, V8HI_type_node,
10158 intHI_type_node,
10159 integer_type_node, NULL_TREE);
10160 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v8hi", ftype,
10161 ALTIVEC_BUILTIN_VEC_SET_V8HI);
10163 ftype = build_function_type_list (V8HI_type_node, V16QI_type_node,
10164 intQI_type_node,
10165 integer_type_node, NULL_TREE);
10166 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v16qi", ftype,
10167 ALTIVEC_BUILTIN_VEC_SET_V16QI);
10169 ftype = build_function_type_list (V4SF_type_node, V4SF_type_node,
10170 float_type_node,
10171 integer_type_node, NULL_TREE);
10172 def_builtin (MASK_ALTIVEC, "__builtin_vec_set_v4sf", ftype,
10173 ALTIVEC_BUILTIN_VEC_SET_V4SF);
10175 /* Access to the vec_extract patterns. */
10176 ftype = build_function_type_list (intSI_type_node, V4SI_type_node,
10177 integer_type_node, NULL_TREE);
10178 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4si", ftype,
10179 ALTIVEC_BUILTIN_VEC_EXT_V4SI);
10181 ftype = build_function_type_list (intHI_type_node, V8HI_type_node,
10182 integer_type_node, NULL_TREE);
10183 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v8hi", ftype,
10184 ALTIVEC_BUILTIN_VEC_EXT_V8HI);
10186 ftype = build_function_type_list (intQI_type_node, V16QI_type_node,
10187 integer_type_node, NULL_TREE);
10188 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v16qi", ftype,
10189 ALTIVEC_BUILTIN_VEC_EXT_V16QI);
10191 ftype = build_function_type_list (float_type_node, V4SF_type_node,
10192 integer_type_node, NULL_TREE);
10193 def_builtin (MASK_ALTIVEC, "__builtin_vec_ext_v4sf", ftype,
10194 ALTIVEC_BUILTIN_VEC_EXT_V4SF);
10197 static void
10198 rs6000_common_init_builtins (void)
10200 const struct builtin_description *d;
10201 size_t i;
10203 tree v2sf_ftype_v2sf_v2sf_v2sf
10204 = build_function_type_list (V2SF_type_node,
10205 V2SF_type_node, V2SF_type_node,
10206 V2SF_type_node, NULL_TREE);
10208 tree v4sf_ftype_v4sf_v4sf_v16qi
10209 = build_function_type_list (V4SF_type_node,
10210 V4SF_type_node, V4SF_type_node,
10211 V16QI_type_node, NULL_TREE);
10212 tree v4si_ftype_v4si_v4si_v16qi
10213 = build_function_type_list (V4SI_type_node,
10214 V4SI_type_node, V4SI_type_node,
10215 V16QI_type_node, NULL_TREE);
10216 tree v8hi_ftype_v8hi_v8hi_v16qi
10217 = build_function_type_list (V8HI_type_node,
10218 V8HI_type_node, V8HI_type_node,
10219 V16QI_type_node, NULL_TREE);
10220 tree v16qi_ftype_v16qi_v16qi_v16qi
10221 = build_function_type_list (V16QI_type_node,
10222 V16QI_type_node, V16QI_type_node,
10223 V16QI_type_node, NULL_TREE);
10224 tree v4si_ftype_int
10225 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
10226 tree v8hi_ftype_int
10227 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
10228 tree v16qi_ftype_int
10229 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
10230 tree v8hi_ftype_v16qi
10231 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
10232 tree v4sf_ftype_v4sf
10233 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
10235 tree v2si_ftype_v2si_v2si
10236 = build_function_type_list (opaque_V2SI_type_node,
10237 opaque_V2SI_type_node,
10238 opaque_V2SI_type_node, NULL_TREE);
10240 tree v2sf_ftype_v2sf_v2sf_spe
10241 = build_function_type_list (opaque_V2SF_type_node,
10242 opaque_V2SF_type_node,
10243 opaque_V2SF_type_node, NULL_TREE);
10245 tree v2sf_ftype_v2sf_v2sf
10246 = build_function_type_list (V2SF_type_node,
10247 V2SF_type_node,
10248 V2SF_type_node, NULL_TREE);
10251 tree v2si_ftype_int_int
10252 = build_function_type_list (opaque_V2SI_type_node,
10253 integer_type_node, integer_type_node,
10254 NULL_TREE);
10256 tree opaque_ftype_opaque
10257 = build_function_type_list (opaque_V4SI_type_node,
10258 opaque_V4SI_type_node, NULL_TREE);
10260 tree v2si_ftype_v2si
10261 = build_function_type_list (opaque_V2SI_type_node,
10262 opaque_V2SI_type_node, NULL_TREE);
10264 tree v2sf_ftype_v2sf_spe
10265 = build_function_type_list (opaque_V2SF_type_node,
10266 opaque_V2SF_type_node, NULL_TREE);
10268 tree v2sf_ftype_v2sf
10269 = build_function_type_list (V2SF_type_node,
10270 V2SF_type_node, NULL_TREE);
10272 tree v2sf_ftype_v2si
10273 = build_function_type_list (opaque_V2SF_type_node,
10274 opaque_V2SI_type_node, NULL_TREE);
10276 tree v2si_ftype_v2sf
10277 = build_function_type_list (opaque_V2SI_type_node,
10278 opaque_V2SF_type_node, NULL_TREE);
10280 tree v2si_ftype_v2si_char
10281 = build_function_type_list (opaque_V2SI_type_node,
10282 opaque_V2SI_type_node,
10283 char_type_node, NULL_TREE);
10285 tree v2si_ftype_int_char
10286 = build_function_type_list (opaque_V2SI_type_node,
10287 integer_type_node, char_type_node, NULL_TREE);
10289 tree v2si_ftype_char
10290 = build_function_type_list (opaque_V2SI_type_node,
10291 char_type_node, NULL_TREE);
10293 tree int_ftype_int_int
10294 = build_function_type_list (integer_type_node,
10295 integer_type_node, integer_type_node,
10296 NULL_TREE);
10298 tree opaque_ftype_opaque_opaque
10299 = build_function_type_list (opaque_V4SI_type_node,
10300 opaque_V4SI_type_node, opaque_V4SI_type_node, NULL_TREE);
10301 tree v4si_ftype_v4si_v4si
10302 = build_function_type_list (V4SI_type_node,
10303 V4SI_type_node, V4SI_type_node, NULL_TREE);
10304 tree v4sf_ftype_v4si_int
10305 = build_function_type_list (V4SF_type_node,
10306 V4SI_type_node, integer_type_node, NULL_TREE);
10307 tree v4si_ftype_v4sf_int
10308 = build_function_type_list (V4SI_type_node,
10309 V4SF_type_node, integer_type_node, NULL_TREE);
10310 tree v4si_ftype_v4si_int
10311 = build_function_type_list (V4SI_type_node,
10312 V4SI_type_node, integer_type_node, NULL_TREE);
10313 tree v8hi_ftype_v8hi_int
10314 = build_function_type_list (V8HI_type_node,
10315 V8HI_type_node, integer_type_node, NULL_TREE);
10316 tree v16qi_ftype_v16qi_int
10317 = build_function_type_list (V16QI_type_node,
10318 V16QI_type_node, integer_type_node, NULL_TREE);
10319 tree v16qi_ftype_v16qi_v16qi_int
10320 = build_function_type_list (V16QI_type_node,
10321 V16QI_type_node, V16QI_type_node,
10322 integer_type_node, NULL_TREE);
10323 tree v8hi_ftype_v8hi_v8hi_int
10324 = build_function_type_list (V8HI_type_node,
10325 V8HI_type_node, V8HI_type_node,
10326 integer_type_node, NULL_TREE);
10327 tree v4si_ftype_v4si_v4si_int
10328 = build_function_type_list (V4SI_type_node,
10329 V4SI_type_node, V4SI_type_node,
10330 integer_type_node, NULL_TREE);
10331 tree v4sf_ftype_v4sf_v4sf_int
10332 = build_function_type_list (V4SF_type_node,
10333 V4SF_type_node, V4SF_type_node,
10334 integer_type_node, NULL_TREE);
10335 tree v4sf_ftype_v4sf_v4sf
10336 = build_function_type_list (V4SF_type_node,
10337 V4SF_type_node, V4SF_type_node, NULL_TREE);
10338 tree opaque_ftype_opaque_opaque_opaque
10339 = build_function_type_list (opaque_V4SI_type_node,
10340 opaque_V4SI_type_node, opaque_V4SI_type_node,
10341 opaque_V4SI_type_node, NULL_TREE);
10342 tree v4sf_ftype_v4sf_v4sf_v4si
10343 = build_function_type_list (V4SF_type_node,
10344 V4SF_type_node, V4SF_type_node,
10345 V4SI_type_node, NULL_TREE);
10346 tree v4sf_ftype_v4sf_v4sf_v4sf
10347 = build_function_type_list (V4SF_type_node,
10348 V4SF_type_node, V4SF_type_node,
10349 V4SF_type_node, NULL_TREE);
10350 tree v4si_ftype_v4si_v4si_v4si
10351 = build_function_type_list (V4SI_type_node,
10352 V4SI_type_node, V4SI_type_node,
10353 V4SI_type_node, NULL_TREE);
10354 tree v8hi_ftype_v8hi_v8hi
10355 = build_function_type_list (V8HI_type_node,
10356 V8HI_type_node, V8HI_type_node, NULL_TREE);
10357 tree v8hi_ftype_v8hi_v8hi_v8hi
10358 = build_function_type_list (V8HI_type_node,
10359 V8HI_type_node, V8HI_type_node,
10360 V8HI_type_node, NULL_TREE);
10361 tree v4si_ftype_v8hi_v8hi_v4si
10362 = build_function_type_list (V4SI_type_node,
10363 V8HI_type_node, V8HI_type_node,
10364 V4SI_type_node, NULL_TREE);
10365 tree v4si_ftype_v16qi_v16qi_v4si
10366 = build_function_type_list (V4SI_type_node,
10367 V16QI_type_node, V16QI_type_node,
10368 V4SI_type_node, NULL_TREE);
10369 tree v16qi_ftype_v16qi_v16qi
10370 = build_function_type_list (V16QI_type_node,
10371 V16QI_type_node, V16QI_type_node, NULL_TREE);
10372 tree v4si_ftype_v4sf_v4sf
10373 = build_function_type_list (V4SI_type_node,
10374 V4SF_type_node, V4SF_type_node, NULL_TREE);
10375 tree v8hi_ftype_v16qi_v16qi
10376 = build_function_type_list (V8HI_type_node,
10377 V16QI_type_node, V16QI_type_node, NULL_TREE);
10378 tree v4si_ftype_v8hi_v8hi
10379 = build_function_type_list (V4SI_type_node,
10380 V8HI_type_node, V8HI_type_node, NULL_TREE);
10381 tree v8hi_ftype_v4si_v4si
10382 = build_function_type_list (V8HI_type_node,
10383 V4SI_type_node, V4SI_type_node, NULL_TREE);
10384 tree v16qi_ftype_v8hi_v8hi
10385 = build_function_type_list (V16QI_type_node,
10386 V8HI_type_node, V8HI_type_node, NULL_TREE);
10387 tree v4si_ftype_v16qi_v4si
10388 = build_function_type_list (V4SI_type_node,
10389 V16QI_type_node, V4SI_type_node, NULL_TREE);
10390 tree v4si_ftype_v16qi_v16qi
10391 = build_function_type_list (V4SI_type_node,
10392 V16QI_type_node, V16QI_type_node, NULL_TREE);
10393 tree v4si_ftype_v8hi_v4si
10394 = build_function_type_list (V4SI_type_node,
10395 V8HI_type_node, V4SI_type_node, NULL_TREE);
10396 tree v4si_ftype_v8hi
10397 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
10398 tree int_ftype_v4si_v4si
10399 = build_function_type_list (integer_type_node,
10400 V4SI_type_node, V4SI_type_node, NULL_TREE);
10401 tree int_ftype_v4sf_v4sf
10402 = build_function_type_list (integer_type_node,
10403 V4SF_type_node, V4SF_type_node, NULL_TREE);
10404 tree int_ftype_v16qi_v16qi
10405 = build_function_type_list (integer_type_node,
10406 V16QI_type_node, V16QI_type_node, NULL_TREE);
10407 tree int_ftype_v8hi_v8hi
10408 = build_function_type_list (integer_type_node,
10409 V8HI_type_node, V8HI_type_node, NULL_TREE);
10411 /* Add the simple ternary operators. */
10412 d = bdesc_3arg;
10413 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
10415 enum machine_mode mode0, mode1, mode2, mode3;
10416 tree type;
10417 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10418 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10420 if (is_overloaded)
10422 mode0 = VOIDmode;
10423 mode1 = VOIDmode;
10424 mode2 = VOIDmode;
10425 mode3 = VOIDmode;
10427 else
10429 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10430 continue;
10432 mode0 = insn_data[d->icode].operand[0].mode;
10433 mode1 = insn_data[d->icode].operand[1].mode;
10434 mode2 = insn_data[d->icode].operand[2].mode;
10435 mode3 = insn_data[d->icode].operand[3].mode;
10438 /* When all four are of the same mode. */
10439 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
10441 switch (mode0)
10443 case VOIDmode:
10444 type = opaque_ftype_opaque_opaque_opaque;
10445 break;
10446 case V4SImode:
10447 type = v4si_ftype_v4si_v4si_v4si;
10448 break;
10449 case V4SFmode:
10450 type = v4sf_ftype_v4sf_v4sf_v4sf;
10451 break;
10452 case V8HImode:
10453 type = v8hi_ftype_v8hi_v8hi_v8hi;
10454 break;
10455 case V16QImode:
10456 type = v16qi_ftype_v16qi_v16qi_v16qi;
10457 break;
10458 case V2SFmode:
10459 type = v2sf_ftype_v2sf_v2sf_v2sf;
10460 break;
10461 default:
10462 gcc_unreachable ();
10465 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
10467 switch (mode0)
10469 case V4SImode:
10470 type = v4si_ftype_v4si_v4si_v16qi;
10471 break;
10472 case V4SFmode:
10473 type = v4sf_ftype_v4sf_v4sf_v16qi;
10474 break;
10475 case V8HImode:
10476 type = v8hi_ftype_v8hi_v8hi_v16qi;
10477 break;
10478 case V16QImode:
10479 type = v16qi_ftype_v16qi_v16qi_v16qi;
10480 break;
10481 default:
10482 gcc_unreachable ();
10485 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
10486 && mode3 == V4SImode)
10487 type = v4si_ftype_v16qi_v16qi_v4si;
10488 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
10489 && mode3 == V4SImode)
10490 type = v4si_ftype_v8hi_v8hi_v4si;
10491 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
10492 && mode3 == V4SImode)
10493 type = v4sf_ftype_v4sf_v4sf_v4si;
10495 /* vchar, vchar, vchar, 4-bit literal. */
10496 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
10497 && mode3 == QImode)
10498 type = v16qi_ftype_v16qi_v16qi_int;
10500 /* vshort, vshort, vshort, 4-bit literal. */
10501 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
10502 && mode3 == QImode)
10503 type = v8hi_ftype_v8hi_v8hi_int;
10505 /* vint, vint, vint, 4-bit literal. */
10506 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
10507 && mode3 == QImode)
10508 type = v4si_ftype_v4si_v4si_int;
10510 /* vfloat, vfloat, vfloat, 4-bit literal. */
10511 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
10512 && mode3 == QImode)
10513 type = v4sf_ftype_v4sf_v4sf_int;
10515 else
10516 gcc_unreachable ();
10518 def_builtin (d->mask, d->name, type, d->code);
10521 /* Add the simple binary operators. */
10522 d = (struct builtin_description *) bdesc_2arg;
10523 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10525 enum machine_mode mode0, mode1, mode2;
10526 tree type;
10527 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10528 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10530 if (is_overloaded)
10532 mode0 = VOIDmode;
10533 mode1 = VOIDmode;
10534 mode2 = VOIDmode;
10536 else
10538 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10539 continue;
10541 mode0 = insn_data[d->icode].operand[0].mode;
10542 mode1 = insn_data[d->icode].operand[1].mode;
10543 mode2 = insn_data[d->icode].operand[2].mode;
10546 /* When all three operands are of the same mode. */
10547 if (mode0 == mode1 && mode1 == mode2)
10549 switch (mode0)
10551 case VOIDmode:
10552 type = opaque_ftype_opaque_opaque;
10553 break;
10554 case V4SFmode:
10555 type = v4sf_ftype_v4sf_v4sf;
10556 break;
10557 case V4SImode:
10558 type = v4si_ftype_v4si_v4si;
10559 break;
10560 case V16QImode:
10561 type = v16qi_ftype_v16qi_v16qi;
10562 break;
10563 case V8HImode:
10564 type = v8hi_ftype_v8hi_v8hi;
10565 break;
10566 case V2SImode:
10567 type = v2si_ftype_v2si_v2si;
10568 break;
10569 case V2SFmode:
10570 if (TARGET_PAIRED_FLOAT)
10571 type = v2sf_ftype_v2sf_v2sf;
10572 else
10573 type = v2sf_ftype_v2sf_v2sf_spe;
10574 break;
10575 case SImode:
10576 type = int_ftype_int_int;
10577 break;
10578 default:
10579 gcc_unreachable ();
10583 /* A few other combos we really don't want to do manually. */
10585 /* vint, vfloat, vfloat. */
10586 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
10587 type = v4si_ftype_v4sf_v4sf;
10589 /* vshort, vchar, vchar. */
10590 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
10591 type = v8hi_ftype_v16qi_v16qi;
10593 /* vint, vshort, vshort. */
10594 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
10595 type = v4si_ftype_v8hi_v8hi;
10597 /* vshort, vint, vint. */
10598 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
10599 type = v8hi_ftype_v4si_v4si;
10601 /* vchar, vshort, vshort. */
10602 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
10603 type = v16qi_ftype_v8hi_v8hi;
10605 /* vint, vchar, vint. */
10606 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
10607 type = v4si_ftype_v16qi_v4si;
10609 /* vint, vchar, vchar. */
10610 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
10611 type = v4si_ftype_v16qi_v16qi;
10613 /* vint, vshort, vint. */
10614 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
10615 type = v4si_ftype_v8hi_v4si;
10617 /* vint, vint, 5-bit literal. */
10618 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
10619 type = v4si_ftype_v4si_int;
10621 /* vshort, vshort, 5-bit literal. */
10622 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
10623 type = v8hi_ftype_v8hi_int;
10625 /* vchar, vchar, 5-bit literal. */
10626 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
10627 type = v16qi_ftype_v16qi_int;
10629 /* vfloat, vint, 5-bit literal. */
10630 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
10631 type = v4sf_ftype_v4si_int;
10633 /* vint, vfloat, 5-bit literal. */
10634 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
10635 type = v4si_ftype_v4sf_int;
10637 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
10638 type = v2si_ftype_int_int;
10640 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
10641 type = v2si_ftype_v2si_char;
10643 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
10644 type = v2si_ftype_int_char;
10646 else
10648 /* int, x, x. */
10649 gcc_assert (mode0 == SImode);
10650 switch (mode1)
10652 case V4SImode:
10653 type = int_ftype_v4si_v4si;
10654 break;
10655 case V4SFmode:
10656 type = int_ftype_v4sf_v4sf;
10657 break;
10658 case V16QImode:
10659 type = int_ftype_v16qi_v16qi;
10660 break;
10661 case V8HImode:
10662 type = int_ftype_v8hi_v8hi;
10663 break;
10664 default:
10665 gcc_unreachable ();
10669 def_builtin (d->mask, d->name, type, d->code);
10672 /* Add the simple unary operators. */
10673 d = (struct builtin_description *) bdesc_1arg;
10674 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
10676 enum machine_mode mode0, mode1;
10677 tree type;
10678 bool is_overloaded = d->code >= ALTIVEC_BUILTIN_OVERLOADED_FIRST
10679 && d->code <= ALTIVEC_BUILTIN_OVERLOADED_LAST;
10681 if (is_overloaded)
10683 mode0 = VOIDmode;
10684 mode1 = VOIDmode;
10686 else
10688 if (d->name == 0 || d->icode == CODE_FOR_nothing)
10689 continue;
10691 mode0 = insn_data[d->icode].operand[0].mode;
10692 mode1 = insn_data[d->icode].operand[1].mode;
10695 if (mode0 == V4SImode && mode1 == QImode)
10696 type = v4si_ftype_int;
10697 else if (mode0 == V8HImode && mode1 == QImode)
10698 type = v8hi_ftype_int;
10699 else if (mode0 == V16QImode && mode1 == QImode)
10700 type = v16qi_ftype_int;
10701 else if (mode0 == VOIDmode && mode1 == VOIDmode)
10702 type = opaque_ftype_opaque;
10703 else if (mode0 == V4SFmode && mode1 == V4SFmode)
10704 type = v4sf_ftype_v4sf;
10705 else if (mode0 == V8HImode && mode1 == V16QImode)
10706 type = v8hi_ftype_v16qi;
10707 else if (mode0 == V4SImode && mode1 == V8HImode)
10708 type = v4si_ftype_v8hi;
10709 else if (mode0 == V2SImode && mode1 == V2SImode)
10710 type = v2si_ftype_v2si;
10711 else if (mode0 == V2SFmode && mode1 == V2SFmode)
10713 if (TARGET_PAIRED_FLOAT)
10714 type = v2sf_ftype_v2sf;
10715 else
10716 type = v2sf_ftype_v2sf_spe;
10718 else if (mode0 == V2SFmode && mode1 == V2SImode)
10719 type = v2sf_ftype_v2si;
10720 else if (mode0 == V2SImode && mode1 == V2SFmode)
10721 type = v2si_ftype_v2sf;
10722 else if (mode0 == V2SImode && mode1 == QImode)
10723 type = v2si_ftype_char;
10724 else
10725 gcc_unreachable ();
10727 def_builtin (d->mask, d->name, type, d->code);
10731 static void
10732 rs6000_init_libfuncs (void)
10734 if (DEFAULT_ABI != ABI_V4 && TARGET_XCOFF
10735 && !TARGET_POWER2 && !TARGET_POWERPC)
10737 /* AIX library routines for float->int conversion. */
10738 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
10739 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
10740 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
10741 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
10744 if (!TARGET_IEEEQUAD)
10745 /* AIX/Darwin/64-bit Linux quad floating point routines. */
10746 if (!TARGET_XL_COMPAT)
10748 set_optab_libfunc (add_optab, TFmode, "__gcc_qadd");
10749 set_optab_libfunc (sub_optab, TFmode, "__gcc_qsub");
10750 set_optab_libfunc (smul_optab, TFmode, "__gcc_qmul");
10751 set_optab_libfunc (sdiv_optab, TFmode, "__gcc_qdiv");
10753 if (!(TARGET_HARD_FLOAT && (TARGET_FPRS || TARGET_E500_DOUBLE)))
10755 set_optab_libfunc (neg_optab, TFmode, "__gcc_qneg");
10756 set_optab_libfunc (eq_optab, TFmode, "__gcc_qeq");
10757 set_optab_libfunc (ne_optab, TFmode, "__gcc_qne");
10758 set_optab_libfunc (gt_optab, TFmode, "__gcc_qgt");
10759 set_optab_libfunc (ge_optab, TFmode, "__gcc_qge");
10760 set_optab_libfunc (lt_optab, TFmode, "__gcc_qlt");
10761 set_optab_libfunc (le_optab, TFmode, "__gcc_qle");
10763 set_conv_libfunc (sext_optab, TFmode, SFmode, "__gcc_stoq");
10764 set_conv_libfunc (sext_optab, TFmode, DFmode, "__gcc_dtoq");
10765 set_conv_libfunc (trunc_optab, SFmode, TFmode, "__gcc_qtos");
10766 set_conv_libfunc (trunc_optab, DFmode, TFmode, "__gcc_qtod");
10767 set_conv_libfunc (sfix_optab, SImode, TFmode, "__gcc_qtoi");
10768 set_conv_libfunc (ufix_optab, SImode, TFmode, "__gcc_qtou");
10769 set_conv_libfunc (sfloat_optab, TFmode, SImode, "__gcc_itoq");
10770 set_conv_libfunc (ufloat_optab, TFmode, SImode, "__gcc_utoq");
10773 if (!(TARGET_HARD_FLOAT && TARGET_FPRS))
10774 set_optab_libfunc (unord_optab, TFmode, "__gcc_qunord");
10776 else
10778 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
10779 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
10780 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
10781 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
10783 else
10785 /* 32-bit SVR4 quad floating point routines. */
10787 set_optab_libfunc (add_optab, TFmode, "_q_add");
10788 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
10789 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
10790 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
10791 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
10792 if (TARGET_PPC_GPOPT || TARGET_POWER2)
10793 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
10795 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
10796 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
10797 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
10798 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
10799 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
10800 set_optab_libfunc (le_optab, TFmode, "_q_fle");
10802 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
10803 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
10804 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
10805 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
10806 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
10807 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
10808 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
10809 set_conv_libfunc (ufloat_optab, TFmode, SImode, "_q_utoq");
10814 /* Expand a block clear operation, and return 1 if successful. Return 0
10815 if we should let the compiler generate normal code.
10817 operands[0] is the destination
10818 operands[1] is the length
10819 operands[3] is the alignment */
10822 expand_block_clear (rtx operands[])
10824 rtx orig_dest = operands[0];
10825 rtx bytes_rtx = operands[1];
10826 rtx align_rtx = operands[3];
10827 bool constp = (GET_CODE (bytes_rtx) == CONST_INT);
10828 HOST_WIDE_INT align;
10829 HOST_WIDE_INT bytes;
10830 int offset;
10831 int clear_bytes;
10832 int clear_step;
10834 /* If this is not a fixed size move, just call memcpy */
10835 if (! constp)
10836 return 0;
10838 /* This must be a fixed size alignment */
10839 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
10840 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10842 /* Anything to clear? */
10843 bytes = INTVAL (bytes_rtx);
10844 if (bytes <= 0)
10845 return 1;
10847 /* Use the builtin memset after a point, to avoid huge code bloat.
10848 When optimize_size, avoid any significant code bloat; calling
10849 memset is about 4 instructions, so allow for one instruction to
10850 load zero and three to do clearing. */
10851 if (TARGET_ALTIVEC && align >= 128)
10852 clear_step = 16;
10853 else if (TARGET_POWERPC64 && align >= 32)
10854 clear_step = 8;
10855 else if (TARGET_SPE && align >= 64)
10856 clear_step = 8;
10857 else
10858 clear_step = 4;
10860 if (optimize_size && bytes > 3 * clear_step)
10861 return 0;
10862 if (! optimize_size && bytes > 8 * clear_step)
10863 return 0;
10865 for (offset = 0; bytes > 0; offset += clear_bytes, bytes -= clear_bytes)
10867 enum machine_mode mode = BLKmode;
10868 rtx dest;
10870 if (bytes >= 16 && TARGET_ALTIVEC && align >= 128)
10872 clear_bytes = 16;
10873 mode = V4SImode;
10875 else if (bytes >= 8 && TARGET_SPE && align >= 64)
10877 clear_bytes = 8;
10878 mode = V2SImode;
10880 else if (bytes >= 8 && TARGET_POWERPC64
10881 /* 64-bit loads and stores require word-aligned
10882 displacements. */
10883 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
10885 clear_bytes = 8;
10886 mode = DImode;
10888 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
10889 { /* move 4 bytes */
10890 clear_bytes = 4;
10891 mode = SImode;
10893 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
10894 { /* move 2 bytes */
10895 clear_bytes = 2;
10896 mode = HImode;
10898 else /* move 1 byte at a time */
10900 clear_bytes = 1;
10901 mode = QImode;
10904 dest = adjust_address (orig_dest, mode, offset);
10906 emit_move_insn (dest, CONST0_RTX (mode));
10909 return 1;
10913 /* Expand a block move operation, and return 1 if successful. Return 0
10914 if we should let the compiler generate normal code.
10916 operands[0] is the destination
10917 operands[1] is the source
10918 operands[2] is the length
10919 operands[3] is the alignment */
10921 #define MAX_MOVE_REG 4
10924 expand_block_move (rtx operands[])
10926 rtx orig_dest = operands[0];
10927 rtx orig_src = operands[1];
10928 rtx bytes_rtx = operands[2];
10929 rtx align_rtx = operands[3];
10930 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
10931 int align;
10932 int bytes;
10933 int offset;
10934 int move_bytes;
10935 rtx stores[MAX_MOVE_REG];
10936 int num_reg = 0;
10938 /* If this is not a fixed size move, just call memcpy */
10939 if (! constp)
10940 return 0;
10942 /* This must be a fixed size alignment */
10943 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
10944 align = INTVAL (align_rtx) * BITS_PER_UNIT;
10946 /* Anything to move? */
10947 bytes = INTVAL (bytes_rtx);
10948 if (bytes <= 0)
10949 return 1;
10951 /* store_one_arg depends on expand_block_move to handle at least the size of
10952 reg_parm_stack_space. */
10953 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
10954 return 0;
10956 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
10958 union {
10959 rtx (*movmemsi) (rtx, rtx, rtx, rtx);
10960 rtx (*mov) (rtx, rtx);
10961 } gen_func;
10962 enum machine_mode mode = BLKmode;
10963 rtx src, dest;
10965 /* Altivec first, since it will be faster than a string move
10966 when it applies, and usually not significantly larger. */
10967 if (TARGET_ALTIVEC && bytes >= 16 && align >= 128)
10969 move_bytes = 16;
10970 mode = V4SImode;
10971 gen_func.mov = gen_movv4si;
10973 else if (TARGET_SPE && bytes >= 8 && align >= 64)
10975 move_bytes = 8;
10976 mode = V2SImode;
10977 gen_func.mov = gen_movv2si;
10979 else if (TARGET_STRING
10980 && bytes > 24 /* move up to 32 bytes at a time */
10981 && ! fixed_regs[5]
10982 && ! fixed_regs[6]
10983 && ! fixed_regs[7]
10984 && ! fixed_regs[8]
10985 && ! fixed_regs[9]
10986 && ! fixed_regs[10]
10987 && ! fixed_regs[11]
10988 && ! fixed_regs[12])
10990 move_bytes = (bytes > 32) ? 32 : bytes;
10991 gen_func.movmemsi = gen_movmemsi_8reg;
10993 else if (TARGET_STRING
10994 && bytes > 16 /* move up to 24 bytes at a time */
10995 && ! fixed_regs[5]
10996 && ! fixed_regs[6]
10997 && ! fixed_regs[7]
10998 && ! fixed_regs[8]
10999 && ! fixed_regs[9]
11000 && ! fixed_regs[10])
11002 move_bytes = (bytes > 24) ? 24 : bytes;
11003 gen_func.movmemsi = gen_movmemsi_6reg;
11005 else if (TARGET_STRING
11006 && bytes > 8 /* move up to 16 bytes at a time */
11007 && ! fixed_regs[5]
11008 && ! fixed_regs[6]
11009 && ! fixed_regs[7]
11010 && ! fixed_regs[8])
11012 move_bytes = (bytes > 16) ? 16 : bytes;
11013 gen_func.movmemsi = gen_movmemsi_4reg;
11015 else if (bytes >= 8 && TARGET_POWERPC64
11016 /* 64-bit loads and stores require word-aligned
11017 displacements. */
11018 && (align >= 64 || (!STRICT_ALIGNMENT && align >= 32)))
11020 move_bytes = 8;
11021 mode = DImode;
11022 gen_func.mov = gen_movdi;
11024 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
11025 { /* move up to 8 bytes at a time */
11026 move_bytes = (bytes > 8) ? 8 : bytes;
11027 gen_func.movmemsi = gen_movmemsi_2reg;
11029 else if (bytes >= 4 && (align >= 32 || !STRICT_ALIGNMENT))
11030 { /* move 4 bytes */
11031 move_bytes = 4;
11032 mode = SImode;
11033 gen_func.mov = gen_movsi;
11035 else if (bytes >= 2 && (align >= 16 || !STRICT_ALIGNMENT))
11036 { /* move 2 bytes */
11037 move_bytes = 2;
11038 mode = HImode;
11039 gen_func.mov = gen_movhi;
11041 else if (TARGET_STRING && bytes > 1)
11042 { /* move up to 4 bytes at a time */
11043 move_bytes = (bytes > 4) ? 4 : bytes;
11044 gen_func.movmemsi = gen_movmemsi_1reg;
11046 else /* move 1 byte at a time */
11048 move_bytes = 1;
11049 mode = QImode;
11050 gen_func.mov = gen_movqi;
11053 src = adjust_address (orig_src, mode, offset);
11054 dest = adjust_address (orig_dest, mode, offset);
11056 if (mode != BLKmode)
11058 rtx tmp_reg = gen_reg_rtx (mode);
11060 emit_insn ((*gen_func.mov) (tmp_reg, src));
11061 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
11064 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
11066 int i;
11067 for (i = 0; i < num_reg; i++)
11068 emit_insn (stores[i]);
11069 num_reg = 0;
11072 if (mode == BLKmode)
11074 /* Move the address into scratch registers. The movmemsi
11075 patterns require zero offset. */
11076 if (!REG_P (XEXP (src, 0)))
11078 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
11079 src = replace_equiv_address (src, src_reg);
11081 set_mem_size (src, GEN_INT (move_bytes));
11083 if (!REG_P (XEXP (dest, 0)))
11085 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
11086 dest = replace_equiv_address (dest, dest_reg);
11088 set_mem_size (dest, GEN_INT (move_bytes));
11090 emit_insn ((*gen_func.movmemsi) (dest, src,
11091 GEN_INT (move_bytes & 31),
11092 align_rtx));
11096 return 1;
11100 /* Return a string to perform a load_multiple operation.
11101 operands[0] is the vector.
11102 operands[1] is the source address.
11103 operands[2] is the first destination register. */
11105 const char *
11106 rs6000_output_load_multiple (rtx operands[3])
11108 /* We have to handle the case where the pseudo used to contain the address
11109 is assigned to one of the output registers. */
11110 int i, j;
11111 int words = XVECLEN (operands[0], 0);
11112 rtx xop[10];
11114 if (XVECLEN (operands[0], 0) == 1)
11115 return "{l|lwz} %2,0(%1)";
11117 for (i = 0; i < words; i++)
11118 if (refers_to_regno_p (REGNO (operands[2]) + i,
11119 REGNO (operands[2]) + i + 1, operands[1], 0))
11121 if (i == words-1)
11123 xop[0] = GEN_INT (4 * (words-1));
11124 xop[1] = operands[1];
11125 xop[2] = operands[2];
11126 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
11127 return "";
11129 else if (i == 0)
11131 xop[0] = GEN_INT (4 * (words-1));
11132 xop[1] = operands[1];
11133 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
11134 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
11135 return "";
11137 else
11139 for (j = 0; j < words; j++)
11140 if (j != i)
11142 xop[0] = GEN_INT (j * 4);
11143 xop[1] = operands[1];
11144 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
11145 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
11147 xop[0] = GEN_INT (i * 4);
11148 xop[1] = operands[1];
11149 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
11150 return "";
11154 return "{lsi|lswi} %2,%1,%N0";
11158 /* A validation routine: say whether CODE, a condition code, and MODE
11159 match. The other alternatives either don't make sense or should
11160 never be generated. */
11162 void
11163 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
11165 gcc_assert ((GET_RTX_CLASS (code) == RTX_COMPARE
11166 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
11167 && GET_MODE_CLASS (mode) == MODE_CC);
11169 /* These don't make sense. */
11170 gcc_assert ((code != GT && code != LT && code != GE && code != LE)
11171 || mode != CCUNSmode);
11173 gcc_assert ((code != GTU && code != LTU && code != GEU && code != LEU)
11174 || mode == CCUNSmode);
11176 gcc_assert (mode == CCFPmode
11177 || (code != ORDERED && code != UNORDERED
11178 && code != UNEQ && code != LTGT
11179 && code != UNGT && code != UNLT
11180 && code != UNGE && code != UNLE));
11182 /* These should never be generated except for
11183 flag_finite_math_only. */
11184 gcc_assert (mode != CCFPmode
11185 || flag_finite_math_only
11186 || (code != LE && code != GE
11187 && code != UNEQ && code != LTGT
11188 && code != UNGT && code != UNLT));
11190 /* These are invalid; the information is not there. */
11191 gcc_assert (mode != CCEQmode || code == EQ || code == NE);
11195 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
11196 mask required to convert the result of a rotate insn into a shift
11197 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
11200 includes_lshift_p (rtx shiftop, rtx andop)
11202 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11204 shift_mask <<= INTVAL (shiftop);
11206 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
11209 /* Similar, but for right shift. */
11212 includes_rshift_p (rtx shiftop, rtx andop)
11214 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
11216 shift_mask >>= INTVAL (shiftop);
11218 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
11221 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
11222 to perform a left shift. It must have exactly SHIFTOP least
11223 significant 0's, then one or more 1's, then zero or more 0's. */
11226 includes_rldic_lshift_p (rtx shiftop, rtx andop)
11228 if (GET_CODE (andop) == CONST_INT)
11230 HOST_WIDE_INT c, lsb, shift_mask;
11232 c = INTVAL (andop);
11233 if (c == 0 || c == ~0)
11234 return 0;
11236 shift_mask = ~0;
11237 shift_mask <<= INTVAL (shiftop);
11239 /* Find the least significant one bit. */
11240 lsb = c & -c;
11242 /* It must coincide with the LSB of the shift mask. */
11243 if (-lsb != shift_mask)
11244 return 0;
11246 /* Invert to look for the next transition (if any). */
11247 c = ~c;
11249 /* Remove the low group of ones (originally low group of zeros). */
11250 c &= -lsb;
11252 /* Again find the lsb, and check we have all 1's above. */
11253 lsb = c & -c;
11254 return c == -lsb;
11256 else if (GET_CODE (andop) == CONST_DOUBLE
11257 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11259 HOST_WIDE_INT low, high, lsb;
11260 HOST_WIDE_INT shift_mask_low, shift_mask_high;
11262 low = CONST_DOUBLE_LOW (andop);
11263 if (HOST_BITS_PER_WIDE_INT < 64)
11264 high = CONST_DOUBLE_HIGH (andop);
11266 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
11267 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
11268 return 0;
11270 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11272 shift_mask_high = ~0;
11273 if (INTVAL (shiftop) > 32)
11274 shift_mask_high <<= INTVAL (shiftop) - 32;
11276 lsb = high & -high;
11278 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
11279 return 0;
11281 high = ~high;
11282 high &= -lsb;
11284 lsb = high & -high;
11285 return high == -lsb;
11288 shift_mask_low = ~0;
11289 shift_mask_low <<= INTVAL (shiftop);
11291 lsb = low & -low;
11293 if (-lsb != shift_mask_low)
11294 return 0;
11296 if (HOST_BITS_PER_WIDE_INT < 64)
11297 high = ~high;
11298 low = ~low;
11299 low &= -lsb;
11301 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
11303 lsb = high & -high;
11304 return high == -lsb;
11307 lsb = low & -low;
11308 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
11310 else
11311 return 0;
11314 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
11315 to perform a left shift. It must have SHIFTOP or more least
11316 significant 0's, with the remainder of the word 1's. */
11319 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
11321 if (GET_CODE (andop) == CONST_INT)
11323 HOST_WIDE_INT c, lsb, shift_mask;
11325 shift_mask = ~0;
11326 shift_mask <<= INTVAL (shiftop);
11327 c = INTVAL (andop);
11329 /* Find the least significant one bit. */
11330 lsb = c & -c;
11332 /* It must be covered by the shift mask.
11333 This test also rejects c == 0. */
11334 if ((lsb & shift_mask) == 0)
11335 return 0;
11337 /* Check we have all 1's above the transition, and reject all 1's. */
11338 return c == -lsb && lsb != 1;
11340 else if (GET_CODE (andop) == CONST_DOUBLE
11341 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
11343 HOST_WIDE_INT low, lsb, shift_mask_low;
11345 low = CONST_DOUBLE_LOW (andop);
11347 if (HOST_BITS_PER_WIDE_INT < 64)
11349 HOST_WIDE_INT high, shift_mask_high;
11351 high = CONST_DOUBLE_HIGH (andop);
11353 if (low == 0)
11355 shift_mask_high = ~0;
11356 if (INTVAL (shiftop) > 32)
11357 shift_mask_high <<= INTVAL (shiftop) - 32;
11359 lsb = high & -high;
11361 if ((lsb & shift_mask_high) == 0)
11362 return 0;
11364 return high == -lsb;
11366 if (high != ~0)
11367 return 0;
11370 shift_mask_low = ~0;
11371 shift_mask_low <<= INTVAL (shiftop);
11373 lsb = low & -low;
11375 if ((lsb & shift_mask_low) == 0)
11376 return 0;
11378 return low == -lsb && lsb != 1;
11380 else
11381 return 0;
11384 /* Return 1 if operands will generate a valid arguments to rlwimi
11385 instruction for insert with right shift in 64-bit mode. The mask may
11386 not start on the first bit or stop on the last bit because wrap-around
11387 effects of instruction do not correspond to semantics of RTL insn. */
11390 insvdi_rshift_rlwimi_p (rtx sizeop, rtx startop, rtx shiftop)
11392 if (INTVAL (startop) > 32
11393 && INTVAL (startop) < 64
11394 && INTVAL (sizeop) > 1
11395 && INTVAL (sizeop) + INTVAL (startop) < 64
11396 && INTVAL (shiftop) > 0
11397 && INTVAL (sizeop) + INTVAL (shiftop) < 32
11398 && (64 - (INTVAL (shiftop) & 63)) >= INTVAL (sizeop))
11399 return 1;
11401 return 0;
11404 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
11405 for lfq and stfq insns iff the registers are hard registers. */
11408 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
11410 /* We might have been passed a SUBREG. */
11411 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
11412 return 0;
11414 /* We might have been passed non floating point registers. */
11415 if (!FP_REGNO_P (REGNO (reg1))
11416 || !FP_REGNO_P (REGNO (reg2)))
11417 return 0;
11419 return (REGNO (reg1) == REGNO (reg2) - 1);
11422 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
11423 addr1 and addr2 must be in consecutive memory locations
11424 (addr2 == addr1 + 8). */
11427 mems_ok_for_quad_peep (rtx mem1, rtx mem2)
11429 rtx addr1, addr2;
11430 unsigned int reg1, reg2;
11431 int offset1, offset2;
11433 /* The mems cannot be volatile. */
11434 if (MEM_VOLATILE_P (mem1) || MEM_VOLATILE_P (mem2))
11435 return 0;
11437 addr1 = XEXP (mem1, 0);
11438 addr2 = XEXP (mem2, 0);
11440 /* Extract an offset (if used) from the first addr. */
11441 if (GET_CODE (addr1) == PLUS)
11443 /* If not a REG, return zero. */
11444 if (GET_CODE (XEXP (addr1, 0)) != REG)
11445 return 0;
11446 else
11448 reg1 = REGNO (XEXP (addr1, 0));
11449 /* The offset must be constant! */
11450 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
11451 return 0;
11452 offset1 = INTVAL (XEXP (addr1, 1));
11455 else if (GET_CODE (addr1) != REG)
11456 return 0;
11457 else
11459 reg1 = REGNO (addr1);
11460 /* This was a simple (mem (reg)) expression. Offset is 0. */
11461 offset1 = 0;
11464 /* And now for the second addr. */
11465 if (GET_CODE (addr2) == PLUS)
11467 /* If not a REG, return zero. */
11468 if (GET_CODE (XEXP (addr2, 0)) != REG)
11469 return 0;
11470 else
11472 reg2 = REGNO (XEXP (addr2, 0));
11473 /* The offset must be constant. */
11474 if (GET_CODE (XEXP (addr2, 1)) != CONST_INT)
11475 return 0;
11476 offset2 = INTVAL (XEXP (addr2, 1));
11479 else if (GET_CODE (addr2) != REG)
11480 return 0;
11481 else
11483 reg2 = REGNO (addr2);
11484 /* This was a simple (mem (reg)) expression. Offset is 0. */
11485 offset2 = 0;
11488 /* Both of these must have the same base register. */
11489 if (reg1 != reg2)
11490 return 0;
11492 /* The offset for the second addr must be 8 more than the first addr. */
11493 if (offset2 != offset1 + 8)
11494 return 0;
11496 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
11497 instructions. */
11498 return 1;
11503 rs6000_secondary_memory_needed_rtx (enum machine_mode mode)
11505 static bool eliminated = false;
11506 if (mode != SDmode)
11507 return assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
11508 else
11510 rtx mem = cfun->machine->sdmode_stack_slot;
11511 gcc_assert (mem != NULL_RTX);
11513 if (!eliminated)
11515 mem = eliminate_regs (mem, VOIDmode, NULL_RTX);
11516 cfun->machine->sdmode_stack_slot = mem;
11517 eliminated = true;
11519 return mem;
11523 static tree
11524 rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11526 /* Don't walk into types. */
11527 if (*tp == NULL_TREE || *tp == error_mark_node || TYPE_P (*tp))
11529 *walk_subtrees = 0;
11530 return NULL_TREE;
11533 switch (TREE_CODE (*tp))
11535 case VAR_DECL:
11536 case PARM_DECL:
11537 case FIELD_DECL:
11538 case RESULT_DECL:
11539 case REAL_CST:
11540 case INDIRECT_REF:
11541 case ALIGN_INDIRECT_REF:
11542 case MISALIGNED_INDIRECT_REF:
11543 case VIEW_CONVERT_EXPR:
11544 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
11545 return *tp;
11546 break;
11547 default:
11548 break;
11551 return NULL_TREE;
11555 /* Allocate a 64-bit stack slot to be used for copying SDmode
11556 values through if this function has any SDmode references. */
11558 static void
11559 rs6000_alloc_sdmode_stack_slot (void)
11561 tree t;
11562 basic_block bb;
11563 gimple_stmt_iterator gsi;
11565 gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
11567 FOR_EACH_BB (bb)
11568 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
11570 tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
11571 if (ret)
11573 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11574 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11575 SDmode, 0);
11576 return;
11580 /* Check for any SDmode parameters of the function. */
11581 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t))
11583 if (TREE_TYPE (t) == error_mark_node)
11584 continue;
11586 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
11587 || TYPE_MODE (DECL_ARG_TYPE (t)) == SDmode)
11589 rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
11590 cfun->machine->sdmode_stack_slot = adjust_address_nv (stack,
11591 SDmode, 0);
11592 return;
11597 static void
11598 rs6000_instantiate_decls (void)
11600 if (cfun->machine->sdmode_stack_slot != NULL_RTX)
11601 instantiate_decl_rtl (cfun->machine->sdmode_stack_slot);
11604 /* Return the register class of a scratch register needed to copy IN into
11605 or out of a register in RCLASS in MODE. If it can be done directly,
11606 NO_REGS is returned. */
11608 enum reg_class
11609 rs6000_secondary_reload_class (enum reg_class rclass,
11610 enum machine_mode mode ATTRIBUTE_UNUSED,
11611 rtx in)
11613 int regno;
11615 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
11616 #if TARGET_MACHO
11617 && MACHOPIC_INDIRECT
11618 #endif
11621 /* We cannot copy a symbolic operand directly into anything
11622 other than BASE_REGS for TARGET_ELF. So indicate that a
11623 register from BASE_REGS is needed as an intermediate
11624 register.
11626 On Darwin, pic addresses require a load from memory, which
11627 needs a base register. */
11628 if (rclass != BASE_REGS
11629 && (GET_CODE (in) == SYMBOL_REF
11630 || GET_CODE (in) == HIGH
11631 || GET_CODE (in) == LABEL_REF
11632 || GET_CODE (in) == CONST))
11633 return BASE_REGS;
11636 if (GET_CODE (in) == REG)
11638 regno = REGNO (in);
11639 if (regno >= FIRST_PSEUDO_REGISTER)
11641 regno = true_regnum (in);
11642 if (regno >= FIRST_PSEUDO_REGISTER)
11643 regno = -1;
11646 else if (GET_CODE (in) == SUBREG)
11648 regno = true_regnum (in);
11649 if (regno >= FIRST_PSEUDO_REGISTER)
11650 regno = -1;
11652 else
11653 regno = -1;
11655 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
11656 into anything. */
11657 if (rclass == GENERAL_REGS || rclass == BASE_REGS
11658 || (regno >= 0 && INT_REGNO_P (regno)))
11659 return NO_REGS;
11661 /* Constants, memory, and FP registers can go into FP registers. */
11662 if ((regno == -1 || FP_REGNO_P (regno))
11663 && (rclass == FLOAT_REGS || rclass == NON_SPECIAL_REGS))
11664 return (mode != SDmode) ? NO_REGS : GENERAL_REGS;
11666 /* Memory, and AltiVec registers can go into AltiVec registers. */
11667 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
11668 && rclass == ALTIVEC_REGS)
11669 return NO_REGS;
11671 /* We can copy among the CR registers. */
11672 if ((rclass == CR_REGS || rclass == CR0_REGS)
11673 && regno >= 0 && CR_REGNO_P (regno))
11674 return NO_REGS;
11676 /* Otherwise, we need GENERAL_REGS. */
11677 return GENERAL_REGS;
11680 /* Given a comparison operation, return the bit number in CCR to test. We
11681 know this is a valid comparison.
11683 SCC_P is 1 if this is for an scc. That means that %D will have been
11684 used instead of %C, so the bits will be in different places.
11686 Return -1 if OP isn't a valid comparison for some reason. */
11689 ccr_bit (rtx op, int scc_p)
11691 enum rtx_code code = GET_CODE (op);
11692 enum machine_mode cc_mode;
11693 int cc_regnum;
11694 int base_bit;
11695 rtx reg;
11697 if (!COMPARISON_P (op))
11698 return -1;
11700 reg = XEXP (op, 0);
11702 gcc_assert (GET_CODE (reg) == REG && CR_REGNO_P (REGNO (reg)));
11704 cc_mode = GET_MODE (reg);
11705 cc_regnum = REGNO (reg);
11706 base_bit = 4 * (cc_regnum - CR0_REGNO);
11708 validate_condition_mode (code, cc_mode);
11710 /* When generating a sCOND operation, only positive conditions are
11711 allowed. */
11712 gcc_assert (!scc_p
11713 || code == EQ || code == GT || code == LT || code == UNORDERED
11714 || code == GTU || code == LTU);
11716 switch (code)
11718 case NE:
11719 return scc_p ? base_bit + 3 : base_bit + 2;
11720 case EQ:
11721 return base_bit + 2;
11722 case GT: case GTU: case UNLE:
11723 return base_bit + 1;
11724 case LT: case LTU: case UNGE:
11725 return base_bit;
11726 case ORDERED: case UNORDERED:
11727 return base_bit + 3;
11729 case GE: case GEU:
11730 /* If scc, we will have done a cror to put the bit in the
11731 unordered position. So test that bit. For integer, this is ! LT
11732 unless this is an scc insn. */
11733 return scc_p ? base_bit + 3 : base_bit;
11735 case LE: case LEU:
11736 return scc_p ? base_bit + 3 : base_bit + 1;
11738 default:
11739 gcc_unreachable ();
11743 /* Return the GOT register. */
11746 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
11748 /* The second flow pass currently (June 1999) can't update
11749 regs_ever_live without disturbing other parts of the compiler, so
11750 update it here to make the prolog/epilogue code happy. */
11751 if (!can_create_pseudo_p ()
11752 && !df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM))
11753 df_set_regs_ever_live (RS6000_PIC_OFFSET_TABLE_REGNUM, true);
11755 crtl->uses_pic_offset_table = 1;
11757 return pic_offset_table_rtx;
11760 /* Function to init struct machine_function.
11761 This will be called, via a pointer variable,
11762 from push_function_context. */
11764 static struct machine_function *
11765 rs6000_init_machine_status (void)
11767 return GGC_CNEW (machine_function);
11770 /* These macros test for integers and extract the low-order bits. */
11771 #define INT_P(X) \
11772 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
11773 && GET_MODE (X) == VOIDmode)
11775 #define INT_LOWPART(X) \
11776 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
11779 extract_MB (rtx op)
11781 int i;
11782 unsigned long val = INT_LOWPART (op);
11784 /* If the high bit is zero, the value is the first 1 bit we find
11785 from the left. */
11786 if ((val & 0x80000000) == 0)
11788 gcc_assert (val & 0xffffffff);
11790 i = 1;
11791 while (((val <<= 1) & 0x80000000) == 0)
11792 ++i;
11793 return i;
11796 /* If the high bit is set and the low bit is not, or the mask is all
11797 1's, the value is zero. */
11798 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
11799 return 0;
11801 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11802 from the right. */
11803 i = 31;
11804 while (((val >>= 1) & 1) != 0)
11805 --i;
11807 return i;
11811 extract_ME (rtx op)
11813 int i;
11814 unsigned long val = INT_LOWPART (op);
11816 /* If the low bit is zero, the value is the first 1 bit we find from
11817 the right. */
11818 if ((val & 1) == 0)
11820 gcc_assert (val & 0xffffffff);
11822 i = 30;
11823 while (((val >>= 1) & 1) == 0)
11824 --i;
11826 return i;
11829 /* If the low bit is set and the high bit is not, or the mask is all
11830 1's, the value is 31. */
11831 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
11832 return 31;
11834 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
11835 from the left. */
11836 i = 0;
11837 while (((val <<= 1) & 0x80000000) != 0)
11838 ++i;
11840 return i;
11843 /* Locate some local-dynamic symbol still in use by this function
11844 so that we can print its name in some tls_ld pattern. */
11846 static const char *
11847 rs6000_get_some_local_dynamic_name (void)
11849 rtx insn;
11851 if (cfun->machine->some_ld_name)
11852 return cfun->machine->some_ld_name;
11854 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
11855 if (INSN_P (insn)
11856 && for_each_rtx (&PATTERN (insn),
11857 rs6000_get_some_local_dynamic_name_1, 0))
11858 return cfun->machine->some_ld_name;
11860 gcc_unreachable ();
11863 /* Helper function for rs6000_get_some_local_dynamic_name. */
11865 static int
11866 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
11868 rtx x = *px;
11870 if (GET_CODE (x) == SYMBOL_REF)
11872 const char *str = XSTR (x, 0);
11873 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
11875 cfun->machine->some_ld_name = str;
11876 return 1;
11880 return 0;
11883 /* Write out a function code label. */
11885 void
11886 rs6000_output_function_entry (FILE *file, const char *fname)
11888 if (fname[0] != '.')
11890 switch (DEFAULT_ABI)
11892 default:
11893 gcc_unreachable ();
11895 case ABI_AIX:
11896 if (DOT_SYMBOLS)
11897 putc ('.', file);
11898 else
11899 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "L.");
11900 break;
11902 case ABI_V4:
11903 case ABI_DARWIN:
11904 break;
11907 if (TARGET_AIX)
11908 RS6000_OUTPUT_BASENAME (file, fname);
11909 else
11910 assemble_name (file, fname);
11913 /* Print an operand. Recognize special options, documented below. */
11915 #if TARGET_ELF
11916 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
11917 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
11918 #else
11919 #define SMALL_DATA_RELOC "sda21"
11920 #define SMALL_DATA_REG 0
11921 #endif
11923 void
11924 print_operand (FILE *file, rtx x, int code)
11926 int i;
11927 HOST_WIDE_INT val;
11928 unsigned HOST_WIDE_INT uval;
11930 switch (code)
11932 case '.':
11933 /* Write out an instruction after the call which may be replaced
11934 with glue code by the loader. This depends on the AIX version. */
11935 asm_fprintf (file, RS6000_CALL_GLUE);
11936 return;
11938 /* %a is output_address. */
11940 case 'A':
11941 /* If X is a constant integer whose low-order 5 bits are zero,
11942 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
11943 in the AIX assembler where "sri" with a zero shift count
11944 writes a trash instruction. */
11945 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
11946 putc ('l', file);
11947 else
11948 putc ('r', file);
11949 return;
11951 case 'b':
11952 /* If constant, low-order 16 bits of constant, unsigned.
11953 Otherwise, write normally. */
11954 if (INT_P (x))
11955 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
11956 else
11957 print_operand (file, x, 0);
11958 return;
11960 case 'B':
11961 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
11962 for 64-bit mask direction. */
11963 putc (((INT_LOWPART (x) & 1) == 0 ? 'r' : 'l'), file);
11964 return;
11966 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
11967 output_operand. */
11969 case 'c':
11970 /* X is a CR register. Print the number of the GT bit of the CR. */
11971 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11972 output_operand_lossage ("invalid %%E value");
11973 else
11974 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 1);
11975 return;
11977 case 'D':
11978 /* Like 'J' but get to the GT bit only. */
11979 gcc_assert (GET_CODE (x) == REG);
11981 /* Bit 1 is GT bit. */
11982 i = 4 * (REGNO (x) - CR0_REGNO) + 1;
11984 /* Add one for shift count in rlinm for scc. */
11985 fprintf (file, "%d", i + 1);
11986 return;
11988 case 'E':
11989 /* X is a CR register. Print the number of the EQ bit of the CR */
11990 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
11991 output_operand_lossage ("invalid %%E value");
11992 else
11993 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
11994 return;
11996 case 'f':
11997 /* X is a CR register. Print the shift count needed to move it
11998 to the high-order four bits. */
11999 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12000 output_operand_lossage ("invalid %%f value");
12001 else
12002 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
12003 return;
12005 case 'F':
12006 /* Similar, but print the count for the rotate in the opposite
12007 direction. */
12008 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12009 output_operand_lossage ("invalid %%F value");
12010 else
12011 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
12012 return;
12014 case 'G':
12015 /* X is a constant integer. If it is negative, print "m",
12016 otherwise print "z". This is to make an aze or ame insn. */
12017 if (GET_CODE (x) != CONST_INT)
12018 output_operand_lossage ("invalid %%G value");
12019 else if (INTVAL (x) >= 0)
12020 putc ('z', file);
12021 else
12022 putc ('m', file);
12023 return;
12025 case 'h':
12026 /* If constant, output low-order five bits. Otherwise, write
12027 normally. */
12028 if (INT_P (x))
12029 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
12030 else
12031 print_operand (file, x, 0);
12032 return;
12034 case 'H':
12035 /* If constant, output low-order six bits. Otherwise, write
12036 normally. */
12037 if (INT_P (x))
12038 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
12039 else
12040 print_operand (file, x, 0);
12041 return;
12043 case 'I':
12044 /* Print `i' if this is a constant, else nothing. */
12045 if (INT_P (x))
12046 putc ('i', file);
12047 return;
12049 case 'j':
12050 /* Write the bit number in CCR for jump. */
12051 i = ccr_bit (x, 0);
12052 if (i == -1)
12053 output_operand_lossage ("invalid %%j code");
12054 else
12055 fprintf (file, "%d", i);
12056 return;
12058 case 'J':
12059 /* Similar, but add one for shift count in rlinm for scc and pass
12060 scc flag to `ccr_bit'. */
12061 i = ccr_bit (x, 1);
12062 if (i == -1)
12063 output_operand_lossage ("invalid %%J code");
12064 else
12065 /* If we want bit 31, write a shift count of zero, not 32. */
12066 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12067 return;
12069 case 'k':
12070 /* X must be a constant. Write the 1's complement of the
12071 constant. */
12072 if (! INT_P (x))
12073 output_operand_lossage ("invalid %%k value");
12074 else
12075 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
12076 return;
12078 case 'K':
12079 /* X must be a symbolic constant on ELF. Write an
12080 expression suitable for an 'addi' that adds in the low 16
12081 bits of the MEM. */
12082 if (GET_CODE (x) != CONST)
12084 print_operand_address (file, x);
12085 fputs ("@l", file);
12087 else
12089 if (GET_CODE (XEXP (x, 0)) != PLUS
12090 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
12091 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
12092 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
12093 output_operand_lossage ("invalid %%K value");
12094 print_operand_address (file, XEXP (XEXP (x, 0), 0));
12095 fputs ("@l", file);
12096 /* For GNU as, there must be a non-alphanumeric character
12097 between 'l' and the number. The '-' is added by
12098 print_operand() already. */
12099 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
12100 fputs ("+", file);
12101 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
12103 return;
12105 /* %l is output_asm_label. */
12107 case 'L':
12108 /* Write second word of DImode or DFmode reference. Works on register
12109 or non-indexed memory only. */
12110 if (GET_CODE (x) == REG)
12111 fputs (reg_names[REGNO (x) + 1], file);
12112 else if (GET_CODE (x) == MEM)
12114 /* Handle possible auto-increment. Since it is pre-increment and
12115 we have already done it, we can just use an offset of word. */
12116 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12117 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
12118 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12119 UNITS_PER_WORD));
12120 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12121 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
12122 UNITS_PER_WORD));
12123 else
12124 output_address (XEXP (adjust_address_nv (x, SImode,
12125 UNITS_PER_WORD),
12126 0));
12128 if (small_data_operand (x, GET_MODE (x)))
12129 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12130 reg_names[SMALL_DATA_REG]);
12132 return;
12134 case 'm':
12135 /* MB value for a mask operand. */
12136 if (! mask_operand (x, SImode))
12137 output_operand_lossage ("invalid %%m value");
12139 fprintf (file, "%d", extract_MB (x));
12140 return;
12142 case 'M':
12143 /* ME value for a mask operand. */
12144 if (! mask_operand (x, SImode))
12145 output_operand_lossage ("invalid %%M value");
12147 fprintf (file, "%d", extract_ME (x));
12148 return;
12150 /* %n outputs the negative of its operand. */
12152 case 'N':
12153 /* Write the number of elements in the vector times 4. */
12154 if (GET_CODE (x) != PARALLEL)
12155 output_operand_lossage ("invalid %%N value");
12156 else
12157 fprintf (file, "%d", XVECLEN (x, 0) * 4);
12158 return;
12160 case 'O':
12161 /* Similar, but subtract 1 first. */
12162 if (GET_CODE (x) != PARALLEL)
12163 output_operand_lossage ("invalid %%O value");
12164 else
12165 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
12166 return;
12168 case 'p':
12169 /* X is a CONST_INT that is a power of two. Output the logarithm. */
12170 if (! INT_P (x)
12171 || INT_LOWPART (x) < 0
12172 || (i = exact_log2 (INT_LOWPART (x))) < 0)
12173 output_operand_lossage ("invalid %%p value");
12174 else
12175 fprintf (file, "%d", i);
12176 return;
12178 case 'P':
12179 /* The operand must be an indirect memory reference. The result
12180 is the register name. */
12181 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
12182 || REGNO (XEXP (x, 0)) >= 32)
12183 output_operand_lossage ("invalid %%P value");
12184 else
12185 fputs (reg_names[REGNO (XEXP (x, 0))], file);
12186 return;
12188 case 'q':
12189 /* This outputs the logical code corresponding to a boolean
12190 expression. The expression may have one or both operands
12191 negated (if one, only the first one). For condition register
12192 logical operations, it will also treat the negated
12193 CR codes as NOTs, but not handle NOTs of them. */
12195 const char *const *t = 0;
12196 const char *s;
12197 enum rtx_code code = GET_CODE (x);
12198 static const char * const tbl[3][3] = {
12199 { "and", "andc", "nor" },
12200 { "or", "orc", "nand" },
12201 { "xor", "eqv", "xor" } };
12203 if (code == AND)
12204 t = tbl[0];
12205 else if (code == IOR)
12206 t = tbl[1];
12207 else if (code == XOR)
12208 t = tbl[2];
12209 else
12210 output_operand_lossage ("invalid %%q value");
12212 if (GET_CODE (XEXP (x, 0)) != NOT)
12213 s = t[0];
12214 else
12216 if (GET_CODE (XEXP (x, 1)) == NOT)
12217 s = t[2];
12218 else
12219 s = t[1];
12222 fputs (s, file);
12224 return;
12226 case 'Q':
12227 if (TARGET_MFCRF)
12228 fputc (',', file);
12229 /* FALLTHRU */
12230 else
12231 return;
12233 case 'R':
12234 /* X is a CR register. Print the mask for `mtcrf'. */
12235 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
12236 output_operand_lossage ("invalid %%R value");
12237 else
12238 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
12239 return;
12241 case 's':
12242 /* Low 5 bits of 32 - value */
12243 if (! INT_P (x))
12244 output_operand_lossage ("invalid %%s value");
12245 else
12246 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
12247 return;
12249 case 'S':
12250 /* PowerPC64 mask position. All 0's is excluded.
12251 CONST_INT 32-bit mask is considered sign-extended so any
12252 transition must occur within the CONST_INT, not on the boundary. */
12253 if (! mask64_operand (x, DImode))
12254 output_operand_lossage ("invalid %%S value");
12256 uval = INT_LOWPART (x);
12258 if (uval & 1) /* Clear Left */
12260 #if HOST_BITS_PER_WIDE_INT > 64
12261 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12262 #endif
12263 i = 64;
12265 else /* Clear Right */
12267 uval = ~uval;
12268 #if HOST_BITS_PER_WIDE_INT > 64
12269 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
12270 #endif
12271 i = 63;
12273 while (uval != 0)
12274 --i, uval >>= 1;
12275 gcc_assert (i >= 0);
12276 fprintf (file, "%d", i);
12277 return;
12279 case 't':
12280 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
12281 gcc_assert (GET_CODE (x) == REG && GET_MODE (x) == CCmode);
12283 /* Bit 3 is OV bit. */
12284 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
12286 /* If we want bit 31, write a shift count of zero, not 32. */
12287 fprintf (file, "%d", i == 31 ? 0 : i + 1);
12288 return;
12290 case 'T':
12291 /* Print the symbolic name of a branch target register. */
12292 if (GET_CODE (x) != REG || (REGNO (x) != LR_REGNO
12293 && REGNO (x) != CTR_REGNO))
12294 output_operand_lossage ("invalid %%T value");
12295 else if (REGNO (x) == LR_REGNO)
12296 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
12297 else
12298 fputs ("ctr", file);
12299 return;
12301 case 'u':
12302 /* High-order 16 bits of constant for use in unsigned operand. */
12303 if (! INT_P (x))
12304 output_operand_lossage ("invalid %%u value");
12305 else
12306 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12307 (INT_LOWPART (x) >> 16) & 0xffff);
12308 return;
12310 case 'v':
12311 /* High-order 16 bits of constant for use in signed operand. */
12312 if (! INT_P (x))
12313 output_operand_lossage ("invalid %%v value");
12314 else
12315 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
12316 (INT_LOWPART (x) >> 16) & 0xffff);
12317 return;
12319 case 'U':
12320 /* Print `u' if this has an auto-increment or auto-decrement. */
12321 if (GET_CODE (x) == MEM
12322 && (GET_CODE (XEXP (x, 0)) == PRE_INC
12323 || GET_CODE (XEXP (x, 0)) == PRE_DEC
12324 || GET_CODE (XEXP (x, 0)) == PRE_MODIFY))
12325 putc ('u', file);
12326 return;
12328 case 'V':
12329 /* Print the trap code for this operand. */
12330 switch (GET_CODE (x))
12332 case EQ:
12333 fputs ("eq", file); /* 4 */
12334 break;
12335 case NE:
12336 fputs ("ne", file); /* 24 */
12337 break;
12338 case LT:
12339 fputs ("lt", file); /* 16 */
12340 break;
12341 case LE:
12342 fputs ("le", file); /* 20 */
12343 break;
12344 case GT:
12345 fputs ("gt", file); /* 8 */
12346 break;
12347 case GE:
12348 fputs ("ge", file); /* 12 */
12349 break;
12350 case LTU:
12351 fputs ("llt", file); /* 2 */
12352 break;
12353 case LEU:
12354 fputs ("lle", file); /* 6 */
12355 break;
12356 case GTU:
12357 fputs ("lgt", file); /* 1 */
12358 break;
12359 case GEU:
12360 fputs ("lge", file); /* 5 */
12361 break;
12362 default:
12363 gcc_unreachable ();
12365 break;
12367 case 'w':
12368 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
12369 normally. */
12370 if (INT_P (x))
12371 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
12372 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
12373 else
12374 print_operand (file, x, 0);
12375 return;
12377 case 'W':
12378 /* MB value for a PowerPC64 rldic operand. */
12379 val = (GET_CODE (x) == CONST_INT
12380 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
12382 if (val < 0)
12383 i = -1;
12384 else
12385 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
12386 if ((val <<= 1) < 0)
12387 break;
12389 #if HOST_BITS_PER_WIDE_INT == 32
12390 if (GET_CODE (x) == CONST_INT && i >= 0)
12391 i += 32; /* zero-extend high-part was all 0's */
12392 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
12394 val = CONST_DOUBLE_LOW (x);
12396 gcc_assert (val);
12397 if (val < 0)
12398 --i;
12399 else
12400 for ( ; i < 64; i++)
12401 if ((val <<= 1) < 0)
12402 break;
12404 #endif
12406 fprintf (file, "%d", i + 1);
12407 return;
12409 case 'X':
12410 if (GET_CODE (x) == MEM
12411 && (legitimate_indexed_address_p (XEXP (x, 0), 0)
12412 || (GET_CODE (XEXP (x, 0)) == PRE_MODIFY
12413 && legitimate_indexed_address_p (XEXP (XEXP (x, 0), 1), 0))))
12414 putc ('x', file);
12415 return;
12417 case 'Y':
12418 /* Like 'L', for third word of TImode */
12419 if (GET_CODE (x) == REG)
12420 fputs (reg_names[REGNO (x) + 2], file);
12421 else if (GET_CODE (x) == MEM)
12423 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12424 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
12425 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
12426 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12427 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
12428 else
12429 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
12430 if (small_data_operand (x, GET_MODE (x)))
12431 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12432 reg_names[SMALL_DATA_REG]);
12434 return;
12436 case 'z':
12437 /* X is a SYMBOL_REF. Write out the name preceded by a
12438 period and without any trailing data in brackets. Used for function
12439 names. If we are configured for System V (or the embedded ABI) on
12440 the PowerPC, do not emit the period, since those systems do not use
12441 TOCs and the like. */
12442 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12444 /* Mark the decl as referenced so that cgraph will output the
12445 function. */
12446 if (SYMBOL_REF_DECL (x))
12447 mark_decl_referenced (SYMBOL_REF_DECL (x));
12449 /* For macho, check to see if we need a stub. */
12450 if (TARGET_MACHO)
12452 const char *name = XSTR (x, 0);
12453 #if TARGET_MACHO
12454 if (MACHOPIC_INDIRECT
12455 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
12456 name = machopic_indirection_name (x, /*stub_p=*/true);
12457 #endif
12458 assemble_name (file, name);
12460 else if (!DOT_SYMBOLS)
12461 assemble_name (file, XSTR (x, 0));
12462 else
12463 rs6000_output_function_entry (file, XSTR (x, 0));
12464 return;
12466 case 'Z':
12467 /* Like 'L', for last word of TImode. */
12468 if (GET_CODE (x) == REG)
12469 fputs (reg_names[REGNO (x) + 3], file);
12470 else if (GET_CODE (x) == MEM)
12472 if (GET_CODE (XEXP (x, 0)) == PRE_INC
12473 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
12474 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
12475 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12476 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
12477 else
12478 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
12479 if (small_data_operand (x, GET_MODE (x)))
12480 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12481 reg_names[SMALL_DATA_REG]);
12483 return;
12485 /* Print AltiVec or SPE memory operand. */
12486 case 'y':
12488 rtx tmp;
12490 gcc_assert (GET_CODE (x) == MEM);
12492 tmp = XEXP (x, 0);
12494 /* Ugly hack because %y is overloaded. */
12495 if ((TARGET_SPE || TARGET_E500_DOUBLE)
12496 && (GET_MODE_SIZE (GET_MODE (x)) == 8
12497 || GET_MODE (x) == TFmode
12498 || GET_MODE (x) == TImode))
12500 /* Handle [reg]. */
12501 if (GET_CODE (tmp) == REG)
12503 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
12504 break;
12506 /* Handle [reg+UIMM]. */
12507 else if (GET_CODE (tmp) == PLUS &&
12508 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
12510 int x;
12512 gcc_assert (GET_CODE (XEXP (tmp, 0)) == REG);
12514 x = INTVAL (XEXP (tmp, 1));
12515 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
12516 break;
12519 /* Fall through. Must be [reg+reg]. */
12521 if (TARGET_ALTIVEC
12522 && GET_CODE (tmp) == AND
12523 && GET_CODE (XEXP (tmp, 1)) == CONST_INT
12524 && INTVAL (XEXP (tmp, 1)) == -16)
12525 tmp = XEXP (tmp, 0);
12526 if (GET_CODE (tmp) == REG)
12527 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
12528 else
12530 if (!GET_CODE (tmp) == PLUS
12531 || !REG_P (XEXP (tmp, 0))
12532 || !REG_P (XEXP (tmp, 1)))
12534 output_operand_lossage ("invalid %%y value, try using the 'Z' constraint");
12535 break;
12538 if (REGNO (XEXP (tmp, 0)) == 0)
12539 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
12540 reg_names[ REGNO (XEXP (tmp, 0)) ]);
12541 else
12542 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
12543 reg_names[ REGNO (XEXP (tmp, 1)) ]);
12545 break;
12548 case 0:
12549 if (GET_CODE (x) == REG)
12550 fprintf (file, "%s", reg_names[REGNO (x)]);
12551 else if (GET_CODE (x) == MEM)
12553 /* We need to handle PRE_INC and PRE_DEC here, since we need to
12554 know the width from the mode. */
12555 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
12556 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
12557 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
12558 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
12559 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
12560 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
12561 else if (GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
12562 output_address (XEXP (XEXP (x, 0), 1));
12563 else
12564 output_address (XEXP (x, 0));
12566 else
12567 output_addr_const (file, x);
12568 return;
12570 case '&':
12571 assemble_name (file, rs6000_get_some_local_dynamic_name ());
12572 return;
12574 default:
12575 output_operand_lossage ("invalid %%xn code");
12579 /* Print the address of an operand. */
12581 void
12582 print_operand_address (FILE *file, rtx x)
12584 if (GET_CODE (x) == REG)
12585 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
12586 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
12587 || GET_CODE (x) == LABEL_REF)
12589 output_addr_const (file, x);
12590 if (small_data_operand (x, GET_MODE (x)))
12591 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
12592 reg_names[SMALL_DATA_REG]);
12593 else
12594 gcc_assert (!TARGET_TOC);
12596 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
12598 gcc_assert (REG_P (XEXP (x, 0)));
12599 if (REGNO (XEXP (x, 0)) == 0)
12600 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
12601 reg_names[ REGNO (XEXP (x, 0)) ]);
12602 else
12603 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
12604 reg_names[ REGNO (XEXP (x, 1)) ]);
12606 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
12607 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
12608 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
12609 #if TARGET_ELF
12610 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
12611 && CONSTANT_P (XEXP (x, 1)))
12613 output_addr_const (file, XEXP (x, 1));
12614 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12616 #endif
12617 #if TARGET_MACHO
12618 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
12619 && CONSTANT_P (XEXP (x, 1)))
12621 fprintf (file, "lo16(");
12622 output_addr_const (file, XEXP (x, 1));
12623 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
12625 #endif
12626 else if (legitimate_constant_pool_address_p (x))
12628 output_addr_const (file, XEXP (x, 1));
12629 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
12631 else
12632 gcc_unreachable ();
12635 /* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */
12637 bool
12638 rs6000_output_addr_const_extra (FILE *file, rtx x)
12640 if (GET_CODE (x) == UNSPEC)
12641 switch (XINT (x, 1))
12643 case UNSPEC_TOCREL:
12644 x = XVECEXP (x, 0, 0);
12645 gcc_assert (GET_CODE (x) == SYMBOL_REF);
12646 output_addr_const (file, x);
12647 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
12649 putc ('-', file);
12650 assemble_name (file, toc_label_name);
12652 else if (TARGET_ELF)
12653 fputs ("@toc", file);
12654 return true;
12656 #if TARGET_MACHO
12657 case UNSPEC_MACHOPIC_OFFSET:
12658 output_addr_const (file, XVECEXP (x, 0, 0));
12659 putc ('-', file);
12660 machopic_output_function_base_name (file);
12661 return true;
12662 #endif
12664 return false;
12667 /* Target hook for assembling integer objects. The PowerPC version has
12668 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
12669 is defined. It also needs to handle DI-mode objects on 64-bit
12670 targets. */
12672 static bool
12673 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
12675 #ifdef RELOCATABLE_NEEDS_FIXUP
12676 /* Special handling for SI values. */
12677 if (RELOCATABLE_NEEDS_FIXUP && size == 4 && aligned_p)
12679 static int recurse = 0;
12681 /* For -mrelocatable, we mark all addresses that need to be fixed up
12682 in the .fixup section. */
12683 if (TARGET_RELOCATABLE
12684 && in_section != toc_section
12685 && in_section != text_section
12686 && !unlikely_text_section_p (in_section)
12687 && !recurse
12688 && GET_CODE (x) != CONST_INT
12689 && GET_CODE (x) != CONST_DOUBLE
12690 && CONSTANT_P (x))
12692 char buf[256];
12694 recurse = 1;
12695 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
12696 fixuplabelno++;
12697 ASM_OUTPUT_LABEL (asm_out_file, buf);
12698 fprintf (asm_out_file, "\t.long\t(");
12699 output_addr_const (asm_out_file, x);
12700 fprintf (asm_out_file, ")@fixup\n");
12701 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
12702 ASM_OUTPUT_ALIGN (asm_out_file, 2);
12703 fprintf (asm_out_file, "\t.long\t");
12704 assemble_name (asm_out_file, buf);
12705 fprintf (asm_out_file, "\n\t.previous\n");
12706 recurse = 0;
12707 return true;
12709 /* Remove initial .'s to turn a -mcall-aixdesc function
12710 address into the address of the descriptor, not the function
12711 itself. */
12712 else if (GET_CODE (x) == SYMBOL_REF
12713 && XSTR (x, 0)[0] == '.'
12714 && DEFAULT_ABI == ABI_AIX)
12716 const char *name = XSTR (x, 0);
12717 while (*name == '.')
12718 name++;
12720 fprintf (asm_out_file, "\t.long\t%s\n", name);
12721 return true;
12724 #endif /* RELOCATABLE_NEEDS_FIXUP */
12725 return default_assemble_integer (x, size, aligned_p);
12728 #ifdef HAVE_GAS_HIDDEN
12729 /* Emit an assembler directive to set symbol visibility for DECL to
12730 VISIBILITY_TYPE. */
12732 static void
12733 rs6000_assemble_visibility (tree decl, int vis)
12735 /* Functions need to have their entry point symbol visibility set as
12736 well as their descriptor symbol visibility. */
12737 if (DEFAULT_ABI == ABI_AIX
12738 && DOT_SYMBOLS
12739 && TREE_CODE (decl) == FUNCTION_DECL)
12741 static const char * const visibility_types[] = {
12742 NULL, "internal", "hidden", "protected"
12745 const char *name, *type;
12747 name = ((* targetm.strip_name_encoding)
12748 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
12749 type = visibility_types[vis];
12751 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
12752 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
12754 else
12755 default_assemble_visibility (decl, vis);
12757 #endif
12759 enum rtx_code
12760 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
12762 /* Reversal of FP compares takes care -- an ordered compare
12763 becomes an unordered compare and vice versa. */
12764 if (mode == CCFPmode
12765 && (!flag_finite_math_only
12766 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
12767 || code == UNEQ || code == LTGT))
12768 return reverse_condition_maybe_unordered (code);
12769 else
12770 return reverse_condition (code);
12773 /* Generate a compare for CODE. Return a brand-new rtx that
12774 represents the result of the compare. */
12776 static rtx
12777 rs6000_generate_compare (enum rtx_code code)
12779 enum machine_mode comp_mode;
12780 rtx compare_result;
12782 if (rs6000_compare_fp_p)
12783 comp_mode = CCFPmode;
12784 else if (code == GTU || code == LTU
12785 || code == GEU || code == LEU)
12786 comp_mode = CCUNSmode;
12787 else if ((code == EQ || code == NE)
12788 && GET_CODE (rs6000_compare_op0) == SUBREG
12789 && GET_CODE (rs6000_compare_op1) == SUBREG
12790 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0)
12791 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1))
12792 /* These are unsigned values, perhaps there will be a later
12793 ordering compare that can be shared with this one.
12794 Unfortunately we cannot detect the signedness of the operands
12795 for non-subregs. */
12796 comp_mode = CCUNSmode;
12797 else
12798 comp_mode = CCmode;
12800 /* First, the compare. */
12801 compare_result = gen_reg_rtx (comp_mode);
12803 /* E500 FP compare instructions on the GPRs. Yuck! */
12804 if ((!TARGET_FPRS && TARGET_HARD_FLOAT)
12805 && rs6000_compare_fp_p)
12807 rtx cmp, or_result, compare_result2;
12808 enum machine_mode op_mode = GET_MODE (rs6000_compare_op0);
12810 if (op_mode == VOIDmode)
12811 op_mode = GET_MODE (rs6000_compare_op1);
12813 /* The E500 FP compare instructions toggle the GT bit (CR bit 1) only.
12814 This explains the following mess. */
12816 switch (code)
12818 case EQ: case UNEQ: case NE: case LTGT:
12819 switch (op_mode)
12821 case SFmode:
12822 cmp = (flag_finite_math_only && !flag_trapping_math)
12823 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
12824 rs6000_compare_op1)
12825 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
12826 rs6000_compare_op1);
12827 break;
12829 case DFmode:
12830 cmp = (flag_finite_math_only && !flag_trapping_math)
12831 ? gen_tstdfeq_gpr (compare_result, rs6000_compare_op0,
12832 rs6000_compare_op1)
12833 : gen_cmpdfeq_gpr (compare_result, rs6000_compare_op0,
12834 rs6000_compare_op1);
12835 break;
12837 case TFmode:
12838 cmp = (flag_finite_math_only && !flag_trapping_math)
12839 ? gen_tsttfeq_gpr (compare_result, rs6000_compare_op0,
12840 rs6000_compare_op1)
12841 : gen_cmptfeq_gpr (compare_result, rs6000_compare_op0,
12842 rs6000_compare_op1);
12843 break;
12845 default:
12846 gcc_unreachable ();
12848 break;
12850 case GT: case GTU: case UNGT: case UNGE: case GE: case GEU:
12851 switch (op_mode)
12853 case SFmode:
12854 cmp = (flag_finite_math_only && !flag_trapping_math)
12855 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
12856 rs6000_compare_op1)
12857 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
12858 rs6000_compare_op1);
12859 break;
12861 case DFmode:
12862 cmp = (flag_finite_math_only && !flag_trapping_math)
12863 ? gen_tstdfgt_gpr (compare_result, rs6000_compare_op0,
12864 rs6000_compare_op1)
12865 : gen_cmpdfgt_gpr (compare_result, rs6000_compare_op0,
12866 rs6000_compare_op1);
12867 break;
12869 case TFmode:
12870 cmp = (flag_finite_math_only && !flag_trapping_math)
12871 ? gen_tsttfgt_gpr (compare_result, rs6000_compare_op0,
12872 rs6000_compare_op1)
12873 : gen_cmptfgt_gpr (compare_result, rs6000_compare_op0,
12874 rs6000_compare_op1);
12875 break;
12877 default:
12878 gcc_unreachable ();
12880 break;
12882 case LT: case LTU: case UNLT: case UNLE: case LE: case LEU:
12883 switch (op_mode)
12885 case SFmode:
12886 cmp = (flag_finite_math_only && !flag_trapping_math)
12887 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
12888 rs6000_compare_op1)
12889 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
12890 rs6000_compare_op1);
12891 break;
12893 case DFmode:
12894 cmp = (flag_finite_math_only && !flag_trapping_math)
12895 ? gen_tstdflt_gpr (compare_result, rs6000_compare_op0,
12896 rs6000_compare_op1)
12897 : gen_cmpdflt_gpr (compare_result, rs6000_compare_op0,
12898 rs6000_compare_op1);
12899 break;
12901 case TFmode:
12902 cmp = (flag_finite_math_only && !flag_trapping_math)
12903 ? gen_tsttflt_gpr (compare_result, rs6000_compare_op0,
12904 rs6000_compare_op1)
12905 : gen_cmptflt_gpr (compare_result, rs6000_compare_op0,
12906 rs6000_compare_op1);
12907 break;
12909 default:
12910 gcc_unreachable ();
12912 break;
12913 default:
12914 gcc_unreachable ();
12917 /* Synthesize LE and GE from LT/GT || EQ. */
12918 if (code == LE || code == GE || code == LEU || code == GEU)
12920 emit_insn (cmp);
12922 switch (code)
12924 case LE: code = LT; break;
12925 case GE: code = GT; break;
12926 case LEU: code = LT; break;
12927 case GEU: code = GT; break;
12928 default: gcc_unreachable ();
12931 compare_result2 = gen_reg_rtx (CCFPmode);
12933 /* Do the EQ. */
12934 switch (op_mode)
12936 case SFmode:
12937 cmp = (flag_finite_math_only && !flag_trapping_math)
12938 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
12939 rs6000_compare_op1)
12940 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
12941 rs6000_compare_op1);
12942 break;
12944 case DFmode:
12945 cmp = (flag_finite_math_only && !flag_trapping_math)
12946 ? gen_tstdfeq_gpr (compare_result2, rs6000_compare_op0,
12947 rs6000_compare_op1)
12948 : gen_cmpdfeq_gpr (compare_result2, rs6000_compare_op0,
12949 rs6000_compare_op1);
12950 break;
12952 case TFmode:
12953 cmp = (flag_finite_math_only && !flag_trapping_math)
12954 ? gen_tsttfeq_gpr (compare_result2, rs6000_compare_op0,
12955 rs6000_compare_op1)
12956 : gen_cmptfeq_gpr (compare_result2, rs6000_compare_op0,
12957 rs6000_compare_op1);
12958 break;
12960 default:
12961 gcc_unreachable ();
12963 emit_insn (cmp);
12965 /* OR them together. */
12966 or_result = gen_reg_rtx (CCFPmode);
12967 cmp = gen_e500_cr_ior_compare (or_result, compare_result,
12968 compare_result2);
12969 compare_result = or_result;
12970 code = EQ;
12972 else
12974 if (code == NE || code == LTGT)
12975 code = NE;
12976 else
12977 code = EQ;
12980 emit_insn (cmp);
12982 else
12984 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
12985 CLOBBERs to match cmptf_internal2 pattern. */
12986 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
12987 && GET_MODE (rs6000_compare_op0) == TFmode
12988 && !TARGET_IEEEQUAD
12989 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
12990 emit_insn (gen_rtx_PARALLEL (VOIDmode,
12991 gen_rtvec (9,
12992 gen_rtx_SET (VOIDmode,
12993 compare_result,
12994 gen_rtx_COMPARE (comp_mode,
12995 rs6000_compare_op0,
12996 rs6000_compare_op1)),
12997 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12998 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
12999 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13000 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13001 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13002 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13003 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
13004 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)))));
13005 else if (GET_CODE (rs6000_compare_op1) == UNSPEC
13006 && XINT (rs6000_compare_op1, 1) == UNSPEC_SP_TEST)
13008 rtx op1 = XVECEXP (rs6000_compare_op1, 0, 0);
13009 comp_mode = CCEQmode;
13010 compare_result = gen_reg_rtx (CCEQmode);
13011 if (TARGET_64BIT)
13012 emit_insn (gen_stack_protect_testdi (compare_result,
13013 rs6000_compare_op0, op1));
13014 else
13015 emit_insn (gen_stack_protect_testsi (compare_result,
13016 rs6000_compare_op0, op1));
13018 else
13019 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
13020 gen_rtx_COMPARE (comp_mode,
13021 rs6000_compare_op0,
13022 rs6000_compare_op1)));
13025 /* Some kinds of FP comparisons need an OR operation;
13026 under flag_finite_math_only we don't bother. */
13027 if (rs6000_compare_fp_p
13028 && !flag_finite_math_only
13029 && !(TARGET_HARD_FLOAT && !TARGET_FPRS)
13030 && (code == LE || code == GE
13031 || code == UNEQ || code == LTGT
13032 || code == UNGT || code == UNLT))
13034 enum rtx_code or1, or2;
13035 rtx or1_rtx, or2_rtx, compare2_rtx;
13036 rtx or_result = gen_reg_rtx (CCEQmode);
13038 switch (code)
13040 case LE: or1 = LT; or2 = EQ; break;
13041 case GE: or1 = GT; or2 = EQ; break;
13042 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
13043 case LTGT: or1 = LT; or2 = GT; break;
13044 case UNGT: or1 = UNORDERED; or2 = GT; break;
13045 case UNLT: or1 = UNORDERED; or2 = LT; break;
13046 default: gcc_unreachable ();
13048 validate_condition_mode (or1, comp_mode);
13049 validate_condition_mode (or2, comp_mode);
13050 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
13051 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
13052 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
13053 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
13054 const_true_rtx);
13055 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
13057 compare_result = or_result;
13058 code = EQ;
13061 validate_condition_mode (code, GET_MODE (compare_result));
13063 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
13067 /* Emit the RTL for an sCOND pattern. */
13069 void
13070 rs6000_emit_sCOND (enum rtx_code code, rtx result)
13072 rtx condition_rtx;
13073 enum machine_mode op_mode;
13074 enum rtx_code cond_code;
13076 condition_rtx = rs6000_generate_compare (code);
13077 cond_code = GET_CODE (condition_rtx);
13079 if (rs6000_compare_fp_p
13080 && !TARGET_FPRS && TARGET_HARD_FLOAT)
13082 rtx t;
13084 PUT_MODE (condition_rtx, SImode);
13085 t = XEXP (condition_rtx, 0);
13087 gcc_assert (cond_code == NE || cond_code == EQ);
13089 if (cond_code == NE)
13090 emit_insn (gen_e500_flip_gt_bit (t, t));
13092 emit_insn (gen_move_from_CR_gt_bit (result, t));
13093 return;
13096 if (cond_code == NE
13097 || cond_code == GE || cond_code == LE
13098 || cond_code == GEU || cond_code == LEU
13099 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
13101 rtx not_result = gen_reg_rtx (CCEQmode);
13102 rtx not_op, rev_cond_rtx;
13103 enum machine_mode cc_mode;
13105 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
13107 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
13108 SImode, XEXP (condition_rtx, 0), const0_rtx);
13109 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
13110 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
13111 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
13114 op_mode = GET_MODE (rs6000_compare_op0);
13115 if (op_mode == VOIDmode)
13116 op_mode = GET_MODE (rs6000_compare_op1);
13118 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
13120 PUT_MODE (condition_rtx, DImode);
13121 convert_move (result, condition_rtx, 0);
13123 else
13125 PUT_MODE (condition_rtx, SImode);
13126 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
13130 /* Emit a branch of kind CODE to location LOC. */
13132 void
13133 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
13135 rtx condition_rtx, loc_ref;
13137 condition_rtx = rs6000_generate_compare (code);
13138 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
13139 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
13140 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
13141 loc_ref, pc_rtx)));
13144 /* Return the string to output a conditional branch to LABEL, which is
13145 the operand number of the label, or -1 if the branch is really a
13146 conditional return.
13148 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
13149 condition code register and its mode specifies what kind of
13150 comparison we made.
13152 REVERSED is nonzero if we should reverse the sense of the comparison.
13154 INSN is the insn. */
13156 char *
13157 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
13159 static char string[64];
13160 enum rtx_code code = GET_CODE (op);
13161 rtx cc_reg = XEXP (op, 0);
13162 enum machine_mode mode = GET_MODE (cc_reg);
13163 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
13164 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
13165 int really_reversed = reversed ^ need_longbranch;
13166 char *s = string;
13167 const char *ccode;
13168 const char *pred;
13169 rtx note;
13171 validate_condition_mode (code, mode);
13173 /* Work out which way this really branches. We could use
13174 reverse_condition_maybe_unordered here always but this
13175 makes the resulting assembler clearer. */
13176 if (really_reversed)
13178 /* Reversal of FP compares takes care -- an ordered compare
13179 becomes an unordered compare and vice versa. */
13180 if (mode == CCFPmode)
13181 code = reverse_condition_maybe_unordered (code);
13182 else
13183 code = reverse_condition (code);
13186 if ((!TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
13188 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
13189 to the GT bit. */
13190 switch (code)
13192 case EQ:
13193 /* Opposite of GT. */
13194 code = GT;
13195 break;
13197 case NE:
13198 code = UNLE;
13199 break;
13201 default:
13202 gcc_unreachable ();
13206 switch (code)
13208 /* Not all of these are actually distinct opcodes, but
13209 we distinguish them for clarity of the resulting assembler. */
13210 case NE: case LTGT:
13211 ccode = "ne"; break;
13212 case EQ: case UNEQ:
13213 ccode = "eq"; break;
13214 case GE: case GEU:
13215 ccode = "ge"; break;
13216 case GT: case GTU: case UNGT:
13217 ccode = "gt"; break;
13218 case LE: case LEU:
13219 ccode = "le"; break;
13220 case LT: case LTU: case UNLT:
13221 ccode = "lt"; break;
13222 case UNORDERED: ccode = "un"; break;
13223 case ORDERED: ccode = "nu"; break;
13224 case UNGE: ccode = "nl"; break;
13225 case UNLE: ccode = "ng"; break;
13226 default:
13227 gcc_unreachable ();
13230 /* Maybe we have a guess as to how likely the branch is.
13231 The old mnemonics don't have a way to specify this information. */
13232 pred = "";
13233 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
13234 if (note != NULL_RTX)
13236 /* PROB is the difference from 50%. */
13237 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
13239 /* Only hint for highly probable/improbable branches on newer
13240 cpus as static prediction overrides processor dynamic
13241 prediction. For older cpus we may as well always hint, but
13242 assume not taken for branches that are very close to 50% as a
13243 mispredicted taken branch is more expensive than a
13244 mispredicted not-taken branch. */
13245 if (rs6000_always_hint
13246 || (abs (prob) > REG_BR_PROB_BASE / 100 * 48
13247 && br_prob_note_reliable_p (note)))
13249 if (abs (prob) > REG_BR_PROB_BASE / 20
13250 && ((prob > 0) ^ need_longbranch))
13251 pred = "+";
13252 else
13253 pred = "-";
13257 if (label == NULL)
13258 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
13259 else
13260 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
13262 /* We need to escape any '%' characters in the reg_names string.
13263 Assume they'd only be the first character.... */
13264 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
13265 *s++ = '%';
13266 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
13268 if (label != NULL)
13270 /* If the branch distance was too far, we may have to use an
13271 unconditional branch to go the distance. */
13272 if (need_longbranch)
13273 s += sprintf (s, ",$+8\n\tb %s", label);
13274 else
13275 s += sprintf (s, ",%s", label);
13278 return string;
13281 /* Return the string to flip the GT bit on a CR. */
13282 char *
13283 output_e500_flip_gt_bit (rtx dst, rtx src)
13285 static char string[64];
13286 int a, b;
13288 gcc_assert (GET_CODE (dst) == REG && CR_REGNO_P (REGNO (dst))
13289 && GET_CODE (src) == REG && CR_REGNO_P (REGNO (src)));
13291 /* GT bit. */
13292 a = 4 * (REGNO (dst) - CR0_REGNO) + 1;
13293 b = 4 * (REGNO (src) - CR0_REGNO) + 1;
13295 sprintf (string, "crnot %d,%d", a, b);
13296 return string;
13299 /* Return insn index for the vector compare instruction for given CODE,
13300 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
13301 not available. */
13303 static int
13304 get_vec_cmp_insn (enum rtx_code code,
13305 enum machine_mode dest_mode,
13306 enum machine_mode op_mode)
13308 if (!TARGET_ALTIVEC)
13309 return INSN_NOT_AVAILABLE;
13311 switch (code)
13313 case EQ:
13314 if (dest_mode == V16QImode && op_mode == V16QImode)
13315 return UNSPEC_VCMPEQUB;
13316 if (dest_mode == V8HImode && op_mode == V8HImode)
13317 return UNSPEC_VCMPEQUH;
13318 if (dest_mode == V4SImode && op_mode == V4SImode)
13319 return UNSPEC_VCMPEQUW;
13320 if (dest_mode == V4SImode && op_mode == V4SFmode)
13321 return UNSPEC_VCMPEQFP;
13322 break;
13323 case GE:
13324 if (dest_mode == V4SImode && op_mode == V4SFmode)
13325 return UNSPEC_VCMPGEFP;
13326 case GT:
13327 if (dest_mode == V16QImode && op_mode == V16QImode)
13328 return UNSPEC_VCMPGTSB;
13329 if (dest_mode == V8HImode && op_mode == V8HImode)
13330 return UNSPEC_VCMPGTSH;
13331 if (dest_mode == V4SImode && op_mode == V4SImode)
13332 return UNSPEC_VCMPGTSW;
13333 if (dest_mode == V4SImode && op_mode == V4SFmode)
13334 return UNSPEC_VCMPGTFP;
13335 break;
13336 case GTU:
13337 if (dest_mode == V16QImode && op_mode == V16QImode)
13338 return UNSPEC_VCMPGTUB;
13339 if (dest_mode == V8HImode && op_mode == V8HImode)
13340 return UNSPEC_VCMPGTUH;
13341 if (dest_mode == V4SImode && op_mode == V4SImode)
13342 return UNSPEC_VCMPGTUW;
13343 break;
13344 default:
13345 break;
13347 return INSN_NOT_AVAILABLE;
13350 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
13351 DMODE is expected destination mode. This is a recursive function. */
13353 static rtx
13354 rs6000_emit_vector_compare (enum rtx_code rcode,
13355 rtx op0, rtx op1,
13356 enum machine_mode dmode)
13358 int vec_cmp_insn;
13359 rtx mask;
13360 enum machine_mode dest_mode;
13361 enum machine_mode op_mode = GET_MODE (op1);
13363 gcc_assert (TARGET_ALTIVEC);
13364 gcc_assert (GET_MODE (op0) == GET_MODE (op1));
13366 /* Floating point vector compare instructions uses destination V4SImode.
13367 Move destination to appropriate mode later. */
13368 if (dmode == V4SFmode)
13369 dest_mode = V4SImode;
13370 else
13371 dest_mode = dmode;
13373 mask = gen_reg_rtx (dest_mode);
13374 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13376 if (vec_cmp_insn == INSN_NOT_AVAILABLE)
13378 bool swap_operands = false;
13379 bool try_again = false;
13380 switch (rcode)
13382 case LT:
13383 rcode = GT;
13384 swap_operands = true;
13385 try_again = true;
13386 break;
13387 case LTU:
13388 rcode = GTU;
13389 swap_operands = true;
13390 try_again = true;
13391 break;
13392 case NE:
13393 case UNLE:
13394 case UNLT:
13395 case UNGE:
13396 case UNGT:
13397 /* Invert condition and try again.
13398 e.g., A != B becomes ~(A==B). */
13400 enum rtx_code rev_code;
13401 enum insn_code nor_code;
13402 rtx eq_rtx;
13404 rev_code = reverse_condition_maybe_unordered (rcode);
13405 eq_rtx = rs6000_emit_vector_compare (rev_code, op0, op1,
13406 dest_mode);
13408 nor_code = optab_handler (one_cmpl_optab, (int)dest_mode)->insn_code;
13409 gcc_assert (nor_code != CODE_FOR_nothing);
13410 emit_insn (GEN_FCN (nor_code) (mask, eq_rtx));
13412 if (dmode != dest_mode)
13414 rtx temp = gen_reg_rtx (dest_mode);
13415 convert_move (temp, mask, 0);
13416 return temp;
13418 return mask;
13420 break;
13421 case GE:
13422 case GEU:
13423 case LE:
13424 case LEU:
13425 /* Try GT/GTU/LT/LTU OR EQ */
13427 rtx c_rtx, eq_rtx;
13428 enum insn_code ior_code;
13429 enum rtx_code new_code;
13431 switch (rcode)
13433 case GE:
13434 new_code = GT;
13435 break;
13437 case GEU:
13438 new_code = GTU;
13439 break;
13441 case LE:
13442 new_code = LT;
13443 break;
13445 case LEU:
13446 new_code = LTU;
13447 break;
13449 default:
13450 gcc_unreachable ();
13453 c_rtx = rs6000_emit_vector_compare (new_code,
13454 op0, op1, dest_mode);
13455 eq_rtx = rs6000_emit_vector_compare (EQ, op0, op1,
13456 dest_mode);
13458 ior_code = optab_handler (ior_optab, (int)dest_mode)->insn_code;
13459 gcc_assert (ior_code != CODE_FOR_nothing);
13460 emit_insn (GEN_FCN (ior_code) (mask, c_rtx, eq_rtx));
13461 if (dmode != dest_mode)
13463 rtx temp = gen_reg_rtx (dest_mode);
13464 convert_move (temp, mask, 0);
13465 return temp;
13467 return mask;
13469 break;
13470 default:
13471 gcc_unreachable ();
13474 if (try_again)
13476 vec_cmp_insn = get_vec_cmp_insn (rcode, dest_mode, op_mode);
13477 /* You only get two chances. */
13478 gcc_assert (vec_cmp_insn != INSN_NOT_AVAILABLE);
13481 if (swap_operands)
13483 rtx tmp;
13484 tmp = op0;
13485 op0 = op1;
13486 op1 = tmp;
13490 emit_insn (gen_rtx_SET (VOIDmode, mask,
13491 gen_rtx_UNSPEC (dest_mode,
13492 gen_rtvec (2, op0, op1),
13493 vec_cmp_insn)));
13494 if (dmode != dest_mode)
13496 rtx temp = gen_reg_rtx (dest_mode);
13497 convert_move (temp, mask, 0);
13498 return temp;
13500 return mask;
13503 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
13504 valid insn doesn exist for given mode. */
13506 static int
13507 get_vsel_insn (enum machine_mode mode)
13509 switch (mode)
13511 case V4SImode:
13512 return UNSPEC_VSEL4SI;
13513 break;
13514 case V4SFmode:
13515 return UNSPEC_VSEL4SF;
13516 break;
13517 case V8HImode:
13518 return UNSPEC_VSEL8HI;
13519 break;
13520 case V16QImode:
13521 return UNSPEC_VSEL16QI;
13522 break;
13523 default:
13524 return INSN_NOT_AVAILABLE;
13525 break;
13527 return INSN_NOT_AVAILABLE;
13530 /* Emit vector select insn where DEST is destination using
13531 operands OP1, OP2 and MASK. */
13533 static void
13534 rs6000_emit_vector_select (rtx dest, rtx op1, rtx op2, rtx mask)
13536 rtx t, temp;
13537 enum machine_mode dest_mode = GET_MODE (dest);
13538 int vsel_insn_index = get_vsel_insn (GET_MODE (dest));
13540 temp = gen_reg_rtx (dest_mode);
13542 /* For each vector element, select op1 when mask is 1 otherwise
13543 select op2. */
13544 t = gen_rtx_SET (VOIDmode, temp,
13545 gen_rtx_UNSPEC (dest_mode,
13546 gen_rtvec (3, op2, op1, mask),
13547 vsel_insn_index));
13548 emit_insn (t);
13549 emit_move_insn (dest, temp);
13550 return;
13553 /* Emit vector conditional expression.
13554 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
13555 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
13558 rs6000_emit_vector_cond_expr (rtx dest, rtx op1, rtx op2,
13559 rtx cond, rtx cc_op0, rtx cc_op1)
13561 enum machine_mode dest_mode = GET_MODE (dest);
13562 enum rtx_code rcode = GET_CODE (cond);
13563 rtx mask;
13565 if (!TARGET_ALTIVEC)
13566 return 0;
13568 /* Get the vector mask for the given relational operations. */
13569 mask = rs6000_emit_vector_compare (rcode, cc_op0, cc_op1, dest_mode);
13571 rs6000_emit_vector_select (dest, op1, op2, mask);
13573 return 1;
13576 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
13577 operands of the last comparison is nonzero/true, FALSE_COND if it
13578 is zero/false. Return 0 if the hardware has no such operation. */
13581 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
13583 enum rtx_code code = GET_CODE (op);
13584 rtx op0 = rs6000_compare_op0;
13585 rtx op1 = rs6000_compare_op1;
13586 REAL_VALUE_TYPE c1;
13587 enum machine_mode compare_mode = GET_MODE (op0);
13588 enum machine_mode result_mode = GET_MODE (dest);
13589 rtx temp;
13590 bool is_against_zero;
13592 /* These modes should always match. */
13593 if (GET_MODE (op1) != compare_mode
13594 /* In the isel case however, we can use a compare immediate, so
13595 op1 may be a small constant. */
13596 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
13597 return 0;
13598 if (GET_MODE (true_cond) != result_mode)
13599 return 0;
13600 if (GET_MODE (false_cond) != result_mode)
13601 return 0;
13603 /* First, work out if the hardware can do this at all, or
13604 if it's too slow.... */
13605 if (! rs6000_compare_fp_p)
13607 if (TARGET_ISEL)
13608 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
13609 return 0;
13611 else if (TARGET_HARD_FLOAT && !TARGET_FPRS
13612 && SCALAR_FLOAT_MODE_P (compare_mode))
13613 return 0;
13615 is_against_zero = op1 == CONST0_RTX (compare_mode);
13617 /* A floating-point subtract might overflow, underflow, or produce
13618 an inexact result, thus changing the floating-point flags, so it
13619 can't be generated if we care about that. It's safe if one side
13620 of the construct is zero, since then no subtract will be
13621 generated. */
13622 if (SCALAR_FLOAT_MODE_P (compare_mode)
13623 && flag_trapping_math && ! is_against_zero)
13624 return 0;
13626 /* Eliminate half of the comparisons by switching operands, this
13627 makes the remaining code simpler. */
13628 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
13629 || code == LTGT || code == LT || code == UNLE)
13631 code = reverse_condition_maybe_unordered (code);
13632 temp = true_cond;
13633 true_cond = false_cond;
13634 false_cond = temp;
13637 /* UNEQ and LTGT take four instructions for a comparison with zero,
13638 it'll probably be faster to use a branch here too. */
13639 if (code == UNEQ && HONOR_NANS (compare_mode))
13640 return 0;
13642 if (GET_CODE (op1) == CONST_DOUBLE)
13643 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
13645 /* We're going to try to implement comparisons by performing
13646 a subtract, then comparing against zero. Unfortunately,
13647 Inf - Inf is NaN which is not zero, and so if we don't
13648 know that the operand is finite and the comparison
13649 would treat EQ different to UNORDERED, we can't do it. */
13650 if (HONOR_INFINITIES (compare_mode)
13651 && code != GT && code != UNGE
13652 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
13653 /* Constructs of the form (a OP b ? a : b) are safe. */
13654 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
13655 || (! rtx_equal_p (op0, true_cond)
13656 && ! rtx_equal_p (op1, true_cond))))
13657 return 0;
13659 /* At this point we know we can use fsel. */
13661 /* Reduce the comparison to a comparison against zero. */
13662 if (! is_against_zero)
13664 temp = gen_reg_rtx (compare_mode);
13665 emit_insn (gen_rtx_SET (VOIDmode, temp,
13666 gen_rtx_MINUS (compare_mode, op0, op1)));
13667 op0 = temp;
13668 op1 = CONST0_RTX (compare_mode);
13671 /* If we don't care about NaNs we can reduce some of the comparisons
13672 down to faster ones. */
13673 if (! HONOR_NANS (compare_mode))
13674 switch (code)
13676 case GT:
13677 code = LE;
13678 temp = true_cond;
13679 true_cond = false_cond;
13680 false_cond = temp;
13681 break;
13682 case UNGE:
13683 code = GE;
13684 break;
13685 case UNEQ:
13686 code = EQ;
13687 break;
13688 default:
13689 break;
13692 /* Now, reduce everything down to a GE. */
13693 switch (code)
13695 case GE:
13696 break;
13698 case LE:
13699 temp = gen_reg_rtx (compare_mode);
13700 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
13701 op0 = temp;
13702 break;
13704 case ORDERED:
13705 temp = gen_reg_rtx (compare_mode);
13706 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
13707 op0 = temp;
13708 break;
13710 case EQ:
13711 temp = gen_reg_rtx (compare_mode);
13712 emit_insn (gen_rtx_SET (VOIDmode, temp,
13713 gen_rtx_NEG (compare_mode,
13714 gen_rtx_ABS (compare_mode, op0))));
13715 op0 = temp;
13716 break;
13718 case UNGE:
13719 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
13720 temp = gen_reg_rtx (result_mode);
13721 emit_insn (gen_rtx_SET (VOIDmode, temp,
13722 gen_rtx_IF_THEN_ELSE (result_mode,
13723 gen_rtx_GE (VOIDmode,
13724 op0, op1),
13725 true_cond, false_cond)));
13726 false_cond = true_cond;
13727 true_cond = temp;
13729 temp = gen_reg_rtx (compare_mode);
13730 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
13731 op0 = temp;
13732 break;
13734 case GT:
13735 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
13736 temp = gen_reg_rtx (result_mode);
13737 emit_insn (gen_rtx_SET (VOIDmode, temp,
13738 gen_rtx_IF_THEN_ELSE (result_mode,
13739 gen_rtx_GE (VOIDmode,
13740 op0, op1),
13741 true_cond, false_cond)));
13742 true_cond = false_cond;
13743 false_cond = temp;
13745 temp = gen_reg_rtx (compare_mode);
13746 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
13747 op0 = temp;
13748 break;
13750 default:
13751 gcc_unreachable ();
13754 emit_insn (gen_rtx_SET (VOIDmode, dest,
13755 gen_rtx_IF_THEN_ELSE (result_mode,
13756 gen_rtx_GE (VOIDmode,
13757 op0, op1),
13758 true_cond, false_cond)));
13759 return 1;
13762 /* Same as above, but for ints (isel). */
13764 static int
13765 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
13767 rtx condition_rtx, cr;
13769 /* All isel implementations thus far are 32-bits. */
13770 if (GET_MODE (rs6000_compare_op0) != SImode)
13771 return 0;
13773 /* We still have to do the compare, because isel doesn't do a
13774 compare, it just looks at the CRx bits set by a previous compare
13775 instruction. */
13776 condition_rtx = rs6000_generate_compare (GET_CODE (op));
13777 cr = XEXP (condition_rtx, 0);
13779 if (GET_MODE (cr) == CCmode)
13780 emit_insn (gen_isel_signed (dest, condition_rtx,
13781 true_cond, false_cond, cr));
13782 else
13783 emit_insn (gen_isel_unsigned (dest, condition_rtx,
13784 true_cond, false_cond, cr));
13786 return 1;
13789 const char *
13790 output_isel (rtx *operands)
13792 enum rtx_code code;
13794 code = GET_CODE (operands[1]);
13795 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
13797 PUT_CODE (operands[1], reverse_condition (code));
13798 return "isel %0,%3,%2,%j1";
13800 else
13801 return "isel %0,%2,%3,%j1";
13804 void
13805 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
13807 enum machine_mode mode = GET_MODE (op0);
13808 enum rtx_code c;
13809 rtx target;
13811 if (code == SMAX || code == SMIN)
13812 c = GE;
13813 else
13814 c = GEU;
13816 if (code == SMAX || code == UMAX)
13817 target = emit_conditional_move (dest, c, op0, op1, mode,
13818 op0, op1, mode, 0);
13819 else
13820 target = emit_conditional_move (dest, c, op0, op1, mode,
13821 op1, op0, mode, 0);
13822 gcc_assert (target);
13823 if (target != dest)
13824 emit_move_insn (dest, target);
13827 /* Emit instructions to perform a load-reserved/store-conditional operation.
13828 The operation performed is an atomic
13829 (set M (CODE:MODE M OP))
13830 If not NULL, BEFORE is atomically set to M before the operation, and
13831 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
13832 If SYNC_P then a memory barrier is emitted before the operation.
13833 Either OP or M may be wrapped in a NOT operation. */
13835 void
13836 rs6000_emit_sync (enum rtx_code code, enum machine_mode mode,
13837 rtx m, rtx op, rtx before_param, rtx after_param,
13838 bool sync_p)
13840 enum machine_mode used_mode;
13841 rtx the_op, set_before, set_after, set_atomic, cc_scratch, before, after;
13842 rtx used_m;
13843 rtvec vec;
13844 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
13845 rtx shift = NULL_RTX;
13847 if (sync_p)
13848 emit_insn (gen_lwsync ());
13850 used_m = m;
13852 /* If this is smaller than SImode, we'll have to use SImode with
13853 adjustments. */
13854 if (mode == QImode || mode == HImode)
13856 rtx newop, oldop;
13858 if (MEM_ALIGN (used_m) >= 32)
13860 int ishift = 0;
13861 if (BYTES_BIG_ENDIAN)
13862 ishift = GET_MODE_BITSIZE (SImode) - GET_MODE_BITSIZE (mode);
13864 shift = GEN_INT (ishift);
13865 used_m = change_address (used_m, SImode, 0);
13867 else
13869 rtx addrSI, aligned_addr;
13870 int shift_mask = mode == QImode ? 0x18 : 0x10;
13872 addrSI = gen_lowpart_common (SImode,
13873 force_reg (Pmode, XEXP (used_m, 0)));
13874 addrSI = force_reg (SImode, addrSI);
13875 shift = gen_reg_rtx (SImode);
13877 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
13878 GEN_INT (shift_mask)));
13879 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
13881 aligned_addr = expand_binop (Pmode, and_optab,
13882 XEXP (used_m, 0),
13883 GEN_INT (-4), NULL_RTX,
13884 1, OPTAB_LIB_WIDEN);
13885 used_m = change_address (used_m, SImode, aligned_addr);
13886 set_mem_align (used_m, 32);
13888 /* It's safe to keep the old alias set of USED_M, because
13889 the operation is atomic and only affects the original
13890 USED_M. */
13891 m = used_m;
13893 if (GET_CODE (op) == NOT)
13895 oldop = lowpart_subreg (SImode, XEXP (op, 0), mode);
13896 oldop = gen_rtx_NOT (SImode, oldop);
13898 else
13899 oldop = lowpart_subreg (SImode, op, mode);
13901 switch (code)
13903 case IOR:
13904 case XOR:
13905 newop = expand_binop (SImode, and_optab,
13906 oldop, GEN_INT (imask), NULL_RTX,
13907 1, OPTAB_LIB_WIDEN);
13908 emit_insn (gen_ashlsi3 (newop, newop, shift));
13909 break;
13911 case NOT: /* NAND */
13912 newop = expand_binop (SImode, ior_optab,
13913 oldop, GEN_INT (~imask), NULL_RTX,
13914 1, OPTAB_LIB_WIDEN);
13915 emit_insn (gen_rotlsi3 (newop, newop, shift));
13916 break;
13918 case AND:
13919 newop = expand_binop (SImode, ior_optab,
13920 oldop, GEN_INT (~imask), NULL_RTX,
13921 1, OPTAB_LIB_WIDEN);
13922 emit_insn (gen_rotlsi3 (newop, newop, shift));
13923 break;
13925 case PLUS:
13926 case MINUS:
13928 rtx mask;
13930 newop = expand_binop (SImode, and_optab,
13931 oldop, GEN_INT (imask), NULL_RTX,
13932 1, OPTAB_LIB_WIDEN);
13933 emit_insn (gen_ashlsi3 (newop, newop, shift));
13935 mask = gen_reg_rtx (SImode);
13936 emit_move_insn (mask, GEN_INT (imask));
13937 emit_insn (gen_ashlsi3 (mask, mask, shift));
13939 if (code == PLUS)
13940 newop = gen_rtx_PLUS (SImode, m, newop);
13941 else
13942 newop = gen_rtx_MINUS (SImode, m, newop);
13943 newop = gen_rtx_AND (SImode, newop, mask);
13944 newop = gen_rtx_IOR (SImode, newop,
13945 gen_rtx_AND (SImode,
13946 gen_rtx_NOT (SImode, mask),
13947 m));
13948 break;
13951 default:
13952 gcc_unreachable ();
13955 op = newop;
13956 used_mode = SImode;
13957 before = gen_reg_rtx (used_mode);
13958 after = gen_reg_rtx (used_mode);
13960 else
13962 used_mode = mode;
13963 before = before_param;
13964 after = after_param;
13966 if (before == NULL_RTX)
13967 before = gen_reg_rtx (used_mode);
13968 if (after == NULL_RTX)
13969 after = gen_reg_rtx (used_mode);
13972 if ((code == PLUS || code == MINUS)
13973 && used_mode != mode)
13974 the_op = op; /* Computed above. */
13975 else if (GET_CODE (op) == NOT && GET_CODE (m) != NOT)
13976 the_op = gen_rtx_fmt_ee (code, used_mode, op, m);
13977 else if (code == NOT)
13978 the_op = gen_rtx_fmt_ee (IOR, used_mode,
13979 gen_rtx_NOT (used_mode, m),
13980 gen_rtx_NOT (used_mode, op));
13981 else
13982 the_op = gen_rtx_fmt_ee (code, used_mode, m, op);
13984 set_after = gen_rtx_SET (VOIDmode, after, the_op);
13985 set_before = gen_rtx_SET (VOIDmode, before, used_m);
13986 set_atomic = gen_rtx_SET (VOIDmode, used_m,
13987 gen_rtx_UNSPEC (used_mode,
13988 gen_rtvec (1, the_op),
13989 UNSPEC_SYNC_OP));
13990 cc_scratch = gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (CCmode));
13992 if ((code == PLUS || code == MINUS) && used_mode != mode)
13993 vec = gen_rtvec (5, set_after, set_before, set_atomic, cc_scratch,
13994 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (SImode)));
13995 else
13996 vec = gen_rtvec (4, set_after, set_before, set_atomic, cc_scratch);
13997 emit_insn (gen_rtx_PARALLEL (VOIDmode, vec));
13999 /* Shift and mask the return values properly. */
14000 if (used_mode != mode && before_param)
14002 emit_insn (gen_lshrsi3 (before, before, shift));
14003 convert_move (before_param, before, 1);
14006 if (used_mode != mode && after_param)
14008 emit_insn (gen_lshrsi3 (after, after, shift));
14009 convert_move (after_param, after, 1);
14012 /* The previous sequence will end with a branch that's dependent on
14013 the conditional store, so placing an isync will ensure that no
14014 other instructions (especially, no load or store instructions)
14015 can start before the atomic operation completes. */
14016 if (sync_p)
14017 emit_insn (gen_isync ());
14020 /* A subroutine of the atomic operation splitters. Jump to LABEL if
14021 COND is true. Mark the jump as unlikely to be taken. */
14023 static void
14024 emit_unlikely_jump (rtx cond, rtx label)
14026 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1);
14027 rtx x;
14029 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
14030 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
14031 REG_NOTES (x) = gen_rtx_EXPR_LIST (REG_BR_PROB, very_unlikely, NULL_RTX);
14034 /* A subroutine of the atomic operation splitters. Emit a load-locked
14035 instruction in MODE. */
14037 static void
14038 emit_load_locked (enum machine_mode mode, rtx reg, rtx mem)
14040 rtx (*fn) (rtx, rtx) = NULL;
14041 if (mode == SImode)
14042 fn = gen_load_locked_si;
14043 else if (mode == DImode)
14044 fn = gen_load_locked_di;
14045 emit_insn (fn (reg, mem));
14048 /* A subroutine of the atomic operation splitters. Emit a store-conditional
14049 instruction in MODE. */
14051 static void
14052 emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val)
14054 rtx (*fn) (rtx, rtx, rtx) = NULL;
14055 if (mode == SImode)
14056 fn = gen_store_conditional_si;
14057 else if (mode == DImode)
14058 fn = gen_store_conditional_di;
14060 /* Emit sync before stwcx. to address PPC405 Erratum. */
14061 if (PPC405_ERRATUM77)
14062 emit_insn (gen_memory_barrier ());
14064 emit_insn (fn (res, mem, val));
14067 /* Expand an atomic fetch-and-operate pattern. CODE is the binary operation
14068 to perform. MEM is the memory on which to operate. VAL is the second
14069 operand of the binary operator. BEFORE and AFTER are optional locations to
14070 return the value of MEM either before of after the operation. SCRATCH is
14071 a scratch register. */
14073 void
14074 rs6000_split_atomic_op (enum rtx_code code, rtx mem, rtx val,
14075 rtx before, rtx after, rtx scratch)
14077 enum machine_mode mode = GET_MODE (mem);
14078 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14080 emit_insn (gen_lwsync ());
14082 label = gen_label_rtx ();
14083 emit_label (label);
14084 label = gen_rtx_LABEL_REF (VOIDmode, label);
14086 if (before == NULL_RTX)
14087 before = scratch;
14088 emit_load_locked (mode, before, mem);
14090 if (code == NOT)
14091 x = gen_rtx_IOR (mode,
14092 gen_rtx_NOT (mode, before),
14093 gen_rtx_NOT (mode, val));
14094 else if (code == AND)
14095 x = gen_rtx_UNSPEC (mode, gen_rtvec (2, before, val), UNSPEC_AND);
14096 else
14097 x = gen_rtx_fmt_ee (code, mode, before, val);
14099 if (after != NULL_RTX)
14100 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x)));
14101 emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
14103 emit_store_conditional (mode, cond, mem, scratch);
14105 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14106 emit_unlikely_jump (x, label);
14108 emit_insn (gen_isync ());
14111 /* Expand an atomic compare and swap operation. MEM is the memory on which
14112 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
14113 value to be stored. SCRATCH is a scratch GPR. */
14115 void
14116 rs6000_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval,
14117 rtx scratch)
14119 enum machine_mode mode = GET_MODE (mem);
14120 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14122 emit_insn (gen_lwsync ());
14124 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14125 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14126 emit_label (XEXP (label1, 0));
14128 emit_load_locked (mode, retval, mem);
14130 x = gen_rtx_COMPARE (CCmode, retval, oldval);
14131 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14133 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14134 emit_unlikely_jump (x, label2);
14136 emit_move_insn (scratch, newval);
14137 emit_store_conditional (mode, cond, mem, scratch);
14139 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14140 emit_unlikely_jump (x, label1);
14142 emit_insn (gen_isync ());
14143 emit_label (XEXP (label2, 0));
14146 /* Expand an atomic test and set operation. MEM is the memory on which
14147 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
14149 void
14150 rs6000_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch)
14152 enum machine_mode mode = GET_MODE (mem);
14153 rtx label, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14155 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14156 emit_label (XEXP (label, 0));
14158 emit_load_locked (mode, retval, mem);
14159 emit_move_insn (scratch, val);
14160 emit_store_conditional (mode, cond, mem, scratch);
14162 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14163 emit_unlikely_jump (x, label);
14165 emit_insn (gen_isync ());
14168 void
14169 rs6000_expand_compare_and_swapqhi (rtx dst, rtx mem, rtx oldval, rtx newval)
14171 enum machine_mode mode = GET_MODE (mem);
14172 rtx addrSI, align, wdst, shift, mask;
14173 HOST_WIDE_INT shift_mask = mode == QImode ? 0x18 : 0x10;
14174 HOST_WIDE_INT imask = GET_MODE_MASK (mode);
14176 /* Shift amount for subword relative to aligned word. */
14177 addrSI = force_reg (GET_MODE (XEXP (mem, 0)), XEXP (mem, 0));
14178 addrSI = force_reg (SImode, gen_lowpart_common (SImode, addrSI));
14179 shift = gen_reg_rtx (SImode);
14180 emit_insn (gen_rlwinm (shift, addrSI, GEN_INT (3),
14181 GEN_INT (shift_mask)));
14182 emit_insn (gen_xorsi3 (shift, shift, GEN_INT (shift_mask)));
14184 /* Shift and mask old value into position within word. */
14185 oldval = convert_modes (SImode, mode, oldval, 1);
14186 oldval = expand_binop (SImode, and_optab,
14187 oldval, GEN_INT (imask), NULL_RTX,
14188 1, OPTAB_LIB_WIDEN);
14189 emit_insn (gen_ashlsi3 (oldval, oldval, shift));
14191 /* Shift and mask new value into position within word. */
14192 newval = convert_modes (SImode, mode, newval, 1);
14193 newval = expand_binop (SImode, and_optab,
14194 newval, GEN_INT (imask), NULL_RTX,
14195 1, OPTAB_LIB_WIDEN);
14196 emit_insn (gen_ashlsi3 (newval, newval, shift));
14198 /* Mask for insertion. */
14199 mask = gen_reg_rtx (SImode);
14200 emit_move_insn (mask, GEN_INT (imask));
14201 emit_insn (gen_ashlsi3 (mask, mask, shift));
14203 /* Address of aligned word containing subword. */
14204 align = expand_binop (Pmode, and_optab, XEXP (mem, 0), GEN_INT (-4),
14205 NULL_RTX, 1, OPTAB_LIB_WIDEN);
14206 mem = change_address (mem, SImode, align);
14207 set_mem_align (mem, 32);
14208 MEM_VOLATILE_P (mem) = 1;
14210 wdst = gen_reg_rtx (SImode);
14211 emit_insn (gen_sync_compare_and_swapqhi_internal (wdst, mask,
14212 oldval, newval, mem));
14214 /* Shift the result back. */
14215 emit_insn (gen_lshrsi3 (wdst, wdst, shift));
14217 emit_move_insn (dst, gen_lowpart (mode, wdst));
14220 void
14221 rs6000_split_compare_and_swapqhi (rtx dest, rtx mask,
14222 rtx oldval, rtx newval, rtx mem,
14223 rtx scratch)
14225 rtx label1, label2, x, cond = gen_rtx_REG (CCmode, CR0_REGNO);
14227 emit_insn (gen_lwsync ());
14228 label1 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14229 label2 = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
14230 emit_label (XEXP (label1, 0));
14232 emit_load_locked (SImode, scratch, mem);
14234 /* Mask subword within loaded value for comparison with oldval.
14235 Use UNSPEC_AND to avoid clobber.*/
14236 emit_insn (gen_rtx_SET (SImode, dest,
14237 gen_rtx_UNSPEC (SImode,
14238 gen_rtvec (2, scratch, mask),
14239 UNSPEC_AND)));
14241 x = gen_rtx_COMPARE (CCmode, dest, oldval);
14242 emit_insn (gen_rtx_SET (VOIDmode, cond, x));
14244 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14245 emit_unlikely_jump (x, label2);
14247 /* Clear subword within loaded value for insertion of new value. */
14248 emit_insn (gen_rtx_SET (SImode, scratch,
14249 gen_rtx_AND (SImode,
14250 gen_rtx_NOT (SImode, mask), scratch)));
14251 emit_insn (gen_iorsi3 (scratch, scratch, newval));
14252 emit_store_conditional (SImode, cond, mem, scratch);
14254 x = gen_rtx_NE (VOIDmode, cond, const0_rtx);
14255 emit_unlikely_jump (x, label1);
14257 emit_insn (gen_isync ());
14258 emit_label (XEXP (label2, 0));
14262 /* Emit instructions to move SRC to DST. Called by splitters for
14263 multi-register moves. It will emit at most one instruction for
14264 each register that is accessed; that is, it won't emit li/lis pairs
14265 (or equivalent for 64-bit code). One of SRC or DST must be a hard
14266 register. */
14268 void
14269 rs6000_split_multireg_move (rtx dst, rtx src)
14271 /* The register number of the first register being moved. */
14272 int reg;
14273 /* The mode that is to be moved. */
14274 enum machine_mode mode;
14275 /* The mode that the move is being done in, and its size. */
14276 enum machine_mode reg_mode;
14277 int reg_mode_size;
14278 /* The number of registers that will be moved. */
14279 int nregs;
14281 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
14282 mode = GET_MODE (dst);
14283 nregs = hard_regno_nregs[reg][mode];
14284 if (FP_REGNO_P (reg))
14285 reg_mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode :
14286 ((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT) ? DFmode : SFmode);
14287 else if (ALTIVEC_REGNO_P (reg))
14288 reg_mode = V16QImode;
14289 else if (TARGET_E500_DOUBLE && mode == TFmode)
14290 reg_mode = DFmode;
14291 else
14292 reg_mode = word_mode;
14293 reg_mode_size = GET_MODE_SIZE (reg_mode);
14295 gcc_assert (reg_mode_size * nregs == GET_MODE_SIZE (mode));
14297 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
14299 /* Move register range backwards, if we might have destructive
14300 overlap. */
14301 int i;
14302 for (i = nregs - 1; i >= 0; i--)
14303 emit_insn (gen_rtx_SET (VOIDmode,
14304 simplify_gen_subreg (reg_mode, dst, mode,
14305 i * reg_mode_size),
14306 simplify_gen_subreg (reg_mode, src, mode,
14307 i * reg_mode_size)));
14309 else
14311 int i;
14312 int j = -1;
14313 bool used_update = false;
14315 if (MEM_P (src) && INT_REGNO_P (reg))
14317 rtx breg;
14319 if (GET_CODE (XEXP (src, 0)) == PRE_INC
14320 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
14322 rtx delta_rtx;
14323 breg = XEXP (XEXP (src, 0), 0);
14324 delta_rtx = (GET_CODE (XEXP (src, 0)) == PRE_INC
14325 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
14326 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src))));
14327 emit_insn (TARGET_32BIT
14328 ? gen_addsi3 (breg, breg, delta_rtx)
14329 : gen_adddi3 (breg, breg, delta_rtx));
14330 src = replace_equiv_address (src, breg);
14332 else if (! rs6000_offsettable_memref_p (src))
14334 rtx basereg;
14335 basereg = gen_rtx_REG (Pmode, reg);
14336 emit_insn (gen_rtx_SET (VOIDmode, basereg, XEXP (src, 0)));
14337 src = replace_equiv_address (src, basereg);
14340 breg = XEXP (src, 0);
14341 if (GET_CODE (breg) == PLUS || GET_CODE (breg) == LO_SUM)
14342 breg = XEXP (breg, 0);
14344 /* If the base register we are using to address memory is
14345 also a destination reg, then change that register last. */
14346 if (REG_P (breg)
14347 && REGNO (breg) >= REGNO (dst)
14348 && REGNO (breg) < REGNO (dst) + nregs)
14349 j = REGNO (breg) - REGNO (dst);
14352 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
14354 rtx breg;
14356 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
14357 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
14359 rtx delta_rtx;
14360 breg = XEXP (XEXP (dst, 0), 0);
14361 delta_rtx = (GET_CODE (XEXP (dst, 0)) == PRE_INC
14362 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
14363 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst))));
14365 /* We have to update the breg before doing the store.
14366 Use store with update, if available. */
14368 if (TARGET_UPDATE)
14370 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
14371 emit_insn (TARGET_32BIT
14372 ? (TARGET_POWERPC64
14373 ? gen_movdi_si_update (breg, breg, delta_rtx, nsrc)
14374 : gen_movsi_update (breg, breg, delta_rtx, nsrc))
14375 : gen_movdi_di_update (breg, breg, delta_rtx, nsrc));
14376 used_update = true;
14378 else
14379 emit_insn (TARGET_32BIT
14380 ? gen_addsi3 (breg, breg, delta_rtx)
14381 : gen_adddi3 (breg, breg, delta_rtx));
14382 dst = replace_equiv_address (dst, breg);
14384 else
14385 gcc_assert (rs6000_offsettable_memref_p (dst));
14388 for (i = 0; i < nregs; i++)
14390 /* Calculate index to next subword. */
14391 ++j;
14392 if (j == nregs)
14393 j = 0;
14395 /* If compiler already emitted move of first word by
14396 store with update, no need to do anything. */
14397 if (j == 0 && used_update)
14398 continue;
14400 emit_insn (gen_rtx_SET (VOIDmode,
14401 simplify_gen_subreg (reg_mode, dst, mode,
14402 j * reg_mode_size),
14403 simplify_gen_subreg (reg_mode, src, mode,
14404 j * reg_mode_size)));
14410 /* This page contains routines that are used to determine what the
14411 function prologue and epilogue code will do and write them out. */
14413 /* Return the first fixed-point register that is required to be
14414 saved. 32 if none. */
14417 first_reg_to_save (void)
14419 int first_reg;
14421 /* Find lowest numbered live register. */
14422 for (first_reg = 13; first_reg <= 31; first_reg++)
14423 if (df_regs_ever_live_p (first_reg)
14424 && (! call_used_regs[first_reg]
14425 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
14426 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
14427 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)
14428 || (TARGET_TOC && TARGET_MINIMAL_TOC)))))
14429 break;
14431 #if TARGET_MACHO
14432 if (flag_pic
14433 && crtl->uses_pic_offset_table
14434 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
14435 return RS6000_PIC_OFFSET_TABLE_REGNUM;
14436 #endif
14438 return first_reg;
14441 /* Similar, for FP regs. */
14444 first_fp_reg_to_save (void)
14446 int first_reg;
14448 /* Find lowest numbered live register. */
14449 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
14450 if (df_regs_ever_live_p (first_reg))
14451 break;
14453 return first_reg;
14456 /* Similar, for AltiVec regs. */
14458 static int
14459 first_altivec_reg_to_save (void)
14461 int i;
14463 /* Stack frame remains as is unless we are in AltiVec ABI. */
14464 if (! TARGET_ALTIVEC_ABI)
14465 return LAST_ALTIVEC_REGNO + 1;
14467 /* On Darwin, the unwind routines are compiled without
14468 TARGET_ALTIVEC, and use save_world to save/restore the
14469 altivec registers when necessary. */
14470 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
14471 && ! TARGET_ALTIVEC)
14472 return FIRST_ALTIVEC_REGNO + 20;
14474 /* Find lowest numbered live register. */
14475 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
14476 if (df_regs_ever_live_p (i))
14477 break;
14479 return i;
14482 /* Return a 32-bit mask of the AltiVec registers we need to set in
14483 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
14484 the 32-bit word is 0. */
14486 static unsigned int
14487 compute_vrsave_mask (void)
14489 unsigned int i, mask = 0;
14491 /* On Darwin, the unwind routines are compiled without
14492 TARGET_ALTIVEC, and use save_world to save/restore the
14493 call-saved altivec registers when necessary. */
14494 if (DEFAULT_ABI == ABI_DARWIN && crtl->calls_eh_return
14495 && ! TARGET_ALTIVEC)
14496 mask |= 0xFFF;
14498 /* First, find out if we use _any_ altivec registers. */
14499 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
14500 if (df_regs_ever_live_p (i))
14501 mask |= ALTIVEC_REG_BIT (i);
14503 if (mask == 0)
14504 return mask;
14506 /* Next, remove the argument registers from the set. These must
14507 be in the VRSAVE mask set by the caller, so we don't need to add
14508 them in again. More importantly, the mask we compute here is
14509 used to generate CLOBBERs in the set_vrsave insn, and we do not
14510 wish the argument registers to die. */
14511 for (i = crtl->args.info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
14512 mask &= ~ALTIVEC_REG_BIT (i);
14514 /* Similarly, remove the return value from the set. */
14516 bool yes = false;
14517 diddle_return_value (is_altivec_return_reg, &yes);
14518 if (yes)
14519 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
14522 return mask;
14525 /* For a very restricted set of circumstances, we can cut down the
14526 size of prologues/epilogues by calling our own save/restore-the-world
14527 routines. */
14529 static void
14530 compute_save_world_info (rs6000_stack_t *info_ptr)
14532 info_ptr->world_save_p = 1;
14533 info_ptr->world_save_p
14534 = (WORLD_SAVE_P (info_ptr)
14535 && DEFAULT_ABI == ABI_DARWIN
14536 && ! (cfun->calls_setjmp && flag_exceptions)
14537 && info_ptr->first_fp_reg_save == FIRST_SAVED_FP_REGNO
14538 && info_ptr->first_gp_reg_save == FIRST_SAVED_GP_REGNO
14539 && info_ptr->first_altivec_reg_save == FIRST_SAVED_ALTIVEC_REGNO
14540 && info_ptr->cr_save_p);
14542 /* This will not work in conjunction with sibcalls. Make sure there
14543 are none. (This check is expensive, but seldom executed.) */
14544 if (WORLD_SAVE_P (info_ptr))
14546 rtx insn;
14547 for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
14548 if ( GET_CODE (insn) == CALL_INSN
14549 && SIBLING_CALL_P (insn))
14551 info_ptr->world_save_p = 0;
14552 break;
14556 if (WORLD_SAVE_P (info_ptr))
14558 /* Even if we're not touching VRsave, make sure there's room on the
14559 stack for it, if it looks like we're calling SAVE_WORLD, which
14560 will attempt to save it. */
14561 info_ptr->vrsave_size = 4;
14563 /* If we are going to save the world, we need to save the link register too. */
14564 info_ptr->lr_save_p = 1;
14566 /* "Save" the VRsave register too if we're saving the world. */
14567 if (info_ptr->vrsave_mask == 0)
14568 info_ptr->vrsave_mask = compute_vrsave_mask ();
14570 /* Because the Darwin register save/restore routines only handle
14571 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
14572 check. */
14573 gcc_assert (info_ptr->first_fp_reg_save >= FIRST_SAVED_FP_REGNO
14574 && (info_ptr->first_altivec_reg_save
14575 >= FIRST_SAVED_ALTIVEC_REGNO));
14577 return;
14581 static void
14582 is_altivec_return_reg (rtx reg, void *xyes)
14584 bool *yes = (bool *) xyes;
14585 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
14586 *yes = true;
14590 /* Calculate the stack information for the current function. This is
14591 complicated by having two separate calling sequences, the AIX calling
14592 sequence and the V.4 calling sequence.
14594 AIX (and Darwin/Mac OS X) stack frames look like:
14595 32-bit 64-bit
14596 SP----> +---------------------------------------+
14597 | back chain to caller | 0 0
14598 +---------------------------------------+
14599 | saved CR | 4 8 (8-11)
14600 +---------------------------------------+
14601 | saved LR | 8 16
14602 +---------------------------------------+
14603 | reserved for compilers | 12 24
14604 +---------------------------------------+
14605 | reserved for binders | 16 32
14606 +---------------------------------------+
14607 | saved TOC pointer | 20 40
14608 +---------------------------------------+
14609 | Parameter save area (P) | 24 48
14610 +---------------------------------------+
14611 | Alloca space (A) | 24+P etc.
14612 +---------------------------------------+
14613 | Local variable space (L) | 24+P+A
14614 +---------------------------------------+
14615 | Float/int conversion temporary (X) | 24+P+A+L
14616 +---------------------------------------+
14617 | Save area for AltiVec registers (W) | 24+P+A+L+X
14618 +---------------------------------------+
14619 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
14620 +---------------------------------------+
14621 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
14622 +---------------------------------------+
14623 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
14624 +---------------------------------------+
14625 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
14626 +---------------------------------------+
14627 old SP->| back chain to caller's caller |
14628 +---------------------------------------+
14630 The required alignment for AIX configurations is two words (i.e., 8
14631 or 16 bytes).
14634 V.4 stack frames look like:
14636 SP----> +---------------------------------------+
14637 | back chain to caller | 0
14638 +---------------------------------------+
14639 | caller's saved LR | 4
14640 +---------------------------------------+
14641 | Parameter save area (P) | 8
14642 +---------------------------------------+
14643 | Alloca space (A) | 8+P
14644 +---------------------------------------+
14645 | Varargs save area (V) | 8+P+A
14646 +---------------------------------------+
14647 | Local variable space (L) | 8+P+A+V
14648 +---------------------------------------+
14649 | Float/int conversion temporary (X) | 8+P+A+V+L
14650 +---------------------------------------+
14651 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
14652 +---------------------------------------+
14653 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
14654 +---------------------------------------+
14655 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
14656 +---------------------------------------+
14657 | SPE: area for 64-bit GP registers |
14658 +---------------------------------------+
14659 | SPE alignment padding |
14660 +---------------------------------------+
14661 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
14662 +---------------------------------------+
14663 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
14664 +---------------------------------------+
14665 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
14666 +---------------------------------------+
14667 old SP->| back chain to caller's caller |
14668 +---------------------------------------+
14670 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
14671 given. (But note below and in sysv4.h that we require only 8 and
14672 may round up the size of our stack frame anyways. The historical
14673 reason is early versions of powerpc-linux which didn't properly
14674 align the stack at program startup. A happy side-effect is that
14675 -mno-eabi libraries can be used with -meabi programs.)
14677 The EABI configuration defaults to the V.4 layout. However,
14678 the stack alignment requirements may differ. If -mno-eabi is not
14679 given, the required stack alignment is 8 bytes; if -mno-eabi is
14680 given, the required alignment is 16 bytes. (But see V.4 comment
14681 above.) */
14683 #ifndef ABI_STACK_BOUNDARY
14684 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
14685 #endif
14687 static rs6000_stack_t *
14688 rs6000_stack_info (void)
14690 static rs6000_stack_t info;
14691 rs6000_stack_t *info_ptr = &info;
14692 int reg_size = TARGET_32BIT ? 4 : 8;
14693 int ehrd_size;
14694 int save_align;
14695 int first_gp;
14696 HOST_WIDE_INT non_fixed_size;
14698 memset (&info, 0, sizeof (info));
14700 if (TARGET_SPE)
14702 /* Cache value so we don't rescan instruction chain over and over. */
14703 if (cfun->machine->insn_chain_scanned_p == 0)
14704 cfun->machine->insn_chain_scanned_p
14705 = spe_func_has_64bit_regs_p () + 1;
14706 info_ptr->spe_64bit_regs_used = cfun->machine->insn_chain_scanned_p - 1;
14709 /* Select which calling sequence. */
14710 info_ptr->abi = DEFAULT_ABI;
14712 /* Calculate which registers need to be saved & save area size. */
14713 info_ptr->first_gp_reg_save = first_reg_to_save ();
14714 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
14715 even if it currently looks like we won't. Reload may need it to
14716 get at a constant; if so, it will have already created a constant
14717 pool entry for it. */
14718 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
14719 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
14720 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
14721 && crtl->uses_const_pool
14722 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
14723 first_gp = RS6000_PIC_OFFSET_TABLE_REGNUM;
14724 else
14725 first_gp = info_ptr->first_gp_reg_save;
14727 info_ptr->gp_size = reg_size * (32 - first_gp);
14729 /* For the SPE, we have an additional upper 32-bits on each GPR.
14730 Ideally we should save the entire 64-bits only when the upper
14731 half is used in SIMD instructions. Since we only record
14732 registers live (not the size they are used in), this proves
14733 difficult because we'd have to traverse the instruction chain at
14734 the right time, taking reload into account. This is a real pain,
14735 so we opt to save the GPRs in 64-bits always if but one register
14736 gets used in 64-bits. Otherwise, all the registers in the frame
14737 get saved in 32-bits.
14739 So... since when we save all GPRs (except the SP) in 64-bits, the
14740 traditional GP save area will be empty. */
14741 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
14742 info_ptr->gp_size = 0;
14744 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
14745 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
14747 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
14748 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
14749 - info_ptr->first_altivec_reg_save);
14751 /* Does this function call anything? */
14752 info_ptr->calls_p = (! current_function_is_leaf
14753 || cfun->machine->ra_needs_full_frame);
14755 /* Determine if we need to save the link register. */
14756 if ((DEFAULT_ABI == ABI_AIX
14757 && crtl->profile
14758 && !TARGET_PROFILE_KERNEL)
14759 #ifdef TARGET_RELOCATABLE
14760 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
14761 #endif
14762 || (info_ptr->first_fp_reg_save != 64
14763 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
14764 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
14765 || info_ptr->calls_p
14766 || rs6000_ra_ever_killed ())
14768 info_ptr->lr_save_p = 1;
14769 df_set_regs_ever_live (LR_REGNO, true);
14772 /* Determine if we need to save the condition code registers. */
14773 if (df_regs_ever_live_p (CR2_REGNO)
14774 || df_regs_ever_live_p (CR3_REGNO)
14775 || df_regs_ever_live_p (CR4_REGNO))
14777 info_ptr->cr_save_p = 1;
14778 if (DEFAULT_ABI == ABI_V4)
14779 info_ptr->cr_size = reg_size;
14782 /* If the current function calls __builtin_eh_return, then we need
14783 to allocate stack space for registers that will hold data for
14784 the exception handler. */
14785 if (crtl->calls_eh_return)
14787 unsigned int i;
14788 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
14789 continue;
14791 /* SPE saves EH registers in 64-bits. */
14792 ehrd_size = i * (TARGET_SPE_ABI
14793 && info_ptr->spe_64bit_regs_used != 0
14794 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
14796 else
14797 ehrd_size = 0;
14799 /* Determine various sizes. */
14800 info_ptr->reg_size = reg_size;
14801 info_ptr->fixed_size = RS6000_SAVE_AREA;
14802 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
14803 info_ptr->parm_size = RS6000_ALIGN (crtl->outgoing_args_size,
14804 TARGET_ALTIVEC ? 16 : 8);
14805 if (FRAME_GROWS_DOWNWARD)
14806 info_ptr->vars_size
14807 += RS6000_ALIGN (info_ptr->fixed_size + info_ptr->vars_size
14808 + info_ptr->parm_size,
14809 ABI_STACK_BOUNDARY / BITS_PER_UNIT)
14810 - (info_ptr->fixed_size + info_ptr->vars_size
14811 + info_ptr->parm_size);
14813 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
14814 info_ptr->spe_gp_size = 8 * (32 - first_gp);
14815 else
14816 info_ptr->spe_gp_size = 0;
14818 if (TARGET_ALTIVEC_ABI)
14819 info_ptr->vrsave_mask = compute_vrsave_mask ();
14820 else
14821 info_ptr->vrsave_mask = 0;
14823 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
14824 info_ptr->vrsave_size = 4;
14825 else
14826 info_ptr->vrsave_size = 0;
14828 compute_save_world_info (info_ptr);
14830 /* Calculate the offsets. */
14831 switch (DEFAULT_ABI)
14833 case ABI_NONE:
14834 default:
14835 gcc_unreachable ();
14837 case ABI_AIX:
14838 case ABI_DARWIN:
14839 info_ptr->fp_save_offset = - info_ptr->fp_size;
14840 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
14842 if (TARGET_ALTIVEC_ABI)
14844 info_ptr->vrsave_save_offset
14845 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
14847 /* Align stack so vector save area is on a quadword boundary.
14848 The padding goes above the vectors. */
14849 if (info_ptr->altivec_size != 0)
14850 info_ptr->altivec_padding_size
14851 = info_ptr->vrsave_save_offset & 0xF;
14852 else
14853 info_ptr->altivec_padding_size = 0;
14855 info_ptr->altivec_save_offset
14856 = info_ptr->vrsave_save_offset
14857 - info_ptr->altivec_padding_size
14858 - info_ptr->altivec_size;
14859 gcc_assert (info_ptr->altivec_size == 0
14860 || info_ptr->altivec_save_offset % 16 == 0);
14862 /* Adjust for AltiVec case. */
14863 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
14865 else
14866 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
14867 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
14868 info_ptr->lr_save_offset = 2*reg_size;
14869 break;
14871 case ABI_V4:
14872 info_ptr->fp_save_offset = - info_ptr->fp_size;
14873 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
14874 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
14876 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
14878 /* Align stack so SPE GPR save area is aligned on a
14879 double-word boundary. */
14880 if (info_ptr->spe_gp_size != 0 && info_ptr->cr_save_offset != 0)
14881 info_ptr->spe_padding_size
14882 = 8 - (-info_ptr->cr_save_offset % 8);
14883 else
14884 info_ptr->spe_padding_size = 0;
14886 info_ptr->spe_gp_save_offset
14887 = info_ptr->cr_save_offset
14888 - info_ptr->spe_padding_size
14889 - info_ptr->spe_gp_size;
14891 /* Adjust for SPE case. */
14892 info_ptr->ehrd_offset = info_ptr->spe_gp_save_offset;
14894 else if (TARGET_ALTIVEC_ABI)
14896 info_ptr->vrsave_save_offset
14897 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
14899 /* Align stack so vector save area is on a quadword boundary. */
14900 if (info_ptr->altivec_size != 0)
14901 info_ptr->altivec_padding_size
14902 = 16 - (-info_ptr->vrsave_save_offset % 16);
14903 else
14904 info_ptr->altivec_padding_size = 0;
14906 info_ptr->altivec_save_offset
14907 = info_ptr->vrsave_save_offset
14908 - info_ptr->altivec_padding_size
14909 - info_ptr->altivec_size;
14911 /* Adjust for AltiVec case. */
14912 info_ptr->ehrd_offset = info_ptr->altivec_save_offset;
14914 else
14915 info_ptr->ehrd_offset = info_ptr->cr_save_offset;
14916 info_ptr->ehrd_offset -= ehrd_size;
14917 info_ptr->lr_save_offset = reg_size;
14918 break;
14921 save_align = (TARGET_ALTIVEC_ABI || DEFAULT_ABI == ABI_DARWIN) ? 16 : 8;
14922 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
14923 + info_ptr->gp_size
14924 + info_ptr->altivec_size
14925 + info_ptr->altivec_padding_size
14926 + info_ptr->spe_gp_size
14927 + info_ptr->spe_padding_size
14928 + ehrd_size
14929 + info_ptr->cr_size
14930 + info_ptr->vrsave_size,
14931 save_align);
14933 non_fixed_size = (info_ptr->vars_size
14934 + info_ptr->parm_size
14935 + info_ptr->save_size);
14937 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
14938 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
14940 /* Determine if we need to allocate any stack frame:
14942 For AIX we need to push the stack if a frame pointer is needed
14943 (because the stack might be dynamically adjusted), if we are
14944 debugging, if we make calls, or if the sum of fp_save, gp_save,
14945 and local variables are more than the space needed to save all
14946 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
14947 + 18*8 = 288 (GPR13 reserved).
14949 For V.4 we don't have the stack cushion that AIX uses, but assume
14950 that the debugger can handle stackless frames. */
14952 if (info_ptr->calls_p)
14953 info_ptr->push_p = 1;
14955 else if (DEFAULT_ABI == ABI_V4)
14956 info_ptr->push_p = non_fixed_size != 0;
14958 else if (frame_pointer_needed)
14959 info_ptr->push_p = 1;
14961 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
14962 info_ptr->push_p = 1;
14964 else
14965 info_ptr->push_p = non_fixed_size > (TARGET_32BIT ? 220 : 288);
14967 /* Zero offsets if we're not saving those registers. */
14968 if (info_ptr->fp_size == 0)
14969 info_ptr->fp_save_offset = 0;
14971 if (info_ptr->gp_size == 0)
14972 info_ptr->gp_save_offset = 0;
14974 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
14975 info_ptr->altivec_save_offset = 0;
14977 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
14978 info_ptr->vrsave_save_offset = 0;
14980 if (! TARGET_SPE_ABI
14981 || info_ptr->spe_64bit_regs_used == 0
14982 || info_ptr->spe_gp_size == 0)
14983 info_ptr->spe_gp_save_offset = 0;
14985 if (! info_ptr->lr_save_p)
14986 info_ptr->lr_save_offset = 0;
14988 if (! info_ptr->cr_save_p)
14989 info_ptr->cr_save_offset = 0;
14991 return info_ptr;
14994 /* Return true if the current function uses any GPRs in 64-bit SIMD
14995 mode. */
14997 static bool
14998 spe_func_has_64bit_regs_p (void)
15000 rtx insns, insn;
15002 /* Functions that save and restore all the call-saved registers will
15003 need to save/restore the registers in 64-bits. */
15004 if (crtl->calls_eh_return
15005 || cfun->calls_setjmp
15006 || crtl->has_nonlocal_goto)
15007 return true;
15009 insns = get_insns ();
15011 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
15013 if (INSN_P (insn))
15015 rtx i;
15017 /* FIXME: This should be implemented with attributes...
15019 (set_attr "spe64" "true")....then,
15020 if (get_spe64(insn)) return true;
15022 It's the only reliable way to do the stuff below. */
15024 i = PATTERN (insn);
15025 if (GET_CODE (i) == SET)
15027 enum machine_mode mode = GET_MODE (SET_SRC (i));
15029 if (SPE_VECTOR_MODE (mode))
15030 return true;
15031 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == TFmode))
15032 return true;
15037 return false;
15040 static void
15041 debug_stack_info (rs6000_stack_t *info)
15043 const char *abi_string;
15045 if (! info)
15046 info = rs6000_stack_info ();
15048 fprintf (stderr, "\nStack information for function %s:\n",
15049 ((current_function_decl && DECL_NAME (current_function_decl))
15050 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
15051 : "<unknown>"));
15053 switch (info->abi)
15055 default: abi_string = "Unknown"; break;
15056 case ABI_NONE: abi_string = "NONE"; break;
15057 case ABI_AIX: abi_string = "AIX"; break;
15058 case ABI_DARWIN: abi_string = "Darwin"; break;
15059 case ABI_V4: abi_string = "V.4"; break;
15062 fprintf (stderr, "\tABI = %5s\n", abi_string);
15064 if (TARGET_ALTIVEC_ABI)
15065 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
15067 if (TARGET_SPE_ABI)
15068 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
15070 if (info->first_gp_reg_save != 32)
15071 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
15073 if (info->first_fp_reg_save != 64)
15074 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
15076 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
15077 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
15078 info->first_altivec_reg_save);
15080 if (info->lr_save_p)
15081 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
15083 if (info->cr_save_p)
15084 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
15086 if (info->vrsave_mask)
15087 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
15089 if (info->push_p)
15090 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
15092 if (info->calls_p)
15093 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
15095 if (info->gp_save_offset)
15096 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
15098 if (info->fp_save_offset)
15099 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
15101 if (info->altivec_save_offset)
15102 fprintf (stderr, "\taltivec_save_offset = %5d\n",
15103 info->altivec_save_offset);
15105 if (info->spe_gp_save_offset)
15106 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
15107 info->spe_gp_save_offset);
15109 if (info->vrsave_save_offset)
15110 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
15111 info->vrsave_save_offset);
15113 if (info->lr_save_offset)
15114 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
15116 if (info->cr_save_offset)
15117 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
15119 if (info->varargs_save_offset)
15120 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
15122 if (info->total_size)
15123 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15124 info->total_size);
15126 if (info->vars_size)
15127 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
15128 info->vars_size);
15130 if (info->parm_size)
15131 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
15133 if (info->fixed_size)
15134 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
15136 if (info->gp_size)
15137 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
15139 if (info->spe_gp_size)
15140 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
15142 if (info->fp_size)
15143 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
15145 if (info->altivec_size)
15146 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
15148 if (info->vrsave_size)
15149 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
15151 if (info->altivec_padding_size)
15152 fprintf (stderr, "\taltivec_padding_size= %5d\n",
15153 info->altivec_padding_size);
15155 if (info->spe_padding_size)
15156 fprintf (stderr, "\tspe_padding_size = %5d\n",
15157 info->spe_padding_size);
15159 if (info->cr_size)
15160 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
15162 if (info->save_size)
15163 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
15165 if (info->reg_size != 4)
15166 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
15168 fprintf (stderr, "\n");
15172 rs6000_return_addr (int count, rtx frame)
15174 /* Currently we don't optimize very well between prolog and body
15175 code and for PIC code the code can be actually quite bad, so
15176 don't try to be too clever here. */
15177 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
15179 cfun->machine->ra_needs_full_frame = 1;
15181 return
15182 gen_rtx_MEM
15183 (Pmode,
15184 memory_address
15185 (Pmode,
15186 plus_constant (copy_to_reg
15187 (gen_rtx_MEM (Pmode,
15188 memory_address (Pmode, frame))),
15189 RETURN_ADDRESS_OFFSET)));
15192 cfun->machine->ra_need_lr = 1;
15193 return get_hard_reg_initial_val (Pmode, LR_REGNO);
15196 /* Say whether a function is a candidate for sibcall handling or not.
15197 We do not allow indirect calls to be optimized into sibling calls.
15198 Also, we can't do it if there are any vector parameters; there's
15199 nowhere to put the VRsave code so it works; note that functions with
15200 vector parameters are required to have a prototype, so the argument
15201 type info must be available here. (The tail recursion case can work
15202 with vector parameters, but there's no way to distinguish here.) */
15203 static bool
15204 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
15206 tree type;
15207 if (decl)
15209 if (TARGET_ALTIVEC_VRSAVE)
15211 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
15212 type; type = TREE_CHAIN (type))
15214 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
15215 return false;
15218 if (DEFAULT_ABI == ABI_DARWIN
15219 || ((*targetm.binds_local_p) (decl)
15220 && (DEFAULT_ABI != ABI_AIX || !DECL_EXTERNAL (decl))))
15222 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
15224 if (!lookup_attribute ("longcall", attr_list)
15225 || lookup_attribute ("shortcall", attr_list))
15226 return true;
15229 return false;
15232 /* NULL if INSN insn is valid within a low-overhead loop.
15233 Otherwise return why doloop cannot be applied.
15234 PowerPC uses the COUNT register for branch on table instructions. */
15236 static const char *
15237 rs6000_invalid_within_doloop (const_rtx insn)
15239 if (CALL_P (insn))
15240 return "Function call in the loop.";
15242 if (JUMP_P (insn)
15243 && (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
15244 || GET_CODE (PATTERN (insn)) == ADDR_VEC))
15245 return "Computed branch in the loop.";
15247 return NULL;
15250 static int
15251 rs6000_ra_ever_killed (void)
15253 rtx top;
15254 rtx reg;
15255 rtx insn;
15257 if (crtl->is_thunk)
15258 return 0;
15260 /* regs_ever_live has LR marked as used if any sibcalls are present,
15261 but this should not force saving and restoring in the
15262 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
15263 clobbers LR, so that is inappropriate. */
15265 /* Also, the prologue can generate a store into LR that
15266 doesn't really count, like this:
15268 move LR->R0
15269 bcl to set PIC register
15270 move LR->R31
15271 move R0->LR
15273 When we're called from the epilogue, we need to avoid counting
15274 this as a store. */
15276 push_topmost_sequence ();
15277 top = get_insns ();
15278 pop_topmost_sequence ();
15279 reg = gen_rtx_REG (Pmode, LR_REGNO);
15281 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
15283 if (INSN_P (insn))
15285 if (CALL_P (insn))
15287 if (!SIBLING_CALL_P (insn))
15288 return 1;
15290 else if (find_regno_note (insn, REG_INC, LR_REGNO))
15291 return 1;
15292 else if (set_of (reg, insn) != NULL_RTX
15293 && !prologue_epilogue_contains (insn))
15294 return 1;
15297 return 0;
15300 /* Emit instructions needed to load the TOC register.
15301 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
15302 a constant pool; or for SVR4 -fpic. */
15304 void
15305 rs6000_emit_load_toc_table (int fromprolog)
15307 rtx dest;
15308 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
15310 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
15312 char buf[30];
15313 rtx lab, tmp1, tmp2, got;
15315 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15316 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15317 if (flag_pic == 2)
15318 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15319 else
15320 got = rs6000_got_sym ();
15321 tmp1 = tmp2 = dest;
15322 if (!fromprolog)
15324 tmp1 = gen_reg_rtx (Pmode);
15325 tmp2 = gen_reg_rtx (Pmode);
15327 emit_insn (gen_load_toc_v4_PIC_1 (lab));
15328 emit_move_insn (tmp1,
15329 gen_rtx_REG (Pmode, LR_REGNO));
15330 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
15331 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
15333 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
15335 emit_insn (gen_load_toc_v4_pic_si ());
15336 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
15338 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
15340 char buf[30];
15341 rtx temp0 = (fromprolog
15342 ? gen_rtx_REG (Pmode, 0)
15343 : gen_reg_rtx (Pmode));
15345 if (fromprolog)
15347 rtx symF, symL;
15349 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15350 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15352 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
15353 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15355 emit_insn (gen_load_toc_v4_PIC_1 (symF));
15356 emit_move_insn (dest,
15357 gen_rtx_REG (Pmode, LR_REGNO));
15358 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
15360 else
15362 rtx tocsym;
15364 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
15365 emit_insn (gen_load_toc_v4_PIC_1b (tocsym));
15366 emit_move_insn (dest,
15367 gen_rtx_REG (Pmode, LR_REGNO));
15368 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
15370 emit_insn (gen_addsi3 (dest, temp0, dest));
15372 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
15374 /* This is for AIX code running in non-PIC ELF32. */
15375 char buf[30];
15376 rtx realsym;
15377 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15378 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
15380 emit_insn (gen_elf_high (dest, realsym));
15381 emit_insn (gen_elf_low (dest, dest, realsym));
15383 else
15385 gcc_assert (DEFAULT_ABI == ABI_AIX);
15387 if (TARGET_32BIT)
15388 emit_insn (gen_load_toc_aix_si (dest));
15389 else
15390 emit_insn (gen_load_toc_aix_di (dest));
15394 /* Emit instructions to restore the link register after determining where
15395 its value has been stored. */
15397 void
15398 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
15400 rs6000_stack_t *info = rs6000_stack_info ();
15401 rtx operands[2];
15403 operands[0] = source;
15404 operands[1] = scratch;
15406 if (info->lr_save_p)
15408 rtx frame_rtx = stack_pointer_rtx;
15409 HOST_WIDE_INT sp_offset = 0;
15410 rtx tmp;
15412 if (frame_pointer_needed
15413 || cfun->calls_alloca
15414 || info->total_size > 32767)
15416 tmp = gen_frame_mem (Pmode, frame_rtx);
15417 emit_move_insn (operands[1], tmp);
15418 frame_rtx = operands[1];
15420 else if (info->push_p)
15421 sp_offset = info->total_size;
15423 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
15424 tmp = gen_frame_mem (Pmode, tmp);
15425 emit_move_insn (tmp, operands[0]);
15427 else
15428 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO), operands[0]);
15431 static GTY(()) alias_set_type set = -1;
15433 alias_set_type
15434 get_TOC_alias_set (void)
15436 if (set == -1)
15437 set = new_alias_set ();
15438 return set;
15441 /* This returns nonzero if the current function uses the TOC. This is
15442 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
15443 is generated by the ABI_V4 load_toc_* patterns. */
15444 #if TARGET_ELF
15445 static int
15446 uses_TOC (void)
15448 rtx insn;
15450 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
15451 if (INSN_P (insn))
15453 rtx pat = PATTERN (insn);
15454 int i;
15456 if (GET_CODE (pat) == PARALLEL)
15457 for (i = 0; i < XVECLEN (pat, 0); i++)
15459 rtx sub = XVECEXP (pat, 0, i);
15460 if (GET_CODE (sub) == USE)
15462 sub = XEXP (sub, 0);
15463 if (GET_CODE (sub) == UNSPEC
15464 && XINT (sub, 1) == UNSPEC_TOC)
15465 return 1;
15469 return 0;
15471 #endif
15474 create_TOC_reference (rtx symbol)
15476 if (!can_create_pseudo_p ())
15477 df_set_regs_ever_live (TOC_REGISTER, true);
15478 return gen_rtx_PLUS (Pmode,
15479 gen_rtx_REG (Pmode, TOC_REGISTER),
15480 gen_rtx_CONST (Pmode,
15481 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL)));
15484 /* If _Unwind_* has been called from within the same module,
15485 toc register is not guaranteed to be saved to 40(1) on function
15486 entry. Save it there in that case. */
15488 void
15489 rs6000_aix_emit_builtin_unwind_init (void)
15491 rtx mem;
15492 rtx stack_top = gen_reg_rtx (Pmode);
15493 rtx opcode_addr = gen_reg_rtx (Pmode);
15494 rtx opcode = gen_reg_rtx (SImode);
15495 rtx tocompare = gen_reg_rtx (SImode);
15496 rtx no_toc_save_needed = gen_label_rtx ();
15498 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
15499 emit_move_insn (stack_top, mem);
15501 mem = gen_frame_mem (Pmode,
15502 gen_rtx_PLUS (Pmode, stack_top,
15503 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
15504 emit_move_insn (opcode_addr, mem);
15505 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
15506 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
15507 : 0xE8410028, SImode));
15509 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
15510 SImode, NULL_RTX, NULL_RTX,
15511 no_toc_save_needed);
15513 mem = gen_frame_mem (Pmode,
15514 gen_rtx_PLUS (Pmode, stack_top,
15515 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
15516 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
15517 emit_label (no_toc_save_needed);
15520 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
15521 and the change to the stack pointer. */
15523 static void
15524 rs6000_emit_stack_tie (void)
15526 rtx mem = gen_frame_mem (BLKmode,
15527 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
15529 emit_insn (gen_stack_tie (mem));
15532 /* Emit the correct code for allocating stack space, as insns.
15533 If COPY_R12, make sure a copy of the old frame is left in r12.
15534 If COPY_R11, make sure a copy of the old frame is left in r11,
15535 in preference to r12 if COPY_R12.
15536 The generated code may use hard register 0 as a temporary. */
15538 static void
15539 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12, int copy_r11)
15541 rtx insn;
15542 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
15543 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
15544 rtx todec = gen_int_mode (-size, Pmode);
15545 rtx par, set, mem;
15547 if (INTVAL (todec) != -size)
15549 warning (0, "stack frame too large");
15550 emit_insn (gen_trap ());
15551 return;
15554 if (crtl->limit_stack)
15556 if (REG_P (stack_limit_rtx)
15557 && REGNO (stack_limit_rtx) > 1
15558 && REGNO (stack_limit_rtx) <= 31)
15560 emit_insn (TARGET_32BIT
15561 ? gen_addsi3 (tmp_reg,
15562 stack_limit_rtx,
15563 GEN_INT (size))
15564 : gen_adddi3 (tmp_reg,
15565 stack_limit_rtx,
15566 GEN_INT (size)));
15568 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15569 const0_rtx));
15571 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
15572 && TARGET_32BIT
15573 && DEFAULT_ABI == ABI_V4)
15575 rtx toload = gen_rtx_CONST (VOIDmode,
15576 gen_rtx_PLUS (Pmode,
15577 stack_limit_rtx,
15578 GEN_INT (size)));
15580 emit_insn (gen_elf_high (tmp_reg, toload));
15581 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
15582 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
15583 const0_rtx));
15585 else
15586 warning (0, "stack limit expression is not supported");
15589 if (copy_r12 || copy_r11)
15590 emit_move_insn (copy_r11
15591 ? gen_rtx_REG (Pmode, 11)
15592 : gen_rtx_REG (Pmode, 12),
15593 stack_reg);
15595 if (size > 32767)
15597 /* Need a note here so that try_split doesn't get confused. */
15598 if (get_last_insn () == NULL_RTX)
15599 emit_note (NOTE_INSN_DELETED);
15600 insn = emit_move_insn (tmp_reg, todec);
15601 try_split (PATTERN (insn), insn, 0);
15602 todec = tmp_reg;
15605 insn = emit_insn (TARGET_32BIT
15606 ? gen_movsi_update_stack (stack_reg, stack_reg,
15607 todec, stack_reg)
15608 : gen_movdi_di_update_stack (stack_reg, stack_reg,
15609 todec, stack_reg));
15610 /* Since we didn't use gen_frame_mem to generate the MEM, grab
15611 it now and set the alias set/attributes. The above gen_*_update
15612 calls will generate a PARALLEL with the MEM set being the first
15613 operation. */
15614 par = PATTERN (insn);
15615 gcc_assert (GET_CODE (par) == PARALLEL);
15616 set = XVECEXP (par, 0, 0);
15617 gcc_assert (GET_CODE (set) == SET);
15618 mem = SET_DEST (set);
15619 gcc_assert (MEM_P (mem));
15620 MEM_NOTRAP_P (mem) = 1;
15621 set_mem_alias_set (mem, get_frame_alias_set ());
15623 RTX_FRAME_RELATED_P (insn) = 1;
15624 REG_NOTES (insn) =
15625 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15626 gen_rtx_SET (VOIDmode, stack_reg,
15627 gen_rtx_PLUS (Pmode, stack_reg,
15628 GEN_INT (-size))),
15629 REG_NOTES (insn));
15632 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
15633 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
15634 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
15635 deduce these equivalences by itself so it wasn't necessary to hold
15636 its hand so much. */
15638 static void
15639 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
15640 rtx reg2, rtx rreg)
15642 rtx real, temp;
15644 /* copy_rtx will not make unique copies of registers, so we need to
15645 ensure we don't have unwanted sharing here. */
15646 if (reg == reg2)
15647 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15649 if (reg == rreg)
15650 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
15652 real = copy_rtx (PATTERN (insn));
15654 if (reg2 != NULL_RTX)
15655 real = replace_rtx (real, reg2, rreg);
15657 real = replace_rtx (real, reg,
15658 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
15659 STACK_POINTER_REGNUM),
15660 GEN_INT (val)));
15662 /* We expect that 'real' is either a SET or a PARALLEL containing
15663 SETs (and possibly other stuff). In a PARALLEL, all the SETs
15664 are important so they all have to be marked RTX_FRAME_RELATED_P. */
15666 if (GET_CODE (real) == SET)
15668 rtx set = real;
15670 temp = simplify_rtx (SET_SRC (set));
15671 if (temp)
15672 SET_SRC (set) = temp;
15673 temp = simplify_rtx (SET_DEST (set));
15674 if (temp)
15675 SET_DEST (set) = temp;
15676 if (GET_CODE (SET_DEST (set)) == MEM)
15678 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15679 if (temp)
15680 XEXP (SET_DEST (set), 0) = temp;
15683 else
15685 int i;
15687 gcc_assert (GET_CODE (real) == PARALLEL);
15688 for (i = 0; i < XVECLEN (real, 0); i++)
15689 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
15691 rtx set = XVECEXP (real, 0, i);
15693 temp = simplify_rtx (SET_SRC (set));
15694 if (temp)
15695 SET_SRC (set) = temp;
15696 temp = simplify_rtx (SET_DEST (set));
15697 if (temp)
15698 SET_DEST (set) = temp;
15699 if (GET_CODE (SET_DEST (set)) == MEM)
15701 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
15702 if (temp)
15703 XEXP (SET_DEST (set), 0) = temp;
15705 RTX_FRAME_RELATED_P (set) = 1;
15709 RTX_FRAME_RELATED_P (insn) = 1;
15710 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
15711 real,
15712 REG_NOTES (insn));
15715 /* Returns an insn that has a vrsave set operation with the
15716 appropriate CLOBBERs. */
15718 static rtx
15719 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
15721 int nclobs, i;
15722 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
15723 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
15725 clobs[0]
15726 = gen_rtx_SET (VOIDmode,
15727 vrsave,
15728 gen_rtx_UNSPEC_VOLATILE (SImode,
15729 gen_rtvec (2, reg, vrsave),
15730 UNSPECV_SET_VRSAVE));
15732 nclobs = 1;
15734 /* We need to clobber the registers in the mask so the scheduler
15735 does not move sets to VRSAVE before sets of AltiVec registers.
15737 However, if the function receives nonlocal gotos, reload will set
15738 all call saved registers live. We will end up with:
15740 (set (reg 999) (mem))
15741 (parallel [ (set (reg vrsave) (unspec blah))
15742 (clobber (reg 999))])
15744 The clobber will cause the store into reg 999 to be dead, and
15745 flow will attempt to delete an epilogue insn. In this case, we
15746 need an unspec use/set of the register. */
15748 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
15749 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
15751 if (!epiloguep || call_used_regs [i])
15752 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
15753 gen_rtx_REG (V4SImode, i));
15754 else
15756 rtx reg = gen_rtx_REG (V4SImode, i);
15758 clobs[nclobs++]
15759 = gen_rtx_SET (VOIDmode,
15760 reg,
15761 gen_rtx_UNSPEC (V4SImode,
15762 gen_rtvec (1, reg), 27));
15766 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
15768 for (i = 0; i < nclobs; ++i)
15769 XVECEXP (insn, 0, i) = clobs[i];
15771 return insn;
15774 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
15775 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
15777 static void
15778 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
15779 unsigned int regno, int offset, HOST_WIDE_INT total_size)
15781 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
15782 rtx replacea, replaceb;
15784 int_rtx = GEN_INT (offset);
15786 /* Some cases that need register indexed addressing. */
15787 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
15788 || (TARGET_E500_DOUBLE && mode == DFmode)
15789 || (TARGET_SPE_ABI
15790 && SPE_VECTOR_MODE (mode)
15791 && !SPE_CONST_OFFSET_OK (offset)))
15793 /* Whomever calls us must make sure r11 is available in the
15794 flow path of instructions in the prologue. */
15795 offset_rtx = gen_rtx_REG (Pmode, 11);
15796 emit_move_insn (offset_rtx, int_rtx);
15798 replacea = offset_rtx;
15799 replaceb = int_rtx;
15801 else
15803 offset_rtx = int_rtx;
15804 replacea = NULL_RTX;
15805 replaceb = NULL_RTX;
15808 reg = gen_rtx_REG (mode, regno);
15809 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
15810 mem = gen_frame_mem (mode, addr);
15812 insn = emit_move_insn (mem, reg);
15814 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
15817 /* Emit an offset memory reference suitable for a frame store, while
15818 converting to a valid addressing mode. */
15820 static rtx
15821 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
15823 rtx int_rtx, offset_rtx;
15825 int_rtx = GEN_INT (offset);
15827 if ((TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
15828 || (TARGET_E500_DOUBLE && mode == DFmode))
15830 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
15831 emit_move_insn (offset_rtx, int_rtx);
15833 else
15834 offset_rtx = int_rtx;
15836 return gen_frame_mem (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
15839 /* Look for user-defined global regs. We should not save and restore these,
15840 and cannot use stmw/lmw if there are any in its range. */
15842 static bool
15843 no_global_regs_above (int first, bool gpr)
15845 int i;
15846 for (i = first; i < gpr ? 32 : 64 ; i++)
15847 if (global_regs[i])
15848 return false;
15849 return true;
15852 #ifndef TARGET_FIX_AND_CONTINUE
15853 #define TARGET_FIX_AND_CONTINUE 0
15854 #endif
15856 /* It's really GPR 13 and FPR 14, but we need the smaller of the two. */
15857 #define FIRST_SAVRES_REGISTER FIRST_SAVED_GP_REGNO
15858 #define LAST_SAVRES_REGISTER 31
15859 #define N_SAVRES_REGISTERS (LAST_SAVRES_REGISTER - FIRST_SAVRES_REGISTER + 1)
15861 static GTY(()) rtx savres_routine_syms[N_SAVRES_REGISTERS][8];
15863 /* Return the symbol for an out-of-line register save/restore routine.
15864 We are saving/restoring GPRs if GPR is true. */
15866 static rtx
15867 rs6000_savres_routine_sym (rs6000_stack_t *info, bool savep, bool gpr, bool exitp)
15869 int regno = gpr ? info->first_gp_reg_save : (info->first_fp_reg_save - 32);
15870 rtx sym;
15871 int select = ((savep ? 1 : 0) << 2
15872 | (gpr
15873 /* On the SPE, we never have any FPRs, but we do have
15874 32/64-bit versions of the routines. */
15875 ? (TARGET_SPE_ABI && info->spe_64bit_regs_used ? 1 : 0)
15876 : 0) << 1
15877 | (exitp ? 1: 0));
15879 /* Don't generate bogus routine names. */
15880 gcc_assert (FIRST_SAVRES_REGISTER <= regno && regno <= LAST_SAVRES_REGISTER);
15882 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select];
15884 if (sym == NULL)
15886 char name[30];
15887 const char *action;
15888 const char *regkind;
15889 const char *exit_suffix;
15891 action = savep ? "save" : "rest";
15893 /* SPE has slightly different names for its routines depending on
15894 whether we are saving 32-bit or 64-bit registers. */
15895 if (TARGET_SPE_ABI)
15897 /* No floating point saves on the SPE. */
15898 gcc_assert (gpr);
15900 regkind = info->spe_64bit_regs_used ? "64gpr" : "32gpr";
15902 else
15903 regkind = gpr ? "gpr" : "fpr";
15905 exit_suffix = exitp ? "_x" : "";
15907 sprintf (name, "_%s%s_%d%s", action, regkind, regno, exit_suffix);
15909 sym = savres_routine_syms[regno-FIRST_SAVRES_REGISTER][select]
15910 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
15913 return sym;
15916 /* Emit a sequence of insns, including a stack tie if needed, for
15917 resetting the stack pointer. If SAVRES is true, then don't reset the
15918 stack pointer, but move the base of the frame into r11 for use by
15919 out-of-line register restore routines. */
15921 static void
15922 rs6000_emit_stack_reset (rs6000_stack_t *info,
15923 rtx sp_reg_rtx, rtx frame_reg_rtx,
15924 int sp_offset, bool savres)
15926 /* This blockage is needed so that sched doesn't decide to move
15927 the sp change before the register restores. */
15928 if (frame_reg_rtx != sp_reg_rtx
15929 || (TARGET_SPE_ABI
15930 && info->spe_64bit_regs_used != 0
15931 && info->first_gp_reg_save != 32))
15932 rs6000_emit_stack_tie ();
15934 if (frame_reg_rtx != sp_reg_rtx)
15936 rs6000_emit_stack_tie ();
15937 if (sp_offset != 0)
15938 emit_insn (gen_addsi3 (sp_reg_rtx, frame_reg_rtx,
15939 GEN_INT (sp_offset)));
15940 else if (!savres)
15941 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
15943 else if (sp_offset != 0)
15945 /* If we are restoring registers out-of-line, we will be using the
15946 "exit" variants of the restore routines, which will reset the
15947 stack for us. But we do need to point r11 into the right place
15948 for those routines. */
15949 rtx dest_reg = (savres
15950 ? gen_rtx_REG (Pmode, 11)
15951 : sp_reg_rtx);
15953 emit_insn (TARGET_32BIT
15954 ? gen_addsi3 (dest_reg, sp_reg_rtx,
15955 GEN_INT (sp_offset))
15956 : gen_adddi3 (dest_reg, sp_reg_rtx,
15957 GEN_INT (sp_offset)));
15961 /* Construct a parallel rtx describing the effect of a call to an
15962 out-of-line register save/restore routine. */
15964 static rtx
15965 rs6000_make_savres_rtx (rs6000_stack_t *info,
15966 rtx frame_reg_rtx, int save_area_offset,
15967 enum machine_mode reg_mode,
15968 bool savep, bool gpr, bool exitp)
15970 int i;
15971 int offset, start_reg, end_reg, n_regs;
15972 int reg_size = GET_MODE_SIZE (reg_mode);
15973 rtx sym;
15974 rtvec p;
15976 offset = 0;
15977 start_reg = (gpr
15978 ? info->first_gp_reg_save
15979 : info->first_fp_reg_save);
15980 end_reg = gpr ? 32 : 64;
15981 n_regs = end_reg - start_reg;
15982 p = rtvec_alloc ((exitp ? 4 : 3) + n_regs);
15984 /* If we're saving registers, then we should never say we're exiting. */
15985 gcc_assert ((savep && !exitp) || !savep);
15987 if (exitp)
15988 RTVEC_ELT (p, offset++) = gen_rtx_RETURN (VOIDmode);
15990 RTVEC_ELT (p, offset++)
15991 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 65));
15993 sym = rs6000_savres_routine_sym (info, savep, gpr, exitp);
15994 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, sym);
15995 RTVEC_ELT (p, offset++) = gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 11));
15997 for (i = 0; i < end_reg - start_reg; i++)
15999 rtx addr, reg, mem;
16000 reg = gen_rtx_REG (reg_mode, start_reg + i);
16001 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16002 GEN_INT (save_area_offset + reg_size*i));
16003 mem = gen_frame_mem (reg_mode, addr);
16005 RTVEC_ELT (p, i + offset) = gen_rtx_SET (VOIDmode,
16006 savep ? mem : reg,
16007 savep ? reg : mem);
16010 return gen_rtx_PARALLEL (VOIDmode, p);
16013 /* Determine whether the gp REG is really used. */
16015 static bool
16016 rs6000_reg_live_or_pic_offset_p (int reg)
16018 return ((df_regs_ever_live_p (reg)
16019 && (!call_used_regs[reg]
16020 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16021 && TARGET_TOC && TARGET_MINIMAL_TOC)))
16022 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
16023 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
16024 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
16027 enum {
16028 SAVRES_MULTIPLE = 0x1,
16029 SAVRES_INLINE_FPRS = 0x2,
16030 SAVRES_INLINE_GPRS = 0x4
16033 /* Determine the strategy for savings/restoring registers. */
16035 static int
16036 rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
16037 int using_static_chain_p, int sibcall)
16039 bool using_multiple_p;
16040 bool common;
16041 bool savres_fprs_inline;
16042 bool savres_gprs_inline;
16043 bool noclobber_global_gprs
16044 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
16046 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
16047 && (!TARGET_SPE_ABI
16048 || info->spe_64bit_regs_used == 0)
16049 && info->first_gp_reg_save < 31
16050 && noclobber_global_gprs);
16051 /* Don't bother to try to save things out-of-line if r11 is occupied
16052 by the static chain. It would require too much fiddling and the
16053 static chain is rarely used anyway. */
16054 common = (using_static_chain_p
16055 || sibcall
16056 || crtl->calls_eh_return
16057 || !info->lr_save_p
16058 || cfun->machine->ra_need_lr
16059 || info->total_size > 32767);
16060 savres_fprs_inline = (common
16061 || info->first_fp_reg_save == 64
16062 || !no_global_regs_above (info->first_fp_reg_save,
16063 /*gpr=*/false)
16064 || FP_SAVE_INLINE (info->first_fp_reg_save));
16065 savres_gprs_inline = (common
16066 /* Saving CR interferes with the exit routines
16067 used on the SPE, so just punt here. */
16068 || (!savep
16069 && TARGET_SPE_ABI
16070 && info->spe_64bit_regs_used != 0
16071 && info->cr_save_p != 0)
16072 || info->first_gp_reg_save == 32
16073 || !noclobber_global_gprs
16074 || GP_SAVE_INLINE (info->first_gp_reg_save));
16076 if (savep)
16077 /* If we are going to use store multiple, then don't even bother
16078 with the out-of-line routines, since the store-multiple instruction
16079 will always be smaller. */
16080 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16081 else
16083 /* The situation is more complicated with load multiple. We'd
16084 prefer to use the out-of-line routines for restores, since the
16085 "exit" out-of-line routines can handle the restore of LR and
16086 the frame teardown. But we can only use the out-of-line
16087 routines if we know that we've used store multiple or
16088 out-of-line routines in the prologue, i.e. if we've saved all
16089 the registers from first_gp_reg_save. Otherwise, we risk
16090 loading garbage from the stack. Furthermore, we can only use
16091 the "exit" out-of-line gpr restore if we haven't saved any
16092 fprs. */
16093 bool saved_all = !savres_gprs_inline || using_multiple_p;
16095 if (saved_all && info->first_fp_reg_save != 64)
16096 /* We can't use the exit routine; use load multiple if it's
16097 available. */
16098 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
16101 return (using_multiple_p
16102 | (savres_fprs_inline << 1)
16103 | (savres_gprs_inline << 2));
16106 /* Emit function prologue as insns. */
16108 void
16109 rs6000_emit_prologue (void)
16111 rs6000_stack_t *info = rs6000_stack_info ();
16112 enum machine_mode reg_mode = Pmode;
16113 int reg_size = TARGET_32BIT ? 4 : 8;
16114 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
16115 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
16116 rtx frame_reg_rtx = sp_reg_rtx;
16117 rtx cr_save_rtx = NULL_RTX;
16118 rtx insn;
16119 int strategy;
16120 int saving_FPRs_inline;
16121 int saving_GPRs_inline;
16122 int using_store_multiple;
16123 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
16124 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
16125 && !call_used_regs[STATIC_CHAIN_REGNUM]);
16126 HOST_WIDE_INT sp_offset = 0;
16128 if (TARGET_FIX_AND_CONTINUE)
16130 /* gdb on darwin arranges to forward a function from the old
16131 address by modifying the first 5 instructions of the function
16132 to branch to the overriding function. This is necessary to
16133 permit function pointers that point to the old function to
16134 actually forward to the new function. */
16135 emit_insn (gen_nop ());
16136 emit_insn (gen_nop ());
16137 emit_insn (gen_nop ());
16138 emit_insn (gen_nop ());
16139 emit_insn (gen_nop ());
16142 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16144 reg_mode = V2SImode;
16145 reg_size = 8;
16148 strategy = rs6000_savres_strategy (info, /*savep=*/true,
16149 /*static_chain_p=*/using_static_chain_p,
16150 /*sibcall=*/0);
16151 using_store_multiple = strategy & SAVRES_MULTIPLE;
16152 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16153 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
16155 /* For V.4, update stack before we do any saving and set back pointer. */
16156 if (! WORLD_SAVE_P (info)
16157 && info->push_p
16158 && (DEFAULT_ABI == ABI_V4
16159 || crtl->calls_eh_return))
16161 bool need_r11 = (TARGET_SPE
16162 ? (!saving_GPRs_inline
16163 && info->spe_64bit_regs_used == 0)
16164 : (!saving_FPRs_inline || !saving_GPRs_inline));
16165 if (info->total_size < 32767)
16166 sp_offset = info->total_size;
16167 else
16168 frame_reg_rtx = (need_r11
16169 ? gen_rtx_REG (Pmode, 11)
16170 : frame_ptr_rtx);
16171 rs6000_emit_allocate_stack (info->total_size,
16172 (frame_reg_rtx != sp_reg_rtx
16173 && (info->cr_save_p
16174 || info->lr_save_p
16175 || info->first_fp_reg_save < 64
16176 || info->first_gp_reg_save < 32
16178 need_r11);
16179 if (frame_reg_rtx != sp_reg_rtx)
16180 rs6000_emit_stack_tie ();
16183 /* Handle world saves specially here. */
16184 if (WORLD_SAVE_P (info))
16186 int i, j, sz;
16187 rtx treg;
16188 rtvec p;
16189 rtx reg0;
16191 /* save_world expects lr in r0. */
16192 reg0 = gen_rtx_REG (Pmode, 0);
16193 if (info->lr_save_p)
16195 insn = emit_move_insn (reg0,
16196 gen_rtx_REG (Pmode, LR_REGNO));
16197 RTX_FRAME_RELATED_P (insn) = 1;
16200 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
16201 assumptions about the offsets of various bits of the stack
16202 frame. */
16203 gcc_assert (info->gp_save_offset == -220
16204 && info->fp_save_offset == -144
16205 && info->lr_save_offset == 8
16206 && info->cr_save_offset == 4
16207 && info->push_p
16208 && info->lr_save_p
16209 && (!crtl->calls_eh_return
16210 || info->ehrd_offset == -432)
16211 && info->vrsave_save_offset == -224
16212 && info->altivec_save_offset == -416);
16214 treg = gen_rtx_REG (SImode, 11);
16215 emit_move_insn (treg, GEN_INT (-info->total_size));
16217 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
16218 in R11. It also clobbers R12, so beware! */
16220 /* Preserve CR2 for save_world prologues */
16221 sz = 5;
16222 sz += 32 - info->first_gp_reg_save;
16223 sz += 64 - info->first_fp_reg_save;
16224 sz += LAST_ALTIVEC_REGNO - info->first_altivec_reg_save + 1;
16225 p = rtvec_alloc (sz);
16226 j = 0;
16227 RTVEC_ELT (p, j++) = gen_rtx_CLOBBER (VOIDmode,
16228 gen_rtx_REG (SImode,
16229 LR_REGNO));
16230 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
16231 gen_rtx_SYMBOL_REF (Pmode,
16232 "*save_world"));
16233 /* We do floats first so that the instruction pattern matches
16234 properly. */
16235 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16237 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16238 ? DFmode : SFmode),
16239 info->first_fp_reg_save + i);
16240 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16241 GEN_INT (info->fp_save_offset
16242 + sp_offset + 8 * i));
16243 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16244 ? DFmode : SFmode), addr);
16246 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16248 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
16250 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16251 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16252 GEN_INT (info->altivec_save_offset
16253 + sp_offset + 16 * i));
16254 rtx mem = gen_frame_mem (V4SImode, addr);
16256 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16258 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16260 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16261 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16262 GEN_INT (info->gp_save_offset
16263 + sp_offset + reg_size * i));
16264 rtx mem = gen_frame_mem (reg_mode, addr);
16266 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16270 /* CR register traditionally saved as CR2. */
16271 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16272 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16273 GEN_INT (info->cr_save_offset
16274 + sp_offset));
16275 rtx mem = gen_frame_mem (reg_mode, addr);
16277 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg);
16279 /* Explain about use of R0. */
16280 if (info->lr_save_p)
16282 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16283 GEN_INT (info->lr_save_offset
16284 + sp_offset));
16285 rtx mem = gen_frame_mem (reg_mode, addr);
16287 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, mem, reg0);
16289 /* Explain what happens to the stack pointer. */
16291 rtx newval = gen_rtx_PLUS (Pmode, sp_reg_rtx, treg);
16292 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, sp_reg_rtx, newval);
16295 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16296 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16297 treg, GEN_INT (-info->total_size));
16298 sp_offset = info->total_size;
16301 /* If we use the link register, get it into r0. */
16302 if (!WORLD_SAVE_P (info) && info->lr_save_p)
16304 rtx addr, reg, mem;
16306 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
16307 gen_rtx_REG (Pmode, LR_REGNO));
16308 RTX_FRAME_RELATED_P (insn) = 1;
16310 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16311 GEN_INT (info->lr_save_offset + sp_offset));
16312 reg = gen_rtx_REG (Pmode, 0);
16313 mem = gen_rtx_MEM (Pmode, addr);
16314 /* This should not be of rs6000_sr_alias_set, because of
16315 __builtin_return_address. */
16317 insn = emit_move_insn (mem, reg);
16318 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16319 NULL_RTX, NULL_RTX);
16322 /* If we need to save CR, put it into r12. */
16323 if (!WORLD_SAVE_P (info) && info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
16325 rtx set;
16327 cr_save_rtx = gen_rtx_REG (SImode, 12);
16328 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16329 RTX_FRAME_RELATED_P (insn) = 1;
16330 /* Now, there's no way that dwarf2out_frame_debug_expr is going
16331 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
16332 But that's OK. All we have to do is specify that _one_ condition
16333 code register is saved in this stack slot. The thrower's epilogue
16334 will then restore all the call-saved registers.
16335 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
16336 set = gen_rtx_SET (VOIDmode, cr_save_rtx,
16337 gen_rtx_REG (SImode, CR2_REGNO));
16338 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16339 set,
16340 REG_NOTES (insn));
16343 /* Do any required saving of fpr's. If only one or two to save, do
16344 it ourselves. Otherwise, call function. */
16345 if (!WORLD_SAVE_P (info) && saving_FPRs_inline)
16347 int i;
16348 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
16349 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
16350 && ! call_used_regs[info->first_fp_reg_save+i]))
16351 emit_frame_save (frame_reg_rtx, frame_ptr_rtx,
16352 (TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16353 ? DFmode : SFmode,
16354 info->first_fp_reg_save + i,
16355 info->fp_save_offset + sp_offset + 8 * i,
16356 info->total_size);
16358 else if (!WORLD_SAVE_P (info) && info->first_fp_reg_save != 64)
16360 rtx par;
16362 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16363 info->fp_save_offset + sp_offset,
16364 DFmode,
16365 /*savep=*/true, /*gpr=*/false,
16366 /*exitp=*/false);
16367 insn = emit_insn (par);
16368 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16369 NULL_RTX, NULL_RTX);
16372 /* Save GPRs. This is done as a PARALLEL if we are using
16373 the store-multiple instructions. */
16374 if (!WORLD_SAVE_P (info)
16375 && TARGET_SPE_ABI
16376 && info->spe_64bit_regs_used != 0
16377 && info->first_gp_reg_save != 32)
16379 int i;
16380 rtx spe_save_area_ptr;
16382 /* Determine whether we can address all of the registers that need
16383 to be saved with an offset from the stack pointer that fits in
16384 the small const field for SPE memory instructions. */
16385 int spe_regs_addressable_via_sp
16386 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
16387 + (32 - info->first_gp_reg_save - 1) * reg_size)
16388 && saving_GPRs_inline);
16389 int spe_offset;
16391 if (spe_regs_addressable_via_sp)
16393 spe_save_area_ptr = frame_reg_rtx;
16394 spe_offset = info->spe_gp_save_offset + sp_offset;
16396 else
16398 /* Make r11 point to the start of the SPE save area. We need
16399 to be careful here if r11 is holding the static chain. If
16400 it is, then temporarily save it in r0. We would use r0 as
16401 our base register here, but using r0 as a base register in
16402 loads and stores means something different from what we
16403 would like. */
16404 int ool_adjust = (saving_GPRs_inline
16406 : (info->first_gp_reg_save
16407 - (FIRST_SAVRES_REGISTER+1))*8);
16408 HOST_WIDE_INT offset = (info->spe_gp_save_offset
16409 + sp_offset - ool_adjust);
16411 if (using_static_chain_p)
16413 rtx r0 = gen_rtx_REG (Pmode, 0);
16414 gcc_assert (info->first_gp_reg_save > 11);
16416 emit_move_insn (r0, gen_rtx_REG (Pmode, 11));
16419 spe_save_area_ptr = gen_rtx_REG (Pmode, 11);
16420 insn = emit_insn (gen_addsi3 (spe_save_area_ptr,
16421 frame_reg_rtx,
16422 GEN_INT (offset)));
16423 /* We need to make sure the move to r11 gets noted for
16424 properly outputting unwind information. */
16425 if (!saving_GPRs_inline)
16426 rs6000_frame_related (insn, frame_reg_rtx, offset,
16427 NULL_RTX, NULL_RTX);
16428 spe_offset = 0;
16431 if (saving_GPRs_inline)
16433 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16434 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16436 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16437 rtx offset, addr, mem;
16439 /* We're doing all this to ensure that the offset fits into
16440 the immediate offset of 'evstdd'. */
16441 gcc_assert (SPE_CONST_OFFSET_OK (reg_size * i + spe_offset));
16443 offset = GEN_INT (reg_size * i + spe_offset);
16444 addr = gen_rtx_PLUS (Pmode, spe_save_area_ptr, offset);
16445 mem = gen_rtx_MEM (V2SImode, addr);
16447 insn = emit_move_insn (mem, reg);
16449 rs6000_frame_related (insn, spe_save_area_ptr,
16450 info->spe_gp_save_offset
16451 + sp_offset + reg_size * i,
16452 offset, const0_rtx);
16455 else
16457 rtx par;
16459 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
16460 0, reg_mode,
16461 /*savep=*/true, /*gpr=*/true,
16462 /*exitp=*/false);
16463 insn = emit_insn (par);
16464 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16465 NULL_RTX, NULL_RTX);
16469 /* Move the static chain pointer back. */
16470 if (using_static_chain_p && !spe_regs_addressable_via_sp)
16471 emit_move_insn (gen_rtx_REG (Pmode, 11), gen_rtx_REG (Pmode, 0));
16473 else if (!WORLD_SAVE_P (info) && !saving_GPRs_inline)
16475 rtx par;
16477 /* Need to adjust r11 if we saved any FPRs. */
16478 if (info->first_fp_reg_save != 64)
16480 rtx r11 = gen_rtx_REG (reg_mode, 11);
16481 rtx offset = GEN_INT (info->total_size
16482 + (-8 * (64-info->first_fp_reg_save)));
16483 rtx ptr_reg = (sp_reg_rtx == frame_reg_rtx
16484 ? sp_reg_rtx : r11);
16486 emit_insn (TARGET_32BIT
16487 ? gen_addsi3 (r11, ptr_reg, offset)
16488 : gen_adddi3 (r11, ptr_reg, offset));
16491 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
16492 info->gp_save_offset + sp_offset,
16493 reg_mode,
16494 /*savep=*/true, /*gpr=*/true,
16495 /*exitp=*/false);
16496 insn = emit_insn (par);
16497 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16498 NULL_RTX, NULL_RTX);
16500 else if (!WORLD_SAVE_P (info) && using_store_multiple)
16502 rtvec p;
16503 int i;
16504 p = rtvec_alloc (32 - info->first_gp_reg_save);
16505 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16507 rtx addr, reg, mem;
16508 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16509 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16510 GEN_INT (info->gp_save_offset
16511 + sp_offset
16512 + reg_size * i));
16513 mem = gen_frame_mem (reg_mode, addr);
16515 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
16517 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16518 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16519 NULL_RTX, NULL_RTX);
16521 else if (!WORLD_SAVE_P (info))
16523 int i;
16524 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16525 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
16527 rtx addr, reg, mem;
16528 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16530 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16531 GEN_INT (info->gp_save_offset
16532 + sp_offset
16533 + reg_size * i));
16534 mem = gen_frame_mem (reg_mode, addr);
16536 insn = emit_move_insn (mem, reg);
16537 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16538 NULL_RTX, NULL_RTX);
16542 /* ??? There's no need to emit actual instructions here, but it's the
16543 easiest way to get the frame unwind information emitted. */
16544 if (crtl->calls_eh_return)
16546 unsigned int i, regno;
16548 /* In AIX ABI we need to pretend we save r2 here. */
16549 if (TARGET_AIX)
16551 rtx addr, reg, mem;
16553 reg = gen_rtx_REG (reg_mode, 2);
16554 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16555 GEN_INT (sp_offset + 5 * reg_size));
16556 mem = gen_frame_mem (reg_mode, addr);
16558 insn = emit_move_insn (mem, reg);
16559 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16560 NULL_RTX, NULL_RTX);
16561 PATTERN (insn) = gen_blockage ();
16564 for (i = 0; ; ++i)
16566 regno = EH_RETURN_DATA_REGNO (i);
16567 if (regno == INVALID_REGNUM)
16568 break;
16570 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
16571 info->ehrd_offset + sp_offset
16572 + reg_size * (int) i,
16573 info->total_size);
16577 /* Save CR if we use any that must be preserved. */
16578 if (!WORLD_SAVE_P (info) && info->cr_save_p)
16580 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16581 GEN_INT (info->cr_save_offset + sp_offset));
16582 rtx mem = gen_frame_mem (SImode, addr);
16583 /* See the large comment above about why CR2_REGNO is used. */
16584 rtx magic_eh_cr_reg = gen_rtx_REG (SImode, CR2_REGNO);
16586 /* If r12 was used to hold the original sp, copy cr into r0 now
16587 that it's free. */
16588 if (REGNO (frame_reg_rtx) == 12)
16590 rtx set;
16592 cr_save_rtx = gen_rtx_REG (SImode, 0);
16593 insn = emit_insn (gen_movesi_from_cr (cr_save_rtx));
16594 RTX_FRAME_RELATED_P (insn) = 1;
16595 set = gen_rtx_SET (VOIDmode, cr_save_rtx, magic_eh_cr_reg);
16596 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
16597 set,
16598 REG_NOTES (insn));
16601 insn = emit_move_insn (mem, cr_save_rtx);
16603 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16604 NULL_RTX, NULL_RTX);
16607 /* Update stack and set back pointer unless this is V.4,
16608 for which it was done previously. */
16609 if (!WORLD_SAVE_P (info) && info->push_p
16610 && !(DEFAULT_ABI == ABI_V4 || crtl->calls_eh_return))
16612 if (info->total_size < 32767)
16613 sp_offset = info->total_size;
16614 else
16615 frame_reg_rtx = frame_ptr_rtx;
16616 rs6000_emit_allocate_stack (info->total_size,
16617 (frame_reg_rtx != sp_reg_rtx
16618 && ((info->altivec_size != 0)
16619 || (info->vrsave_mask != 0)
16621 FALSE);
16622 if (frame_reg_rtx != sp_reg_rtx)
16623 rs6000_emit_stack_tie ();
16626 /* Set frame pointer, if needed. */
16627 if (frame_pointer_needed)
16629 insn = emit_move_insn (gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
16630 sp_reg_rtx);
16631 RTX_FRAME_RELATED_P (insn) = 1;
16634 /* Save AltiVec registers if needed. Save here because the red zone does
16635 not include AltiVec registers. */
16636 if (!WORLD_SAVE_P (info) && TARGET_ALTIVEC_ABI && info->altivec_size != 0)
16638 int i;
16640 /* There should be a non inline version of this, for when we
16641 are saving lots of vector registers. */
16642 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
16643 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
16645 rtx areg, savereg, mem;
16646 int offset;
16648 offset = info->altivec_save_offset + sp_offset
16649 + 16 * (i - info->first_altivec_reg_save);
16651 savereg = gen_rtx_REG (V4SImode, i);
16653 areg = gen_rtx_REG (Pmode, 0);
16654 emit_move_insn (areg, GEN_INT (offset));
16656 /* AltiVec addressing mode is [reg+reg]. */
16657 mem = gen_frame_mem (V4SImode,
16658 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
16660 insn = emit_move_insn (mem, savereg);
16662 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
16663 areg, GEN_INT (offset));
16667 /* VRSAVE is a bit vector representing which AltiVec registers
16668 are used. The OS uses this to determine which vector
16669 registers to save on a context switch. We need to save
16670 VRSAVE on the stack frame, add whatever AltiVec registers we
16671 used in this function, and do the corresponding magic in the
16672 epilogue. */
16674 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
16675 && info->vrsave_mask != 0)
16677 rtx reg, mem, vrsave;
16678 int offset;
16680 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
16681 as frame_reg_rtx and r11 as the static chain pointer for
16682 nested functions. */
16683 reg = gen_rtx_REG (SImode, 0);
16684 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
16685 if (TARGET_MACHO)
16686 emit_insn (gen_get_vrsave_internal (reg));
16687 else
16688 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
16690 if (!WORLD_SAVE_P (info))
16692 /* Save VRSAVE. */
16693 offset = info->vrsave_save_offset + sp_offset;
16694 mem = gen_frame_mem (SImode,
16695 gen_rtx_PLUS (Pmode, frame_reg_rtx,
16696 GEN_INT (offset)));
16697 insn = emit_move_insn (mem, reg);
16700 /* Include the registers in the mask. */
16701 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
16703 insn = emit_insn (generate_set_vrsave (reg, info, 0));
16706 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
16707 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
16708 || (DEFAULT_ABI == ABI_V4
16709 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
16710 && df_regs_ever_live_p (RS6000_PIC_OFFSET_TABLE_REGNUM)))
16712 /* If emit_load_toc_table will use the link register, we need to save
16713 it. We use R12 for this purpose because emit_load_toc_table
16714 can use register 0. This allows us to use a plain 'blr' to return
16715 from the procedure more often. */
16716 int save_LR_around_toc_setup = (TARGET_ELF
16717 && DEFAULT_ABI != ABI_AIX
16718 && flag_pic
16719 && ! info->lr_save_p
16720 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
16721 if (save_LR_around_toc_setup)
16723 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
16725 insn = emit_move_insn (frame_ptr_rtx, lr);
16726 RTX_FRAME_RELATED_P (insn) = 1;
16728 rs6000_emit_load_toc_table (TRUE);
16730 insn = emit_move_insn (lr, frame_ptr_rtx);
16731 RTX_FRAME_RELATED_P (insn) = 1;
16733 else
16734 rs6000_emit_load_toc_table (TRUE);
16737 #if TARGET_MACHO
16738 if (DEFAULT_ABI == ABI_DARWIN
16739 && flag_pic && crtl->uses_pic_offset_table)
16741 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
16742 rtx src = gen_rtx_SYMBOL_REF (Pmode, MACHOPIC_FUNCTION_BASE_NAME);
16744 /* Save and restore LR locally around this call (in R0). */
16745 if (!info->lr_save_p)
16746 emit_move_insn (gen_rtx_REG (Pmode, 0), lr);
16748 emit_insn (gen_load_macho_picbase (src));
16750 emit_move_insn (gen_rtx_REG (Pmode,
16751 RS6000_PIC_OFFSET_TABLE_REGNUM),
16752 lr);
16754 if (!info->lr_save_p)
16755 emit_move_insn (lr, gen_rtx_REG (Pmode, 0));
16757 #endif
16760 /* Write function prologue. */
16762 static void
16763 rs6000_output_function_prologue (FILE *file,
16764 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
16766 rs6000_stack_t *info = rs6000_stack_info ();
16768 if (TARGET_DEBUG_STACK)
16769 debug_stack_info (info);
16771 /* Write .extern for any function we will call to save and restore
16772 fp values. */
16773 if (info->first_fp_reg_save < 64
16774 && !FP_SAVE_INLINE (info->first_fp_reg_save))
16775 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
16776 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
16777 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
16779 /* Write .extern for AIX common mode routines, if needed. */
16780 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
16782 fputs ("\t.extern __mulh\n", file);
16783 fputs ("\t.extern __mull\n", file);
16784 fputs ("\t.extern __divss\n", file);
16785 fputs ("\t.extern __divus\n", file);
16786 fputs ("\t.extern __quoss\n", file);
16787 fputs ("\t.extern __quous\n", file);
16788 common_mode_defined = 1;
16791 if (! HAVE_prologue)
16793 start_sequence ();
16795 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
16796 the "toplevel" insn chain. */
16797 emit_note (NOTE_INSN_DELETED);
16798 rs6000_emit_prologue ();
16799 emit_note (NOTE_INSN_DELETED);
16801 /* Expand INSN_ADDRESSES so final() doesn't crash. */
16803 rtx insn;
16804 unsigned addr = 0;
16805 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
16807 INSN_ADDRESSES_NEW (insn, addr);
16808 addr += 4;
16812 if (TARGET_DEBUG_STACK)
16813 debug_rtx_list (get_insns (), 100);
16814 final (get_insns (), file, FALSE);
16815 end_sequence ();
16818 rs6000_pic_labelno++;
16821 /* Non-zero if vmx regs are restored before the frame pop, zero if
16822 we restore after the pop when possible. */
16823 #define ALWAYS_RESTORE_ALTIVEC_BEFORE_POP 0
16825 /* Reload CR from REG. */
16827 static void
16828 rs6000_restore_saved_cr (rtx reg, int using_mfcr_multiple)
16830 int count = 0;
16831 int i;
16833 if (using_mfcr_multiple)
16835 for (i = 0; i < 8; i++)
16836 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16837 count++;
16838 gcc_assert (count);
16841 if (using_mfcr_multiple && count > 1)
16843 rtvec p;
16844 int ndx;
16846 p = rtvec_alloc (count);
16848 ndx = 0;
16849 for (i = 0; i < 8; i++)
16850 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16852 rtvec r = rtvec_alloc (2);
16853 RTVEC_ELT (r, 0) = reg;
16854 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
16855 RTVEC_ELT (p, ndx) =
16856 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
16857 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
16858 ndx++;
16860 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
16861 gcc_assert (ndx == count);
16863 else
16864 for (i = 0; i < 8; i++)
16865 if (df_regs_ever_live_p (CR0_REGNO+i) && ! call_used_regs[CR0_REGNO+i])
16867 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
16868 CR0_REGNO+i),
16869 reg));
16873 /* Emit function epilogue as insns.
16875 At present, dwarf2out_frame_debug_expr doesn't understand
16876 register restores, so we don't bother setting RTX_FRAME_RELATED_P
16877 anywhere in the epilogue. Most of the insns below would in any case
16878 need special notes to explain where r11 is in relation to the stack. */
16880 void
16881 rs6000_emit_epilogue (int sibcall)
16883 rs6000_stack_t *info;
16884 int restoring_GPRs_inline;
16885 int restoring_FPRs_inline;
16886 int using_load_multiple;
16887 int using_mtcr_multiple;
16888 int use_backchain_to_restore_sp;
16889 int restore_lr;
16890 int strategy;
16891 int sp_offset = 0;
16892 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
16893 rtx frame_reg_rtx = sp_reg_rtx;
16894 enum machine_mode reg_mode = Pmode;
16895 int reg_size = TARGET_32BIT ? 4 : 8;
16896 int i;
16898 info = rs6000_stack_info ();
16900 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
16902 reg_mode = V2SImode;
16903 reg_size = 8;
16906 strategy = rs6000_savres_strategy (info, /*savep=*/false,
16907 /*static_chain_p=*/0, sibcall);
16908 using_load_multiple = strategy & SAVRES_MULTIPLE;
16909 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS;
16910 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS;
16911 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
16912 || rs6000_cpu == PROCESSOR_PPC603
16913 || rs6000_cpu == PROCESSOR_PPC750
16914 || optimize_size);
16915 /* Restore via the backchain when we have a large frame, since this
16916 is more efficient than an addis, addi pair. The second condition
16917 here will not trigger at the moment; We don't actually need a
16918 frame pointer for alloca, but the generic parts of the compiler
16919 give us one anyway. */
16920 use_backchain_to_restore_sp = (info->total_size > 32767
16921 || info->total_size
16922 + (info->lr_save_p ? info->lr_save_offset : 0)
16923 > 32767
16924 || (cfun->calls_alloca
16925 && !frame_pointer_needed));
16926 restore_lr = (info->lr_save_p
16927 && restoring_GPRs_inline
16928 && restoring_FPRs_inline);
16930 if (WORLD_SAVE_P (info))
16932 int i, j;
16933 char rname[30];
16934 const char *alloc_rname;
16935 rtvec p;
16937 /* eh_rest_world_r10 will return to the location saved in the LR
16938 stack slot (which is not likely to be our caller.)
16939 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
16940 rest_world is similar, except any R10 parameter is ignored.
16941 The exception-handling stuff that was here in 2.95 is no
16942 longer necessary. */
16944 p = rtvec_alloc (9
16946 + 32 - info->first_gp_reg_save
16947 + LAST_ALTIVEC_REGNO + 1 - info->first_altivec_reg_save
16948 + 63 + 1 - info->first_fp_reg_save);
16950 strcpy (rname, ((crtl->calls_eh_return) ?
16951 "*eh_rest_world_r10" : "*rest_world"));
16952 alloc_rname = ggc_strdup (rname);
16954 j = 0;
16955 RTVEC_ELT (p, j++) = gen_rtx_RETURN (VOIDmode);
16956 RTVEC_ELT (p, j++) = gen_rtx_USE (VOIDmode,
16957 gen_rtx_REG (Pmode,
16958 LR_REGNO));
16959 RTVEC_ELT (p, j++)
16960 = gen_rtx_USE (VOIDmode, gen_rtx_SYMBOL_REF (Pmode, alloc_rname));
16961 /* The instruction pattern requires a clobber here;
16962 it is shared with the restVEC helper. */
16963 RTVEC_ELT (p, j++)
16964 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
16967 /* CR register traditionally saved as CR2. */
16968 rtx reg = gen_rtx_REG (reg_mode, CR2_REGNO);
16969 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16970 GEN_INT (info->cr_save_offset));
16971 rtx mem = gen_frame_mem (reg_mode, addr);
16973 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16976 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
16978 rtx reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
16979 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16980 GEN_INT (info->gp_save_offset
16981 + reg_size * i));
16982 rtx mem = gen_frame_mem (reg_mode, addr);
16984 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16986 for (i = 0; info->first_altivec_reg_save + i <= LAST_ALTIVEC_REGNO; i++)
16988 rtx reg = gen_rtx_REG (V4SImode, info->first_altivec_reg_save + i);
16989 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
16990 GEN_INT (info->altivec_save_offset
16991 + 16 * i));
16992 rtx mem = gen_frame_mem (V4SImode, addr);
16994 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
16996 for (i = 0; info->first_fp_reg_save + i <= 63; i++)
16998 rtx reg = gen_rtx_REG (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
16999 ? DFmode : SFmode),
17000 info->first_fp_reg_save + i);
17001 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17002 GEN_INT (info->fp_save_offset
17003 + 8 * i));
17004 rtx mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17005 ? DFmode : SFmode), addr);
17007 RTVEC_ELT (p, j++) = gen_rtx_SET (VOIDmode, reg, mem);
17009 RTVEC_ELT (p, j++)
17010 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 0));
17011 RTVEC_ELT (p, j++)
17012 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 12));
17013 RTVEC_ELT (p, j++)
17014 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 7));
17015 RTVEC_ELT (p, j++)
17016 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 8));
17017 RTVEC_ELT (p, j++)
17018 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, 10));
17019 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17021 return;
17024 /* frame_reg_rtx + sp_offset points to the top of this stack frame. */
17025 if (info->push_p)
17026 sp_offset = info->total_size;
17028 /* Restore AltiVec registers if we must do so before adjusting the
17029 stack. */
17030 if (TARGET_ALTIVEC_ABI
17031 && info->altivec_size != 0
17032 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17033 || (DEFAULT_ABI != ABI_V4
17034 && info->altivec_save_offset < (TARGET_32BIT ? -220 : -288))))
17036 int i;
17038 if (use_backchain_to_restore_sp)
17040 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17041 emit_move_insn (frame_reg_rtx,
17042 gen_rtx_MEM (Pmode, sp_reg_rtx));
17043 sp_offset = 0;
17045 else if (frame_pointer_needed)
17046 frame_reg_rtx = hard_frame_pointer_rtx;
17048 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17049 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17051 rtx addr, areg, mem;
17053 areg = gen_rtx_REG (Pmode, 0);
17054 emit_move_insn
17055 (areg, GEN_INT (info->altivec_save_offset
17056 + sp_offset
17057 + 16 * (i - info->first_altivec_reg_save)));
17059 /* AltiVec addressing mode is [reg+reg]. */
17060 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17061 mem = gen_frame_mem (V4SImode, addr);
17063 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17067 /* Restore VRSAVE if we must do so before adjusting the stack. */
17068 if (TARGET_ALTIVEC
17069 && TARGET_ALTIVEC_VRSAVE
17070 && info->vrsave_mask != 0
17071 && (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17072 || (DEFAULT_ABI != ABI_V4
17073 && info->vrsave_save_offset < (TARGET_32BIT ? -220 : -288))))
17075 rtx addr, mem, reg;
17077 if (frame_reg_rtx == sp_reg_rtx)
17079 if (use_backchain_to_restore_sp)
17081 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17082 emit_move_insn (frame_reg_rtx,
17083 gen_rtx_MEM (Pmode, sp_reg_rtx));
17084 sp_offset = 0;
17086 else if (frame_pointer_needed)
17087 frame_reg_rtx = hard_frame_pointer_rtx;
17090 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17091 GEN_INT (info->vrsave_save_offset + sp_offset));
17092 mem = gen_frame_mem (SImode, addr);
17093 reg = gen_rtx_REG (SImode, 12);
17094 emit_move_insn (reg, mem);
17096 emit_insn (generate_set_vrsave (reg, info, 1));
17099 /* If we have a large stack frame, restore the old stack pointer
17100 using the backchain. */
17101 if (use_backchain_to_restore_sp)
17103 if (frame_reg_rtx == sp_reg_rtx)
17105 /* Under V.4, don't reset the stack pointer until after we're done
17106 loading the saved registers. */
17107 if (DEFAULT_ABI == ABI_V4)
17108 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17110 emit_move_insn (frame_reg_rtx,
17111 gen_rtx_MEM (Pmode, sp_reg_rtx));
17112 sp_offset = 0;
17114 else if (ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17115 && DEFAULT_ABI == ABI_V4)
17116 /* frame_reg_rtx has been set up by the altivec restore. */
17118 else
17120 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
17121 frame_reg_rtx = sp_reg_rtx;
17124 /* If we have a frame pointer, we can restore the old stack pointer
17125 from it. */
17126 else if (frame_pointer_needed)
17128 frame_reg_rtx = sp_reg_rtx;
17129 if (DEFAULT_ABI == ABI_V4)
17130 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17132 emit_insn (TARGET_32BIT
17133 ? gen_addsi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17134 GEN_INT (info->total_size))
17135 : gen_adddi3 (frame_reg_rtx, hard_frame_pointer_rtx,
17136 GEN_INT (info->total_size)));
17137 sp_offset = 0;
17139 else if (info->push_p
17140 && DEFAULT_ABI != ABI_V4
17141 && !crtl->calls_eh_return)
17143 emit_insn (TARGET_32BIT
17144 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
17145 GEN_INT (info->total_size))
17146 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
17147 GEN_INT (info->total_size)));
17148 sp_offset = 0;
17151 /* Restore AltiVec registers if we have not done so already. */
17152 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17153 && TARGET_ALTIVEC_ABI
17154 && info->altivec_size != 0
17155 && (DEFAULT_ABI == ABI_V4
17156 || info->altivec_save_offset >= (TARGET_32BIT ? -220 : -288)))
17158 int i;
17160 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
17161 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
17163 rtx addr, areg, mem;
17165 areg = gen_rtx_REG (Pmode, 0);
17166 emit_move_insn
17167 (areg, GEN_INT (info->altivec_save_offset
17168 + sp_offset
17169 + 16 * (i - info->first_altivec_reg_save)));
17171 /* AltiVec addressing mode is [reg+reg]. */
17172 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
17173 mem = gen_frame_mem (V4SImode, addr);
17175 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
17179 /* Restore VRSAVE if we have not done so already. */
17180 if (!ALWAYS_RESTORE_ALTIVEC_BEFORE_POP
17181 && TARGET_ALTIVEC
17182 && TARGET_ALTIVEC_VRSAVE
17183 && info->vrsave_mask != 0
17184 && (DEFAULT_ABI == ABI_V4
17185 || info->vrsave_save_offset >= (TARGET_32BIT ? -220 : -288)))
17187 rtx addr, mem, reg;
17189 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17190 GEN_INT (info->vrsave_save_offset + sp_offset));
17191 mem = gen_frame_mem (SImode, addr);
17192 reg = gen_rtx_REG (SImode, 12);
17193 emit_move_insn (reg, mem);
17195 emit_insn (generate_set_vrsave (reg, info, 1));
17198 /* Get the old lr if we saved it. If we are restoring registers
17199 out-of-line, then the out-of-line routines can do this for us. */
17200 if (restore_lr)
17202 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
17203 info->lr_save_offset + sp_offset);
17205 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
17208 /* Get the old cr if we saved it. */
17209 if (info->cr_save_p)
17211 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17212 GEN_INT (info->cr_save_offset + sp_offset));
17213 rtx mem = gen_frame_mem (SImode, addr);
17215 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
17218 /* Set LR here to try to overlap restores below. */
17219 if (restore_lr)
17220 emit_move_insn (gen_rtx_REG (Pmode, LR_REGNO),
17221 gen_rtx_REG (Pmode, 0));
17223 /* Load exception handler data registers, if needed. */
17224 if (crtl->calls_eh_return)
17226 unsigned int i, regno;
17228 if (TARGET_AIX)
17230 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17231 GEN_INT (sp_offset + 5 * reg_size));
17232 rtx mem = gen_frame_mem (reg_mode, addr);
17234 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
17237 for (i = 0; ; ++i)
17239 rtx mem;
17241 regno = EH_RETURN_DATA_REGNO (i);
17242 if (regno == INVALID_REGNUM)
17243 break;
17245 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
17246 info->ehrd_offset + sp_offset
17247 + reg_size * (int) i);
17249 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
17253 /* Restore GPRs. This is done as a PARALLEL if we are using
17254 the load-multiple instructions. */
17255 if (TARGET_SPE_ABI
17256 && info->spe_64bit_regs_used != 0
17257 && info->first_gp_reg_save != 32)
17259 /* Determine whether we can address all of the registers that need
17260 to be saved with an offset from the stack pointer that fits in
17261 the small const field for SPE memory instructions. */
17262 int spe_regs_addressable_via_sp
17263 = (SPE_CONST_OFFSET_OK(info->spe_gp_save_offset + sp_offset
17264 + (32 - info->first_gp_reg_save - 1) * reg_size)
17265 && restoring_GPRs_inline);
17266 int spe_offset;
17268 if (spe_regs_addressable_via_sp)
17269 spe_offset = info->spe_gp_save_offset + sp_offset;
17270 else
17272 rtx old_frame_reg_rtx = frame_reg_rtx;
17273 /* Make r11 point to the start of the SPE save area. We worried about
17274 not clobbering it when we were saving registers in the prologue.
17275 There's no need to worry here because the static chain is passed
17276 anew to every function. */
17277 int ool_adjust = (restoring_GPRs_inline
17279 : (info->first_gp_reg_save
17280 - (FIRST_SAVRES_REGISTER+1))*8);
17282 if (frame_reg_rtx == sp_reg_rtx)
17283 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
17284 emit_insn (gen_addsi3 (frame_reg_rtx, old_frame_reg_rtx,
17285 GEN_INT (info->spe_gp_save_offset
17286 + sp_offset
17287 - ool_adjust)));
17288 /* Keep the invariant that frame_reg_rtx + sp_offset points
17289 at the top of the stack frame. */
17290 sp_offset = -info->spe_gp_save_offset;
17292 spe_offset = 0;
17295 if (restoring_GPRs_inline)
17297 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17298 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17300 rtx offset, addr, mem;
17302 /* We're doing all this to ensure that the immediate offset
17303 fits into the immediate field of 'evldd'. */
17304 gcc_assert (SPE_CONST_OFFSET_OK (spe_offset + reg_size * i));
17306 offset = GEN_INT (spe_offset + reg_size * i);
17307 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, offset);
17308 mem = gen_rtx_MEM (V2SImode, addr);
17310 emit_move_insn (gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17311 mem);
17314 else
17316 rtx par;
17318 par = rs6000_make_savres_rtx (info, gen_rtx_REG (Pmode, 11),
17319 0, reg_mode,
17320 /*savep=*/false, /*gpr=*/true,
17321 /*exitp=*/true);
17322 emit_jump_insn (par);
17324 /* We don't want anybody else emitting things after we jumped
17325 back. */
17326 return;
17329 else if (!restoring_GPRs_inline)
17331 /* We are jumping to an out-of-line function. */
17332 bool can_use_exit = info->first_fp_reg_save == 64;
17333 rtx par;
17335 /* Emit stack reset code if we need it. */
17336 if (can_use_exit)
17337 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17338 sp_offset, can_use_exit);
17339 else
17340 emit_insn (gen_addsi3 (gen_rtx_REG (Pmode, 11),
17341 sp_reg_rtx,
17342 GEN_INT (sp_offset - info->fp_size)));
17344 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
17345 info->gp_save_offset, reg_mode,
17346 /*savep=*/false, /*gpr=*/true,
17347 /*exitp=*/can_use_exit);
17349 if (can_use_exit)
17351 if (info->cr_save_p)
17352 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12),
17353 using_mtcr_multiple);
17355 emit_jump_insn (par);
17357 /* We don't want anybody else emitting things after we jumped
17358 back. */
17359 return;
17361 else
17362 emit_insn (par);
17364 else if (using_load_multiple)
17366 rtvec p;
17367 p = rtvec_alloc (32 - info->first_gp_reg_save);
17368 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17370 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17371 GEN_INT (info->gp_save_offset
17372 + sp_offset
17373 + reg_size * i));
17374 rtx mem = gen_frame_mem (reg_mode, addr);
17376 RTVEC_ELT (p, i) =
17377 gen_rtx_SET (VOIDmode,
17378 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
17379 mem);
17381 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
17383 else
17385 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
17386 if (rs6000_reg_live_or_pic_offset_p (info->first_gp_reg_save + i))
17388 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17389 GEN_INT (info->gp_save_offset
17390 + sp_offset
17391 + reg_size * i));
17392 rtx mem = gen_frame_mem (reg_mode, addr);
17394 emit_move_insn (gen_rtx_REG (reg_mode,
17395 info->first_gp_reg_save + i), mem);
17399 /* Restore fpr's if we need to do it without calling a function. */
17400 if (restoring_FPRs_inline)
17401 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17402 if ((df_regs_ever_live_p (info->first_fp_reg_save+i)
17403 && ! call_used_regs[info->first_fp_reg_save+i]))
17405 rtx addr, mem;
17406 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
17407 GEN_INT (info->fp_save_offset
17408 + sp_offset
17409 + 8 * i));
17410 mem = gen_frame_mem (((TARGET_HARD_FLOAT && TARGET_DOUBLE_FLOAT)
17411 ? DFmode : SFmode), addr);
17413 emit_move_insn (gen_rtx_REG (((TARGET_HARD_FLOAT
17414 && TARGET_DOUBLE_FLOAT)
17415 ? DFmode : SFmode),
17416 info->first_fp_reg_save + i),
17417 mem);
17420 /* If we saved cr, restore it here. Just those that were used. */
17421 if (info->cr_save_p)
17422 rs6000_restore_saved_cr (gen_rtx_REG (SImode, 12), using_mtcr_multiple);
17424 /* If this is V.4, unwind the stack pointer after all of the loads
17425 have been done. */
17426 rs6000_emit_stack_reset (info, sp_reg_rtx, frame_reg_rtx,
17427 sp_offset, !restoring_FPRs_inline);
17429 if (crtl->calls_eh_return)
17431 rtx sa = EH_RETURN_STACKADJ_RTX;
17432 emit_insn (TARGET_32BIT
17433 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
17434 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
17437 if (!sibcall)
17439 rtvec p;
17440 if (! restoring_FPRs_inline)
17441 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
17442 else
17443 p = rtvec_alloc (2);
17445 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
17446 RTVEC_ELT (p, 1) = (restoring_FPRs_inline
17447 ? gen_rtx_USE (VOIDmode, gen_rtx_REG (Pmode, 65))
17448 : gen_rtx_CLOBBER (VOIDmode,
17449 gen_rtx_REG (Pmode, 65)));
17451 /* If we have to restore more than two FP registers, branch to the
17452 restore function. It will return to our caller. */
17453 if (! restoring_FPRs_inline)
17455 int i;
17456 rtx sym;
17458 sym = rs6000_savres_routine_sym (info,
17459 /*savep=*/false,
17460 /*gpr=*/false,
17461 /*exitp=*/true);
17462 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode, sym);
17463 RTVEC_ELT (p, 3) = gen_rtx_USE (VOIDmode,
17464 gen_rtx_REG (Pmode, 11));
17465 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
17467 rtx addr, mem;
17468 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
17469 GEN_INT (info->fp_save_offset + 8*i));
17470 mem = gen_frame_mem (DFmode, addr);
17472 RTVEC_ELT (p, i+4) =
17473 gen_rtx_SET (VOIDmode,
17474 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
17475 mem);
17479 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
17483 /* Write function epilogue. */
17485 static void
17486 rs6000_output_function_epilogue (FILE *file,
17487 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
17489 if (! HAVE_epilogue)
17491 rtx insn = get_last_insn ();
17492 /* If the last insn was a BARRIER, we don't have to write anything except
17493 the trace table. */
17494 if (GET_CODE (insn) == NOTE)
17495 insn = prev_nonnote_insn (insn);
17496 if (insn == 0 || GET_CODE (insn) != BARRIER)
17498 /* This is slightly ugly, but at least we don't have two
17499 copies of the epilogue-emitting code. */
17500 start_sequence ();
17502 /* A NOTE_INSN_DELETED is supposed to be at the start
17503 and end of the "toplevel" insn chain. */
17504 emit_note (NOTE_INSN_DELETED);
17505 rs6000_emit_epilogue (FALSE);
17506 emit_note (NOTE_INSN_DELETED);
17508 /* Expand INSN_ADDRESSES so final() doesn't crash. */
17510 rtx insn;
17511 unsigned addr = 0;
17512 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
17514 INSN_ADDRESSES_NEW (insn, addr);
17515 addr += 4;
17519 if (TARGET_DEBUG_STACK)
17520 debug_rtx_list (get_insns (), 100);
17521 final (get_insns (), file, FALSE);
17522 end_sequence ();
17526 #if TARGET_MACHO
17527 macho_branch_islands ();
17528 /* Mach-O doesn't support labels at the end of objects, so if
17529 it looks like we might want one, insert a NOP. */
17531 rtx insn = get_last_insn ();
17532 while (insn
17533 && NOTE_P (insn)
17534 && NOTE_KIND (insn) != NOTE_INSN_DELETED_LABEL)
17535 insn = PREV_INSN (insn);
17536 if (insn
17537 && (LABEL_P (insn)
17538 || (NOTE_P (insn)
17539 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL)))
17540 fputs ("\tnop\n", file);
17542 #endif
17544 /* Output a traceback table here. See /usr/include/sys/debug.h for info
17545 on its format.
17547 We don't output a traceback table if -finhibit-size-directive was
17548 used. The documentation for -finhibit-size-directive reads
17549 ``don't output a @code{.size} assembler directive, or anything
17550 else that would cause trouble if the function is split in the
17551 middle, and the two halves are placed at locations far apart in
17552 memory.'' The traceback table has this property, since it
17553 includes the offset from the start of the function to the
17554 traceback table itself.
17556 System V.4 Powerpc's (and the embedded ABI derived from it) use a
17557 different traceback table. */
17558 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
17559 && rs6000_traceback != traceback_none && !crtl->is_thunk)
17561 const char *fname = NULL;
17562 const char *language_string = lang_hooks.name;
17563 int fixed_parms = 0, float_parms = 0, parm_info = 0;
17564 int i;
17565 int optional_tbtab;
17566 rs6000_stack_t *info = rs6000_stack_info ();
17568 if (rs6000_traceback == traceback_full)
17569 optional_tbtab = 1;
17570 else if (rs6000_traceback == traceback_part)
17571 optional_tbtab = 0;
17572 else
17573 optional_tbtab = !optimize_size && !TARGET_ELF;
17575 if (optional_tbtab)
17577 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
17578 while (*fname == '.') /* V.4 encodes . in the name */
17579 fname++;
17581 /* Need label immediately before tbtab, so we can compute
17582 its offset from the function start. */
17583 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17584 ASM_OUTPUT_LABEL (file, fname);
17587 /* The .tbtab pseudo-op can only be used for the first eight
17588 expressions, since it can't handle the possibly variable
17589 length fields that follow. However, if you omit the optional
17590 fields, the assembler outputs zeros for all optional fields
17591 anyways, giving each variable length field is minimum length
17592 (as defined in sys/debug.h). Thus we can not use the .tbtab
17593 pseudo-op at all. */
17595 /* An all-zero word flags the start of the tbtab, for debuggers
17596 that have to find it by searching forward from the entry
17597 point or from the current pc. */
17598 fputs ("\t.long 0\n", file);
17600 /* Tbtab format type. Use format type 0. */
17601 fputs ("\t.byte 0,", file);
17603 /* Language type. Unfortunately, there does not seem to be any
17604 official way to discover the language being compiled, so we
17605 use language_string.
17606 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
17607 Java is 13. Objective-C is 14. Objective-C++ isn't assigned
17608 a number, so for now use 9. */
17609 if (! strcmp (language_string, "GNU C"))
17610 i = 0;
17611 else if (! strcmp (language_string, "GNU F77")
17612 || ! strcmp (language_string, "GNU Fortran"))
17613 i = 1;
17614 else if (! strcmp (language_string, "GNU Pascal"))
17615 i = 2;
17616 else if (! strcmp (language_string, "GNU Ada"))
17617 i = 3;
17618 else if (! strcmp (language_string, "GNU C++")
17619 || ! strcmp (language_string, "GNU Objective-C++"))
17620 i = 9;
17621 else if (! strcmp (language_string, "GNU Java"))
17622 i = 13;
17623 else if (! strcmp (language_string, "GNU Objective-C"))
17624 i = 14;
17625 else
17626 gcc_unreachable ();
17627 fprintf (file, "%d,", i);
17629 /* 8 single bit fields: global linkage (not set for C extern linkage,
17630 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
17631 from start of procedure stored in tbtab, internal function, function
17632 has controlled storage, function has no toc, function uses fp,
17633 function logs/aborts fp operations. */
17634 /* Assume that fp operations are used if any fp reg must be saved. */
17635 fprintf (file, "%d,",
17636 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
17638 /* 6 bitfields: function is interrupt handler, name present in
17639 proc table, function calls alloca, on condition directives
17640 (controls stack walks, 3 bits), saves condition reg, saves
17641 link reg. */
17642 /* The `function calls alloca' bit seems to be set whenever reg 31 is
17643 set up as a frame pointer, even when there is no alloca call. */
17644 fprintf (file, "%d,",
17645 ((optional_tbtab << 6)
17646 | ((optional_tbtab & frame_pointer_needed) << 5)
17647 | (info->cr_save_p << 1)
17648 | (info->lr_save_p)));
17650 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
17651 (6 bits). */
17652 fprintf (file, "%d,",
17653 (info->push_p << 7) | (64 - info->first_fp_reg_save));
17655 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
17656 fprintf (file, "%d,", (32 - first_reg_to_save ()));
17658 if (optional_tbtab)
17660 /* Compute the parameter info from the function decl argument
17661 list. */
17662 tree decl;
17663 int next_parm_info_bit = 31;
17665 for (decl = DECL_ARGUMENTS (current_function_decl);
17666 decl; decl = TREE_CHAIN (decl))
17668 rtx parameter = DECL_INCOMING_RTL (decl);
17669 enum machine_mode mode = GET_MODE (parameter);
17671 if (GET_CODE (parameter) == REG)
17673 if (SCALAR_FLOAT_MODE_P (mode))
17675 int bits;
17677 float_parms++;
17679 switch (mode)
17681 case SFmode:
17682 case SDmode:
17683 bits = 0x2;
17684 break;
17686 case DFmode:
17687 case DDmode:
17688 case TFmode:
17689 case TDmode:
17690 bits = 0x3;
17691 break;
17693 default:
17694 gcc_unreachable ();
17697 /* If only one bit will fit, don't or in this entry. */
17698 if (next_parm_info_bit > 0)
17699 parm_info |= (bits << (next_parm_info_bit - 1));
17700 next_parm_info_bit -= 2;
17702 else
17704 fixed_parms += ((GET_MODE_SIZE (mode)
17705 + (UNITS_PER_WORD - 1))
17706 / UNITS_PER_WORD);
17707 next_parm_info_bit -= 1;
17713 /* Number of fixed point parameters. */
17714 /* This is actually the number of words of fixed point parameters; thus
17715 an 8 byte struct counts as 2; and thus the maximum value is 8. */
17716 fprintf (file, "%d,", fixed_parms);
17718 /* 2 bitfields: number of floating point parameters (7 bits), parameters
17719 all on stack. */
17720 /* This is actually the number of fp registers that hold parameters;
17721 and thus the maximum value is 13. */
17722 /* Set parameters on stack bit if parameters are not in their original
17723 registers, regardless of whether they are on the stack? Xlc
17724 seems to set the bit when not optimizing. */
17725 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
17727 if (! optional_tbtab)
17728 return;
17730 /* Optional fields follow. Some are variable length. */
17732 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
17733 11 double float. */
17734 /* There is an entry for each parameter in a register, in the order that
17735 they occur in the parameter list. Any intervening arguments on the
17736 stack are ignored. If the list overflows a long (max possible length
17737 34 bits) then completely leave off all elements that don't fit. */
17738 /* Only emit this long if there was at least one parameter. */
17739 if (fixed_parms || float_parms)
17740 fprintf (file, "\t.long %d\n", parm_info);
17742 /* Offset from start of code to tb table. */
17743 fputs ("\t.long ", file);
17744 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
17745 if (TARGET_AIX)
17746 RS6000_OUTPUT_BASENAME (file, fname);
17747 else
17748 assemble_name (file, fname);
17749 putc ('-', file);
17750 rs6000_output_function_entry (file, fname);
17751 putc ('\n', file);
17753 /* Interrupt handler mask. */
17754 /* Omit this long, since we never set the interrupt handler bit
17755 above. */
17757 /* Number of CTL (controlled storage) anchors. */
17758 /* Omit this long, since the has_ctl bit is never set above. */
17760 /* Displacement into stack of each CTL anchor. */
17761 /* Omit this list of longs, because there are no CTL anchors. */
17763 /* Length of function name. */
17764 if (*fname == '*')
17765 ++fname;
17766 fprintf (file, "\t.short %d\n", (int) strlen (fname));
17768 /* Function name. */
17769 assemble_string (fname, strlen (fname));
17771 /* Register for alloca automatic storage; this is always reg 31.
17772 Only emit this if the alloca bit was set above. */
17773 if (frame_pointer_needed)
17774 fputs ("\t.byte 31\n", file);
17776 fputs ("\t.align 2\n", file);
17780 /* A C compound statement that outputs the assembler code for a thunk
17781 function, used to implement C++ virtual function calls with
17782 multiple inheritance. The thunk acts as a wrapper around a virtual
17783 function, adjusting the implicit object parameter before handing
17784 control off to the real function.
17786 First, emit code to add the integer DELTA to the location that
17787 contains the incoming first argument. Assume that this argument
17788 contains a pointer, and is the one used to pass the `this' pointer
17789 in C++. This is the incoming argument *before* the function
17790 prologue, e.g. `%o0' on a sparc. The addition must preserve the
17791 values of all other incoming arguments.
17793 After the addition, emit code to jump to FUNCTION, which is a
17794 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
17795 not touch the return address. Hence returning from FUNCTION will
17796 return to whoever called the current `thunk'.
17798 The effect must be as if FUNCTION had been called directly with the
17799 adjusted first argument. This macro is responsible for emitting
17800 all of the code for a thunk function; output_function_prologue()
17801 and output_function_epilogue() are not invoked.
17803 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
17804 been extracted from it.) It might possibly be useful on some
17805 targets, but probably not.
17807 If you do not define this macro, the target-independent code in the
17808 C++ frontend will generate a less efficient heavyweight thunk that
17809 calls FUNCTION instead of jumping to it. The generic approach does
17810 not support varargs. */
17812 static void
17813 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
17814 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
17815 tree function)
17817 rtx this_rtx, insn, funexp;
17819 reload_completed = 1;
17820 epilogue_completed = 1;
17822 /* Mark the end of the (empty) prologue. */
17823 emit_note (NOTE_INSN_PROLOGUE_END);
17825 /* Find the "this" pointer. If the function returns a structure,
17826 the structure return pointer is in r3. */
17827 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
17828 this_rtx = gen_rtx_REG (Pmode, 4);
17829 else
17830 this_rtx = gen_rtx_REG (Pmode, 3);
17832 /* Apply the constant offset, if required. */
17833 if (delta)
17835 rtx delta_rtx = GEN_INT (delta);
17836 emit_insn (TARGET_32BIT
17837 ? gen_addsi3 (this_rtx, this_rtx, delta_rtx)
17838 : gen_adddi3 (this_rtx, this_rtx, delta_rtx));
17841 /* Apply the offset from the vtable, if required. */
17842 if (vcall_offset)
17844 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
17845 rtx tmp = gen_rtx_REG (Pmode, 12);
17847 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
17848 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
17850 emit_insn (TARGET_32BIT
17851 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
17852 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
17853 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
17855 else
17857 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
17859 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
17861 emit_insn (TARGET_32BIT
17862 ? gen_addsi3 (this_rtx, this_rtx, tmp)
17863 : gen_adddi3 (this_rtx, this_rtx, tmp));
17866 /* Generate a tail call to the target function. */
17867 if (!TREE_USED (function))
17869 assemble_external (function);
17870 TREE_USED (function) = 1;
17872 funexp = XEXP (DECL_RTL (function), 0);
17873 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
17875 #if TARGET_MACHO
17876 if (MACHOPIC_INDIRECT)
17877 funexp = machopic_indirect_call_target (funexp);
17878 #endif
17880 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
17881 generate sibcall RTL explicitly. */
17882 insn = emit_call_insn (
17883 gen_rtx_PARALLEL (VOIDmode,
17884 gen_rtvec (4,
17885 gen_rtx_CALL (VOIDmode,
17886 funexp, const0_rtx),
17887 gen_rtx_USE (VOIDmode, const0_rtx),
17888 gen_rtx_USE (VOIDmode,
17889 gen_rtx_REG (SImode,
17890 LR_REGNO)),
17891 gen_rtx_RETURN (VOIDmode))));
17892 SIBLING_CALL_P (insn) = 1;
17893 emit_barrier ();
17895 /* Run just enough of rest_of_compilation to get the insns emitted.
17896 There's not really enough bulk here to make other passes such as
17897 instruction scheduling worth while. Note that use_thunk calls
17898 assemble_start_function and assemble_end_function. */
17899 insn = get_insns ();
17900 insn_locators_alloc ();
17901 shorten_branches (insn);
17902 final_start_function (insn, file, 1);
17903 final (insn, file, 1);
17904 final_end_function ();
17905 free_after_compilation (cfun);
17907 reload_completed = 0;
17908 epilogue_completed = 0;
17911 /* A quick summary of the various types of 'constant-pool tables'
17912 under PowerPC:
17914 Target Flags Name One table per
17915 AIX (none) AIX TOC object file
17916 AIX -mfull-toc AIX TOC object file
17917 AIX -mminimal-toc AIX minimal TOC translation unit
17918 SVR4/EABI (none) SVR4 SDATA object file
17919 SVR4/EABI -fpic SVR4 pic object file
17920 SVR4/EABI -fPIC SVR4 PIC translation unit
17921 SVR4/EABI -mrelocatable EABI TOC function
17922 SVR4/EABI -maix AIX TOC object file
17923 SVR4/EABI -maix -mminimal-toc
17924 AIX minimal TOC translation unit
17926 Name Reg. Set by entries contains:
17927 made by addrs? fp? sum?
17929 AIX TOC 2 crt0 as Y option option
17930 AIX minimal TOC 30 prolog gcc Y Y option
17931 SVR4 SDATA 13 crt0 gcc N Y N
17932 SVR4 pic 30 prolog ld Y not yet N
17933 SVR4 PIC 30 prolog gcc Y option option
17934 EABI TOC 30 prolog gcc Y option option
17938 /* Hash functions for the hash table. */
17940 static unsigned
17941 rs6000_hash_constant (rtx k)
17943 enum rtx_code code = GET_CODE (k);
17944 enum machine_mode mode = GET_MODE (k);
17945 unsigned result = (code << 3) ^ mode;
17946 const char *format;
17947 int flen, fidx;
17949 format = GET_RTX_FORMAT (code);
17950 flen = strlen (format);
17951 fidx = 0;
17953 switch (code)
17955 case LABEL_REF:
17956 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
17958 case CONST_DOUBLE:
17959 if (mode != VOIDmode)
17960 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
17961 flen = 2;
17962 break;
17964 case CODE_LABEL:
17965 fidx = 3;
17966 break;
17968 default:
17969 break;
17972 for (; fidx < flen; fidx++)
17973 switch (format[fidx])
17975 case 's':
17977 unsigned i, len;
17978 const char *str = XSTR (k, fidx);
17979 len = strlen (str);
17980 result = result * 613 + len;
17981 for (i = 0; i < len; i++)
17982 result = result * 613 + (unsigned) str[i];
17983 break;
17985 case 'u':
17986 case 'e':
17987 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
17988 break;
17989 case 'i':
17990 case 'n':
17991 result = result * 613 + (unsigned) XINT (k, fidx);
17992 break;
17993 case 'w':
17994 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
17995 result = result * 613 + (unsigned) XWINT (k, fidx);
17996 else
17998 size_t i;
17999 for (i = 0; i < sizeof (HOST_WIDE_INT) / sizeof (unsigned); i++)
18000 result = result * 613 + (unsigned) (XWINT (k, fidx)
18001 >> CHAR_BIT * i);
18003 break;
18004 case '0':
18005 break;
18006 default:
18007 gcc_unreachable ();
18010 return result;
18013 static unsigned
18014 toc_hash_function (const void *hash_entry)
18016 const struct toc_hash_struct *thc =
18017 (const struct toc_hash_struct *) hash_entry;
18018 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
18021 /* Compare H1 and H2 for equivalence. */
18023 static int
18024 toc_hash_eq (const void *h1, const void *h2)
18026 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
18027 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
18029 if (((const struct toc_hash_struct *) h1)->key_mode
18030 != ((const struct toc_hash_struct *) h2)->key_mode)
18031 return 0;
18033 return rtx_equal_p (r1, r2);
18036 /* These are the names given by the C++ front-end to vtables, and
18037 vtable-like objects. Ideally, this logic should not be here;
18038 instead, there should be some programmatic way of inquiring as
18039 to whether or not an object is a vtable. */
18041 #define VTABLE_NAME_P(NAME) \
18042 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
18043 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
18044 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
18045 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
18046 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
18048 #ifdef NO_DOLLAR_IN_LABEL
18049 /* Return a GGC-allocated character string translating dollar signs in
18050 input NAME to underscores. Used by XCOFF ASM_OUTPUT_LABELREF. */
18052 const char *
18053 rs6000_xcoff_strip_dollar (const char *name)
18055 char *strip, *p;
18056 int len;
18058 p = strchr (name, '$');
18060 if (p == 0 || p == name)
18061 return name;
18063 len = strlen (name);
18064 strip = (char *) alloca (len + 1);
18065 strcpy (strip, name);
18066 p = strchr (strip, '$');
18067 while (p)
18069 *p = '_';
18070 p = strchr (p + 1, '$');
18073 return ggc_alloc_string (strip, len);
18075 #endif
18077 void
18078 rs6000_output_symbol_ref (FILE *file, rtx x)
18080 /* Currently C++ toc references to vtables can be emitted before it
18081 is decided whether the vtable is public or private. If this is
18082 the case, then the linker will eventually complain that there is
18083 a reference to an unknown section. Thus, for vtables only,
18084 we emit the TOC reference to reference the symbol and not the
18085 section. */
18086 const char *name = XSTR (x, 0);
18088 if (VTABLE_NAME_P (name))
18090 RS6000_OUTPUT_BASENAME (file, name);
18092 else
18093 assemble_name (file, name);
18096 /* Output a TOC entry. We derive the entry name from what is being
18097 written. */
18099 void
18100 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
18102 char buf[256];
18103 const char *name = buf;
18104 rtx base = x;
18105 HOST_WIDE_INT offset = 0;
18107 gcc_assert (!TARGET_NO_TOC);
18109 /* When the linker won't eliminate them, don't output duplicate
18110 TOC entries (this happens on AIX if there is any kind of TOC,
18111 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
18112 CODE_LABELs. */
18113 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
18115 struct toc_hash_struct *h;
18116 void * * found;
18118 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
18119 time because GGC is not initialized at that point. */
18120 if (toc_hash_table == NULL)
18121 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
18122 toc_hash_eq, NULL);
18124 h = GGC_NEW (struct toc_hash_struct);
18125 h->key = x;
18126 h->key_mode = mode;
18127 h->labelno = labelno;
18129 found = htab_find_slot (toc_hash_table, h, 1);
18130 if (*found == NULL)
18131 *found = h;
18132 else /* This is indeed a duplicate.
18133 Set this label equal to that label. */
18135 fputs ("\t.set ", file);
18136 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18137 fprintf (file, "%d,", labelno);
18138 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
18139 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
18140 found)->labelno));
18141 return;
18145 /* If we're going to put a double constant in the TOC, make sure it's
18146 aligned properly when strict alignment is on. */
18147 if (GET_CODE (x) == CONST_DOUBLE
18148 && STRICT_ALIGNMENT
18149 && GET_MODE_BITSIZE (mode) >= 64
18150 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
18151 ASM_OUTPUT_ALIGN (file, 3);
18154 (*targetm.asm_out.internal_label) (file, "LC", labelno);
18156 /* Handle FP constants specially. Note that if we have a minimal
18157 TOC, things we put here aren't actually in the TOC, so we can allow
18158 FP constants. */
18159 if (GET_CODE (x) == CONST_DOUBLE &&
18160 (GET_MODE (x) == TFmode || GET_MODE (x) == TDmode))
18162 REAL_VALUE_TYPE rv;
18163 long k[4];
18165 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
18166 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18167 REAL_VALUE_TO_TARGET_DECIMAL128 (rv, k);
18168 else
18169 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
18171 if (TARGET_64BIT)
18173 if (TARGET_MINIMAL_TOC)
18174 fputs (DOUBLE_INT_ASM_OP, file);
18175 else
18176 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18177 k[0] & 0xffffffff, k[1] & 0xffffffff,
18178 k[2] & 0xffffffff, k[3] & 0xffffffff);
18179 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
18180 k[0] & 0xffffffff, k[1] & 0xffffffff,
18181 k[2] & 0xffffffff, k[3] & 0xffffffff);
18182 return;
18184 else
18186 if (TARGET_MINIMAL_TOC)
18187 fputs ("\t.long ", file);
18188 else
18189 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
18190 k[0] & 0xffffffff, k[1] & 0xffffffff,
18191 k[2] & 0xffffffff, k[3] & 0xffffffff);
18192 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
18193 k[0] & 0xffffffff, k[1] & 0xffffffff,
18194 k[2] & 0xffffffff, k[3] & 0xffffffff);
18195 return;
18198 else if (GET_CODE (x) == CONST_DOUBLE &&
18199 (GET_MODE (x) == DFmode || GET_MODE (x) == DDmode))
18201 REAL_VALUE_TYPE rv;
18202 long k[2];
18204 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
18206 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18207 REAL_VALUE_TO_TARGET_DECIMAL64 (rv, k);
18208 else
18209 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
18211 if (TARGET_64BIT)
18213 if (TARGET_MINIMAL_TOC)
18214 fputs (DOUBLE_INT_ASM_OP, file);
18215 else
18216 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18217 k[0] & 0xffffffff, k[1] & 0xffffffff);
18218 fprintf (file, "0x%lx%08lx\n",
18219 k[0] & 0xffffffff, k[1] & 0xffffffff);
18220 return;
18222 else
18224 if (TARGET_MINIMAL_TOC)
18225 fputs ("\t.long ", file);
18226 else
18227 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
18228 k[0] & 0xffffffff, k[1] & 0xffffffff);
18229 fprintf (file, "0x%lx,0x%lx\n",
18230 k[0] & 0xffffffff, k[1] & 0xffffffff);
18231 return;
18234 else if (GET_CODE (x) == CONST_DOUBLE &&
18235 (GET_MODE (x) == SFmode || GET_MODE (x) == SDmode))
18237 REAL_VALUE_TYPE rv;
18238 long l;
18240 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
18241 if (DECIMAL_FLOAT_MODE_P (GET_MODE (x)))
18242 REAL_VALUE_TO_TARGET_DECIMAL32 (rv, l);
18243 else
18244 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
18246 if (TARGET_64BIT)
18248 if (TARGET_MINIMAL_TOC)
18249 fputs (DOUBLE_INT_ASM_OP, file);
18250 else
18251 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18252 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
18253 return;
18255 else
18257 if (TARGET_MINIMAL_TOC)
18258 fputs ("\t.long ", file);
18259 else
18260 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
18261 fprintf (file, "0x%lx\n", l & 0xffffffff);
18262 return;
18265 else if (GET_MODE (x) == VOIDmode
18266 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
18268 unsigned HOST_WIDE_INT low;
18269 HOST_WIDE_INT high;
18271 if (GET_CODE (x) == CONST_DOUBLE)
18273 low = CONST_DOUBLE_LOW (x);
18274 high = CONST_DOUBLE_HIGH (x);
18276 else
18277 #if HOST_BITS_PER_WIDE_INT == 32
18279 low = INTVAL (x);
18280 high = (low & 0x80000000) ? ~0 : 0;
18282 #else
18284 low = INTVAL (x) & 0xffffffff;
18285 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
18287 #endif
18289 /* TOC entries are always Pmode-sized, but since this
18290 is a bigendian machine then if we're putting smaller
18291 integer constants in the TOC we have to pad them.
18292 (This is still a win over putting the constants in
18293 a separate constant pool, because then we'd have
18294 to have both a TOC entry _and_ the actual constant.)
18296 For a 32-bit target, CONST_INT values are loaded and shifted
18297 entirely within `low' and can be stored in one TOC entry. */
18299 /* It would be easy to make this work, but it doesn't now. */
18300 gcc_assert (!TARGET_64BIT || POINTER_SIZE >= GET_MODE_BITSIZE (mode));
18302 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
18304 #if HOST_BITS_PER_WIDE_INT == 32
18305 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
18306 POINTER_SIZE, &low, &high, 0);
18307 #else
18308 low |= high << 32;
18309 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
18310 high = (HOST_WIDE_INT) low >> 32;
18311 low &= 0xffffffff;
18312 #endif
18315 if (TARGET_64BIT)
18317 if (TARGET_MINIMAL_TOC)
18318 fputs (DOUBLE_INT_ASM_OP, file);
18319 else
18320 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18321 (long) high & 0xffffffff, (long) low & 0xffffffff);
18322 fprintf (file, "0x%lx%08lx\n",
18323 (long) high & 0xffffffff, (long) low & 0xffffffff);
18324 return;
18326 else
18328 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
18330 if (TARGET_MINIMAL_TOC)
18331 fputs ("\t.long ", file);
18332 else
18333 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
18334 (long) high & 0xffffffff, (long) low & 0xffffffff);
18335 fprintf (file, "0x%lx,0x%lx\n",
18336 (long) high & 0xffffffff, (long) low & 0xffffffff);
18338 else
18340 if (TARGET_MINIMAL_TOC)
18341 fputs ("\t.long ", file);
18342 else
18343 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
18344 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
18346 return;
18350 if (GET_CODE (x) == CONST)
18352 gcc_assert (GET_CODE (XEXP (x, 0)) == PLUS);
18354 base = XEXP (XEXP (x, 0), 0);
18355 offset = INTVAL (XEXP (XEXP (x, 0), 1));
18358 switch (GET_CODE (base))
18360 case SYMBOL_REF:
18361 name = XSTR (base, 0);
18362 break;
18364 case LABEL_REF:
18365 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
18366 CODE_LABEL_NUMBER (XEXP (base, 0)));
18367 break;
18369 case CODE_LABEL:
18370 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
18371 break;
18373 default:
18374 gcc_unreachable ();
18377 if (TARGET_MINIMAL_TOC)
18378 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
18379 else
18381 fputs ("\t.tc ", file);
18382 RS6000_OUTPUT_BASENAME (file, name);
18384 if (offset < 0)
18385 fprintf (file, ".N" HOST_WIDE_INT_PRINT_UNSIGNED, - offset);
18386 else if (offset)
18387 fprintf (file, ".P" HOST_WIDE_INT_PRINT_UNSIGNED, offset);
18389 fputs ("[TC],", file);
18392 /* Currently C++ toc references to vtables can be emitted before it
18393 is decided whether the vtable is public or private. If this is
18394 the case, then the linker will eventually complain that there is
18395 a TOC reference to an unknown section. Thus, for vtables only,
18396 we emit the TOC reference to reference the symbol and not the
18397 section. */
18398 if (VTABLE_NAME_P (name))
18400 RS6000_OUTPUT_BASENAME (file, name);
18401 if (offset < 0)
18402 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
18403 else if (offset > 0)
18404 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC, offset);
18406 else
18407 output_addr_const (file, x);
18408 putc ('\n', file);
18411 /* Output an assembler pseudo-op to write an ASCII string of N characters
18412 starting at P to FILE.
18414 On the RS/6000, we have to do this using the .byte operation and
18415 write out special characters outside the quoted string.
18416 Also, the assembler is broken; very long strings are truncated,
18417 so we must artificially break them up early. */
18419 void
18420 output_ascii (FILE *file, const char *p, int n)
18422 char c;
18423 int i, count_string;
18424 const char *for_string = "\t.byte \"";
18425 const char *for_decimal = "\t.byte ";
18426 const char *to_close = NULL;
18428 count_string = 0;
18429 for (i = 0; i < n; i++)
18431 c = *p++;
18432 if (c >= ' ' && c < 0177)
18434 if (for_string)
18435 fputs (for_string, file);
18436 putc (c, file);
18438 /* Write two quotes to get one. */
18439 if (c == '"')
18441 putc (c, file);
18442 ++count_string;
18445 for_string = NULL;
18446 for_decimal = "\"\n\t.byte ";
18447 to_close = "\"\n";
18448 ++count_string;
18450 if (count_string >= 512)
18452 fputs (to_close, file);
18454 for_string = "\t.byte \"";
18455 for_decimal = "\t.byte ";
18456 to_close = NULL;
18457 count_string = 0;
18460 else
18462 if (for_decimal)
18463 fputs (for_decimal, file);
18464 fprintf (file, "%d", c);
18466 for_string = "\n\t.byte \"";
18467 for_decimal = ", ";
18468 to_close = "\n";
18469 count_string = 0;
18473 /* Now close the string if we have written one. Then end the line. */
18474 if (to_close)
18475 fputs (to_close, file);
18478 /* Generate a unique section name for FILENAME for a section type
18479 represented by SECTION_DESC. Output goes into BUF.
18481 SECTION_DESC can be any string, as long as it is different for each
18482 possible section type.
18484 We name the section in the same manner as xlc. The name begins with an
18485 underscore followed by the filename (after stripping any leading directory
18486 names) with the last period replaced by the string SECTION_DESC. If
18487 FILENAME does not contain a period, SECTION_DESC is appended to the end of
18488 the name. */
18490 void
18491 rs6000_gen_section_name (char **buf, const char *filename,
18492 const char *section_desc)
18494 const char *q, *after_last_slash, *last_period = 0;
18495 char *p;
18496 int len;
18498 after_last_slash = filename;
18499 for (q = filename; *q; q++)
18501 if (*q == '/')
18502 after_last_slash = q + 1;
18503 else if (*q == '.')
18504 last_period = q;
18507 len = strlen (after_last_slash) + strlen (section_desc) + 2;
18508 *buf = (char *) xmalloc (len);
18510 p = *buf;
18511 *p++ = '_';
18513 for (q = after_last_slash; *q; q++)
18515 if (q == last_period)
18517 strcpy (p, section_desc);
18518 p += strlen (section_desc);
18519 break;
18522 else if (ISALNUM (*q))
18523 *p++ = *q;
18526 if (last_period == 0)
18527 strcpy (p, section_desc);
18528 else
18529 *p = '\0';
18532 /* Emit profile function. */
18534 void
18535 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
18537 /* Non-standard profiling for kernels, which just saves LR then calls
18538 _mcount without worrying about arg saves. The idea is to change
18539 the function prologue as little as possible as it isn't easy to
18540 account for arg save/restore code added just for _mcount. */
18541 if (TARGET_PROFILE_KERNEL)
18542 return;
18544 if (DEFAULT_ABI == ABI_AIX)
18546 #ifndef NO_PROFILE_COUNTERS
18547 # define NO_PROFILE_COUNTERS 0
18548 #endif
18549 if (NO_PROFILE_COUNTERS)
18550 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
18551 else
18553 char buf[30];
18554 const char *label_name;
18555 rtx fun;
18557 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18558 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
18559 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
18561 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
18562 fun, Pmode);
18565 else if (DEFAULT_ABI == ABI_DARWIN)
18567 const char *mcount_name = RS6000_MCOUNT;
18568 int caller_addr_regno = LR_REGNO;
18570 /* Be conservative and always set this, at least for now. */
18571 crtl->uses_pic_offset_table = 1;
18573 #if TARGET_MACHO
18574 /* For PIC code, set up a stub and collect the caller's address
18575 from r0, which is where the prologue puts it. */
18576 if (MACHOPIC_INDIRECT
18577 && crtl->uses_pic_offset_table)
18578 caller_addr_regno = 0;
18579 #endif
18580 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
18581 0, VOIDmode, 1,
18582 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
18586 /* Write function profiler code. */
18588 void
18589 output_function_profiler (FILE *file, int labelno)
18591 char buf[100];
18593 switch (DEFAULT_ABI)
18595 default:
18596 gcc_unreachable ();
18598 case ABI_V4:
18599 if (!TARGET_32BIT)
18601 warning (0, "no profiling of 64-bit code for this ABI");
18602 return;
18604 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
18605 fprintf (file, "\tmflr %s\n", reg_names[0]);
18606 if (NO_PROFILE_COUNTERS)
18608 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18609 reg_names[0], reg_names[1]);
18611 else if (TARGET_SECURE_PLT && flag_pic)
18613 asm_fprintf (file, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
18614 reg_names[0], reg_names[1]);
18615 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18616 asm_fprintf (file, "\t{cau|addis} %s,%s,",
18617 reg_names[12], reg_names[12]);
18618 assemble_name (file, buf);
18619 asm_fprintf (file, "-1b@ha\n\t{cal|la} %s,", reg_names[0]);
18620 assemble_name (file, buf);
18621 asm_fprintf (file, "-1b@l(%s)\n", reg_names[12]);
18623 else if (flag_pic == 1)
18625 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
18626 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18627 reg_names[0], reg_names[1]);
18628 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
18629 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
18630 assemble_name (file, buf);
18631 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
18633 else if (flag_pic > 1)
18635 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18636 reg_names[0], reg_names[1]);
18637 /* Now, we need to get the address of the label. */
18638 fputs ("\tbcl 20,31,1f\n\t.long ", file);
18639 assemble_name (file, buf);
18640 fputs ("-.\n1:", file);
18641 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
18642 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
18643 reg_names[0], reg_names[11]);
18644 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
18645 reg_names[0], reg_names[0], reg_names[11]);
18647 else
18649 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
18650 assemble_name (file, buf);
18651 fputs ("@ha\n", file);
18652 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
18653 reg_names[0], reg_names[1]);
18654 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
18655 assemble_name (file, buf);
18656 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
18659 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
18660 fprintf (file, "\tbl %s%s\n",
18661 RS6000_MCOUNT, flag_pic ? "@plt" : "");
18662 break;
18664 case ABI_AIX:
18665 case ABI_DARWIN:
18666 if (!TARGET_PROFILE_KERNEL)
18668 /* Don't do anything, done in output_profile_hook (). */
18670 else
18672 gcc_assert (!TARGET_32BIT);
18674 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
18675 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
18677 if (cfun->static_chain_decl != NULL)
18679 asm_fprintf (file, "\tstd %s,24(%s)\n",
18680 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18681 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18682 asm_fprintf (file, "\tld %s,24(%s)\n",
18683 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
18685 else
18686 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
18688 break;
18694 /* The following variable value is the last issued insn. */
18696 static rtx last_scheduled_insn;
18698 /* The following variable helps to balance issuing of load and
18699 store instructions */
18701 static int load_store_pendulum;
18703 /* Power4 load update and store update instructions are cracked into a
18704 load or store and an integer insn which are executed in the same cycle.
18705 Branches have their own dispatch slot which does not count against the
18706 GCC issue rate, but it changes the program flow so there are no other
18707 instructions to issue in this cycle. */
18709 static int
18710 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
18711 int verbose ATTRIBUTE_UNUSED,
18712 rtx insn, int more)
18714 last_scheduled_insn = insn;
18715 if (GET_CODE (PATTERN (insn)) == USE
18716 || GET_CODE (PATTERN (insn)) == CLOBBER)
18718 cached_can_issue_more = more;
18719 return cached_can_issue_more;
18722 if (insn_terminates_group_p (insn, current_group))
18724 cached_can_issue_more = 0;
18725 return cached_can_issue_more;
18728 /* If no reservation, but reach here */
18729 if (recog_memoized (insn) < 0)
18730 return more;
18732 if (rs6000_sched_groups)
18734 if (is_microcoded_insn (insn))
18735 cached_can_issue_more = 0;
18736 else if (is_cracked_insn (insn))
18737 cached_can_issue_more = more > 2 ? more - 2 : 0;
18738 else
18739 cached_can_issue_more = more - 1;
18741 return cached_can_issue_more;
18744 if (rs6000_cpu_attr == CPU_CELL && is_nonpipeline_insn (insn))
18745 return 0;
18747 cached_can_issue_more = more - 1;
18748 return cached_can_issue_more;
18751 /* Adjust the cost of a scheduling dependency. Return the new cost of
18752 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
18754 static int
18755 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
18757 enum attr_type attr_type;
18759 if (! recog_memoized (insn))
18760 return 0;
18762 switch (REG_NOTE_KIND (link))
18764 case REG_DEP_TRUE:
18766 /* Data dependency; DEP_INSN writes a register that INSN reads
18767 some cycles later. */
18769 /* Separate a load from a narrower, dependent store. */
18770 if (rs6000_sched_groups
18771 && GET_CODE (PATTERN (insn)) == SET
18772 && GET_CODE (PATTERN (dep_insn)) == SET
18773 && GET_CODE (XEXP (PATTERN (insn), 1)) == MEM
18774 && GET_CODE (XEXP (PATTERN (dep_insn), 0)) == MEM
18775 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn), 1)))
18776 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn), 0)))))
18777 return cost + 14;
18779 attr_type = get_attr_type (insn);
18781 switch (attr_type)
18783 case TYPE_JMPREG:
18784 /* Tell the first scheduling pass about the latency between
18785 a mtctr and bctr (and mtlr and br/blr). The first
18786 scheduling pass will not know about this latency since
18787 the mtctr instruction, which has the latency associated
18788 to it, will be generated by reload. */
18789 return TARGET_POWER ? 5 : 4;
18790 case TYPE_BRANCH:
18791 /* Leave some extra cycles between a compare and its
18792 dependent branch, to inhibit expensive mispredicts. */
18793 if ((rs6000_cpu_attr == CPU_PPC603
18794 || rs6000_cpu_attr == CPU_PPC604
18795 || rs6000_cpu_attr == CPU_PPC604E
18796 || rs6000_cpu_attr == CPU_PPC620
18797 || rs6000_cpu_attr == CPU_PPC630
18798 || rs6000_cpu_attr == CPU_PPC750
18799 || rs6000_cpu_attr == CPU_PPC7400
18800 || rs6000_cpu_attr == CPU_PPC7450
18801 || rs6000_cpu_attr == CPU_POWER4
18802 || rs6000_cpu_attr == CPU_POWER5
18803 || rs6000_cpu_attr == CPU_CELL)
18804 && recog_memoized (dep_insn)
18805 && (INSN_CODE (dep_insn) >= 0))
18807 switch (get_attr_type (dep_insn))
18809 case TYPE_CMP:
18810 case TYPE_COMPARE:
18811 case TYPE_DELAYED_COMPARE:
18812 case TYPE_IMUL_COMPARE:
18813 case TYPE_LMUL_COMPARE:
18814 case TYPE_FPCOMPARE:
18815 case TYPE_CR_LOGICAL:
18816 case TYPE_DELAYED_CR:
18817 return cost + 2;
18818 default:
18819 break;
18821 break;
18823 case TYPE_STORE:
18824 case TYPE_STORE_U:
18825 case TYPE_STORE_UX:
18826 case TYPE_FPSTORE:
18827 case TYPE_FPSTORE_U:
18828 case TYPE_FPSTORE_UX:
18829 if ((rs6000_cpu == PROCESSOR_POWER6)
18830 && recog_memoized (dep_insn)
18831 && (INSN_CODE (dep_insn) >= 0))
18834 if (GET_CODE (PATTERN (insn)) != SET)
18835 /* If this happens, we have to extend this to schedule
18836 optimally. Return default for now. */
18837 return cost;
18839 /* Adjust the cost for the case where the value written
18840 by a fixed point operation is used as the address
18841 gen value on a store. */
18842 switch (get_attr_type (dep_insn))
18844 case TYPE_LOAD:
18845 case TYPE_LOAD_U:
18846 case TYPE_LOAD_UX:
18847 case TYPE_CNTLZ:
18849 if (! store_data_bypass_p (dep_insn, insn))
18850 return 4;
18851 break;
18853 case TYPE_LOAD_EXT:
18854 case TYPE_LOAD_EXT_U:
18855 case TYPE_LOAD_EXT_UX:
18856 case TYPE_VAR_SHIFT_ROTATE:
18857 case TYPE_VAR_DELAYED_COMPARE:
18859 if (! store_data_bypass_p (dep_insn, insn))
18860 return 6;
18861 break;
18863 case TYPE_INTEGER:
18864 case TYPE_COMPARE:
18865 case TYPE_FAST_COMPARE:
18866 case TYPE_EXTS:
18867 case TYPE_SHIFT:
18868 case TYPE_INSERT_WORD:
18869 case TYPE_INSERT_DWORD:
18870 case TYPE_FPLOAD_U:
18871 case TYPE_FPLOAD_UX:
18872 case TYPE_STORE_U:
18873 case TYPE_STORE_UX:
18874 case TYPE_FPSTORE_U:
18875 case TYPE_FPSTORE_UX:
18877 if (! store_data_bypass_p (dep_insn, insn))
18878 return 3;
18879 break;
18881 case TYPE_IMUL:
18882 case TYPE_IMUL2:
18883 case TYPE_IMUL3:
18884 case TYPE_LMUL:
18885 case TYPE_IMUL_COMPARE:
18886 case TYPE_LMUL_COMPARE:
18888 if (! store_data_bypass_p (dep_insn, insn))
18889 return 17;
18890 break;
18892 case TYPE_IDIV:
18894 if (! store_data_bypass_p (dep_insn, insn))
18895 return 45;
18896 break;
18898 case TYPE_LDIV:
18900 if (! store_data_bypass_p (dep_insn, insn))
18901 return 57;
18902 break;
18904 default:
18905 break;
18908 break;
18910 case TYPE_LOAD:
18911 case TYPE_LOAD_U:
18912 case TYPE_LOAD_UX:
18913 case TYPE_LOAD_EXT:
18914 case TYPE_LOAD_EXT_U:
18915 case TYPE_LOAD_EXT_UX:
18916 if ((rs6000_cpu == PROCESSOR_POWER6)
18917 && recog_memoized (dep_insn)
18918 && (INSN_CODE (dep_insn) >= 0))
18921 /* Adjust the cost for the case where the value written
18922 by a fixed point instruction is used within the address
18923 gen portion of a subsequent load(u)(x) */
18924 switch (get_attr_type (dep_insn))
18926 case TYPE_LOAD:
18927 case TYPE_LOAD_U:
18928 case TYPE_LOAD_UX:
18929 case TYPE_CNTLZ:
18931 if (set_to_load_agen (dep_insn, insn))
18932 return 4;
18933 break;
18935 case TYPE_LOAD_EXT:
18936 case TYPE_LOAD_EXT_U:
18937 case TYPE_LOAD_EXT_UX:
18938 case TYPE_VAR_SHIFT_ROTATE:
18939 case TYPE_VAR_DELAYED_COMPARE:
18941 if (set_to_load_agen (dep_insn, insn))
18942 return 6;
18943 break;
18945 case TYPE_INTEGER:
18946 case TYPE_COMPARE:
18947 case TYPE_FAST_COMPARE:
18948 case TYPE_EXTS:
18949 case TYPE_SHIFT:
18950 case TYPE_INSERT_WORD:
18951 case TYPE_INSERT_DWORD:
18952 case TYPE_FPLOAD_U:
18953 case TYPE_FPLOAD_UX:
18954 case TYPE_STORE_U:
18955 case TYPE_STORE_UX:
18956 case TYPE_FPSTORE_U:
18957 case TYPE_FPSTORE_UX:
18959 if (set_to_load_agen (dep_insn, insn))
18960 return 3;
18961 break;
18963 case TYPE_IMUL:
18964 case TYPE_IMUL2:
18965 case TYPE_IMUL3:
18966 case TYPE_LMUL:
18967 case TYPE_IMUL_COMPARE:
18968 case TYPE_LMUL_COMPARE:
18970 if (set_to_load_agen (dep_insn, insn))
18971 return 17;
18972 break;
18974 case TYPE_IDIV:
18976 if (set_to_load_agen (dep_insn, insn))
18977 return 45;
18978 break;
18980 case TYPE_LDIV:
18982 if (set_to_load_agen (dep_insn, insn))
18983 return 57;
18984 break;
18986 default:
18987 break;
18990 break;
18992 case TYPE_FPLOAD:
18993 if ((rs6000_cpu == PROCESSOR_POWER6)
18994 && recog_memoized (dep_insn)
18995 && (INSN_CODE (dep_insn) >= 0)
18996 && (get_attr_type (dep_insn) == TYPE_MFFGPR))
18997 return 2;
18999 default:
19000 break;
19003 /* Fall out to return default cost. */
19005 break;
19007 case REG_DEP_OUTPUT:
19008 /* Output dependency; DEP_INSN writes a register that INSN writes some
19009 cycles later. */
19010 if ((rs6000_cpu == PROCESSOR_POWER6)
19011 && recog_memoized (dep_insn)
19012 && (INSN_CODE (dep_insn) >= 0))
19014 attr_type = get_attr_type (insn);
19016 switch (attr_type)
19018 case TYPE_FP:
19019 if (get_attr_type (dep_insn) == TYPE_FP)
19020 return 1;
19021 break;
19022 case TYPE_FPLOAD:
19023 if (get_attr_type (dep_insn) == TYPE_MFFGPR)
19024 return 2;
19025 break;
19026 default:
19027 break;
19030 case REG_DEP_ANTI:
19031 /* Anti dependency; DEP_INSN reads a register that INSN writes some
19032 cycles later. */
19033 return 0;
19035 default:
19036 gcc_unreachable ();
19039 return cost;
19042 /* The function returns a true if INSN is microcoded.
19043 Return false otherwise. */
19045 static bool
19046 is_microcoded_insn (rtx insn)
19048 if (!insn || !INSN_P (insn)
19049 || GET_CODE (PATTERN (insn)) == USE
19050 || GET_CODE (PATTERN (insn)) == CLOBBER)
19051 return false;
19053 if (rs6000_cpu_attr == CPU_CELL)
19054 return get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS;
19056 if (rs6000_sched_groups)
19058 enum attr_type type = get_attr_type (insn);
19059 if (type == TYPE_LOAD_EXT_U
19060 || type == TYPE_LOAD_EXT_UX
19061 || type == TYPE_LOAD_UX
19062 || type == TYPE_STORE_UX
19063 || type == TYPE_MFCR)
19064 return true;
19067 return false;
19070 /* The function returns true if INSN is cracked into 2 instructions
19071 by the processor (and therefore occupies 2 issue slots). */
19073 static bool
19074 is_cracked_insn (rtx insn)
19076 if (!insn || !INSN_P (insn)
19077 || GET_CODE (PATTERN (insn)) == USE
19078 || GET_CODE (PATTERN (insn)) == CLOBBER)
19079 return false;
19081 if (rs6000_sched_groups)
19083 enum attr_type type = get_attr_type (insn);
19084 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
19085 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
19086 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
19087 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
19088 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
19089 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
19090 || type == TYPE_IDIV || type == TYPE_LDIV
19091 || type == TYPE_INSERT_WORD)
19092 return true;
19095 return false;
19098 /* The function returns true if INSN can be issued only from
19099 the branch slot. */
19101 static bool
19102 is_branch_slot_insn (rtx insn)
19104 if (!insn || !INSN_P (insn)
19105 || GET_CODE (PATTERN (insn)) == USE
19106 || GET_CODE (PATTERN (insn)) == CLOBBER)
19107 return false;
19109 if (rs6000_sched_groups)
19111 enum attr_type type = get_attr_type (insn);
19112 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
19113 return true;
19114 return false;
19117 return false;
19120 /* The function returns true if out_inst sets a value that is
19121 used in the address generation computation of in_insn */
19122 static bool
19123 set_to_load_agen (rtx out_insn, rtx in_insn)
19125 rtx out_set, in_set;
19127 /* For performance reasons, only handle the simple case where
19128 both loads are a single_set. */
19129 out_set = single_set (out_insn);
19130 if (out_set)
19132 in_set = single_set (in_insn);
19133 if (in_set)
19134 return reg_mentioned_p (SET_DEST (out_set), SET_SRC (in_set));
19137 return false;
19140 /* The function returns true if the target storage location of
19141 out_insn is adjacent to the target storage location of in_insn */
19142 /* Return 1 if memory locations are adjacent. */
19144 static bool
19145 adjacent_mem_locations (rtx insn1, rtx insn2)
19148 rtx a = get_store_dest (PATTERN (insn1));
19149 rtx b = get_store_dest (PATTERN (insn2));
19151 if ((GET_CODE (XEXP (a, 0)) == REG
19152 || (GET_CODE (XEXP (a, 0)) == PLUS
19153 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
19154 && (GET_CODE (XEXP (b, 0)) == REG
19155 || (GET_CODE (XEXP (b, 0)) == PLUS
19156 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
19158 HOST_WIDE_INT val0 = 0, val1 = 0, val_diff;
19159 rtx reg0, reg1;
19161 if (GET_CODE (XEXP (a, 0)) == PLUS)
19163 reg0 = XEXP (XEXP (a, 0), 0);
19164 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
19166 else
19167 reg0 = XEXP (a, 0);
19169 if (GET_CODE (XEXP (b, 0)) == PLUS)
19171 reg1 = XEXP (XEXP (b, 0), 0);
19172 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
19174 else
19175 reg1 = XEXP (b, 0);
19177 val_diff = val1 - val0;
19179 return ((REGNO (reg0) == REGNO (reg1))
19180 && ((MEM_SIZE (a) && val_diff == INTVAL (MEM_SIZE (a)))
19181 || (MEM_SIZE (b) && val_diff == -INTVAL (MEM_SIZE (b)))));
19184 return false;
19187 /* A C statement (sans semicolon) to update the integer scheduling
19188 priority INSN_PRIORITY (INSN). Increase the priority to execute the
19189 INSN earlier, reduce the priority to execute INSN later. Do not
19190 define this macro if you do not need to adjust the scheduling
19191 priorities of insns. */
19193 static int
19194 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
19196 /* On machines (like the 750) which have asymmetric integer units,
19197 where one integer unit can do multiply and divides and the other
19198 can't, reduce the priority of multiply/divide so it is scheduled
19199 before other integer operations. */
19201 #if 0
19202 if (! INSN_P (insn))
19203 return priority;
19205 if (GET_CODE (PATTERN (insn)) == USE)
19206 return priority;
19208 switch (rs6000_cpu_attr) {
19209 case CPU_PPC750:
19210 switch (get_attr_type (insn))
19212 default:
19213 break;
19215 case TYPE_IMUL:
19216 case TYPE_IDIV:
19217 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
19218 priority, priority);
19219 if (priority >= 0 && priority < 0x01000000)
19220 priority >>= 3;
19221 break;
19224 #endif
19226 if (insn_must_be_first_in_group (insn)
19227 && reload_completed
19228 && current_sched_info->sched_max_insns_priority
19229 && rs6000_sched_restricted_insns_priority)
19232 /* Prioritize insns that can be dispatched only in the first
19233 dispatch slot. */
19234 if (rs6000_sched_restricted_insns_priority == 1)
19235 /* Attach highest priority to insn. This means that in
19236 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
19237 precede 'priority' (critical path) considerations. */
19238 return current_sched_info->sched_max_insns_priority;
19239 else if (rs6000_sched_restricted_insns_priority == 2)
19240 /* Increase priority of insn by a minimal amount. This means that in
19241 haifa-sched.c:ready_sort(), only 'priority' (critical path)
19242 considerations precede dispatch-slot restriction considerations. */
19243 return (priority + 1);
19246 if (rs6000_cpu == PROCESSOR_POWER6
19247 && ((load_store_pendulum == -2 && is_load_insn (insn))
19248 || (load_store_pendulum == 2 && is_store_insn (insn))))
19249 /* Attach highest priority to insn if the scheduler has just issued two
19250 stores and this instruction is a load, or two loads and this instruction
19251 is a store. Power6 wants loads and stores scheduled alternately
19252 when possible */
19253 return current_sched_info->sched_max_insns_priority;
19255 return priority;
19258 /* Return true if the instruction is nonpipelined on the Cell. */
19259 static bool
19260 is_nonpipeline_insn (rtx insn)
19262 enum attr_type type;
19263 if (!insn || !INSN_P (insn)
19264 || GET_CODE (PATTERN (insn)) == USE
19265 || GET_CODE (PATTERN (insn)) == CLOBBER)
19266 return false;
19268 type = get_attr_type (insn);
19269 if (type == TYPE_IMUL
19270 || type == TYPE_IMUL2
19271 || type == TYPE_IMUL3
19272 || type == TYPE_LMUL
19273 || type == TYPE_IDIV
19274 || type == TYPE_LDIV
19275 || type == TYPE_SDIV
19276 || type == TYPE_DDIV
19277 || type == TYPE_SSQRT
19278 || type == TYPE_DSQRT
19279 || type == TYPE_MFCR
19280 || type == TYPE_MFCRF
19281 || type == TYPE_MFJMPR)
19283 return true;
19285 return false;
19289 /* Return how many instructions the machine can issue per cycle. */
19291 static int
19292 rs6000_issue_rate (void)
19294 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
19295 if (!reload_completed)
19296 return 1;
19298 switch (rs6000_cpu_attr) {
19299 case CPU_RIOS1: /* ? */
19300 case CPU_RS64A:
19301 case CPU_PPC601: /* ? */
19302 case CPU_PPC7450:
19303 return 3;
19304 case CPU_PPC440:
19305 case CPU_PPC603:
19306 case CPU_PPC750:
19307 case CPU_PPC7400:
19308 case CPU_PPC8540:
19309 case CPU_CELL:
19310 case CPU_PPCE300C2:
19311 case CPU_PPCE300C3:
19312 case CPU_PPCE500MC:
19313 return 2;
19314 case CPU_RIOS2:
19315 case CPU_PPC604:
19316 case CPU_PPC604E:
19317 case CPU_PPC620:
19318 case CPU_PPC630:
19319 return 4;
19320 case CPU_POWER4:
19321 case CPU_POWER5:
19322 case CPU_POWER6:
19323 return 5;
19324 default:
19325 return 1;
19329 /* Return how many instructions to look ahead for better insn
19330 scheduling. */
19332 static int
19333 rs6000_use_sched_lookahead (void)
19335 if (rs6000_cpu_attr == CPU_PPC8540)
19336 return 4;
19337 if (rs6000_cpu_attr == CPU_CELL)
19338 return (reload_completed ? 8 : 0);
19339 return 0;
19342 /* We are choosing insn from the ready queue. Return nonzero if INSN can be chosen. */
19343 static int
19344 rs6000_use_sched_lookahead_guard (rtx insn)
19346 if (rs6000_cpu_attr != CPU_CELL)
19347 return 1;
19349 if (insn == NULL_RTX || !INSN_P (insn))
19350 abort ();
19352 if (!reload_completed
19353 || is_nonpipeline_insn (insn)
19354 || is_microcoded_insn (insn))
19355 return 0;
19357 return 1;
19360 /* Determine is PAT refers to memory. */
19362 static bool
19363 is_mem_ref (rtx pat)
19365 const char * fmt;
19366 int i, j;
19367 bool ret = false;
19369 /* stack_tie does not produce any real memory traffic. */
19370 if (GET_CODE (pat) == UNSPEC
19371 && XINT (pat, 1) == UNSPEC_TIE)
19372 return false;
19374 if (GET_CODE (pat) == MEM)
19375 return true;
19377 /* Recursively process the pattern. */
19378 fmt = GET_RTX_FORMAT (GET_CODE (pat));
19380 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
19382 if (fmt[i] == 'e')
19383 ret |= is_mem_ref (XEXP (pat, i));
19384 else if (fmt[i] == 'E')
19385 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
19386 ret |= is_mem_ref (XVECEXP (pat, i, j));
19389 return ret;
19392 /* Determine if PAT is a PATTERN of a load insn. */
19394 static bool
19395 is_load_insn1 (rtx pat)
19397 if (!pat || pat == NULL_RTX)
19398 return false;
19400 if (GET_CODE (pat) == SET)
19401 return is_mem_ref (SET_SRC (pat));
19403 if (GET_CODE (pat) == PARALLEL)
19405 int i;
19407 for (i = 0; i < XVECLEN (pat, 0); i++)
19408 if (is_load_insn1 (XVECEXP (pat, 0, i)))
19409 return true;
19412 return false;
19415 /* Determine if INSN loads from memory. */
19417 static bool
19418 is_load_insn (rtx insn)
19420 if (!insn || !INSN_P (insn))
19421 return false;
19423 if (GET_CODE (insn) == CALL_INSN)
19424 return false;
19426 return is_load_insn1 (PATTERN (insn));
19429 /* Determine if PAT is a PATTERN of a store insn. */
19431 static bool
19432 is_store_insn1 (rtx pat)
19434 if (!pat || pat == NULL_RTX)
19435 return false;
19437 if (GET_CODE (pat) == SET)
19438 return is_mem_ref (SET_DEST (pat));
19440 if (GET_CODE (pat) == PARALLEL)
19442 int i;
19444 for (i = 0; i < XVECLEN (pat, 0); i++)
19445 if (is_store_insn1 (XVECEXP (pat, 0, i)))
19446 return true;
19449 return false;
19452 /* Determine if INSN stores to memory. */
19454 static bool
19455 is_store_insn (rtx insn)
19457 if (!insn || !INSN_P (insn))
19458 return false;
19460 return is_store_insn1 (PATTERN (insn));
19463 /* Return the dest of a store insn. */
19465 static rtx
19466 get_store_dest (rtx pat)
19468 gcc_assert (is_store_insn1 (pat));
19470 if (GET_CODE (pat) == SET)
19471 return SET_DEST (pat);
19472 else if (GET_CODE (pat) == PARALLEL)
19474 int i;
19476 for (i = 0; i < XVECLEN (pat, 0); i++)
19478 rtx inner_pat = XVECEXP (pat, 0, i);
19479 if (GET_CODE (inner_pat) == SET
19480 && is_mem_ref (SET_DEST (inner_pat)))
19481 return inner_pat;
19484 /* We shouldn't get here, because we should have either a simple
19485 store insn or a store with update which are covered above. */
19486 gcc_unreachable();
19489 /* Returns whether the dependence between INSN and NEXT is considered
19490 costly by the given target. */
19492 static bool
19493 rs6000_is_costly_dependence (dep_t dep, int cost, int distance)
19495 rtx insn;
19496 rtx next;
19498 /* If the flag is not enabled - no dependence is considered costly;
19499 allow all dependent insns in the same group.
19500 This is the most aggressive option. */
19501 if (rs6000_sched_costly_dep == no_dep_costly)
19502 return false;
19504 /* If the flag is set to 1 - a dependence is always considered costly;
19505 do not allow dependent instructions in the same group.
19506 This is the most conservative option. */
19507 if (rs6000_sched_costly_dep == all_deps_costly)
19508 return true;
19510 insn = DEP_PRO (dep);
19511 next = DEP_CON (dep);
19513 if (rs6000_sched_costly_dep == store_to_load_dep_costly
19514 && is_load_insn (next)
19515 && is_store_insn (insn))
19516 /* Prevent load after store in the same group. */
19517 return true;
19519 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
19520 && is_load_insn (next)
19521 && is_store_insn (insn)
19522 && DEP_TYPE (dep) == REG_DEP_TRUE)
19523 /* Prevent load after store in the same group if it is a true
19524 dependence. */
19525 return true;
19527 /* The flag is set to X; dependences with latency >= X are considered costly,
19528 and will not be scheduled in the same group. */
19529 if (rs6000_sched_costly_dep <= max_dep_latency
19530 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
19531 return true;
19533 return false;
19536 /* Return the next insn after INSN that is found before TAIL is reached,
19537 skipping any "non-active" insns - insns that will not actually occupy
19538 an issue slot. Return NULL_RTX if such an insn is not found. */
19540 static rtx
19541 get_next_active_insn (rtx insn, rtx tail)
19543 if (insn == NULL_RTX || insn == tail)
19544 return NULL_RTX;
19546 while (1)
19548 insn = NEXT_INSN (insn);
19549 if (insn == NULL_RTX || insn == tail)
19550 return NULL_RTX;
19552 if (CALL_P (insn)
19553 || JUMP_P (insn)
19554 || (NONJUMP_INSN_P (insn)
19555 && GET_CODE (PATTERN (insn)) != USE
19556 && GET_CODE (PATTERN (insn)) != CLOBBER
19557 && INSN_CODE (insn) != CODE_FOR_stack_tie))
19558 break;
19560 return insn;
19563 /* We are about to begin issuing insns for this clock cycle. */
19565 static int
19566 rs6000_sched_reorder (FILE *dump ATTRIBUTE_UNUSED, int sched_verbose,
19567 rtx *ready ATTRIBUTE_UNUSED,
19568 int *pn_ready ATTRIBUTE_UNUSED,
19569 int clock_var ATTRIBUTE_UNUSED)
19571 int n_ready = *pn_ready;
19573 if (sched_verbose)
19574 fprintf (dump, "// rs6000_sched_reorder :\n");
19576 /* Reorder the ready list, if the second to last ready insn
19577 is a nonepipeline insn. */
19578 if (rs6000_cpu_attr == CPU_CELL && n_ready > 1)
19580 if (is_nonpipeline_insn (ready[n_ready - 1])
19581 && (recog_memoized (ready[n_ready - 2]) > 0))
19582 /* Simply swap first two insns. */
19584 rtx tmp = ready[n_ready - 1];
19585 ready[n_ready - 1] = ready[n_ready - 2];
19586 ready[n_ready - 2] = tmp;
19590 if (rs6000_cpu == PROCESSOR_POWER6)
19591 load_store_pendulum = 0;
19593 return rs6000_issue_rate ();
19596 /* Like rs6000_sched_reorder, but called after issuing each insn. */
19598 static int
19599 rs6000_sched_reorder2 (FILE *dump, int sched_verbose, rtx *ready,
19600 int *pn_ready, int clock_var ATTRIBUTE_UNUSED)
19602 if (sched_verbose)
19603 fprintf (dump, "// rs6000_sched_reorder2 :\n");
19605 /* For Power6, we need to handle some special cases to try and keep the
19606 store queue from overflowing and triggering expensive flushes.
19608 This code monitors how load and store instructions are being issued
19609 and skews the ready list one way or the other to increase the likelihood
19610 that a desired instruction is issued at the proper time.
19612 A couple of things are done. First, we maintain a "load_store_pendulum"
19613 to track the current state of load/store issue.
19615 - If the pendulum is at zero, then no loads or stores have been
19616 issued in the current cycle so we do nothing.
19618 - If the pendulum is 1, then a single load has been issued in this
19619 cycle and we attempt to locate another load in the ready list to
19620 issue with it.
19622 - If the pendulum is -2, then two stores have already been
19623 issued in this cycle, so we increase the priority of the first load
19624 in the ready list to increase it's likelihood of being chosen first
19625 in the next cycle.
19627 - If the pendulum is -1, then a single store has been issued in this
19628 cycle and we attempt to locate another store in the ready list to
19629 issue with it, preferring a store to an adjacent memory location to
19630 facilitate store pairing in the store queue.
19632 - If the pendulum is 2, then two loads have already been
19633 issued in this cycle, so we increase the priority of the first store
19634 in the ready list to increase it's likelihood of being chosen first
19635 in the next cycle.
19637 - If the pendulum < -2 or > 2, then do nothing.
19639 Note: This code covers the most common scenarios. There exist non
19640 load/store instructions which make use of the LSU and which
19641 would need to be accounted for to strictly model the behavior
19642 of the machine. Those instructions are currently unaccounted
19643 for to help minimize compile time overhead of this code.
19645 if (rs6000_cpu == PROCESSOR_POWER6 && last_scheduled_insn)
19647 int pos;
19648 int i;
19649 rtx tmp;
19651 if (is_store_insn (last_scheduled_insn))
19652 /* Issuing a store, swing the load_store_pendulum to the left */
19653 load_store_pendulum--;
19654 else if (is_load_insn (last_scheduled_insn))
19655 /* Issuing a load, swing the load_store_pendulum to the right */
19656 load_store_pendulum++;
19657 else
19658 return cached_can_issue_more;
19660 /* If the pendulum is balanced, or there is only one instruction on
19661 the ready list, then all is well, so return. */
19662 if ((load_store_pendulum == 0) || (*pn_ready <= 1))
19663 return cached_can_issue_more;
19665 if (load_store_pendulum == 1)
19667 /* A load has been issued in this cycle. Scan the ready list
19668 for another load to issue with it */
19669 pos = *pn_ready-1;
19671 while (pos >= 0)
19673 if (is_load_insn (ready[pos]))
19675 /* Found a load. Move it to the head of the ready list,
19676 and adjust it's priority so that it is more likely to
19677 stay there */
19678 tmp = ready[pos];
19679 for (i=pos; i<*pn_ready-1; i++)
19680 ready[i] = ready[i + 1];
19681 ready[*pn_ready-1] = tmp;
19683 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
19684 INSN_PRIORITY (tmp)++;
19685 break;
19687 pos--;
19690 else if (load_store_pendulum == -2)
19692 /* Two stores have been issued in this cycle. Increase the
19693 priority of the first load in the ready list to favor it for
19694 issuing in the next cycle. */
19695 pos = *pn_ready-1;
19697 while (pos >= 0)
19699 if (is_load_insn (ready[pos])
19700 && !sel_sched_p ()
19701 && INSN_PRIORITY_KNOWN (ready[pos]))
19703 INSN_PRIORITY (ready[pos])++;
19705 /* Adjust the pendulum to account for the fact that a load
19706 was found and increased in priority. This is to prevent
19707 increasing the priority of multiple loads */
19708 load_store_pendulum--;
19710 break;
19712 pos--;
19715 else if (load_store_pendulum == -1)
19717 /* A store has been issued in this cycle. Scan the ready list for
19718 another store to issue with it, preferring a store to an adjacent
19719 memory location */
19720 int first_store_pos = -1;
19722 pos = *pn_ready-1;
19724 while (pos >= 0)
19726 if (is_store_insn (ready[pos]))
19728 /* Maintain the index of the first store found on the
19729 list */
19730 if (first_store_pos == -1)
19731 first_store_pos = pos;
19733 if (is_store_insn (last_scheduled_insn)
19734 && adjacent_mem_locations (last_scheduled_insn,ready[pos]))
19736 /* Found an adjacent store. Move it to the head of the
19737 ready list, and adjust it's priority so that it is
19738 more likely to stay there */
19739 tmp = ready[pos];
19740 for (i=pos; i<*pn_ready-1; i++)
19741 ready[i] = ready[i + 1];
19742 ready[*pn_ready-1] = tmp;
19744 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
19745 INSN_PRIORITY (tmp)++;
19747 first_store_pos = -1;
19749 break;
19752 pos--;
19755 if (first_store_pos >= 0)
19757 /* An adjacent store wasn't found, but a non-adjacent store was,
19758 so move the non-adjacent store to the front of the ready
19759 list, and adjust its priority so that it is more likely to
19760 stay there. */
19761 tmp = ready[first_store_pos];
19762 for (i=first_store_pos; i<*pn_ready-1; i++)
19763 ready[i] = ready[i + 1];
19764 ready[*pn_ready-1] = tmp;
19765 if (!sel_sched_p () && INSN_PRIORITY_KNOWN (tmp))
19766 INSN_PRIORITY (tmp)++;
19769 else if (load_store_pendulum == 2)
19771 /* Two loads have been issued in this cycle. Increase the priority
19772 of the first store in the ready list to favor it for issuing in
19773 the next cycle. */
19774 pos = *pn_ready-1;
19776 while (pos >= 0)
19778 if (is_store_insn (ready[pos])
19779 && !sel_sched_p ()
19780 && INSN_PRIORITY_KNOWN (ready[pos]))
19782 INSN_PRIORITY (ready[pos])++;
19784 /* Adjust the pendulum to account for the fact that a store
19785 was found and increased in priority. This is to prevent
19786 increasing the priority of multiple stores */
19787 load_store_pendulum++;
19789 break;
19791 pos--;
19796 return cached_can_issue_more;
19799 /* Return whether the presence of INSN causes a dispatch group termination
19800 of group WHICH_GROUP.
19802 If WHICH_GROUP == current_group, this function will return true if INSN
19803 causes the termination of the current group (i.e, the dispatch group to
19804 which INSN belongs). This means that INSN will be the last insn in the
19805 group it belongs to.
19807 If WHICH_GROUP == previous_group, this function will return true if INSN
19808 causes the termination of the previous group (i.e, the dispatch group that
19809 precedes the group to which INSN belongs). This means that INSN will be
19810 the first insn in the group it belongs to). */
19812 static bool
19813 insn_terminates_group_p (rtx insn, enum group_termination which_group)
19815 bool first, last;
19817 if (! insn)
19818 return false;
19820 first = insn_must_be_first_in_group (insn);
19821 last = insn_must_be_last_in_group (insn);
19823 if (first && last)
19824 return true;
19826 if (which_group == current_group)
19827 return last;
19828 else if (which_group == previous_group)
19829 return first;
19831 return false;
19835 static bool
19836 insn_must_be_first_in_group (rtx insn)
19838 enum attr_type type;
19840 if (!insn
19841 || insn == NULL_RTX
19842 || GET_CODE (insn) == NOTE
19843 || GET_CODE (PATTERN (insn)) == USE
19844 || GET_CODE (PATTERN (insn)) == CLOBBER)
19845 return false;
19847 switch (rs6000_cpu)
19849 case PROCESSOR_POWER5:
19850 if (is_cracked_insn (insn))
19851 return true;
19852 case PROCESSOR_POWER4:
19853 if (is_microcoded_insn (insn))
19854 return true;
19856 if (!rs6000_sched_groups)
19857 return false;
19859 type = get_attr_type (insn);
19861 switch (type)
19863 case TYPE_MFCR:
19864 case TYPE_MFCRF:
19865 case TYPE_MTCR:
19866 case TYPE_DELAYED_CR:
19867 case TYPE_CR_LOGICAL:
19868 case TYPE_MTJMPR:
19869 case TYPE_MFJMPR:
19870 case TYPE_IDIV:
19871 case TYPE_LDIV:
19872 case TYPE_LOAD_L:
19873 case TYPE_STORE_C:
19874 case TYPE_ISYNC:
19875 case TYPE_SYNC:
19876 return true;
19877 default:
19878 break;
19880 break;
19881 case PROCESSOR_POWER6:
19882 type = get_attr_type (insn);
19884 switch (type)
19886 case TYPE_INSERT_DWORD:
19887 case TYPE_EXTS:
19888 case TYPE_CNTLZ:
19889 case TYPE_SHIFT:
19890 case TYPE_VAR_SHIFT_ROTATE:
19891 case TYPE_TRAP:
19892 case TYPE_IMUL:
19893 case TYPE_IMUL2:
19894 case TYPE_IMUL3:
19895 case TYPE_LMUL:
19896 case TYPE_IDIV:
19897 case TYPE_INSERT_WORD:
19898 case TYPE_DELAYED_COMPARE:
19899 case TYPE_IMUL_COMPARE:
19900 case TYPE_LMUL_COMPARE:
19901 case TYPE_FPCOMPARE:
19902 case TYPE_MFCR:
19903 case TYPE_MTCR:
19904 case TYPE_MFJMPR:
19905 case TYPE_MTJMPR:
19906 case TYPE_ISYNC:
19907 case TYPE_SYNC:
19908 case TYPE_LOAD_L:
19909 case TYPE_STORE_C:
19910 case TYPE_LOAD_U:
19911 case TYPE_LOAD_UX:
19912 case TYPE_LOAD_EXT_UX:
19913 case TYPE_STORE_U:
19914 case TYPE_STORE_UX:
19915 case TYPE_FPLOAD_U:
19916 case TYPE_FPLOAD_UX:
19917 case TYPE_FPSTORE_U:
19918 case TYPE_FPSTORE_UX:
19919 return true;
19920 default:
19921 break;
19923 break;
19924 default:
19925 break;
19928 return false;
19931 static bool
19932 insn_must_be_last_in_group (rtx insn)
19934 enum attr_type type;
19936 if (!insn
19937 || insn == NULL_RTX
19938 || GET_CODE (insn) == NOTE
19939 || GET_CODE (PATTERN (insn)) == USE
19940 || GET_CODE (PATTERN (insn)) == CLOBBER)
19941 return false;
19943 switch (rs6000_cpu) {
19944 case PROCESSOR_POWER4:
19945 case PROCESSOR_POWER5:
19946 if (is_microcoded_insn (insn))
19947 return true;
19949 if (is_branch_slot_insn (insn))
19950 return true;
19952 break;
19953 case PROCESSOR_POWER6:
19954 type = get_attr_type (insn);
19956 switch (type)
19958 case TYPE_EXTS:
19959 case TYPE_CNTLZ:
19960 case TYPE_SHIFT:
19961 case TYPE_VAR_SHIFT_ROTATE:
19962 case TYPE_TRAP:
19963 case TYPE_IMUL:
19964 case TYPE_IMUL2:
19965 case TYPE_IMUL3:
19966 case TYPE_LMUL:
19967 case TYPE_IDIV:
19968 case TYPE_DELAYED_COMPARE:
19969 case TYPE_IMUL_COMPARE:
19970 case TYPE_LMUL_COMPARE:
19971 case TYPE_FPCOMPARE:
19972 case TYPE_MFCR:
19973 case TYPE_MTCR:
19974 case TYPE_MFJMPR:
19975 case TYPE_MTJMPR:
19976 case TYPE_ISYNC:
19977 case TYPE_SYNC:
19978 case TYPE_LOAD_L:
19979 case TYPE_STORE_C:
19980 return true;
19981 default:
19982 break;
19984 break;
19985 default:
19986 break;
19989 return false;
19992 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
19993 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
19995 static bool
19996 is_costly_group (rtx *group_insns, rtx next_insn)
19998 int i;
19999 int issue_rate = rs6000_issue_rate ();
20001 for (i = 0; i < issue_rate; i++)
20003 sd_iterator_def sd_it;
20004 dep_t dep;
20005 rtx insn = group_insns[i];
20007 if (!insn)
20008 continue;
20010 FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep)
20012 rtx next = DEP_CON (dep);
20014 if (next == next_insn
20015 && rs6000_is_costly_dependence (dep, dep_cost (dep), 0))
20016 return true;
20020 return false;
20023 /* Utility of the function redefine_groups.
20024 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
20025 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
20026 to keep it "far" (in a separate group) from GROUP_INSNS, following
20027 one of the following schemes, depending on the value of the flag
20028 -minsert_sched_nops = X:
20029 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
20030 in order to force NEXT_INSN into a separate group.
20031 (2) X < sched_finish_regroup_exact: insert exactly X nops.
20032 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
20033 insertion (has a group just ended, how many vacant issue slots remain in the
20034 last group, and how many dispatch groups were encountered so far). */
20036 static int
20037 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns,
20038 rtx next_insn, bool *group_end, int can_issue_more,
20039 int *group_count)
20041 rtx nop;
20042 bool force;
20043 int issue_rate = rs6000_issue_rate ();
20044 bool end = *group_end;
20045 int i;
20047 if (next_insn == NULL_RTX)
20048 return can_issue_more;
20050 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
20051 return can_issue_more;
20053 force = is_costly_group (group_insns, next_insn);
20054 if (!force)
20055 return can_issue_more;
20057 if (sched_verbose > 6)
20058 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
20059 *group_count ,can_issue_more);
20061 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
20063 if (*group_end)
20064 can_issue_more = 0;
20066 /* Since only a branch can be issued in the last issue_slot, it is
20067 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
20068 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
20069 in this case the last nop will start a new group and the branch
20070 will be forced to the new group. */
20071 if (can_issue_more && !is_branch_slot_insn (next_insn))
20072 can_issue_more--;
20074 while (can_issue_more > 0)
20076 nop = gen_nop ();
20077 emit_insn_before (nop, next_insn);
20078 can_issue_more--;
20081 *group_end = true;
20082 return 0;
20085 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
20087 int n_nops = rs6000_sched_insert_nops;
20089 /* Nops can't be issued from the branch slot, so the effective
20090 issue_rate for nops is 'issue_rate - 1'. */
20091 if (can_issue_more == 0)
20092 can_issue_more = issue_rate;
20093 can_issue_more--;
20094 if (can_issue_more == 0)
20096 can_issue_more = issue_rate - 1;
20097 (*group_count)++;
20098 end = true;
20099 for (i = 0; i < issue_rate; i++)
20101 group_insns[i] = 0;
20105 while (n_nops > 0)
20107 nop = gen_nop ();
20108 emit_insn_before (nop, next_insn);
20109 if (can_issue_more == issue_rate - 1) /* new group begins */
20110 end = false;
20111 can_issue_more--;
20112 if (can_issue_more == 0)
20114 can_issue_more = issue_rate - 1;
20115 (*group_count)++;
20116 end = true;
20117 for (i = 0; i < issue_rate; i++)
20119 group_insns[i] = 0;
20122 n_nops--;
20125 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
20126 can_issue_more++;
20128 /* Is next_insn going to start a new group? */
20129 *group_end
20130 = (end
20131 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20132 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20133 || (can_issue_more < issue_rate &&
20134 insn_terminates_group_p (next_insn, previous_group)));
20135 if (*group_end && end)
20136 (*group_count)--;
20138 if (sched_verbose > 6)
20139 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
20140 *group_count, can_issue_more);
20141 return can_issue_more;
20144 return can_issue_more;
20147 /* This function tries to synch the dispatch groups that the compiler "sees"
20148 with the dispatch groups that the processor dispatcher is expected to
20149 form in practice. It tries to achieve this synchronization by forcing the
20150 estimated processor grouping on the compiler (as opposed to the function
20151 'pad_goups' which tries to force the scheduler's grouping on the processor).
20153 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
20154 examines the (estimated) dispatch groups that will be formed by the processor
20155 dispatcher. It marks these group boundaries to reflect the estimated
20156 processor grouping, overriding the grouping that the scheduler had marked.
20157 Depending on the value of the flag '-minsert-sched-nops' this function can
20158 force certain insns into separate groups or force a certain distance between
20159 them by inserting nops, for example, if there exists a "costly dependence"
20160 between the insns.
20162 The function estimates the group boundaries that the processor will form as
20163 follows: It keeps track of how many vacant issue slots are available after
20164 each insn. A subsequent insn will start a new group if one of the following
20165 4 cases applies:
20166 - no more vacant issue slots remain in the current dispatch group.
20167 - only the last issue slot, which is the branch slot, is vacant, but the next
20168 insn is not a branch.
20169 - only the last 2 or less issue slots, including the branch slot, are vacant,
20170 which means that a cracked insn (which occupies two issue slots) can't be
20171 issued in this group.
20172 - less than 'issue_rate' slots are vacant, and the next insn always needs to
20173 start a new group. */
20175 static int
20176 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20178 rtx insn, next_insn;
20179 int issue_rate;
20180 int can_issue_more;
20181 int slot, i;
20182 bool group_end;
20183 int group_count = 0;
20184 rtx *group_insns;
20186 /* Initialize. */
20187 issue_rate = rs6000_issue_rate ();
20188 group_insns = XALLOCAVEC (rtx, issue_rate);
20189 for (i = 0; i < issue_rate; i++)
20191 group_insns[i] = 0;
20193 can_issue_more = issue_rate;
20194 slot = 0;
20195 insn = get_next_active_insn (prev_head_insn, tail);
20196 group_end = false;
20198 while (insn != NULL_RTX)
20200 slot = (issue_rate - can_issue_more);
20201 group_insns[slot] = insn;
20202 can_issue_more =
20203 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20204 if (insn_terminates_group_p (insn, current_group))
20205 can_issue_more = 0;
20207 next_insn = get_next_active_insn (insn, tail);
20208 if (next_insn == NULL_RTX)
20209 return group_count + 1;
20211 /* Is next_insn going to start a new group? */
20212 group_end
20213 = (can_issue_more == 0
20214 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
20215 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
20216 || (can_issue_more < issue_rate &&
20217 insn_terminates_group_p (next_insn, previous_group)));
20219 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
20220 next_insn, &group_end, can_issue_more,
20221 &group_count);
20223 if (group_end)
20225 group_count++;
20226 can_issue_more = 0;
20227 for (i = 0; i < issue_rate; i++)
20229 group_insns[i] = 0;
20233 if (GET_MODE (next_insn) == TImode && can_issue_more)
20234 PUT_MODE (next_insn, VOIDmode);
20235 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
20236 PUT_MODE (next_insn, TImode);
20238 insn = next_insn;
20239 if (can_issue_more == 0)
20240 can_issue_more = issue_rate;
20241 } /* while */
20243 return group_count;
20246 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
20247 dispatch group boundaries that the scheduler had marked. Pad with nops
20248 any dispatch groups which have vacant issue slots, in order to force the
20249 scheduler's grouping on the processor dispatcher. The function
20250 returns the number of dispatch groups found. */
20252 static int
20253 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
20255 rtx insn, next_insn;
20256 rtx nop;
20257 int issue_rate;
20258 int can_issue_more;
20259 int group_end;
20260 int group_count = 0;
20262 /* Initialize issue_rate. */
20263 issue_rate = rs6000_issue_rate ();
20264 can_issue_more = issue_rate;
20266 insn = get_next_active_insn (prev_head_insn, tail);
20267 next_insn = get_next_active_insn (insn, tail);
20269 while (insn != NULL_RTX)
20271 can_issue_more =
20272 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
20274 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
20276 if (next_insn == NULL_RTX)
20277 break;
20279 if (group_end)
20281 /* If the scheduler had marked group termination at this location
20282 (between insn and next_insn), and neither insn nor next_insn will
20283 force group termination, pad the group with nops to force group
20284 termination. */
20285 if (can_issue_more
20286 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
20287 && !insn_terminates_group_p (insn, current_group)
20288 && !insn_terminates_group_p (next_insn, previous_group))
20290 if (!is_branch_slot_insn (next_insn))
20291 can_issue_more--;
20293 while (can_issue_more)
20295 nop = gen_nop ();
20296 emit_insn_before (nop, next_insn);
20297 can_issue_more--;
20301 can_issue_more = issue_rate;
20302 group_count++;
20305 insn = next_insn;
20306 next_insn = get_next_active_insn (insn, tail);
20309 return group_count;
20312 /* We're beginning a new block. Initialize data structures as necessary. */
20314 static void
20315 rs6000_sched_init (FILE *dump ATTRIBUTE_UNUSED,
20316 int sched_verbose ATTRIBUTE_UNUSED,
20317 int max_ready ATTRIBUTE_UNUSED)
20319 last_scheduled_insn = NULL_RTX;
20320 load_store_pendulum = 0;
20323 /* The following function is called at the end of scheduling BB.
20324 After reload, it inserts nops at insn group bundling. */
20326 static void
20327 rs6000_sched_finish (FILE *dump, int sched_verbose)
20329 int n_groups;
20331 if (sched_verbose)
20332 fprintf (dump, "=== Finishing schedule.\n");
20334 if (reload_completed && rs6000_sched_groups)
20336 /* Do not run sched_finish hook when selective scheduling enabled. */
20337 if (sel_sched_p ())
20338 return;
20340 if (rs6000_sched_insert_nops == sched_finish_none)
20341 return;
20343 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
20344 n_groups = pad_groups (dump, sched_verbose,
20345 current_sched_info->prev_head,
20346 current_sched_info->next_tail);
20347 else
20348 n_groups = redefine_groups (dump, sched_verbose,
20349 current_sched_info->prev_head,
20350 current_sched_info->next_tail);
20352 if (sched_verbose >= 6)
20354 fprintf (dump, "ngroups = %d\n", n_groups);
20355 print_rtl (dump, current_sched_info->prev_head);
20356 fprintf (dump, "Done finish_sched\n");
20361 struct _rs6000_sched_context
20363 short cached_can_issue_more;
20364 rtx last_scheduled_insn;
20365 int load_store_pendulum;
20368 typedef struct _rs6000_sched_context rs6000_sched_context_def;
20369 typedef rs6000_sched_context_def *rs6000_sched_context_t;
20371 /* Allocate store for new scheduling context. */
20372 static void *
20373 rs6000_alloc_sched_context (void)
20375 return xmalloc (sizeof (rs6000_sched_context_def));
20378 /* If CLEAN_P is true then initializes _SC with clean data,
20379 and from the global context otherwise. */
20380 static void
20381 rs6000_init_sched_context (void *_sc, bool clean_p)
20383 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20385 if (clean_p)
20387 sc->cached_can_issue_more = 0;
20388 sc->last_scheduled_insn = NULL_RTX;
20389 sc->load_store_pendulum = 0;
20391 else
20393 sc->cached_can_issue_more = cached_can_issue_more;
20394 sc->last_scheduled_insn = last_scheduled_insn;
20395 sc->load_store_pendulum = load_store_pendulum;
20399 /* Sets the global scheduling context to the one pointed to by _SC. */
20400 static void
20401 rs6000_set_sched_context (void *_sc)
20403 rs6000_sched_context_t sc = (rs6000_sched_context_t) _sc;
20405 gcc_assert (sc != NULL);
20407 cached_can_issue_more = sc->cached_can_issue_more;
20408 last_scheduled_insn = sc->last_scheduled_insn;
20409 load_store_pendulum = sc->load_store_pendulum;
20412 /* Free _SC. */
20413 static void
20414 rs6000_free_sched_context (void *_sc)
20416 gcc_assert (_sc != NULL);
20418 free (_sc);
20422 /* Length in units of the trampoline for entering a nested function. */
20425 rs6000_trampoline_size (void)
20427 int ret = 0;
20429 switch (DEFAULT_ABI)
20431 default:
20432 gcc_unreachable ();
20434 case ABI_AIX:
20435 ret = (TARGET_32BIT) ? 12 : 24;
20436 break;
20438 case ABI_DARWIN:
20439 case ABI_V4:
20440 ret = (TARGET_32BIT) ? 40 : 48;
20441 break;
20444 return ret;
20447 /* Emit RTL insns to initialize the variable parts of a trampoline.
20448 FNADDR is an RTX for the address of the function's pure code.
20449 CXT is an RTX for the static chain value for the function. */
20451 void
20452 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
20454 int regsize = (TARGET_32BIT) ? 4 : 8;
20455 rtx ctx_reg = force_reg (Pmode, cxt);
20457 switch (DEFAULT_ABI)
20459 default:
20460 gcc_unreachable ();
20462 /* Macros to shorten the code expansions below. */
20463 #define MEM_DEREF(addr) gen_rtx_MEM (Pmode, memory_address (Pmode, addr))
20464 #define MEM_PLUS(addr,offset) \
20465 gen_rtx_MEM (Pmode, memory_address (Pmode, plus_constant (addr, offset)))
20467 /* Under AIX, just build the 3 word function descriptor */
20468 case ABI_AIX:
20470 rtx fn_reg = gen_reg_rtx (Pmode);
20471 rtx toc_reg = gen_reg_rtx (Pmode);
20472 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
20473 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
20474 emit_move_insn (MEM_DEREF (addr), fn_reg);
20475 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
20476 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
20478 break;
20480 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
20481 case ABI_DARWIN:
20482 case ABI_V4:
20483 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__trampoline_setup"),
20484 FALSE, VOIDmode, 4,
20485 addr, Pmode,
20486 GEN_INT (rs6000_trampoline_size ()), SImode,
20487 fnaddr, Pmode,
20488 ctx_reg, Pmode);
20489 break;
20492 return;
20496 /* Table of valid machine attributes. */
20498 const struct attribute_spec rs6000_attribute_table[] =
20500 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
20501 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
20502 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20503 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
20504 { "ms_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20505 { "gcc_struct", 0, 0, false, false, false, rs6000_handle_struct_attribute },
20506 #ifdef SUBTARGET_ATTRIBUTE_TABLE
20507 SUBTARGET_ATTRIBUTE_TABLE,
20508 #endif
20509 { NULL, 0, 0, false, false, false, NULL }
20512 /* Handle the "altivec" attribute. The attribute may have
20513 arguments as follows:
20515 __attribute__((altivec(vector__)))
20516 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
20517 __attribute__((altivec(bool__))) (always followed by 'unsigned')
20519 and may appear more than once (e.g., 'vector bool char') in a
20520 given declaration. */
20522 static tree
20523 rs6000_handle_altivec_attribute (tree *node,
20524 tree name ATTRIBUTE_UNUSED,
20525 tree args,
20526 int flags ATTRIBUTE_UNUSED,
20527 bool *no_add_attrs)
20529 tree type = *node, result = NULL_TREE;
20530 enum machine_mode mode;
20531 int unsigned_p;
20532 char altivec_type
20533 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
20534 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
20535 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
20536 : '?');
20538 while (POINTER_TYPE_P (type)
20539 || TREE_CODE (type) == FUNCTION_TYPE
20540 || TREE_CODE (type) == METHOD_TYPE
20541 || TREE_CODE (type) == ARRAY_TYPE)
20542 type = TREE_TYPE (type);
20544 mode = TYPE_MODE (type);
20546 /* Check for invalid AltiVec type qualifiers. */
20547 if (type == long_unsigned_type_node || type == long_integer_type_node)
20549 if (TARGET_64BIT)
20550 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
20551 else if (rs6000_warn_altivec_long)
20552 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
20554 else if (type == long_long_unsigned_type_node
20555 || type == long_long_integer_type_node)
20556 error ("use of %<long long%> in AltiVec types is invalid");
20557 else if (type == double_type_node)
20558 error ("use of %<double%> in AltiVec types is invalid");
20559 else if (type == long_double_type_node)
20560 error ("use of %<long double%> in AltiVec types is invalid");
20561 else if (type == boolean_type_node)
20562 error ("use of boolean types in AltiVec types is invalid");
20563 else if (TREE_CODE (type) == COMPLEX_TYPE)
20564 error ("use of %<complex%> in AltiVec types is invalid");
20565 else if (DECIMAL_FLOAT_MODE_P (mode))
20566 error ("use of decimal floating point types in AltiVec types is invalid");
20568 switch (altivec_type)
20570 case 'v':
20571 unsigned_p = TYPE_UNSIGNED (type);
20572 switch (mode)
20574 case SImode:
20575 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
20576 break;
20577 case HImode:
20578 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
20579 break;
20580 case QImode:
20581 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
20582 break;
20583 case SFmode: result = V4SF_type_node; break;
20584 /* If the user says 'vector int bool', we may be handed the 'bool'
20585 attribute _before_ the 'vector' attribute, and so select the
20586 proper type in the 'b' case below. */
20587 case V4SImode: case V8HImode: case V16QImode: case V4SFmode:
20588 result = type;
20589 default: break;
20591 break;
20592 case 'b':
20593 switch (mode)
20595 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
20596 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
20597 case QImode: case V16QImode: result = bool_V16QI_type_node;
20598 default: break;
20600 break;
20601 case 'p':
20602 switch (mode)
20604 case V8HImode: result = pixel_V8HI_type_node;
20605 default: break;
20607 default: break;
20610 /* Propagate qualifiers attached to the element type
20611 onto the vector type. */
20612 if (result && result != type && TYPE_QUALS (type))
20613 result = build_qualified_type (result, TYPE_QUALS (type));
20615 *no_add_attrs = true; /* No need to hang on to the attribute. */
20617 if (result)
20618 *node = lang_hooks.types.reconstruct_complex_type (*node, result);
20620 return NULL_TREE;
20623 /* AltiVec defines four built-in scalar types that serve as vector
20624 elements; we must teach the compiler how to mangle them. */
20626 static const char *
20627 rs6000_mangle_type (const_tree type)
20629 type = TYPE_MAIN_VARIANT (type);
20631 if (TREE_CODE (type) != VOID_TYPE && TREE_CODE (type) != BOOLEAN_TYPE
20632 && TREE_CODE (type) != INTEGER_TYPE && TREE_CODE (type) != REAL_TYPE)
20633 return NULL;
20635 if (type == bool_char_type_node) return "U6__boolc";
20636 if (type == bool_short_type_node) return "U6__bools";
20637 if (type == pixel_type_node) return "u7__pixel";
20638 if (type == bool_int_type_node) return "U6__booli";
20640 /* Mangle IBM extended float long double as `g' (__float128) on
20641 powerpc*-linux where long-double-64 previously was the default. */
20642 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
20643 && TARGET_ELF
20644 && TARGET_LONG_DOUBLE_128
20645 && !TARGET_IEEEQUAD)
20646 return "g";
20648 /* For all other types, use normal C++ mangling. */
20649 return NULL;
20652 /* Handle a "longcall" or "shortcall" attribute; arguments as in
20653 struct attribute_spec.handler. */
20655 static tree
20656 rs6000_handle_longcall_attribute (tree *node, tree name,
20657 tree args ATTRIBUTE_UNUSED,
20658 int flags ATTRIBUTE_UNUSED,
20659 bool *no_add_attrs)
20661 if (TREE_CODE (*node) != FUNCTION_TYPE
20662 && TREE_CODE (*node) != FIELD_DECL
20663 && TREE_CODE (*node) != TYPE_DECL)
20665 warning (OPT_Wattributes, "%qs attribute only applies to functions",
20666 IDENTIFIER_POINTER (name));
20667 *no_add_attrs = true;
20670 return NULL_TREE;
20673 /* Set longcall attributes on all functions declared when
20674 rs6000_default_long_calls is true. */
20675 static void
20676 rs6000_set_default_type_attributes (tree type)
20678 if (rs6000_default_long_calls
20679 && (TREE_CODE (type) == FUNCTION_TYPE
20680 || TREE_CODE (type) == METHOD_TYPE))
20681 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
20682 NULL_TREE,
20683 TYPE_ATTRIBUTES (type));
20685 #if TARGET_MACHO
20686 darwin_set_default_type_attributes (type);
20687 #endif
20690 /* Return a reference suitable for calling a function with the
20691 longcall attribute. */
20694 rs6000_longcall_ref (rtx call_ref)
20696 const char *call_name;
20697 tree node;
20699 if (GET_CODE (call_ref) != SYMBOL_REF)
20700 return call_ref;
20702 /* System V adds '.' to the internal name, so skip them. */
20703 call_name = XSTR (call_ref, 0);
20704 if (*call_name == '.')
20706 while (*call_name == '.')
20707 call_name++;
20709 node = get_identifier (call_name);
20710 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
20713 return force_reg (Pmode, call_ref);
20716 #ifndef TARGET_USE_MS_BITFIELD_LAYOUT
20717 #define TARGET_USE_MS_BITFIELD_LAYOUT 0
20718 #endif
20720 /* Handle a "ms_struct" or "gcc_struct" attribute; arguments as in
20721 struct attribute_spec.handler. */
20722 static tree
20723 rs6000_handle_struct_attribute (tree *node, tree name,
20724 tree args ATTRIBUTE_UNUSED,
20725 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
20727 tree *type = NULL;
20728 if (DECL_P (*node))
20730 if (TREE_CODE (*node) == TYPE_DECL)
20731 type = &TREE_TYPE (*node);
20733 else
20734 type = node;
20736 if (!(type && (TREE_CODE (*type) == RECORD_TYPE
20737 || TREE_CODE (*type) == UNION_TYPE)))
20739 warning (OPT_Wattributes, "%qs attribute ignored", IDENTIFIER_POINTER (name));
20740 *no_add_attrs = true;
20743 else if ((is_attribute_p ("ms_struct", name)
20744 && lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (*type)))
20745 || ((is_attribute_p ("gcc_struct", name)
20746 && lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (*type)))))
20748 warning (OPT_Wattributes, "%qs incompatible attribute ignored",
20749 IDENTIFIER_POINTER (name));
20750 *no_add_attrs = true;
20753 return NULL_TREE;
20756 static bool
20757 rs6000_ms_bitfield_layout_p (const_tree record_type)
20759 return (TARGET_USE_MS_BITFIELD_LAYOUT &&
20760 !lookup_attribute ("gcc_struct", TYPE_ATTRIBUTES (record_type)))
20761 || lookup_attribute ("ms_struct", TYPE_ATTRIBUTES (record_type));
20764 #ifdef USING_ELFOS_H
20766 /* A get_unnamed_section callback, used for switching to toc_section. */
20768 static void
20769 rs6000_elf_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
20771 if (DEFAULT_ABI == ABI_AIX
20772 && TARGET_MINIMAL_TOC
20773 && !TARGET_RELOCATABLE)
20775 if (!toc_initialized)
20777 toc_initialized = 1;
20778 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20779 (*targetm.asm_out.internal_label) (asm_out_file, "LCTOC", 0);
20780 fprintf (asm_out_file, "\t.tc ");
20781 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1[TC],");
20782 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20783 fprintf (asm_out_file, "\n");
20785 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20786 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20787 fprintf (asm_out_file, " = .+32768\n");
20789 else
20790 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20792 else if (DEFAULT_ABI == ABI_AIX && !TARGET_RELOCATABLE)
20793 fprintf (asm_out_file, "%s\n", TOC_SECTION_ASM_OP);
20794 else
20796 fprintf (asm_out_file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
20797 if (!toc_initialized)
20799 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (asm_out_file, "LCTOC1");
20800 fprintf (asm_out_file, " = .+32768\n");
20801 toc_initialized = 1;
20806 /* Implement TARGET_ASM_INIT_SECTIONS. */
20808 static void
20809 rs6000_elf_asm_init_sections (void)
20811 toc_section
20812 = get_unnamed_section (0, rs6000_elf_output_toc_section_asm_op, NULL);
20814 sdata2_section
20815 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
20816 SDATA2_SECTION_ASM_OP);
20819 /* Implement TARGET_SELECT_RTX_SECTION. */
20821 static section *
20822 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
20823 unsigned HOST_WIDE_INT align)
20825 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
20826 return toc_section;
20827 else
20828 return default_elf_select_rtx_section (mode, x, align);
20831 /* For a SYMBOL_REF, set generic flags and then perform some
20832 target-specific processing.
20834 When the AIX ABI is requested on a non-AIX system, replace the
20835 function name with the real name (with a leading .) rather than the
20836 function descriptor name. This saves a lot of overriding code to
20837 read the prefixes. */
20839 static void
20840 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
20842 default_encode_section_info (decl, rtl, first);
20844 if (first
20845 && TREE_CODE (decl) == FUNCTION_DECL
20846 && !TARGET_AIX
20847 && DEFAULT_ABI == ABI_AIX)
20849 rtx sym_ref = XEXP (rtl, 0);
20850 size_t len = strlen (XSTR (sym_ref, 0));
20851 char *str = XALLOCAVEC (char, len + 2);
20852 str[0] = '.';
20853 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
20854 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
20858 static inline bool
20859 compare_section_name (const char *section, const char *templ)
20861 int len;
20863 len = strlen (templ);
20864 return (strncmp (section, templ, len) == 0
20865 && (section[len] == 0 || section[len] == '.'));
20868 bool
20869 rs6000_elf_in_small_data_p (const_tree decl)
20871 if (rs6000_sdata == SDATA_NONE)
20872 return false;
20874 /* We want to merge strings, so we never consider them small data. */
20875 if (TREE_CODE (decl) == STRING_CST)
20876 return false;
20878 /* Functions are never in the small data area. */
20879 if (TREE_CODE (decl) == FUNCTION_DECL)
20880 return false;
20882 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
20884 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
20885 if (compare_section_name (section, ".sdata")
20886 || compare_section_name (section, ".sdata2")
20887 || compare_section_name (section, ".gnu.linkonce.s")
20888 || compare_section_name (section, ".sbss")
20889 || compare_section_name (section, ".sbss2")
20890 || compare_section_name (section, ".gnu.linkonce.sb")
20891 || strcmp (section, ".PPC.EMB.sdata0") == 0
20892 || strcmp (section, ".PPC.EMB.sbss0") == 0)
20893 return true;
20895 else
20897 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
20899 if (size > 0
20900 && (unsigned HOST_WIDE_INT) size <= g_switch_value
20901 /* If it's not public, and we're not going to reference it there,
20902 there's no need to put it in the small data section. */
20903 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
20904 return true;
20907 return false;
20910 #endif /* USING_ELFOS_H */
20912 /* Implement TARGET_USE_BLOCKS_FOR_CONSTANT_P. */
20914 static bool
20915 rs6000_use_blocks_for_constant_p (enum machine_mode mode, const_rtx x)
20917 return !ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode);
20920 /* Return a REG that occurs in ADDR with coefficient 1.
20921 ADDR can be effectively incremented by incrementing REG.
20923 r0 is special and we must not select it as an address
20924 register by this routine since our caller will try to
20925 increment the returned register via an "la" instruction. */
20928 find_addr_reg (rtx addr)
20930 while (GET_CODE (addr) == PLUS)
20932 if (GET_CODE (XEXP (addr, 0)) == REG
20933 && REGNO (XEXP (addr, 0)) != 0)
20934 addr = XEXP (addr, 0);
20935 else if (GET_CODE (XEXP (addr, 1)) == REG
20936 && REGNO (XEXP (addr, 1)) != 0)
20937 addr = XEXP (addr, 1);
20938 else if (CONSTANT_P (XEXP (addr, 0)))
20939 addr = XEXP (addr, 1);
20940 else if (CONSTANT_P (XEXP (addr, 1)))
20941 addr = XEXP (addr, 0);
20942 else
20943 gcc_unreachable ();
20945 gcc_assert (GET_CODE (addr) == REG && REGNO (addr) != 0);
20946 return addr;
20949 void
20950 rs6000_fatal_bad_address (rtx op)
20952 fatal_insn ("bad address", op);
20955 #if TARGET_MACHO
20957 static tree branch_island_list = 0;
20959 /* Remember to generate a branch island for far calls to the given
20960 function. */
20962 static void
20963 add_compiler_branch_island (tree label_name, tree function_name,
20964 int line_number)
20966 tree branch_island = build_tree_list (function_name, label_name);
20967 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number);
20968 TREE_CHAIN (branch_island) = branch_island_list;
20969 branch_island_list = branch_island;
20972 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
20973 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
20974 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
20975 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
20977 /* Generate far-jump branch islands for everything on the
20978 branch_island_list. Invoked immediately after the last instruction
20979 of the epilogue has been emitted; the branch-islands must be
20980 appended to, and contiguous with, the function body. Mach-O stubs
20981 are generated in machopic_output_stub(). */
20983 static void
20984 macho_branch_islands (void)
20986 char tmp_buf[512];
20987 tree branch_island;
20989 for (branch_island = branch_island_list;
20990 branch_island;
20991 branch_island = TREE_CHAIN (branch_island))
20993 const char *label =
20994 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
20995 const char *name =
20996 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
20997 char name_buf[512];
20998 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
20999 if (name[0] == '*' || name[0] == '&')
21000 strcpy (name_buf, name+1);
21001 else
21003 name_buf[0] = '_';
21004 strcpy (name_buf+1, name);
21006 strcpy (tmp_buf, "\n");
21007 strcat (tmp_buf, label);
21008 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
21009 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
21010 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
21011 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
21012 if (flag_pic)
21014 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
21015 strcat (tmp_buf, label);
21016 strcat (tmp_buf, "_pic\n");
21017 strcat (tmp_buf, label);
21018 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
21020 strcat (tmp_buf, "\taddis r11,r11,ha16(");
21021 strcat (tmp_buf, name_buf);
21022 strcat (tmp_buf, " - ");
21023 strcat (tmp_buf, label);
21024 strcat (tmp_buf, "_pic)\n");
21026 strcat (tmp_buf, "\tmtlr r0\n");
21028 strcat (tmp_buf, "\taddi r12,r11,lo16(");
21029 strcat (tmp_buf, name_buf);
21030 strcat (tmp_buf, " - ");
21031 strcat (tmp_buf, label);
21032 strcat (tmp_buf, "_pic)\n");
21034 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
21036 else
21038 strcat (tmp_buf, ":\nlis r12,hi16(");
21039 strcat (tmp_buf, name_buf);
21040 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
21041 strcat (tmp_buf, name_buf);
21042 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
21044 output_asm_insn (tmp_buf, 0);
21045 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
21046 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
21047 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island));
21048 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
21051 branch_island_list = 0;
21054 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
21055 already there or not. */
21057 static int
21058 no_previous_def (tree function_name)
21060 tree branch_island;
21061 for (branch_island = branch_island_list;
21062 branch_island;
21063 branch_island = TREE_CHAIN (branch_island))
21064 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21065 return 0;
21066 return 1;
21069 /* GET_PREV_LABEL gets the label name from the previous definition of
21070 the function. */
21072 static tree
21073 get_prev_label (tree function_name)
21075 tree branch_island;
21076 for (branch_island = branch_island_list;
21077 branch_island;
21078 branch_island = TREE_CHAIN (branch_island))
21079 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
21080 return BRANCH_ISLAND_LABEL_NAME (branch_island);
21081 return 0;
21084 #ifndef DARWIN_LINKER_GENERATES_ISLANDS
21085 #define DARWIN_LINKER_GENERATES_ISLANDS 0
21086 #endif
21088 /* KEXTs still need branch islands. */
21089 #define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
21090 || flag_mkernel || flag_apple_kext)
21092 /* INSN is either a function call or a millicode call. It may have an
21093 unconditional jump in its delay slot.
21095 CALL_DEST is the routine we are calling. */
21097 char *
21098 output_call (rtx insn, rtx *operands, int dest_operand_number,
21099 int cookie_operand_number)
21101 static char buf[256];
21102 if (DARWIN_GENERATE_ISLANDS
21103 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
21104 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
21106 tree labelname;
21107 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
21109 if (no_previous_def (funname))
21111 rtx label_rtx = gen_label_rtx ();
21112 char *label_buf, temp_buf[256];
21113 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
21114 CODE_LABEL_NUMBER (label_rtx));
21115 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
21116 labelname = get_identifier (label_buf);
21117 add_compiler_branch_island (labelname, funname, insn_line (insn));
21119 else
21120 labelname = get_prev_label (funname);
21122 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
21123 instruction will reach 'foo', otherwise link as 'bl L42'".
21124 "L42" should be a 'branch island', that will do a far jump to
21125 'foo'. Branch islands are generated in
21126 macho_branch_islands(). */
21127 sprintf (buf, "jbsr %%z%d,%.246s",
21128 dest_operand_number, IDENTIFIER_POINTER (labelname));
21130 else
21131 sprintf (buf, "bl %%z%d", dest_operand_number);
21132 return buf;
21135 /* Generate PIC and indirect symbol stubs. */
21137 void
21138 machopic_output_stub (FILE *file, const char *symb, const char *stub)
21140 unsigned int length;
21141 char *symbol_name, *lazy_ptr_name;
21142 char *local_label_0;
21143 static int label = 0;
21145 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
21146 symb = (*targetm.strip_name_encoding) (symb);
21149 length = strlen (symb);
21150 symbol_name = XALLOCAVEC (char, length + 32);
21151 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
21153 lazy_ptr_name = XALLOCAVEC (char, length + 32);
21154 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
21156 if (flag_pic == 2)
21157 switch_to_section (darwin_sections[machopic_picsymbol_stub1_section]);
21158 else
21159 switch_to_section (darwin_sections[machopic_symbol_stub1_section]);
21161 if (flag_pic == 2)
21163 fprintf (file, "\t.align 5\n");
21165 fprintf (file, "%s:\n", stub);
21166 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21168 label++;
21169 local_label_0 = XALLOCAVEC (char, sizeof ("\"L00000000000$spb\""));
21170 sprintf (local_label_0, "\"L%011d$spb\"", label);
21172 fprintf (file, "\tmflr r0\n");
21173 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
21174 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
21175 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
21176 lazy_ptr_name, local_label_0);
21177 fprintf (file, "\tmtlr r0\n");
21178 fprintf (file, "\t%s r12,lo16(%s-%s)(r11)\n",
21179 (TARGET_64BIT ? "ldu" : "lwzu"),
21180 lazy_ptr_name, local_label_0);
21181 fprintf (file, "\tmtctr r12\n");
21182 fprintf (file, "\tbctr\n");
21184 else
21186 fprintf (file, "\t.align 4\n");
21188 fprintf (file, "%s:\n", stub);
21189 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21191 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
21192 fprintf (file, "\t%s r12,lo16(%s)(r11)\n",
21193 (TARGET_64BIT ? "ldu" : "lwzu"),
21194 lazy_ptr_name);
21195 fprintf (file, "\tmtctr r12\n");
21196 fprintf (file, "\tbctr\n");
21199 switch_to_section (darwin_sections[machopic_lazy_symbol_ptr_section]);
21200 fprintf (file, "%s:\n", lazy_ptr_name);
21201 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
21202 fprintf (file, "%sdyld_stub_binding_helper\n",
21203 (TARGET_64BIT ? DOUBLE_INT_ASM_OP : "\t.long\t"));
21206 /* Legitimize PIC addresses. If the address is already
21207 position-independent, we return ORIG. Newly generated
21208 position-independent addresses go into a reg. This is REG if non
21209 zero, otherwise we allocate register(s) as necessary. */
21211 #define SMALL_INT(X) ((UINTVAL (X) + 0x8000) < 0x10000)
21214 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
21215 rtx reg)
21217 rtx base, offset;
21219 if (reg == NULL && ! reload_in_progress && ! reload_completed)
21220 reg = gen_reg_rtx (Pmode);
21222 if (GET_CODE (orig) == CONST)
21224 rtx reg_temp;
21226 if (GET_CODE (XEXP (orig, 0)) == PLUS
21227 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
21228 return orig;
21230 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
21232 /* Use a different reg for the intermediate value, as
21233 it will be marked UNCHANGING. */
21234 reg_temp = !can_create_pseudo_p () ? reg : gen_reg_rtx (Pmode);
21235 base = rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
21236 Pmode, reg_temp);
21237 offset =
21238 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
21239 Pmode, reg);
21241 if (GET_CODE (offset) == CONST_INT)
21243 if (SMALL_INT (offset))
21244 return plus_constant (base, INTVAL (offset));
21245 else if (! reload_in_progress && ! reload_completed)
21246 offset = force_reg (Pmode, offset);
21247 else
21249 rtx mem = force_const_mem (Pmode, orig);
21250 return machopic_legitimize_pic_address (mem, Pmode, reg);
21253 return gen_rtx_PLUS (Pmode, base, offset);
21256 /* Fall back on generic machopic code. */
21257 return machopic_legitimize_pic_address (orig, mode, reg);
21260 /* Output a .machine directive for the Darwin assembler, and call
21261 the generic start_file routine. */
21263 static void
21264 rs6000_darwin_file_start (void)
21266 static const struct
21268 const char *arg;
21269 const char *name;
21270 int if_set;
21271 } mapping[] = {
21272 { "ppc64", "ppc64", MASK_64BIT },
21273 { "970", "ppc970", MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64 },
21274 { "power4", "ppc970", 0 },
21275 { "G5", "ppc970", 0 },
21276 { "7450", "ppc7450", 0 },
21277 { "7400", "ppc7400", MASK_ALTIVEC },
21278 { "G4", "ppc7400", 0 },
21279 { "750", "ppc750", 0 },
21280 { "740", "ppc750", 0 },
21281 { "G3", "ppc750", 0 },
21282 { "604e", "ppc604e", 0 },
21283 { "604", "ppc604", 0 },
21284 { "603e", "ppc603", 0 },
21285 { "603", "ppc603", 0 },
21286 { "601", "ppc601", 0 },
21287 { NULL, "ppc", 0 } };
21288 const char *cpu_id = "";
21289 size_t i;
21291 rs6000_file_start ();
21292 darwin_file_start ();
21294 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
21295 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
21296 if (rs6000_select[i].set_arch_p && rs6000_select[i].string
21297 && rs6000_select[i].string[0] != '\0')
21298 cpu_id = rs6000_select[i].string;
21300 /* Look through the mapping array. Pick the first name that either
21301 matches the argument, has a bit set in IF_SET that is also set
21302 in the target flags, or has a NULL name. */
21304 i = 0;
21305 while (mapping[i].arg != NULL
21306 && strcmp (mapping[i].arg, cpu_id) != 0
21307 && (mapping[i].if_set & target_flags) == 0)
21308 i++;
21310 fprintf (asm_out_file, "\t.machine %s\n", mapping[i].name);
21313 #endif /* TARGET_MACHO */
21315 #if TARGET_ELF
21316 static int
21317 rs6000_elf_reloc_rw_mask (void)
21319 if (flag_pic)
21320 return 3;
21321 else if (DEFAULT_ABI == ABI_AIX)
21322 return 2;
21323 else
21324 return 0;
21327 /* Record an element in the table of global constructors. SYMBOL is
21328 a SYMBOL_REF of the function to be called; PRIORITY is a number
21329 between 0 and MAX_INIT_PRIORITY.
21331 This differs from default_named_section_asm_out_constructor in
21332 that we have special handling for -mrelocatable. */
21334 static void
21335 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
21337 const char *section = ".ctors";
21338 char buf[16];
21340 if (priority != DEFAULT_INIT_PRIORITY)
21342 sprintf (buf, ".ctors.%.5u",
21343 /* Invert the numbering so the linker puts us in the proper
21344 order; constructors are run from right to left, and the
21345 linker sorts in increasing order. */
21346 MAX_INIT_PRIORITY - priority);
21347 section = buf;
21350 switch_to_section (get_section (section, SECTION_WRITE, NULL));
21351 assemble_align (POINTER_SIZE);
21353 if (TARGET_RELOCATABLE)
21355 fputs ("\t.long (", asm_out_file);
21356 output_addr_const (asm_out_file, symbol);
21357 fputs (")@fixup\n", asm_out_file);
21359 else
21360 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
21363 static void
21364 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
21366 const char *section = ".dtors";
21367 char buf[16];
21369 if (priority != DEFAULT_INIT_PRIORITY)
21371 sprintf (buf, ".dtors.%.5u",
21372 /* Invert the numbering so the linker puts us in the proper
21373 order; constructors are run from right to left, and the
21374 linker sorts in increasing order. */
21375 MAX_INIT_PRIORITY - priority);
21376 section = buf;
21379 switch_to_section (get_section (section, SECTION_WRITE, NULL));
21380 assemble_align (POINTER_SIZE);
21382 if (TARGET_RELOCATABLE)
21384 fputs ("\t.long (", asm_out_file);
21385 output_addr_const (asm_out_file, symbol);
21386 fputs (")@fixup\n", asm_out_file);
21388 else
21389 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
21392 void
21393 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
21395 if (TARGET_64BIT)
21397 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
21398 ASM_OUTPUT_LABEL (file, name);
21399 fputs (DOUBLE_INT_ASM_OP, file);
21400 rs6000_output_function_entry (file, name);
21401 fputs (",.TOC.@tocbase,0\n\t.previous\n", file);
21402 if (DOT_SYMBOLS)
21404 fputs ("\t.size\t", file);
21405 assemble_name (file, name);
21406 fputs (",24\n\t.type\t.", file);
21407 assemble_name (file, name);
21408 fputs (",@function\n", file);
21409 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
21411 fputs ("\t.globl\t.", file);
21412 assemble_name (file, name);
21413 putc ('\n', file);
21416 else
21417 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21418 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21419 rs6000_output_function_entry (file, name);
21420 fputs (":\n", file);
21421 return;
21424 if (TARGET_RELOCATABLE
21425 && !TARGET_SECURE_PLT
21426 && (get_pool_size () != 0 || crtl->profile)
21427 && uses_TOC ())
21429 char buf[256];
21431 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
21433 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
21434 fprintf (file, "\t.long ");
21435 assemble_name (file, buf);
21436 putc ('-', file);
21437 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
21438 assemble_name (file, buf);
21439 putc ('\n', file);
21442 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
21443 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
21445 if (DEFAULT_ABI == ABI_AIX)
21447 const char *desc_name, *orig_name;
21449 orig_name = (*targetm.strip_name_encoding) (name);
21450 desc_name = orig_name;
21451 while (*desc_name == '.')
21452 desc_name++;
21454 if (TREE_PUBLIC (decl))
21455 fprintf (file, "\t.globl %s\n", desc_name);
21457 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
21458 fprintf (file, "%s:\n", desc_name);
21459 fprintf (file, "\t.long %s\n", orig_name);
21460 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
21461 if (DEFAULT_ABI == ABI_AIX)
21462 fputs ("\t.long 0\n", file);
21463 fprintf (file, "\t.previous\n");
21465 ASM_OUTPUT_LABEL (file, name);
21468 static void
21469 rs6000_elf_end_indicate_exec_stack (void)
21471 if (TARGET_32BIT)
21472 file_end_indicate_exec_stack ();
21474 #endif
21476 #if TARGET_XCOFF
21477 static void
21478 rs6000_xcoff_asm_output_anchor (rtx symbol)
21480 char buffer[100];
21482 sprintf (buffer, "$ + " HOST_WIDE_INT_PRINT_DEC,
21483 SYMBOL_REF_BLOCK_OFFSET (symbol));
21484 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
21487 static void
21488 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
21490 fputs (GLOBAL_ASM_OP, stream);
21491 RS6000_OUTPUT_BASENAME (stream, name);
21492 putc ('\n', stream);
21495 /* A get_unnamed_decl callback, used for read-only sections. PTR
21496 points to the section string variable. */
21498 static void
21499 rs6000_xcoff_output_readonly_section_asm_op (const void *directive)
21501 fprintf (asm_out_file, "\t.csect %s[RO],%s\n",
21502 *(const char *const *) directive,
21503 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
21506 /* Likewise for read-write sections. */
21508 static void
21509 rs6000_xcoff_output_readwrite_section_asm_op (const void *directive)
21511 fprintf (asm_out_file, "\t.csect %s[RW],%s\n",
21512 *(const char *const *) directive,
21513 XCOFF_CSECT_DEFAULT_ALIGNMENT_STR);
21516 /* A get_unnamed_section callback, used for switching to toc_section. */
21518 static void
21519 rs6000_xcoff_output_toc_section_asm_op (const void *data ATTRIBUTE_UNUSED)
21521 if (TARGET_MINIMAL_TOC)
21523 /* toc_section is always selected at least once from
21524 rs6000_xcoff_file_start, so this is guaranteed to
21525 always be defined once and only once in each file. */
21526 if (!toc_initialized)
21528 fputs ("\t.toc\nLCTOC..1:\n", asm_out_file);
21529 fputs ("\t.tc toc_table[TC],toc_table[RW]\n", asm_out_file);
21530 toc_initialized = 1;
21532 fprintf (asm_out_file, "\t.csect toc_table[RW]%s\n",
21533 (TARGET_32BIT ? "" : ",3"));
21535 else
21536 fputs ("\t.toc\n", asm_out_file);
21539 /* Implement TARGET_ASM_INIT_SECTIONS. */
21541 static void
21542 rs6000_xcoff_asm_init_sections (void)
21544 read_only_data_section
21545 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21546 &xcoff_read_only_section_name);
21548 private_data_section
21549 = get_unnamed_section (SECTION_WRITE,
21550 rs6000_xcoff_output_readwrite_section_asm_op,
21551 &xcoff_private_data_section_name);
21553 read_only_private_data_section
21554 = get_unnamed_section (0, rs6000_xcoff_output_readonly_section_asm_op,
21555 &xcoff_private_data_section_name);
21557 toc_section
21558 = get_unnamed_section (0, rs6000_xcoff_output_toc_section_asm_op, NULL);
21560 readonly_data_section = read_only_data_section;
21561 exception_section = data_section;
21564 static int
21565 rs6000_xcoff_reloc_rw_mask (void)
21567 return 3;
21570 static void
21571 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags,
21572 tree decl ATTRIBUTE_UNUSED)
21574 int smclass;
21575 static const char * const suffix[3] = { "PR", "RO", "RW" };
21577 if (flags & SECTION_CODE)
21578 smclass = 0;
21579 else if (flags & SECTION_WRITE)
21580 smclass = 2;
21581 else
21582 smclass = 1;
21584 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
21585 (flags & SECTION_CODE) ? "." : "",
21586 name, suffix[smclass], flags & SECTION_ENTSIZE);
21589 static section *
21590 rs6000_xcoff_select_section (tree decl, int reloc,
21591 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
21593 if (decl_readonly_section (decl, reloc))
21595 if (TREE_PUBLIC (decl))
21596 return read_only_data_section;
21597 else
21598 return read_only_private_data_section;
21600 else
21602 if (TREE_PUBLIC (decl))
21603 return data_section;
21604 else
21605 return private_data_section;
21609 static void
21610 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
21612 const char *name;
21614 /* Use select_section for private and uninitialized data. */
21615 if (!TREE_PUBLIC (decl)
21616 || DECL_COMMON (decl)
21617 || DECL_INITIAL (decl) == NULL_TREE
21618 || DECL_INITIAL (decl) == error_mark_node
21619 || (flag_zero_initialized_in_bss
21620 && initializer_zerop (DECL_INITIAL (decl))))
21621 return;
21623 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21624 name = (*targetm.strip_name_encoding) (name);
21625 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
21628 /* Select section for constant in constant pool.
21630 On RS/6000, all constants are in the private read-only data area.
21631 However, if this is being placed in the TOC it must be output as a
21632 toc entry. */
21634 static section *
21635 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
21636 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
21638 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
21639 return toc_section;
21640 else
21641 return read_only_private_data_section;
21644 /* Remove any trailing [DS] or the like from the symbol name. */
21646 static const char *
21647 rs6000_xcoff_strip_name_encoding (const char *name)
21649 size_t len;
21650 if (*name == '*')
21651 name++;
21652 len = strlen (name);
21653 if (name[len - 1] == ']')
21654 return ggc_alloc_string (name, len - 4);
21655 else
21656 return name;
21659 /* Section attributes. AIX is always PIC. */
21661 static unsigned int
21662 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
21664 unsigned int align;
21665 unsigned int flags = default_section_type_flags (decl, name, reloc);
21667 /* Align to at least UNIT size. */
21668 if (flags & SECTION_CODE)
21669 align = MIN_UNITS_PER_WORD;
21670 else
21671 /* Increase alignment of large objects if not already stricter. */
21672 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
21673 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
21674 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
21676 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
21679 /* Output at beginning of assembler file.
21681 Initialize the section names for the RS/6000 at this point.
21683 Specify filename, including full path, to assembler.
21685 We want to go into the TOC section so at least one .toc will be emitted.
21686 Also, in order to output proper .bs/.es pairs, we need at least one static
21687 [RW] section emitted.
21689 Finally, declare mcount when profiling to make the assembler happy. */
21691 static void
21692 rs6000_xcoff_file_start (void)
21694 rs6000_gen_section_name (&xcoff_bss_section_name,
21695 main_input_filename, ".bss_");
21696 rs6000_gen_section_name (&xcoff_private_data_section_name,
21697 main_input_filename, ".rw_");
21698 rs6000_gen_section_name (&xcoff_read_only_section_name,
21699 main_input_filename, ".ro_");
21701 fputs ("\t.file\t", asm_out_file);
21702 output_quoted_string (asm_out_file, main_input_filename);
21703 fputc ('\n', asm_out_file);
21704 if (write_symbols != NO_DEBUG)
21705 switch_to_section (private_data_section);
21706 switch_to_section (text_section);
21707 if (profile_flag)
21708 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
21709 rs6000_file_start ();
21712 /* Output at end of assembler file.
21713 On the RS/6000, referencing data should automatically pull in text. */
21715 static void
21716 rs6000_xcoff_file_end (void)
21718 switch_to_section (text_section);
21719 fputs ("_section_.text:\n", asm_out_file);
21720 switch_to_section (data_section);
21721 fputs (TARGET_32BIT
21722 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
21723 asm_out_file);
21725 #endif /* TARGET_XCOFF */
21727 /* Compute a (partial) cost for rtx X. Return true if the complete
21728 cost has been computed, and false if subexpressions should be
21729 scanned. In either case, *TOTAL contains the cost result. */
21731 static bool
21732 rs6000_rtx_costs (rtx x, int code, int outer_code, int *total,
21733 bool speed)
21735 enum machine_mode mode = GET_MODE (x);
21737 switch (code)
21739 /* On the RS/6000, if it is valid in the insn, it is free. */
21740 case CONST_INT:
21741 if (((outer_code == SET
21742 || outer_code == PLUS
21743 || outer_code == MINUS)
21744 && (satisfies_constraint_I (x)
21745 || satisfies_constraint_L (x)))
21746 || (outer_code == AND
21747 && (satisfies_constraint_K (x)
21748 || (mode == SImode
21749 ? satisfies_constraint_L (x)
21750 : satisfies_constraint_J (x))
21751 || mask_operand (x, mode)
21752 || (mode == DImode
21753 && mask64_operand (x, DImode))))
21754 || ((outer_code == IOR || outer_code == XOR)
21755 && (satisfies_constraint_K (x)
21756 || (mode == SImode
21757 ? satisfies_constraint_L (x)
21758 : satisfies_constraint_J (x))))
21759 || outer_code == ASHIFT
21760 || outer_code == ASHIFTRT
21761 || outer_code == LSHIFTRT
21762 || outer_code == ROTATE
21763 || outer_code == ROTATERT
21764 || outer_code == ZERO_EXTRACT
21765 || (outer_code == MULT
21766 && satisfies_constraint_I (x))
21767 || ((outer_code == DIV || outer_code == UDIV
21768 || outer_code == MOD || outer_code == UMOD)
21769 && exact_log2 (INTVAL (x)) >= 0)
21770 || (outer_code == COMPARE
21771 && (satisfies_constraint_I (x)
21772 || satisfies_constraint_K (x)))
21773 || (outer_code == EQ
21774 && (satisfies_constraint_I (x)
21775 || satisfies_constraint_K (x)
21776 || (mode == SImode
21777 ? satisfies_constraint_L (x)
21778 : satisfies_constraint_J (x))))
21779 || (outer_code == GTU
21780 && satisfies_constraint_I (x))
21781 || (outer_code == LTU
21782 && satisfies_constraint_P (x)))
21784 *total = 0;
21785 return true;
21787 else if ((outer_code == PLUS
21788 && reg_or_add_cint_operand (x, VOIDmode))
21789 || (outer_code == MINUS
21790 && reg_or_sub_cint_operand (x, VOIDmode))
21791 || ((outer_code == SET
21792 || outer_code == IOR
21793 || outer_code == XOR)
21794 && (INTVAL (x)
21795 & ~ (unsigned HOST_WIDE_INT) 0xffffffff) == 0))
21797 *total = COSTS_N_INSNS (1);
21798 return true;
21800 /* FALLTHRU */
21802 case CONST_DOUBLE:
21803 if (mode == DImode && code == CONST_DOUBLE)
21805 if ((outer_code == IOR || outer_code == XOR)
21806 && CONST_DOUBLE_HIGH (x) == 0
21807 && (CONST_DOUBLE_LOW (x)
21808 & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0)
21810 *total = 0;
21811 return true;
21813 else if ((outer_code == AND && and64_2_operand (x, DImode))
21814 || ((outer_code == SET
21815 || outer_code == IOR
21816 || outer_code == XOR)
21817 && CONST_DOUBLE_HIGH (x) == 0))
21819 *total = COSTS_N_INSNS (1);
21820 return true;
21823 /* FALLTHRU */
21825 case CONST:
21826 case HIGH:
21827 case SYMBOL_REF:
21828 case MEM:
21829 /* When optimizing for size, MEM should be slightly more expensive
21830 than generating address, e.g., (plus (reg) (const)).
21831 L1 cache latency is about two instructions. */
21832 *total = !speed ? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
21833 return true;
21835 case LABEL_REF:
21836 *total = 0;
21837 return true;
21839 case PLUS:
21840 if (mode == DFmode)
21842 if (GET_CODE (XEXP (x, 0)) == MULT)
21844 /* FNMA accounted in outer NEG. */
21845 if (outer_code == NEG)
21846 *total = rs6000_cost->dmul - rs6000_cost->fp;
21847 else
21848 *total = rs6000_cost->dmul;
21850 else
21851 *total = rs6000_cost->fp;
21853 else if (mode == SFmode)
21855 /* FNMA accounted in outer NEG. */
21856 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21857 *total = 0;
21858 else
21859 *total = rs6000_cost->fp;
21861 else
21862 *total = COSTS_N_INSNS (1);
21863 return false;
21865 case MINUS:
21866 if (mode == DFmode)
21868 if (GET_CODE (XEXP (x, 0)) == MULT
21869 || GET_CODE (XEXP (x, 1)) == MULT)
21871 /* FNMA accounted in outer NEG. */
21872 if (outer_code == NEG)
21873 *total = rs6000_cost->dmul - rs6000_cost->fp;
21874 else
21875 *total = rs6000_cost->dmul;
21877 else
21878 *total = rs6000_cost->fp;
21880 else if (mode == SFmode)
21882 /* FNMA accounted in outer NEG. */
21883 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
21884 *total = 0;
21885 else
21886 *total = rs6000_cost->fp;
21888 else
21889 *total = COSTS_N_INSNS (1);
21890 return false;
21892 case MULT:
21893 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21894 && satisfies_constraint_I (XEXP (x, 1)))
21896 if (INTVAL (XEXP (x, 1)) >= -256
21897 && INTVAL (XEXP (x, 1)) <= 255)
21898 *total = rs6000_cost->mulsi_const9;
21899 else
21900 *total = rs6000_cost->mulsi_const;
21902 /* FMA accounted in outer PLUS/MINUS. */
21903 else if ((mode == DFmode || mode == SFmode)
21904 && (outer_code == PLUS || outer_code == MINUS))
21905 *total = 0;
21906 else if (mode == DFmode)
21907 *total = rs6000_cost->dmul;
21908 else if (mode == SFmode)
21909 *total = rs6000_cost->fp;
21910 else if (mode == DImode)
21911 *total = rs6000_cost->muldi;
21912 else
21913 *total = rs6000_cost->mulsi;
21914 return false;
21916 case DIV:
21917 case MOD:
21918 if (FLOAT_MODE_P (mode))
21920 *total = mode == DFmode ? rs6000_cost->ddiv
21921 : rs6000_cost->sdiv;
21922 return false;
21924 /* FALLTHRU */
21926 case UDIV:
21927 case UMOD:
21928 if (GET_CODE (XEXP (x, 1)) == CONST_INT
21929 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
21931 if (code == DIV || code == MOD)
21932 /* Shift, addze */
21933 *total = COSTS_N_INSNS (2);
21934 else
21935 /* Shift */
21936 *total = COSTS_N_INSNS (1);
21938 else
21940 if (GET_MODE (XEXP (x, 1)) == DImode)
21941 *total = rs6000_cost->divdi;
21942 else
21943 *total = rs6000_cost->divsi;
21945 /* Add in shift and subtract for MOD. */
21946 if (code == MOD || code == UMOD)
21947 *total += COSTS_N_INSNS (2);
21948 return false;
21950 case CTZ:
21951 case FFS:
21952 *total = COSTS_N_INSNS (4);
21953 return false;
21955 case POPCOUNT:
21956 *total = COSTS_N_INSNS (6);
21957 return false;
21959 case NOT:
21960 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
21962 *total = 0;
21963 return false;
21965 /* FALLTHRU */
21967 case AND:
21968 case CLZ:
21969 case IOR:
21970 case XOR:
21971 case ZERO_EXTRACT:
21972 *total = COSTS_N_INSNS (1);
21973 return false;
21975 case ASHIFT:
21976 case ASHIFTRT:
21977 case LSHIFTRT:
21978 case ROTATE:
21979 case ROTATERT:
21980 /* Handle mul_highpart. */
21981 if (outer_code == TRUNCATE
21982 && GET_CODE (XEXP (x, 0)) == MULT)
21984 if (mode == DImode)
21985 *total = rs6000_cost->muldi;
21986 else
21987 *total = rs6000_cost->mulsi;
21988 return true;
21990 else if (outer_code == AND)
21991 *total = 0;
21992 else
21993 *total = COSTS_N_INSNS (1);
21994 return false;
21996 case SIGN_EXTEND:
21997 case ZERO_EXTEND:
21998 if (GET_CODE (XEXP (x, 0)) == MEM)
21999 *total = 0;
22000 else
22001 *total = COSTS_N_INSNS (1);
22002 return false;
22004 case COMPARE:
22005 case NEG:
22006 case ABS:
22007 if (!FLOAT_MODE_P (mode))
22009 *total = COSTS_N_INSNS (1);
22010 return false;
22012 /* FALLTHRU */
22014 case FLOAT:
22015 case UNSIGNED_FLOAT:
22016 case FIX:
22017 case UNSIGNED_FIX:
22018 case FLOAT_TRUNCATE:
22019 *total = rs6000_cost->fp;
22020 return false;
22022 case FLOAT_EXTEND:
22023 if (mode == DFmode)
22024 *total = 0;
22025 else
22026 *total = rs6000_cost->fp;
22027 return false;
22029 case UNSPEC:
22030 switch (XINT (x, 1))
22032 case UNSPEC_FRSP:
22033 *total = rs6000_cost->fp;
22034 return true;
22036 default:
22037 break;
22039 break;
22041 case CALL:
22042 case IF_THEN_ELSE:
22043 if (!speed)
22045 *total = COSTS_N_INSNS (1);
22046 return true;
22048 else if (FLOAT_MODE_P (mode)
22049 && TARGET_PPC_GFXOPT && TARGET_HARD_FLOAT && TARGET_FPRS)
22051 *total = rs6000_cost->fp;
22052 return false;
22054 break;
22056 case EQ:
22057 case GTU:
22058 case LTU:
22059 /* Carry bit requires mode == Pmode.
22060 NEG or PLUS already counted so only add one. */
22061 if (mode == Pmode
22062 && (outer_code == NEG || outer_code == PLUS))
22064 *total = COSTS_N_INSNS (1);
22065 return true;
22067 if (outer_code == SET)
22069 if (XEXP (x, 1) == const0_rtx)
22071 *total = COSTS_N_INSNS (2);
22072 return true;
22074 else if (mode == Pmode)
22076 *total = COSTS_N_INSNS (3);
22077 return false;
22080 /* FALLTHRU */
22082 case GT:
22083 case LT:
22084 case UNORDERED:
22085 if (outer_code == SET && (XEXP (x, 1) == const0_rtx))
22087 *total = COSTS_N_INSNS (2);
22088 return true;
22090 /* CC COMPARE. */
22091 if (outer_code == COMPARE)
22093 *total = 0;
22094 return true;
22096 break;
22098 default:
22099 break;
22102 return false;
22105 /* A C expression returning the cost of moving data from a register of class
22106 CLASS1 to one of CLASS2. */
22109 rs6000_register_move_cost (enum machine_mode mode,
22110 enum reg_class from, enum reg_class to)
22112 /* Moves from/to GENERAL_REGS. */
22113 if (reg_classes_intersect_p (to, GENERAL_REGS)
22114 || reg_classes_intersect_p (from, GENERAL_REGS))
22116 if (! reg_classes_intersect_p (to, GENERAL_REGS))
22117 from = to;
22119 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
22120 return (rs6000_memory_move_cost (mode, from, 0)
22121 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
22123 /* It's more expensive to move CR_REGS than CR0_REGS because of the
22124 shift. */
22125 else if (from == CR_REGS)
22126 return 4;
22128 /* Power6 has slower LR/CTR moves so make them more expensive than
22129 memory in order to bias spills to memory .*/
22130 else if (rs6000_cpu == PROCESSOR_POWER6
22131 && reg_classes_intersect_p (from, LINK_OR_CTR_REGS))
22132 return 6 * hard_regno_nregs[0][mode];
22134 else
22135 /* A move will cost one instruction per GPR moved. */
22136 return 2 * hard_regno_nregs[0][mode];
22139 /* Moving between two similar registers is just one instruction. */
22140 else if (reg_classes_intersect_p (to, from))
22141 return (mode == TFmode || mode == TDmode) ? 4 : 2;
22143 /* Everything else has to go through GENERAL_REGS. */
22144 else
22145 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
22146 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
22149 /* A C expressions returning the cost of moving data of MODE from a register to
22150 or from memory. */
22153 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass,
22154 int in ATTRIBUTE_UNUSED)
22156 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
22157 return 4 * hard_regno_nregs[0][mode];
22158 else if (reg_classes_intersect_p (rclass, FLOAT_REGS))
22159 return 4 * hard_regno_nregs[32][mode];
22160 else if (reg_classes_intersect_p (rclass, ALTIVEC_REGS))
22161 return 4 * hard_regno_nregs[FIRST_ALTIVEC_REGNO][mode];
22162 else
22163 return 4 + rs6000_register_move_cost (mode, rclass, GENERAL_REGS);
22166 /* Returns a code for a target-specific builtin that implements
22167 reciprocal of the function, or NULL_TREE if not available. */
22169 static tree
22170 rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
22171 bool sqrt ATTRIBUTE_UNUSED)
22173 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size
22174 && flag_finite_math_only && !flag_trapping_math
22175 && flag_unsafe_math_optimizations))
22176 return NULL_TREE;
22178 if (md_fn)
22179 return NULL_TREE;
22180 else
22181 switch (fn)
22183 case BUILT_IN_SQRTF:
22184 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
22186 default:
22187 return NULL_TREE;
22191 /* Newton-Raphson approximation of single-precision floating point divide n/d.
22192 Assumes no trapping math and finite arguments. */
22194 void
22195 rs6000_emit_swdivsf (rtx dst, rtx n, rtx d)
22197 rtx x0, e0, e1, y1, u0, v0, one;
22199 x0 = gen_reg_rtx (SFmode);
22200 e0 = gen_reg_rtx (SFmode);
22201 e1 = gen_reg_rtx (SFmode);
22202 y1 = gen_reg_rtx (SFmode);
22203 u0 = gen_reg_rtx (SFmode);
22204 v0 = gen_reg_rtx (SFmode);
22205 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22207 /* x0 = 1./d estimate */
22208 emit_insn (gen_rtx_SET (VOIDmode, x0,
22209 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d),
22210 UNSPEC_FRES)));
22211 /* e0 = 1. - d * x0 */
22212 emit_insn (gen_rtx_SET (VOIDmode, e0,
22213 gen_rtx_MINUS (SFmode, one,
22214 gen_rtx_MULT (SFmode, d, x0))));
22215 /* e1 = e0 + e0 * e0 */
22216 emit_insn (gen_rtx_SET (VOIDmode, e1,
22217 gen_rtx_PLUS (SFmode,
22218 gen_rtx_MULT (SFmode, e0, e0), e0)));
22219 /* y1 = x0 + e1 * x0 */
22220 emit_insn (gen_rtx_SET (VOIDmode, y1,
22221 gen_rtx_PLUS (SFmode,
22222 gen_rtx_MULT (SFmode, e1, x0), x0)));
22223 /* u0 = n * y1 */
22224 emit_insn (gen_rtx_SET (VOIDmode, u0,
22225 gen_rtx_MULT (SFmode, n, y1)));
22226 /* v0 = n - d * u0 */
22227 emit_insn (gen_rtx_SET (VOIDmode, v0,
22228 gen_rtx_MINUS (SFmode, n,
22229 gen_rtx_MULT (SFmode, d, u0))));
22230 /* dst = u0 + v0 * y1 */
22231 emit_insn (gen_rtx_SET (VOIDmode, dst,
22232 gen_rtx_PLUS (SFmode,
22233 gen_rtx_MULT (SFmode, v0, y1), u0)));
22236 /* Newton-Raphson approximation of double-precision floating point divide n/d.
22237 Assumes no trapping math and finite arguments. */
22239 void
22240 rs6000_emit_swdivdf (rtx dst, rtx n, rtx d)
22242 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
22244 x0 = gen_reg_rtx (DFmode);
22245 e0 = gen_reg_rtx (DFmode);
22246 e1 = gen_reg_rtx (DFmode);
22247 e2 = gen_reg_rtx (DFmode);
22248 y1 = gen_reg_rtx (DFmode);
22249 y2 = gen_reg_rtx (DFmode);
22250 y3 = gen_reg_rtx (DFmode);
22251 u0 = gen_reg_rtx (DFmode);
22252 v0 = gen_reg_rtx (DFmode);
22253 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode));
22255 /* x0 = 1./d estimate */
22256 emit_insn (gen_rtx_SET (VOIDmode, x0,
22257 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d),
22258 UNSPEC_FRES)));
22259 /* e0 = 1. - d * x0 */
22260 emit_insn (gen_rtx_SET (VOIDmode, e0,
22261 gen_rtx_MINUS (DFmode, one,
22262 gen_rtx_MULT (SFmode, d, x0))));
22263 /* y1 = x0 + e0 * x0 */
22264 emit_insn (gen_rtx_SET (VOIDmode, y1,
22265 gen_rtx_PLUS (DFmode,
22266 gen_rtx_MULT (DFmode, e0, x0), x0)));
22267 /* e1 = e0 * e0 */
22268 emit_insn (gen_rtx_SET (VOIDmode, e1,
22269 gen_rtx_MULT (DFmode, e0, e0)));
22270 /* y2 = y1 + e1 * y1 */
22271 emit_insn (gen_rtx_SET (VOIDmode, y2,
22272 gen_rtx_PLUS (DFmode,
22273 gen_rtx_MULT (DFmode, e1, y1), y1)));
22274 /* e2 = e1 * e1 */
22275 emit_insn (gen_rtx_SET (VOIDmode, e2,
22276 gen_rtx_MULT (DFmode, e1, e1)));
22277 /* y3 = y2 + e2 * y2 */
22278 emit_insn (gen_rtx_SET (VOIDmode, y3,
22279 gen_rtx_PLUS (DFmode,
22280 gen_rtx_MULT (DFmode, e2, y2), y2)));
22281 /* u0 = n * y3 */
22282 emit_insn (gen_rtx_SET (VOIDmode, u0,
22283 gen_rtx_MULT (DFmode, n, y3)));
22284 /* v0 = n - d * u0 */
22285 emit_insn (gen_rtx_SET (VOIDmode, v0,
22286 gen_rtx_MINUS (DFmode, n,
22287 gen_rtx_MULT (DFmode, d, u0))));
22288 /* dst = u0 + v0 * y3 */
22289 emit_insn (gen_rtx_SET (VOIDmode, dst,
22290 gen_rtx_PLUS (DFmode,
22291 gen_rtx_MULT (DFmode, v0, y3), u0)));
22295 /* Newton-Raphson approximation of single-precision floating point rsqrt.
22296 Assumes no trapping math and finite arguments. */
22298 void
22299 rs6000_emit_swrsqrtsf (rtx dst, rtx src)
22301 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
22302 half, one, halfthree, c1, cond, label;
22304 x0 = gen_reg_rtx (SFmode);
22305 x1 = gen_reg_rtx (SFmode);
22306 x2 = gen_reg_rtx (SFmode);
22307 y1 = gen_reg_rtx (SFmode);
22308 u0 = gen_reg_rtx (SFmode);
22309 u1 = gen_reg_rtx (SFmode);
22310 u2 = gen_reg_rtx (SFmode);
22311 v0 = gen_reg_rtx (SFmode);
22312 v1 = gen_reg_rtx (SFmode);
22313 v2 = gen_reg_rtx (SFmode);
22314 t0 = gen_reg_rtx (SFmode);
22315 halfthree = gen_reg_rtx (SFmode);
22316 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
22317 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
22319 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
22320 emit_insn (gen_rtx_SET (VOIDmode, t0,
22321 gen_rtx_MULT (SFmode, src, src)));
22323 emit_insn (gen_rtx_SET (VOIDmode, cond,
22324 gen_rtx_COMPARE (CCFPmode, t0, src)));
22325 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
22326 emit_unlikely_jump (c1, label);
22328 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
22329 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
22331 /* halfthree = 1.5 = 1.0 + 0.5 */
22332 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
22333 gen_rtx_PLUS (SFmode, one, half)));
22335 /* x0 = rsqrt estimate */
22336 emit_insn (gen_rtx_SET (VOIDmode, x0,
22337 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src),
22338 UNSPEC_RSQRT)));
22340 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */
22341 emit_insn (gen_rtx_SET (VOIDmode, y1,
22342 gen_rtx_MINUS (SFmode,
22343 gen_rtx_MULT (SFmode, src, halfthree),
22344 src)));
22346 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */
22347 emit_insn (gen_rtx_SET (VOIDmode, u0,
22348 gen_rtx_MULT (SFmode, x0, x0)));
22349 emit_insn (gen_rtx_SET (VOIDmode, v0,
22350 gen_rtx_MINUS (SFmode,
22351 halfthree,
22352 gen_rtx_MULT (SFmode, y1, u0))));
22353 emit_insn (gen_rtx_SET (VOIDmode, x1,
22354 gen_rtx_MULT (SFmode, x0, v0)));
22356 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */
22357 emit_insn (gen_rtx_SET (VOIDmode, u1,
22358 gen_rtx_MULT (SFmode, x1, x1)));
22359 emit_insn (gen_rtx_SET (VOIDmode, v1,
22360 gen_rtx_MINUS (SFmode,
22361 halfthree,
22362 gen_rtx_MULT (SFmode, y1, u1))));
22363 emit_insn (gen_rtx_SET (VOIDmode, x2,
22364 gen_rtx_MULT (SFmode, x1, v1)));
22366 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
22367 emit_insn (gen_rtx_SET (VOIDmode, u2,
22368 gen_rtx_MULT (SFmode, x2, x2)));
22369 emit_insn (gen_rtx_SET (VOIDmode, v2,
22370 gen_rtx_MINUS (SFmode,
22371 halfthree,
22372 gen_rtx_MULT (SFmode, y1, u2))));
22373 emit_insn (gen_rtx_SET (VOIDmode, dst,
22374 gen_rtx_MULT (SFmode, x2, v2)));
22376 emit_label (XEXP (label, 0));
22379 /* Emit popcount intrinsic on TARGET_POPCNTB targets. DST is the
22380 target, and SRC is the argument operand. */
22382 void
22383 rs6000_emit_popcount (rtx dst, rtx src)
22385 enum machine_mode mode = GET_MODE (dst);
22386 rtx tmp1, tmp2;
22388 tmp1 = gen_reg_rtx (mode);
22390 if (mode == SImode)
22392 emit_insn (gen_popcntbsi2 (tmp1, src));
22393 tmp2 = expand_mult (SImode, tmp1, GEN_INT (0x01010101),
22394 NULL_RTX, 0);
22395 tmp2 = force_reg (SImode, tmp2);
22396 emit_insn (gen_lshrsi3 (dst, tmp2, GEN_INT (24)));
22398 else
22400 emit_insn (gen_popcntbdi2 (tmp1, src));
22401 tmp2 = expand_mult (DImode, tmp1,
22402 GEN_INT ((HOST_WIDE_INT)
22403 0x01010101 << 32 | 0x01010101),
22404 NULL_RTX, 0);
22405 tmp2 = force_reg (DImode, tmp2);
22406 emit_insn (gen_lshrdi3 (dst, tmp2, GEN_INT (56)));
22411 /* Emit parity intrinsic on TARGET_POPCNTB targets. DST is the
22412 target, and SRC is the argument operand. */
22414 void
22415 rs6000_emit_parity (rtx dst, rtx src)
22417 enum machine_mode mode = GET_MODE (dst);
22418 rtx tmp;
22420 tmp = gen_reg_rtx (mode);
22421 if (mode == SImode)
22423 /* Is mult+shift >= shift+xor+shift+xor? */
22424 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
22426 rtx tmp1, tmp2, tmp3, tmp4;
22428 tmp1 = gen_reg_rtx (SImode);
22429 emit_insn (gen_popcntbsi2 (tmp1, src));
22431 tmp2 = gen_reg_rtx (SImode);
22432 emit_insn (gen_lshrsi3 (tmp2, tmp1, GEN_INT (16)));
22433 tmp3 = gen_reg_rtx (SImode);
22434 emit_insn (gen_xorsi3 (tmp3, tmp1, tmp2));
22436 tmp4 = gen_reg_rtx (SImode);
22437 emit_insn (gen_lshrsi3 (tmp4, tmp3, GEN_INT (8)));
22438 emit_insn (gen_xorsi3 (tmp, tmp3, tmp4));
22440 else
22441 rs6000_emit_popcount (tmp, src);
22442 emit_insn (gen_andsi3 (dst, tmp, const1_rtx));
22444 else
22446 /* Is mult+shift >= shift+xor+shift+xor+shift+xor? */
22447 if (rs6000_cost->muldi >= COSTS_N_INSNS (5))
22449 rtx tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
22451 tmp1 = gen_reg_rtx (DImode);
22452 emit_insn (gen_popcntbdi2 (tmp1, src));
22454 tmp2 = gen_reg_rtx (DImode);
22455 emit_insn (gen_lshrdi3 (tmp2, tmp1, GEN_INT (32)));
22456 tmp3 = gen_reg_rtx (DImode);
22457 emit_insn (gen_xordi3 (tmp3, tmp1, tmp2));
22459 tmp4 = gen_reg_rtx (DImode);
22460 emit_insn (gen_lshrdi3 (tmp4, tmp3, GEN_INT (16)));
22461 tmp5 = gen_reg_rtx (DImode);
22462 emit_insn (gen_xordi3 (tmp5, tmp3, tmp4));
22464 tmp6 = gen_reg_rtx (DImode);
22465 emit_insn (gen_lshrdi3 (tmp6, tmp5, GEN_INT (8)));
22466 emit_insn (gen_xordi3 (tmp, tmp5, tmp6));
22468 else
22469 rs6000_emit_popcount (tmp, src);
22470 emit_insn (gen_anddi3 (dst, tmp, const1_rtx));
22474 /* Return an RTX representing where to find the function value of a
22475 function returning MODE. */
22476 static rtx
22477 rs6000_complex_function_value (enum machine_mode mode)
22479 unsigned int regno;
22480 rtx r1, r2;
22481 enum machine_mode inner = GET_MODE_INNER (mode);
22482 unsigned int inner_bytes = GET_MODE_SIZE (inner);
22484 if (FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22485 regno = FP_ARG_RETURN;
22486 else
22488 regno = GP_ARG_RETURN;
22490 /* 32-bit is OK since it'll go in r3/r4. */
22491 if (TARGET_32BIT && inner_bytes >= 4)
22492 return gen_rtx_REG (mode, regno);
22495 if (inner_bytes >= 8)
22496 return gen_rtx_REG (mode, regno);
22498 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
22499 const0_rtx);
22500 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
22501 GEN_INT (inner_bytes));
22502 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
22505 /* Define how to find the value returned by a function.
22506 VALTYPE is the data type of the value (as a tree).
22507 If the precise function being called is known, FUNC is its FUNCTION_DECL;
22508 otherwise, FUNC is 0.
22510 On the SPE, both FPs and vectors are returned in r3.
22512 On RS/6000 an integer value is in r3 and a floating-point value is in
22513 fp1, unless -msoft-float. */
22516 rs6000_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
22518 enum machine_mode mode;
22519 unsigned int regno;
22521 /* Special handling for structs in darwin64. */
22522 if (rs6000_darwin64_abi
22523 && TYPE_MODE (valtype) == BLKmode
22524 && TREE_CODE (valtype) == RECORD_TYPE
22525 && int_size_in_bytes (valtype) > 0)
22527 CUMULATIVE_ARGS valcum;
22528 rtx valret;
22530 valcum.words = 0;
22531 valcum.fregno = FP_ARG_MIN_REG;
22532 valcum.vregno = ALTIVEC_ARG_MIN_REG;
22533 /* Do a trial code generation as if this were going to be passed as
22534 an argument; if any part goes in memory, we return NULL. */
22535 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true);
22536 if (valret)
22537 return valret;
22538 /* Otherwise fall through to standard ABI rules. */
22541 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
22543 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22544 return gen_rtx_PARALLEL (DImode,
22545 gen_rtvec (2,
22546 gen_rtx_EXPR_LIST (VOIDmode,
22547 gen_rtx_REG (SImode, GP_ARG_RETURN),
22548 const0_rtx),
22549 gen_rtx_EXPR_LIST (VOIDmode,
22550 gen_rtx_REG (SImode,
22551 GP_ARG_RETURN + 1),
22552 GEN_INT (4))));
22554 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DCmode)
22556 return gen_rtx_PARALLEL (DCmode,
22557 gen_rtvec (4,
22558 gen_rtx_EXPR_LIST (VOIDmode,
22559 gen_rtx_REG (SImode, GP_ARG_RETURN),
22560 const0_rtx),
22561 gen_rtx_EXPR_LIST (VOIDmode,
22562 gen_rtx_REG (SImode,
22563 GP_ARG_RETURN + 1),
22564 GEN_INT (4)),
22565 gen_rtx_EXPR_LIST (VOIDmode,
22566 gen_rtx_REG (SImode,
22567 GP_ARG_RETURN + 2),
22568 GEN_INT (8)),
22569 gen_rtx_EXPR_LIST (VOIDmode,
22570 gen_rtx_REG (SImode,
22571 GP_ARG_RETURN + 3),
22572 GEN_INT (12))));
22575 mode = TYPE_MODE (valtype);
22576 if ((INTEGRAL_TYPE_P (valtype) && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
22577 || POINTER_TYPE_P (valtype))
22578 mode = TARGET_32BIT ? SImode : DImode;
22580 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22581 /* _Decimal128 must use an even/odd register pair. */
22582 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
22583 else if (SCALAR_FLOAT_TYPE_P (valtype) && TARGET_HARD_FLOAT && TARGET_FPRS)
22584 regno = FP_ARG_RETURN;
22585 else if (TREE_CODE (valtype) == COMPLEX_TYPE
22586 && targetm.calls.split_complex_arg)
22587 return rs6000_complex_function_value (mode);
22588 else if (TREE_CODE (valtype) == VECTOR_TYPE
22589 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI
22590 && ALTIVEC_VECTOR_MODE (mode))
22591 regno = ALTIVEC_ARG_RETURN;
22592 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
22593 && (mode == DFmode || mode == DCmode
22594 || mode == TFmode || mode == TCmode))
22595 return spe_build_register_parallel (mode, GP_ARG_RETURN);
22596 else
22597 regno = GP_ARG_RETURN;
22599 return gen_rtx_REG (mode, regno);
22602 /* Define how to find the value returned by a library function
22603 assuming the value has mode MODE. */
22605 rs6000_libcall_value (enum machine_mode mode)
22607 unsigned int regno;
22609 if (TARGET_32BIT && TARGET_POWERPC64 && mode == DImode)
22611 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
22612 return gen_rtx_PARALLEL (DImode,
22613 gen_rtvec (2,
22614 gen_rtx_EXPR_LIST (VOIDmode,
22615 gen_rtx_REG (SImode, GP_ARG_RETURN),
22616 const0_rtx),
22617 gen_rtx_EXPR_LIST (VOIDmode,
22618 gen_rtx_REG (SImode,
22619 GP_ARG_RETURN + 1),
22620 GEN_INT (4))));
22623 if (DECIMAL_FLOAT_MODE_P (mode) && TARGET_HARD_FLOAT && TARGET_FPRS)
22624 /* _Decimal128 must use an even/odd register pair. */
22625 regno = (mode == TDmode) ? FP_ARG_RETURN + 1 : FP_ARG_RETURN;
22626 else if (SCALAR_FLOAT_MODE_P (mode)
22627 && TARGET_HARD_FLOAT && TARGET_FPRS)
22628 regno = FP_ARG_RETURN;
22629 else if (ALTIVEC_VECTOR_MODE (mode)
22630 && TARGET_ALTIVEC && TARGET_ALTIVEC_ABI)
22631 regno = ALTIVEC_ARG_RETURN;
22632 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
22633 return rs6000_complex_function_value (mode);
22634 else if (TARGET_E500_DOUBLE && TARGET_HARD_FLOAT
22635 && (mode == DFmode || mode == DCmode
22636 || mode == TFmode || mode == TCmode))
22637 return spe_build_register_parallel (mode, GP_ARG_RETURN);
22638 else
22639 regno = GP_ARG_RETURN;
22641 return gen_rtx_REG (mode, regno);
22644 /* Define the offset between two registers, FROM to be eliminated and its
22645 replacement TO, at the start of a routine. */
22646 HOST_WIDE_INT
22647 rs6000_initial_elimination_offset (int from, int to)
22649 rs6000_stack_t *info = rs6000_stack_info ();
22650 HOST_WIDE_INT offset;
22652 if (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22653 offset = info->push_p ? 0 : -info->total_size;
22654 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22656 offset = info->push_p ? 0 : -info->total_size;
22657 if (FRAME_GROWS_DOWNWARD)
22658 offset += info->fixed_size + info->vars_size + info->parm_size;
22660 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22661 offset = FRAME_GROWS_DOWNWARD
22662 ? info->fixed_size + info->vars_size + info->parm_size
22663 : 0;
22664 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
22665 offset = info->total_size;
22666 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
22667 offset = info->push_p ? info->total_size : 0;
22668 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
22669 offset = 0;
22670 else
22671 gcc_unreachable ();
22673 return offset;
22676 /* Return true if TYPE is a SPE or AltiVec opaque type. */
22678 static bool
22679 rs6000_is_opaque_type (const_tree type)
22681 return (type == opaque_V2SI_type_node
22682 || type == opaque_V2SF_type_node
22683 || type == opaque_V4SI_type_node);
22686 static rtx
22687 rs6000_dwarf_register_span (rtx reg)
22689 unsigned regno;
22691 if (TARGET_SPE
22692 && (SPE_VECTOR_MODE (GET_MODE (reg))
22693 || (TARGET_E500_DOUBLE
22694 && (GET_MODE (reg) == DFmode || GET_MODE (reg) == DDmode))))
22696 else
22697 return NULL_RTX;
22699 regno = REGNO (reg);
22701 /* The duality of the SPE register size wreaks all kinds of havoc.
22702 This is a way of distinguishing r0 in 32-bits from r0 in
22703 64-bits. */
22704 return
22705 gen_rtx_PARALLEL (VOIDmode,
22706 BYTES_BIG_ENDIAN
22707 ? gen_rtvec (2,
22708 gen_rtx_REG (SImode, regno + 1200),
22709 gen_rtx_REG (SImode, regno))
22710 : gen_rtvec (2,
22711 gen_rtx_REG (SImode, regno),
22712 gen_rtx_REG (SImode, regno + 1200)));
22715 /* Fill in sizes for SPE register high parts in table used by unwinder. */
22717 static void
22718 rs6000_init_dwarf_reg_sizes_extra (tree address)
22720 if (TARGET_SPE)
22722 int i;
22723 enum machine_mode mode = TYPE_MODE (char_type_node);
22724 rtx addr = expand_expr (address, NULL_RTX, VOIDmode, 0);
22725 rtx mem = gen_rtx_MEM (BLKmode, addr);
22726 rtx value = gen_int_mode (4, mode);
22728 for (i = 1201; i < 1232; i++)
22730 int column = DWARF_REG_TO_UNWIND_COLUMN (i);
22731 HOST_WIDE_INT offset
22732 = DWARF_FRAME_REGNUM (column) * GET_MODE_SIZE (mode);
22734 emit_move_insn (adjust_address (mem, mode, offset), value);
22739 /* Map internal gcc register numbers to DWARF2 register numbers. */
22741 unsigned int
22742 rs6000_dbx_register_number (unsigned int regno)
22744 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
22745 return regno;
22746 if (regno == MQ_REGNO)
22747 return 100;
22748 if (regno == LR_REGNO)
22749 return 108;
22750 if (regno == CTR_REGNO)
22751 return 109;
22752 if (CR_REGNO_P (regno))
22753 return regno - CR0_REGNO + 86;
22754 if (regno == XER_REGNO)
22755 return 101;
22756 if (ALTIVEC_REGNO_P (regno))
22757 return regno - FIRST_ALTIVEC_REGNO + 1124;
22758 if (regno == VRSAVE_REGNO)
22759 return 356;
22760 if (regno == VSCR_REGNO)
22761 return 67;
22762 if (regno == SPE_ACC_REGNO)
22763 return 99;
22764 if (regno == SPEFSCR_REGNO)
22765 return 612;
22766 /* SPE high reg number. We get these values of regno from
22767 rs6000_dwarf_register_span. */
22768 gcc_assert (regno >= 1200 && regno < 1232);
22769 return regno;
22772 /* target hook eh_return_filter_mode */
22773 static enum machine_mode
22774 rs6000_eh_return_filter_mode (void)
22776 return TARGET_32BIT ? SImode : word_mode;
22779 /* Target hook for scalar_mode_supported_p. */
22780 static bool
22781 rs6000_scalar_mode_supported_p (enum machine_mode mode)
22783 if (DECIMAL_FLOAT_MODE_P (mode))
22784 return true;
22785 else
22786 return default_scalar_mode_supported_p (mode);
22789 /* Target hook for vector_mode_supported_p. */
22790 static bool
22791 rs6000_vector_mode_supported_p (enum machine_mode mode)
22794 if (TARGET_PAIRED_FLOAT && PAIRED_VECTOR_MODE (mode))
22795 return true;
22797 if (TARGET_SPE && SPE_VECTOR_MODE (mode))
22798 return true;
22800 else if (TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
22801 return true;
22803 else
22804 return false;
22807 /* Target hook for invalid_arg_for_unprototyped_fn. */
22808 static const char *
22809 invalid_arg_for_unprototyped_fn (const_tree typelist, const_tree funcdecl, const_tree val)
22811 return (!rs6000_darwin64_abi
22812 && typelist == 0
22813 && TREE_CODE (TREE_TYPE (val)) == VECTOR_TYPE
22814 && (funcdecl == NULL_TREE
22815 || (TREE_CODE (funcdecl) == FUNCTION_DECL
22816 && DECL_BUILT_IN_CLASS (funcdecl) != BUILT_IN_MD)))
22817 ? N_("AltiVec argument passed to unprototyped function")
22818 : NULL;
22821 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
22822 setup by using __stack_chk_fail_local hidden function instead of
22823 calling __stack_chk_fail directly. Otherwise it is better to call
22824 __stack_chk_fail directly. */
22826 static tree
22827 rs6000_stack_protect_fail (void)
22829 return (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
22830 ? default_hidden_stack_protect_fail ()
22831 : default_external_stack_protect_fail ();
22834 void
22835 rs6000_final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
22836 int num_operands ATTRIBUTE_UNUSED)
22838 if (rs6000_warn_cell_microcode)
22840 const char *temp;
22841 int insn_code_number = recog_memoized (insn);
22842 location_t location = locator_location (INSN_LOCATOR (insn));
22844 /* Punt on insns we cannot recognize. */
22845 if (insn_code_number < 0)
22846 return;
22848 temp = get_insn_template (insn_code_number, insn);
22850 if (get_attr_cell_micro (insn) == CELL_MICRO_ALWAYS)
22851 warning_at (location, OPT_mwarn_cell_microcode,
22852 "emitting microcode insn %s\t[%s] #%d",
22853 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22854 else if (get_attr_cell_micro (insn) == CELL_MICRO_CONDITIONAL)
22855 warning_at (location, OPT_mwarn_cell_microcode,
22856 "emitting conditional microcode insn %s\t[%s] #%d",
22857 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
22861 #include "gt-rs6000.h"