PR/56490
[official-gcc.git] / gcc / config / tilegx / tilegx.c
blob98f53f1993e922b95d66f0e2ed4dc791827ceb0f
1 /* Subroutines used for code generation on the Tilera TILE-Gx.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "insn-attr.h"
30 #include "recog.h"
31 #include "expr.h"
32 #include "langhooks.h"
33 #include "optabs.h"
34 #include "sched-int.h"
35 #include "tm_p.h"
36 #include "tm-constrs.h"
37 #include "target.h"
38 #include "target-def.h"
39 #include "function.h"
40 #include "dwarf2.h"
41 #include "timevar.h"
42 #include "gimple.h"
43 #include "cfgloop.h"
44 #include "tilegx-builtins.h"
45 #include "tilegx-multiply.h"
46 #include "diagnostic.h"
48 /* SYMBOL_REF for GOT */
49 static GTY(()) rtx g_got_symbol = NULL;
51 /* In case of a POST_INC or POST_DEC memory reference, we must report
52 the mode of the memory reference from TARGET_PRINT_OPERAND to
53 TARGET_PRINT_OPERAND_ADDRESS. */
54 static enum machine_mode output_memory_reference_mode;
56 /* Report whether we're printing out the first address fragment of a
57 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
58 TARGET_PRINT_OPERAND_ADDRESS. */
59 static bool output_memory_autoinc_first;
63 /* Option handling */
65 /* Implement TARGET_OPTION_OVERRIDE. */
66 static void
67 tilegx_option_override (void)
69 if (global_options_set.x_tilegx_cmodel)
71 switch (tilegx_cmodel)
73 case CM_SMALL:
74 case CM_SMALL_PIC:
75 if (flag_pic)
76 tilegx_cmodel = CM_SMALL_PIC;
77 break;
79 case CM_LARGE:
80 case CM_LARGE_PIC:
81 if (flag_pic)
82 tilegx_cmodel = CM_LARGE_PIC;
83 break;
85 default:
86 gcc_unreachable ();
89 else
90 tilegx_cmodel = flag_pic ? CM_SMALL_PIC : CM_SMALL;
92 /* When modulo scheduling is enabled, we still rely on regular
93 scheduler for bundling. */
94 if (flag_modulo_sched)
95 flag_resched_modulo_sched = 1;
100 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
101 static bool
102 tilegx_scalar_mode_supported_p (enum machine_mode mode)
104 switch (mode)
106 case QImode:
107 case HImode:
108 case SImode:
109 case DImode:
110 case TImode:
111 return true;
113 case SFmode:
114 case DFmode:
115 return true;
117 default:
118 return false;
123 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
124 static bool
125 tilegx_vector_mode_supported_p (enum machine_mode mode)
127 return mode == V8QImode || mode == V4HImode || mode == V2SImode;
131 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
132 static bool
133 tilegx_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
134 rtx x ATTRIBUTE_UNUSED)
136 return true;
140 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
141 static bool
142 tilegx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
144 return (tilegx_cmodel != CM_LARGE && tilegx_cmodel != CM_LARGE_PIC
145 && (decl != NULL));
149 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
150 passed by reference. */
151 static bool
152 tilegx_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
153 enum machine_mode mode ATTRIBUTE_UNUSED,
154 const_tree type, bool named ATTRIBUTE_UNUSED)
156 return (type && TYPE_SIZE (type)
157 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST);
161 /* Implement TARGET_RETURN_IN_MEMORY. */
162 static bool
163 tilegx_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
165 return !IN_RANGE (int_size_in_bytes (type),
166 0, TILEGX_NUM_RETURN_REGS * UNITS_PER_WORD);
170 /* Implement TARGET_MODE_REP_EXTENDED. */
171 static int
172 tilegx_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
174 /* SImode register values are sign-extended to DImode. */
175 if (mode == SImode && mode_rep == DImode)
176 return SIGN_EXTEND;
178 return UNKNOWN;
182 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
183 static unsigned int
184 tilegx_function_arg_boundary (enum machine_mode mode, const_tree type)
186 unsigned int alignment;
188 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
189 if (alignment < PARM_BOUNDARY)
190 alignment = PARM_BOUNDARY;
191 if (alignment > STACK_BOUNDARY)
192 alignment = STACK_BOUNDARY;
193 return alignment;
197 /* Implement TARGET_FUNCTION_ARG. */
198 static rtx
199 tilegx_function_arg (cumulative_args_t cum_v,
200 enum machine_mode mode,
201 const_tree type, bool named ATTRIBUTE_UNUSED)
203 CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
204 int byte_size = ((mode == BLKmode)
205 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
207 if (cum >= TILEGX_NUM_ARG_REGS)
208 return NULL_RTX;
210 /* The ABI does not allow parameters to be passed partially in reg
211 and partially in stack. */
212 if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
213 > TILEGX_NUM_ARG_REGS)
214 return NULL_RTX;
216 return gen_rtx_REG (mode, cum);
220 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
221 static void
222 tilegx_function_arg_advance (cumulative_args_t cum_v,
223 enum machine_mode mode,
224 const_tree type, bool named ATTRIBUTE_UNUSED)
226 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
228 int byte_size = ((mode == BLKmode)
229 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
230 int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
232 /* If the current argument does not fit in the pretend_args space,
233 skip over it. */
234 if (*cum < TILEGX_NUM_ARG_REGS
235 && *cum + word_size > TILEGX_NUM_ARG_REGS)
236 *cum = TILEGX_NUM_ARG_REGS;
238 *cum += word_size;
242 /* Implement TARGET_FUNCTION_VALUE. */
243 static rtx
244 tilegx_function_value (const_tree valtype, const_tree fn_decl_or_type,
245 bool outgoing ATTRIBUTE_UNUSED)
247 enum machine_mode mode;
248 int unsigned_p;
250 mode = TYPE_MODE (valtype);
251 unsigned_p = TYPE_UNSIGNED (valtype);
253 mode = promote_function_mode (valtype, mode, &unsigned_p,
254 fn_decl_or_type, 1);
256 return gen_rtx_REG (mode, 0);
260 /* Implement TARGET_LIBCALL_VALUE. */
261 static rtx
262 tilegx_libcall_value (enum machine_mode mode,
263 const_rtx fun ATTRIBUTE_UNUSED)
265 return gen_rtx_REG (mode, 0);
269 /* Implement FUNCTION_VALUE_REGNO_P. */
270 static bool
271 tilegx_function_value_regno_p (const unsigned int regno)
273 return regno < TILEGX_NUM_RETURN_REGS;
277 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
278 static tree
279 tilegx_build_builtin_va_list (void)
281 tree f_args, f_skip, record, type_decl;
282 bool owp;
284 record = lang_hooks.types.make_type (RECORD_TYPE);
286 type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
287 get_identifier ("__va_list_tag"), record);
289 f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
290 get_identifier ("__args"), ptr_type_node);
291 f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
292 get_identifier ("__skip"), ptr_type_node);
294 DECL_FIELD_CONTEXT (f_args) = record;
296 DECL_FIELD_CONTEXT (f_skip) = record;
298 TREE_CHAIN (record) = type_decl;
299 TYPE_NAME (record) = type_decl;
300 TYPE_FIELDS (record) = f_args;
301 TREE_CHAIN (f_args) = f_skip;
303 /* We know this is being padded and we want it too. It is an
304 internal type so hide the warnings from the user. */
305 owp = warn_padded;
306 warn_padded = false;
308 layout_type (record);
310 warn_padded = owp;
312 /* The correct type is an array type of one element. */
313 return record;
317 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
318 static void
319 tilegx_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
321 tree f_args, f_skip;
322 tree args, skip, t;
324 f_args = TYPE_FIELDS (TREE_TYPE (valist));
325 f_skip = TREE_CHAIN (f_args);
327 args =
328 build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
329 skip =
330 build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
332 /* Find the __args area. */
333 t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
334 t = fold_build_pointer_plus_hwi (t,
335 UNITS_PER_WORD *
336 (crtl->args.info - TILEGX_NUM_ARG_REGS));
338 if (crtl->args.pretend_args_size > 0)
339 t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
341 t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
342 TREE_SIDE_EFFECTS (t) = 1;
343 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
345 /* Find the __skip area. */
346 t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
347 t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
348 t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
349 TREE_SIDE_EFFECTS (t) = 1;
350 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
354 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
355 static void
356 tilegx_setup_incoming_varargs (cumulative_args_t cum,
357 enum machine_mode mode,
358 tree type, int *pretend_args, int no_rtl)
360 CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
361 int first_reg;
363 /* The caller has advanced CUM up to, but not beyond, the last named
364 argument. Advance a local copy of CUM past the last "real" named
365 argument, to find out how many registers are left over. */
366 targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum),
367 mode, type, true);
368 first_reg = local_cum;
370 if (local_cum < TILEGX_NUM_ARG_REGS)
372 *pretend_args = UNITS_PER_WORD * (TILEGX_NUM_ARG_REGS - first_reg);
374 if (!no_rtl)
376 alias_set_type set = get_varargs_alias_set ();
377 rtx tmp =
378 gen_rtx_MEM (BLKmode, plus_constant (Pmode,
379 virtual_incoming_args_rtx,
380 -STACK_POINTER_OFFSET -
381 UNITS_PER_WORD *
382 (TILEGX_NUM_ARG_REGS -
383 first_reg)));
384 MEM_NOTRAP_P (tmp) = 1;
385 set_mem_alias_set (tmp, set);
386 move_block_from_reg (first_reg, tmp,
387 TILEGX_NUM_ARG_REGS - first_reg);
390 else
391 *pretend_args = 0;
395 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
396 the va_list structure VALIST as required to retrieve an argument of
397 type TYPE, and returning that argument.
399 ret = va_arg(VALIST, TYPE);
401 generates code equivalent to:
403 paddedsize = (sizeof(TYPE) + 3) & -4;
404 if ( (VALIST.__args + paddedsize > VALIST.__skip)
405 & (VALIST.__args <= VALIST.__skip))
406 addr = VALIST.__skip + STACK_POINTER_OFFSET;
407 else
408 addr = VALIST.__args;
409 VALIST.__args = addr + paddedsize;
410 ret = *(TYPE *)addr;
412 static tree
413 tilegx_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
414 gimple_seq *post_p ATTRIBUTE_UNUSED)
416 tree f_args, f_skip;
417 tree args, skip;
418 HOST_WIDE_INT size, rsize;
419 tree addr, tmp;
420 bool pass_by_reference_p;
422 f_args = TYPE_FIELDS (va_list_type_node);
423 f_skip = TREE_CHAIN (f_args);
425 args =
426 build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
427 skip =
428 build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
430 addr = create_tmp_var (ptr_type_node, "va_arg");
432 /* If an object is dynamically sized, a pointer to it is passed
433 instead of the object itself. */
434 pass_by_reference_p = pass_by_reference (NULL, TYPE_MODE (type), type,
435 false);
437 if (pass_by_reference_p)
438 type = build_pointer_type (type);
440 size = int_size_in_bytes (type);
441 rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
443 /* Assert alignment assumption. */
444 gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY);
446 /* Build conditional expression to calculate addr. The expression
447 will be gimplified later. */
448 tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
449 tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
450 build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
451 build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
452 unshare_expr (skip)));
454 tmp = build3 (COND_EXPR, ptr_type_node, tmp,
455 build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
456 size_int (STACK_POINTER_OFFSET)),
457 unshare_expr (args));
459 gimplify_assign (addr, tmp, pre_p);
461 /* Update VALIST.__args. */
462 tmp = fold_build_pointer_plus_hwi (addr, rsize);
463 gimplify_assign (unshare_expr (args), tmp, pre_p);
465 addr = fold_convert (build_pointer_type (type), addr);
467 if (pass_by_reference_p)
468 addr = build_va_arg_indirect_ref (addr);
470 return build_va_arg_indirect_ref (addr);
475 /* Implement TARGET_RTX_COSTS. */
476 static bool
477 tilegx_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
478 bool speed)
480 switch (code)
482 case CONST_INT:
483 /* If this is an 8-bit constant, return zero since it can be
484 used nearly anywhere with no cost. If it is a valid operand
485 for an ADD or AND, likewise return 0 if we know it will be
486 used in that context. Otherwise, return 2 since it might be
487 used there later. All other constants take at least two
488 insns. */
489 if (satisfies_constraint_I (x))
491 *total = 0;
492 return true;
494 else if (outer_code == PLUS && add_operand (x, VOIDmode))
496 /* Slightly penalize large constants even though we can add
497 them in one instruction, because it forces the use of
498 2-wide bundling mode. */
499 *total = 1;
500 return true;
502 else if (move_operand (x, SImode))
504 /* We can materialize in one move. */
505 *total = COSTS_N_INSNS (1);
506 return true;
508 else
510 /* We can materialize in two moves. */
511 *total = COSTS_N_INSNS (2);
512 return true;
515 return false;
517 case CONST:
518 case LABEL_REF:
519 case SYMBOL_REF:
520 *total = COSTS_N_INSNS (2);
521 return true;
523 case CONST_DOUBLE:
524 *total = COSTS_N_INSNS (4);
525 return true;
527 case HIGH:
528 *total = 0;
529 return true;
531 case MEM:
532 /* If outer-code was a sign or zero extension, a cost of
533 COSTS_N_INSNS (1) was already added in, so account for
534 that. */
535 if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
536 *total = COSTS_N_INSNS (1);
537 else
538 *total = COSTS_N_INSNS (2);
539 return true;
541 case PLUS:
542 /* Convey that shl[123]add are efficient. */
543 if (GET_CODE (XEXP (x, 0)) == MULT
544 && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
546 *total = (rtx_cost (XEXP (XEXP (x, 0), 0),
547 (enum rtx_code) outer_code, opno, speed)
548 + rtx_cost (XEXP (x, 1),
549 (enum rtx_code) outer_code, opno, speed)
550 + COSTS_N_INSNS (1));
551 return true;
553 return false;
555 case MULT:
556 *total = COSTS_N_INSNS (2);
557 return false;
559 case DIV:
560 case UDIV:
561 case MOD:
562 case UMOD:
563 /* These are handled by software and are very expensive. */
564 *total = COSTS_N_INSNS (100);
565 return false;
567 case UNSPEC:
568 case UNSPEC_VOLATILE:
570 int num = XINT (x, 1);
572 if (num <= TILEGX_LAST_LATENCY_1_INSN)
573 *total = COSTS_N_INSNS (1);
574 else if (num <= TILEGX_LAST_LATENCY_2_INSN)
575 *total = COSTS_N_INSNS (2);
576 else if (num > TILEGX_LAST_LATENCY_INSN)
578 if (num == UNSPEC_NON_TEMPORAL)
580 /* These are basically loads. */
581 if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
582 *total = COSTS_N_INSNS (1);
583 else
584 *total = COSTS_N_INSNS (2);
586 else
588 if (outer_code == PLUS)
589 *total = 0;
590 else
591 *total = COSTS_N_INSNS (1);
594 else
596 switch (num)
598 case UNSPEC_BLOCKAGE:
599 case UNSPEC_NETWORK_BARRIER:
600 case UNSPEC_ATOMIC:
601 *total = 0;
602 break;
604 case UNSPEC_LNK_AND_LABEL:
605 case UNSPEC_MF:
606 case UNSPEC_MOV_PCREL_STEP3:
607 case UNSPEC_NETWORK_RECEIVE:
608 case UNSPEC_NETWORK_SEND:
609 case UNSPEC_SPR_MOVE:
610 case UNSPEC_TLS_GD_ADD:
611 *total = COSTS_N_INSNS (1);
612 break;
614 case UNSPEC_TLS_IE_LOAD:
615 case UNSPEC_XCHG:
616 *total = COSTS_N_INSNS (2);
617 break;
619 case UNSPEC_SP_SET:
620 *total = COSTS_N_INSNS (3);
621 break;
623 case UNSPEC_SP_TEST:
624 *total = COSTS_N_INSNS (4);
625 break;
627 case UNSPEC_CMPXCHG:
628 case UNSPEC_INSN_CMPEXCH:
629 case UNSPEC_LATENCY_L2:
630 *total = COSTS_N_INSNS (11);
631 break;
633 case UNSPEC_TLS_GD_CALL:
634 *total = COSTS_N_INSNS (30);
635 break;
637 case UNSPEC_LATENCY_MISS:
638 *total = COSTS_N_INSNS (80);
639 break;
641 default:
642 *total = COSTS_N_INSNS (1);
645 return true;
648 default:
649 return false;
655 /* Rtl lowering. */
657 /* Create a temporary variable to hold a partial result, to enable
658 CSE. */
659 static rtx
660 create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
662 return can_create_pseudo_p () ? gen_reg_rtx (mode) : default_reg;
666 /* Functions to save and restore machine-specific function data. */
667 static struct machine_function *
668 tilegx_init_machine_status (void)
670 return ggc_alloc_cleared_machine_function ();
674 /* Do anything needed before RTL is emitted for each function. */
675 void
676 tilegx_init_expanders (void)
678 /* Arrange to initialize and mark the machine per-function
679 status. */
680 init_machine_status = tilegx_init_machine_status;
682 if (cfun && cfun->machine && flag_pic)
684 static int label_num = 0;
686 char text_label_name[32];
688 struct machine_function *machine = cfun->machine;
690 ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
692 machine->text_label_symbol =
693 gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
695 machine->text_label_rtx =
696 gen_rtx_REG (Pmode, TILEGX_PIC_TEXT_LABEL_REGNUM);
698 machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
700 machine->calls_tls_get_addr = false;
705 /* Implement TARGET_SHIFT_TRUNCATION_MASK. DImode shifts use the mode
706 matching insns and therefore guarantee that the shift count is
707 modulo 64. SImode shifts sometimes use the 64 bit version so do
708 not hold such guarantee. */
709 static unsigned HOST_WIDE_INT
710 tilegx_shift_truncation_mask (enum machine_mode mode)
712 return mode == DImode ? 63 : 0;
716 /* Implement TARGET_INIT_LIBFUNCS. */
717 static void
718 tilegx_init_libfuncs (void)
720 /* We need to explicitly generate these libfunc's to support
721 conversion of divide by constant to multiply (the divide stubs in
722 tilegx.md exist also for this reason). Normally we'd expect gcc
723 to lazily generate them when they are needed, but for some reason
724 it's set up to only generate them if the mode is the word
725 mode. */
726 set_optab_libfunc (sdiv_optab, SImode, "__divsi3");
727 set_optab_libfunc (udiv_optab, SImode, "__udivsi3");
728 set_optab_libfunc (smod_optab, SImode, "__modsi3");
729 set_optab_libfunc (umod_optab, SImode, "__umodsi3");
733 /* Return true if X contains a thread-local symbol. */
734 static bool
735 tilegx_tls_referenced_p (rtx x)
737 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
738 x = XEXP (XEXP (x, 0), 0);
740 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
741 return true;
743 /* That's all we handle in tilegx_legitimize_tls_address for
744 now. */
745 return false;
749 /* Return true if X requires a scratch register. It is given that
750 flag_pic is on and that X satisfies CONSTANT_P. */
751 static int
752 tilegx_pic_address_needs_scratch (rtx x)
754 if (GET_CODE (x) == CONST
755 && GET_CODE (XEXP (x, 0)) == PLUS
756 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
757 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
758 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
759 return true;
761 return false;
765 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
766 which we are willing to load the value into a register via a move
767 pattern. TLS cannot be treated as a constant because it can
768 include a function call. */
769 static bool
770 tilegx_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
772 switch (GET_CODE (x))
774 case CONST:
775 case SYMBOL_REF:
776 return !tilegx_tls_referenced_p (x);
778 default:
779 return true;
784 /* Return true if the constant value X is a legitimate general operand
785 when generating PIC code. It is given that flag_pic is on and that
786 X satisfies CONSTANT_P. */
787 bool
788 tilegx_legitimate_pic_operand_p (rtx x)
790 if (tilegx_pic_address_needs_scratch (x))
791 return false;
793 if (tilegx_tls_referenced_p (x))
794 return false;
796 return true;
800 /* Return true if the rtx X can be used as an address operand. */
801 static bool
802 tilegx_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
803 bool strict)
805 if (GET_CODE (x) == SUBREG)
806 x = SUBREG_REG (x);
808 switch (GET_CODE (x))
810 case POST_INC:
811 case POST_DEC:
812 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
813 return false;
815 x = XEXP (x, 0);
816 break;
818 case POST_MODIFY:
819 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
820 return false;
822 if (GET_CODE (XEXP (x, 1)) != PLUS)
823 return false;
825 if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
826 return false;
828 if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
829 return false;
831 x = XEXP (x, 0);
832 break;
834 case REG:
835 break;
837 default:
838 return false;
841 /* Check if x is a valid reg. */
842 if (!REG_P (x))
843 return false;
845 if (strict)
846 return REGNO_OK_FOR_BASE_P (REGNO (x));
847 else
848 return true;
852 /* Return the rtx containing SYMBOL_REF to the text label. */
853 static rtx
854 tilegx_text_label_symbol (void)
856 return cfun->machine->text_label_symbol;
860 /* Return the register storing the value of the text label. */
861 static rtx
862 tilegx_text_label_rtx (void)
864 return cfun->machine->text_label_rtx;
868 /* Return the register storing the value of the global offset
869 table. */
870 static rtx
871 tilegx_got_rtx (void)
873 return cfun->machine->got_rtx;
877 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
878 static rtx
879 tilegx_got_symbol (void)
881 if (g_got_symbol == NULL)
882 g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
884 return g_got_symbol;
888 /* Return a reference to the got to be used by tls references. */
889 static rtx
890 tilegx_tls_got (void)
892 rtx temp;
893 if (flag_pic)
895 crtl->uses_pic_offset_table = 1;
896 return tilegx_got_rtx ();
899 temp = gen_reg_rtx (Pmode);
900 emit_move_insn (temp, tilegx_got_symbol ());
902 return temp;
906 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
907 this (thread-local) address. */
908 static rtx
909 tilegx_legitimize_tls_address (rtx addr)
911 rtx ret;
913 gcc_assert (can_create_pseudo_p ());
915 if (GET_CODE (addr) == SYMBOL_REF)
916 switch (SYMBOL_REF_TLS_MODEL (addr))
918 case TLS_MODEL_GLOBAL_DYNAMIC:
919 case TLS_MODEL_LOCAL_DYNAMIC:
921 rtx r0, temp, temp2, temp3, got, last;
923 ret = gen_reg_rtx (Pmode);
924 r0 = gen_rtx_REG (Pmode, 0);
925 temp = gen_reg_rtx (Pmode);
926 temp2 = gen_reg_rtx (Pmode);
927 temp3 = gen_reg_rtx (Pmode);
929 got = tilegx_tls_got ();
930 if (TARGET_32BIT)
932 emit_insn (gen_mov_tls_gd_step1_32bit (temp, addr));
933 emit_insn (gen_mov_tls_gd_step2_32bit (temp2, temp, addr));
934 emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
936 else
938 emit_insn (gen_mov_tls_gd_step1 (temp, addr));
939 emit_insn (gen_mov_tls_gd_step2 (temp2, temp, addr));
940 emit_insn (gen_tls_add (temp2, got, temp2, addr));
943 emit_move_insn (r0, temp2);
945 if (TARGET_32BIT)
947 emit_insn (gen_tls_gd_call_32bit (addr));
949 else
951 emit_insn (gen_tls_gd_call (addr));
954 emit_move_insn (temp3, r0);
956 if (TARGET_32BIT)
957 last = emit_insn (gen_tls_gd_add_32bit (ret, temp3, addr));
958 else
959 last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
961 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
962 break;
964 case TLS_MODEL_INITIAL_EXEC:
966 rtx temp, temp2, temp3, got, last;
968 ret = gen_reg_rtx (Pmode);
969 temp = gen_reg_rtx (Pmode);
970 temp2 = gen_reg_rtx (Pmode);
971 temp3 = gen_reg_rtx (Pmode);
973 got = tilegx_tls_got ();
974 if (TARGET_32BIT)
976 emit_insn (gen_mov_tls_ie_step1_32bit (temp, addr));
977 emit_insn (gen_mov_tls_ie_step2_32bit (temp2, temp, addr));
978 emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
979 emit_insn (gen_tls_ie_load_32bit (temp3, temp2, addr));
981 else
983 emit_insn (gen_mov_tls_ie_step1 (temp, addr));
984 emit_insn (gen_mov_tls_ie_step2 (temp2, temp, addr));
985 emit_insn (gen_tls_add (temp2, got, temp2, addr));
986 emit_insn (gen_tls_ie_load (temp3, temp2, addr));
989 last =
990 emit_move_insn(ret,
991 gen_rtx_PLUS (Pmode,
992 gen_rtx_REG (Pmode,
993 THREAD_POINTER_REGNUM),
994 temp3));
995 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
996 break;
998 case TLS_MODEL_LOCAL_EXEC:
1000 rtx temp, temp2, last;
1002 ret = gen_reg_rtx (Pmode);
1003 temp = gen_reg_rtx (Pmode);
1004 temp2 = gen_reg_rtx (Pmode);
1006 if (TARGET_32BIT)
1008 emit_insn (gen_mov_tls_le_step1_32bit (temp, addr));
1009 emit_insn (gen_mov_tls_le_step2_32bit (temp2, temp, addr));
1011 else
1013 emit_insn (gen_mov_tls_le_step1 (temp, addr));
1014 emit_insn (gen_mov_tls_le_step2 (temp2, temp, addr));
1017 last =
1018 emit_move_insn (ret,
1019 gen_rtx_PLUS (Pmode,
1020 gen_rtx_REG (Pmode,
1021 THREAD_POINTER_REGNUM),
1022 temp2));
1023 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
1024 break;
1026 default:
1027 gcc_unreachable ();
1029 else if (GET_CODE (addr) == CONST)
1031 rtx base, offset;
1033 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
1035 base = tilegx_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
1036 offset = XEXP (XEXP (addr, 0), 1);
1038 base = force_operand (base, NULL_RTX);
1039 ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1041 else
1042 gcc_unreachable ();
1044 return ret;
1048 /* Returns a register that points to ADDR, a symbolic address, by
1049 computing its address relative to tilegx_text_label_symbol. */
1050 void
1051 tilegx_compute_pcrel_address (rtx result, rtx addr)
1053 rtx text_label_symbol = tilegx_text_label_symbol ();
1054 rtx text_label_rtx = tilegx_text_label_rtx ();
1055 rtx temp, temp2, temp3;
1057 temp = create_temp_reg_if_possible (Pmode, result);
1058 temp2 = create_temp_reg_if_possible (Pmode, result);
1060 if (TARGET_32BIT)
1062 emit_insn (gen_mov_pcrel_step1_32bit (temp, addr, text_label_symbol));
1063 emit_insn (gen_mov_pcrel_step2_32bit (temp2, temp, addr,
1064 text_label_symbol));
1065 emit_insn (gen_mov_pcrel_step3_32bit (result, temp2,
1066 text_label_rtx,
1067 addr, text_label_symbol));
1069 else if (tilegx_cmodel == CM_LARGE_PIC)
1071 temp3 = create_temp_reg_if_possible (Pmode, result);
1072 emit_insn (gen_mov_large_pcrel_step1 (temp, addr, text_label_symbol));
1073 emit_insn (gen_mov_large_pcrel_step2 (temp2, temp, addr,
1074 text_label_symbol));
1075 emit_insn (gen_mov_large_pcrel_step3 (temp3, temp2, addr,
1076 text_label_symbol));
1077 emit_insn (gen_mov_large_pcrel_step4 (result, temp3,
1078 text_label_rtx,
1079 addr, text_label_symbol));
1081 else
1083 emit_insn (gen_mov_pcrel_step1 (temp, addr, text_label_symbol));
1084 emit_insn (gen_mov_pcrel_step2 (temp2, temp, addr, text_label_symbol));
1085 emit_insn (gen_mov_pcrel_step3 (result, temp2,
1086 text_label_rtx,
1087 addr, text_label_symbol));
1092 /* Returns a register that points to the plt entry of ADDR, a symbolic
1093 address, by computing its address relative to
1094 tilegx_text_label_symbol. */
1095 void
1096 tilegx_compute_pcrel_plt_address (rtx result, rtx addr)
1098 rtx text_label_symbol = tilegx_text_label_symbol ();
1099 rtx text_label_rtx = tilegx_text_label_rtx ();
1100 rtx temp, temp2, temp3;
1102 temp = create_temp_reg_if_possible (Pmode, result);
1103 temp2 = create_temp_reg_if_possible (Pmode, result);
1105 if (TARGET_32BIT)
1107 emit_insn (gen_mov_plt_pcrel_step1_32bit (temp, addr,
1108 text_label_symbol));
1109 emit_insn (gen_mov_plt_pcrel_step2_32bit (temp2, temp, addr,
1110 text_label_symbol));
1111 emit_move_insn (result, gen_rtx_PLUS (Pmode, temp2, text_label_rtx));
1113 else
1115 temp3 = create_temp_reg_if_possible (Pmode, result);
1117 emit_insn (gen_mov_plt_pcrel_step1 (temp, addr, text_label_symbol));
1118 emit_insn (gen_mov_plt_pcrel_step2 (temp2, temp, addr,
1119 text_label_symbol));
1120 emit_insn (gen_mov_plt_pcrel_step3 (temp3, temp2, addr,
1121 text_label_symbol));
1122 emit_move_insn (result, gen_rtx_PLUS (Pmode, temp3, text_label_rtx));
1127 /* Legitimize PIC addresses. If the address is already
1128 position-independent, we return ORIG. Newly generated
1129 position-independent addresses go into a reg. This is REG if
1130 nonzero, otherwise we allocate register(s) as necessary. */
1131 static rtx
1132 tilegx_legitimize_pic_address (rtx orig,
1133 enum machine_mode mode ATTRIBUTE_UNUSED,
1134 rtx reg)
1136 if (GET_CODE (orig) == SYMBOL_REF)
1138 rtx address, pic_ref;
1140 if (reg == 0)
1142 gcc_assert (can_create_pseudo_p ());
1143 reg = gen_reg_rtx (Pmode);
1146 if (SYMBOL_REF_LOCAL_P (orig))
1148 /* If not during reload, allocate another temp reg here for
1149 loading in the address, so that these instructions can be
1150 optimized properly. */
1151 rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1152 tilegx_compute_pcrel_address (temp_reg, orig);
1154 /* Note: this is conservative. We use the text_label but we
1155 don't use the pic_offset_table. However, in some cases
1156 we may need the pic_offset_table (see
1157 tilegx_fixup_pcrel_references). */
1158 crtl->uses_pic_offset_table = 1;
1160 address = temp_reg;
1162 emit_move_insn (reg, address);
1163 return reg;
1165 else
1167 /* If not during reload, allocate another temp reg here for
1168 loading in the address, so that these instructions can be
1169 optimized properly. */
1170 rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1172 gcc_assert (flag_pic);
1173 if (flag_pic == 1)
1175 if (TARGET_32BIT)
1177 emit_insn (gen_add_got16_32bit (temp_reg,
1178 tilegx_got_rtx (),
1179 orig));
1181 else
1183 emit_insn (gen_add_got16 (temp_reg,
1184 tilegx_got_rtx (), orig));
1187 else
1189 rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1190 rtx temp_reg3 = create_temp_reg_if_possible (Pmode, reg);
1191 if (TARGET_32BIT)
1193 emit_insn (gen_mov_got32_step1_32bit (temp_reg3, orig));
1194 emit_insn (gen_mov_got32_step2_32bit
1195 (temp_reg2, temp_reg3, orig));
1197 else
1199 emit_insn (gen_mov_got32_step1 (temp_reg3, orig));
1200 emit_insn (gen_mov_got32_step2 (temp_reg2, temp_reg3,
1201 orig));
1203 emit_move_insn (temp_reg,
1204 gen_rtx_PLUS (Pmode,
1205 tilegx_got_rtx (), temp_reg2));
1208 address = temp_reg;
1210 pic_ref = gen_const_mem (Pmode, address);
1211 crtl->uses_pic_offset_table = 1;
1212 emit_move_insn (reg, pic_ref);
1213 /* The following put a REG_EQUAL note on this insn, so that
1214 it can be optimized by loop. But it causes the label to
1215 be optimized away. */
1216 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1217 return reg;
1220 else if (GET_CODE (orig) == CONST)
1222 rtx base, offset;
1224 if (GET_CODE (XEXP (orig, 0)) == PLUS
1225 && XEXP (XEXP (orig, 0), 0) == tilegx_got_rtx ())
1226 return orig;
1228 if (reg == 0)
1230 gcc_assert (can_create_pseudo_p ());
1231 reg = gen_reg_rtx (Pmode);
1234 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1235 base = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
1236 Pmode, reg);
1237 offset = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1238 base == reg ? 0 : reg);
1240 if (CONST_INT_P (offset))
1242 if (can_create_pseudo_p ())
1243 offset = force_reg (Pmode, offset);
1244 else
1245 /* If we reach here, then something is seriously wrong. */
1246 gcc_unreachable ();
1249 if (can_create_pseudo_p ())
1250 return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1251 else
1252 gcc_unreachable ();
1254 else if (GET_CODE (orig) == LABEL_REF)
1256 rtx address;
1257 rtx temp_reg;
1259 if (reg == 0)
1261 gcc_assert (can_create_pseudo_p ());
1262 reg = gen_reg_rtx (Pmode);
1265 /* If not during reload, allocate another temp reg here for
1266 loading in the address, so that these instructions can be
1267 optimized properly. */
1268 temp_reg = create_temp_reg_if_possible (Pmode, reg);
1269 tilegx_compute_pcrel_address (temp_reg, orig);
1271 /* Note: this is conservative. We use the text_label but we
1272 don't use the pic_offset_table. */
1273 crtl->uses_pic_offset_table = 1;
1275 address = temp_reg;
1277 emit_move_insn (reg, address);
1279 return reg;
1282 return orig;
1286 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1287 static rtx
1288 tilegx_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1289 enum machine_mode mode)
1291 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1292 && symbolic_operand (x, Pmode) && tilegx_tls_referenced_p (x))
1294 return tilegx_legitimize_tls_address (x);
1296 else if (flag_pic)
1298 return tilegx_legitimize_pic_address (x, mode, 0);
1300 else
1301 return x;
1305 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1306 static rtx
1307 tilegx_delegitimize_address (rtx x)
1309 x = delegitimize_mem_from_attrs (x);
1311 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1313 switch (XINT (XEXP (x, 0), 1))
1315 case UNSPEC_HW0:
1316 case UNSPEC_HW1:
1317 case UNSPEC_HW2:
1318 case UNSPEC_HW3:
1319 case UNSPEC_HW0_LAST:
1320 case UNSPEC_HW1_LAST:
1321 case UNSPEC_HW2_LAST:
1322 case UNSPEC_HW0_PCREL:
1323 case UNSPEC_HW1_PCREL:
1324 case UNSPEC_HW1_LAST_PCREL:
1325 case UNSPEC_HW2_LAST_PCREL:
1326 case UNSPEC_HW0_PLT_PCREL:
1327 case UNSPEC_HW1_PLT_PCREL:
1328 case UNSPEC_HW1_LAST_PLT_PCREL:
1329 case UNSPEC_HW2_LAST_PLT_PCREL:
1330 case UNSPEC_HW0_GOT:
1331 case UNSPEC_HW0_LAST_GOT:
1332 case UNSPEC_HW1_LAST_GOT:
1333 case UNSPEC_HW0_TLS_GD:
1334 case UNSPEC_HW1_LAST_TLS_GD:
1335 case UNSPEC_HW0_TLS_IE:
1336 case UNSPEC_HW1_LAST_TLS_IE:
1337 case UNSPEC_HW0_TLS_LE:
1338 case UNSPEC_HW1_LAST_TLS_LE:
1339 x = XVECEXP (XEXP (x, 0), 0, 0);
1340 break;
1344 return x;
1348 /* Emit code to load the PIC register. */
1349 static void
1350 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1352 int orig_flag_pic = flag_pic;
1354 rtx got_symbol = tilegx_got_symbol ();
1355 rtx text_label_symbol = tilegx_text_label_symbol ();
1356 rtx text_label_rtx = tilegx_text_label_rtx ();
1357 flag_pic = 0;
1359 if (TARGET_32BIT)
1361 emit_insn (gen_insn_lnk_and_label_32bit (text_label_rtx,
1362 text_label_symbol));
1364 else
1366 emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1369 tilegx_compute_pcrel_address (tilegx_got_rtx (), got_symbol);
1371 flag_pic = orig_flag_pic;
1373 /* Need to emit this whether or not we obey regdecls, since
1374 setjmp/longjmp can cause life info to screw up. ??? In the case
1375 where we don't obey regdecls, this is not sufficient since we may
1376 not fall out the bottom. */
1377 emit_use (tilegx_got_rtx ());
1381 /* Return the simd variant of the constant NUM of mode MODE, by
1382 replicating it to fill an interger of mode DImode. NUM is first
1383 truncated to fit in MODE. */
1385 tilegx_simd_int (rtx num, enum machine_mode mode)
1387 HOST_WIDE_INT n = 0;
1389 gcc_assert (CONST_INT_P (num));
1391 n = INTVAL (num);
1393 switch (mode)
1395 case QImode:
1396 n = 0x0101010101010101LL * (n & 0x000000FF);
1397 break;
1398 case HImode:
1399 n = 0x0001000100010001LL * (n & 0x0000FFFF);
1400 break;
1401 case SImode:
1402 n = 0x0000000100000001LL * (n & 0xFFFFFFFF);
1403 break;
1404 case DImode:
1405 break;
1406 default:
1407 gcc_unreachable ();
1410 return GEN_INT (n);
1414 /* Returns true iff VAL can be moved into a register in one
1415 instruction. And if it can, it emits the code to move the constant
1416 into DEST_REG.
1418 If THREE_WIDE_ONLY is true, this insists on an instruction that
1419 works in a bundle containing three instructions. */
1420 static bool
1421 expand_set_cint64_one_inst (rtx dest_reg,
1422 HOST_WIDE_INT val, bool three_wide_only)
1424 if (val == trunc_int_for_mode (val, QImode))
1426 /* Success! */
1427 emit_move_insn (dest_reg, GEN_INT (val));
1428 return true;
1430 else if (!three_wide_only)
1432 rtx imm_op = GEN_INT (val);
1434 if (satisfies_constraint_J (imm_op)
1435 || satisfies_constraint_K (imm_op)
1436 || satisfies_constraint_N (imm_op)
1437 || satisfies_constraint_P (imm_op))
1439 emit_move_insn (dest_reg, imm_op);
1440 return true;
1444 return false;
1448 /* Implement DImode rotatert. */
1449 static HOST_WIDE_INT
1450 rotate_right (HOST_WIDE_INT n, int count)
1452 unsigned HOST_WIDE_INT x = n & 0xFFFFFFFFFFFFFFFFULL;
1453 if (count == 0)
1454 return x;
1455 return ((x >> count) | (x << (64 - count))) & 0xFFFFFFFFFFFFFFFFULL;
1459 /* Return true iff n contains exactly one contiguous sequence of 1
1460 bits, possibly wrapping around from high bits to low bits. */
1461 bool
1462 tilegx_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1464 int i;
1466 if (n == 0)
1467 return false;
1469 for (i = 0; i < 64; i++)
1471 unsigned HOST_WIDE_INT x = rotate_right (n, i);
1472 if (!(x & 1))
1473 continue;
1475 /* See if x is a power of two minus one, i.e. only consecutive 1
1476 bits starting from bit 0. */
1477 if ((x & (x + 1)) == 0)
1479 if (first_bit != NULL)
1480 *first_bit = i;
1481 if (last_bit != NULL)
1482 *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 63;
1484 return true;
1488 return false;
1492 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1493 static void
1494 expand_set_cint64 (rtx dest_reg, rtx src_val)
1496 HOST_WIDE_INT val;
1497 int leading_zeroes, trailing_zeroes;
1498 int three_wide_only;
1499 int shift, ins_shift, zero_cluster_shift;
1500 rtx temp, subreg;
1502 gcc_assert (CONST_INT_P (src_val));
1503 val = trunc_int_for_mode (INTVAL (src_val), GET_MODE (dest_reg));
1505 /* See if we can generate the constant in one instruction. */
1506 if (expand_set_cint64_one_inst (dest_reg, val, false))
1507 return;
1509 /* Force the destination to DImode so we can use DImode instructions
1510 to create it. This both allows instructions like rotl, and
1511 certain efficient 3-wide instructions. */
1512 subreg = simplify_gen_subreg (DImode, dest_reg, GET_MODE (dest_reg), 0);
1513 gcc_assert (subreg != NULL);
1514 dest_reg = subreg;
1516 temp = create_temp_reg_if_possible (DImode, dest_reg);
1518 leading_zeroes = 63 - floor_log2 (val & 0xFFFFFFFFFFFFFFFFULL);
1519 trailing_zeroes = exact_log2 (val & -val);
1521 /* First try all three-wide instructions that generate a constant
1522 (i.e. movei) followed by various shifts and rotates. If none of
1523 those work, try various two-wide ways of generating a constant
1524 followed by various shifts and rotates. */
1525 for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1527 int count;
1529 if (expand_set_cint64_one_inst (temp, val >> trailing_zeroes,
1530 three_wide_only))
1532 /* 0xFFFFFFFFFFFFA500 becomes:
1533 movei temp, 0xFFFFFFFFFFFFFFA5
1534 shli dest, temp, 8 */
1535 emit_move_insn (dest_reg,
1536 gen_rtx_ASHIFT (DImode, temp,
1537 GEN_INT (trailing_zeroes)));
1538 return;
1541 if (expand_set_cint64_one_inst (temp, val << leading_zeroes,
1542 three_wide_only))
1544 /* 0x7FFFFFFFFFFFFFFF becomes:
1545 movei temp, -2
1546 shrui dest, temp, 1 */
1547 emit_move_insn (dest_reg,
1548 gen_rtx_LSHIFTRT (DImode, temp,
1549 GEN_INT (leading_zeroes)));
1550 return;
1553 /* Try rotating a one-instruction immediate. */
1554 for (count = 1; count < 64; count++)
1556 HOST_WIDE_INT r = rotate_right (val, count);
1557 if (expand_set_cint64_one_inst (temp, r, three_wide_only))
1559 /* 0xFFFFFFFFFFA5FFFF becomes:
1560 movei temp, 0xFFFFFFFFFFFFFFA5
1561 rotli dest, temp, 16 */
1562 emit_move_insn (dest_reg,
1563 gen_rtx_ROTATE (DImode, temp, GEN_INT (count)));
1564 return;
1569 /* There are two cases here to produce a large constant.
1570 In the most general case, we do this:
1572 moveli x, hw3(NUM)
1573 shl16insli x, x, hw2(NUM)
1574 shl16insli x, x, hw1(NUM)
1575 shl16insli x, x, hw0(NUM)
1577 However, we can sometimes do better. shl16insli is a poor way to
1578 insert 16 zero bits, because simply shifting left by 16 has more
1579 bundling freedom. So if we see any contiguous aligned sequence
1580 of 16 or more zero bits (below the highest set bit), it is always
1581 more efficient to materialize the bits above the zero bits, then
1582 left shift to put in the zeroes, then insert whatever bits
1583 remain. For example, we might end up with:
1585 movei x, NUM >> (37 + 16)
1586 shli x, x, 37
1587 shl16insli x, x, hw0(NUM) */
1589 zero_cluster_shift = -1;
1591 for (shift = 0; shift < 48 - leading_zeroes; shift += 16)
1593 HOST_WIDE_INT x = val >> shift;
1595 /* Find the least significant group of 16 aligned zero bits. */
1596 if ((x & 0xFFFF) == 0x0000)
1598 /* Grab any following zero bits as well. */
1599 zero_cluster_shift = exact_log2 (x & -x);
1600 shift += zero_cluster_shift;
1601 break;
1605 if (zero_cluster_shift >= 0)
1607 unsigned HOST_WIDE_INT leftover;
1609 /* Recursively create the constant above the lowest 16 zero
1610 bits. */
1611 expand_set_cint64 (temp, GEN_INT (val >> shift));
1613 /* See if we can easily insert the remaining bits, or if we need
1614 to fall through to the more general case. */
1615 leftover = val - ((val >> shift) << shift);
1616 if (leftover == 0)
1618 /* A simple left shift is enough. */
1619 emit_move_insn (dest_reg,
1620 gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1621 return;
1623 else if (leftover <= 32767)
1625 /* Left shift into position then add in the leftover. */
1626 rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1627 emit_move_insn (temp2,
1628 gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1629 emit_move_insn (dest_reg,
1630 gen_rtx_PLUS (DImode, temp2, GEN_INT (leftover)));
1631 return;
1633 else
1635 /* Shift in the batch of >= 16 zeroes we detected earlier.
1636 After this, shift will be aligned mod 16 so the final
1637 loop can use shl16insli. */
1638 rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1639 rtx shift_count_rtx = GEN_INT (zero_cluster_shift);
1641 emit_move_insn (temp2,
1642 gen_rtx_ASHIFT (DImode, temp, shift_count_rtx));
1644 shift -= zero_cluster_shift;
1645 temp = temp2;
1648 else
1650 /* Set as many high 16-bit blocks as we can with a single
1651 instruction. We'll insert the remaining 16-bit blocks
1652 below. */
1653 for (shift = 16;; shift += 16)
1655 gcc_assert (shift < 64);
1656 if (expand_set_cint64_one_inst (temp, val >> shift, false))
1657 break;
1661 /* At this point, temp == val >> shift, shift % 16 == 0, and we
1662 still need to insert any bits of 'val' below 'shift'. Those bits
1663 are guaranteed to not have 16 contiguous zeroes. */
1665 gcc_assert ((shift & 15) == 0);
1667 for (ins_shift = shift - 16; ins_shift >= 0; ins_shift -= 16)
1669 rtx result;
1670 HOST_WIDE_INT bits = (val >> ins_shift) & 0xFFFF;
1671 gcc_assert (bits != 0);
1673 /* On the last iteration we need to store into dest_reg. */
1674 if (ins_shift == 0)
1675 result = dest_reg;
1676 else
1677 result = create_temp_reg_if_possible (DImode, dest_reg);
1679 emit_insn (gen_insn_shl16insli (result, temp, GEN_INT (bits)));
1681 temp = result;
1686 /* Load OP1, a 64-bit constant, into OP0, a register. We know it
1687 can't be done in one insn when we get here, the move expander
1688 guarantees this. */
1689 void
1690 tilegx_expand_set_const64 (rtx op0, rtx op1)
1692 if (CONST_INT_P (op1))
1694 /* TODO: I don't know if we want to split large constants
1695 now, or wait until later (with a define_split).
1697 Does splitting early help CSE? Does it harm other
1698 optimizations that might fold loads? */
1699 expand_set_cint64 (op0, op1);
1701 else
1703 rtx temp = create_temp_reg_if_possible (Pmode, op0);
1705 if (TARGET_32BIT)
1707 /* Generate the 2-insn sequence to materialize a symbolic
1708 address. */
1709 emit_insn (gen_mov_address_32bit_step1 (temp, op1));
1710 emit_insn (gen_mov_address_32bit_step2 (op0, temp, op1));
1712 else
1714 /* Generate the 3-insn sequence to materialize a symbolic
1715 address. Note that this assumes that virtual addresses
1716 fit in 48 signed bits, which is currently true. */
1717 rtx temp2 = create_temp_reg_if_possible (Pmode, op0);
1718 emit_insn (gen_mov_address_step1 (temp, op1));
1719 emit_insn (gen_mov_address_step2 (temp2, temp, op1));
1720 emit_insn (gen_mov_address_step3 (op0, temp2, op1));
1726 /* Expand a move instruction. Return true if all work is done. */
1727 bool
1728 tilegx_expand_mov (enum machine_mode mode, rtx *operands)
1730 /* Handle sets of MEM first. */
1731 if (MEM_P (operands[0]))
1733 if (can_create_pseudo_p ())
1734 operands[0] = validize_mem (operands[0]);
1736 if (reg_or_0_operand (operands[1], mode))
1737 return false;
1739 if (!reload_in_progress)
1740 operands[1] = force_reg (mode, operands[1]);
1743 /* Fixup TLS cases. */
1744 if (CONSTANT_P (operands[1]) && tilegx_tls_referenced_p (operands[1]))
1746 operands[1] = tilegx_legitimize_tls_address (operands[1]);
1747 return false;
1750 /* Fixup PIC cases. */
1751 if (flag_pic && CONSTANT_P (operands[1]))
1753 if (tilegx_pic_address_needs_scratch (operands[1]))
1754 operands[1] = tilegx_legitimize_pic_address (operands[1], mode, 0);
1756 if (symbolic_operand (operands[1], mode))
1758 operands[1] = tilegx_legitimize_pic_address (operands[1],
1759 mode,
1760 (reload_in_progress ?
1761 operands[0] :
1762 NULL_RTX));
1763 return false;
1767 /* Accept non-constants and valid constants unmodified. */
1768 if (!CONSTANT_P (operands[1]) || move_operand (operands[1], mode))
1769 return false;
1771 /* Split large integers. */
1772 tilegx_expand_set_const64 (operands[0], operands[1]);
1773 return true;
1777 /* Expand unaligned loads. */
1778 void
1779 tilegx_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1780 HOST_WIDE_INT bit_offset, bool sign)
1782 enum machine_mode mode;
1783 rtx addr_lo, addr_hi;
1784 rtx mem_lo, mem_hi, hi;
1785 rtx mema, wide_result;
1786 int last_byte_offset;
1787 HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1789 mode = GET_MODE (dest_reg);
1791 hi = gen_reg_rtx (mode);
1793 if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1795 /* When just loading a two byte value, we can load the two bytes
1796 individually and combine them efficiently. */
1798 mem_lo = adjust_address (mem, QImode, byte_offset);
1799 mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1801 if (sign)
1803 /* Do a signed load of the second byte and use bfins to set
1804 the high bits of the result. */
1805 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, dest_reg),
1806 mem_lo));
1807 emit_insn (gen_extendqidi2 (gen_lowpart (DImode, hi), mem_hi));
1808 emit_insn (gen_insv (gen_lowpart (DImode, dest_reg),
1809 GEN_INT (64 - 8), GEN_INT (8),
1810 gen_lowpart (DImode, hi)));
1812 else
1814 /* Do two unsigned loads and use v1int_l to interleave
1815 them. */
1816 rtx lo = gen_reg_rtx (mode);
1817 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, lo),
1818 mem_lo));
1819 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, hi),
1820 mem_hi));
1821 emit_insn (gen_insn_v1int_l (gen_lowpart (DImode, dest_reg),
1822 gen_lowpart (DImode, hi),
1823 gen_lowpart (DImode, lo)));
1826 return;
1829 mema = XEXP (mem, 0);
1831 /* AND addresses cannot be in any alias set, since they may
1832 implicitly alias surrounding code. Ideally we'd have some alias
1833 set that covered all types except those with alignment 8 or
1834 higher. */
1835 addr_lo = force_reg (Pmode, plus_constant (Pmode, mema, byte_offset));
1836 mem_lo = change_address (mem, mode,
1837 gen_rtx_AND (GET_MODE (mema), addr_lo,
1838 GEN_INT (-8)));
1839 set_mem_alias_set (mem_lo, 0);
1841 /* Load the high word at an address that will not fault if the low
1842 address is aligned and at the very end of a page. */
1843 last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1844 addr_hi = force_reg (Pmode, plus_constant (Pmode, mema, last_byte_offset));
1845 mem_hi = change_address (mem, mode,
1846 gen_rtx_AND (GET_MODE (mema), addr_hi,
1847 GEN_INT (-8)));
1848 set_mem_alias_set (mem_hi, 0);
1850 if (bitsize == 64)
1852 addr_lo = make_safe_from (addr_lo, dest_reg);
1853 wide_result = dest_reg;
1855 else
1857 wide_result = gen_reg_rtx (mode);
1860 /* Load hi first in case dest_reg is used in mema. */
1861 emit_move_insn (hi, mem_hi);
1862 emit_move_insn (wide_result, mem_lo);
1864 emit_insn (gen_insn_dblalign (gen_lowpart (DImode, wide_result),
1865 gen_lowpart (DImode, wide_result),
1866 gen_lowpart (DImode, hi), addr_lo));
1868 if (bitsize != 64)
1870 rtx extracted =
1871 extract_bit_field (gen_lowpart (DImode, wide_result),
1872 bitsize, bit_offset % BITS_PER_UNIT,
1873 !sign, false, gen_lowpart (DImode, dest_reg),
1874 DImode, DImode);
1876 if (extracted != dest_reg)
1877 emit_move_insn (dest_reg, gen_lowpart (DImode, extracted));
1882 /* Expand unaligned stores. */
1883 static void
1884 tilegx_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1885 HOST_WIDE_INT bit_offset)
1887 HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1888 HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1889 HOST_WIDE_INT shift_amt;
1890 HOST_WIDE_INT i;
1891 rtx mem_addr;
1892 rtx store_val;
1894 for (i = 0, shift_amt = 0; i < bytesize; i++, shift_amt += BITS_PER_UNIT)
1896 mem_addr = adjust_address (mem, QImode, byte_offset + i);
1898 if (shift_amt)
1900 store_val = expand_simple_binop (DImode, LSHIFTRT,
1901 gen_lowpart (DImode, src),
1902 GEN_INT (shift_amt), NULL, 1,
1903 OPTAB_LIB_WIDEN);
1904 store_val = gen_lowpart (QImode, store_val);
1906 else
1908 store_val = gen_lowpart (QImode, src);
1911 emit_move_insn (mem_addr, store_val);
1916 /* Implement the movmisalign patterns. One of the operands is a
1917 memory that is not naturally aligned. Emit instructions to load
1918 it. */
1919 void
1920 tilegx_expand_movmisalign (enum machine_mode mode, rtx *operands)
1922 if (MEM_P (operands[1]))
1924 rtx tmp;
1926 if (register_operand (operands[0], mode))
1927 tmp = operands[0];
1928 else
1929 tmp = gen_reg_rtx (mode);
1931 tilegx_expand_unaligned_load (tmp, operands[1], GET_MODE_BITSIZE (mode),
1932 0, true);
1934 if (tmp != operands[0])
1935 emit_move_insn (operands[0], tmp);
1937 else if (MEM_P (operands[0]))
1939 if (!reg_or_0_operand (operands[1], mode))
1940 operands[1] = force_reg (mode, operands[1]);
1942 tilegx_expand_unaligned_store (operands[0], operands[1],
1943 GET_MODE_BITSIZE (mode), 0);
1945 else
1946 gcc_unreachable ();
1951 /* Implement the allocate_stack pattern (alloca). */
1952 void
1953 tilegx_allocate_stack (rtx op0, rtx op1)
1955 /* Technically the correct way to initialize chain_loc is with
1956 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1957 * sets the alias_set to that of a frame reference. Some of our
1958 * tests rely on some unsafe assumption about when the chaining
1959 * update is done, we need to be conservative about reordering the
1960 * chaining instructions.
1962 rtx fp_addr = gen_reg_rtx (Pmode);
1963 rtx fp_value = gen_reg_rtx (Pmode);
1964 rtx fp_loc;
1966 emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1967 GEN_INT (UNITS_PER_WORD)));
1969 fp_loc = gen_frame_mem (Pmode, fp_addr);
1971 emit_move_insn (fp_value, fp_loc);
1973 op1 = force_reg (Pmode, op1);
1975 emit_move_insn (stack_pointer_rtx,
1976 gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
1978 emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
1979 GEN_INT (UNITS_PER_WORD)));
1981 fp_loc = gen_frame_mem (Pmode, fp_addr);
1983 emit_move_insn (fp_loc, fp_value);
1985 emit_move_insn (op0, virtual_stack_dynamic_rtx);
1990 /* Multiplies */
1993 /* Returns the insn_code in ENTRY. */
1994 static enum insn_code
1995 tilegx_multiply_get_opcode (const struct tilegx_multiply_insn_seq_entry
1996 *entry)
1998 return tilegx_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
2002 /* Returns the length of the 'op' array. */
2003 static int
2004 tilegx_multiply_get_num_ops (const struct tilegx_multiply_insn_seq *seq)
2006 /* The array either uses all of its allocated slots or is terminated
2007 by a bogus opcode. Either way, the array size is the index of the
2008 last valid opcode plus one. */
2009 int i;
2010 for (i = tilegx_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
2011 if (tilegx_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
2012 return i + 1;
2014 /* An empty array is not allowed. */
2015 gcc_unreachable ();
2019 /* We precompute a number of expression trees for multiplying by
2020 constants. This generates code for such an expression tree by
2021 walking through the nodes in the tree (which are conveniently
2022 pre-linearized) and emitting an instruction for each one. */
2023 static void
2024 tilegx_expand_constant_multiply_given_sequence (rtx result, rtx src,
2025 const struct
2026 tilegx_multiply_insn_seq *seq)
2028 int i;
2029 int num_ops;
2031 /* Keep track of the subexpressions computed so far, so later
2032 instructions can refer to them. We seed the array with zero and
2033 the value being multiplied. */
2034 int num_subexprs = 2;
2035 rtx subexprs[tilegx_multiply_insn_seq_MAX_OPERATIONS + 2];
2036 subexprs[0] = const0_rtx;
2037 subexprs[1] = src;
2039 /* Determine how many instructions we are going to generate. */
2040 num_ops = tilegx_multiply_get_num_ops (seq);
2041 gcc_assert (num_ops > 0
2042 && num_ops <= tilegx_multiply_insn_seq_MAX_OPERATIONS);
2044 for (i = 0; i < num_ops; i++)
2046 const struct tilegx_multiply_insn_seq_entry *entry = &seq->op[i];
2048 /* Figure out where to store the output of this instruction. */
2049 const bool is_last_op = (i + 1 == num_ops);
2050 rtx out = is_last_op ? result : gen_reg_rtx (DImode);
2052 enum insn_code opcode = tilegx_multiply_get_opcode (entry);
2053 if (opcode == CODE_FOR_ashldi3)
2055 /* Handle shift by immediate. This is a special case because
2056 the meaning of the second operand is a constant shift
2057 count rather than an operand index. */
2059 /* Make sure the shift count is in range. Zero should not
2060 happen. */
2061 const int shift_count = entry->rhs;
2062 gcc_assert (shift_count > 0 && shift_count < 64);
2064 /* Emit the actual instruction. */
2065 emit_insn (GEN_FCN (opcode)
2066 (out, subexprs[entry->lhs],
2067 gen_rtx_CONST_INT (DImode, shift_count)));
2069 else
2071 /* Handle a normal two-operand instruction, such as add or
2072 shl1add. */
2074 /* Make sure we are referring to a previously computed
2075 subexpression. */
2076 gcc_assert (entry->rhs < num_subexprs);
2078 /* Emit the actual instruction. */
2079 emit_insn (GEN_FCN (opcode)
2080 (out, subexprs[entry->lhs], subexprs[entry->rhs]));
2083 /* Record this subexpression for use by later expressions. */
2084 subexprs[num_subexprs++] = out;
2089 /* bsearch helper function. */
2090 static int
2091 tilegx_compare_multipliers (const void *key, const void *t)
2093 long long delta =
2094 (*(const long long *) key
2095 - ((const struct tilegx_multiply_insn_seq *) t)->multiplier);
2096 return (delta < 0) ? -1 : (delta > 0);
2100 /* Returns the tilegx_multiply_insn_seq for multiplier, or NULL if none
2101 exists. */
2102 static const struct tilegx_multiply_insn_seq *
2103 tilegx_find_multiply_insn_seq_for_constant (long long multiplier)
2105 return ((const struct tilegx_multiply_insn_seq *)
2106 bsearch (&multiplier, tilegx_multiply_insn_seq_table,
2107 tilegx_multiply_insn_seq_table_size,
2108 sizeof tilegx_multiply_insn_seq_table[0],
2109 tilegx_compare_multipliers));
2113 /* Try to a expand constant multiply in DImode by looking it up in a
2114 precompiled table. OP0 is the result operand, OP1 is the source
2115 operand, and MULTIPLIER is the value of the constant. Return true
2116 if it succeeds. */
2117 static bool
2118 tilegx_expand_const_muldi (rtx op0, rtx op1, long long multiplier)
2120 /* See if we have precomputed an efficient way to multiply by this
2121 constant. */
2122 const struct tilegx_multiply_insn_seq *seq =
2123 tilegx_find_multiply_insn_seq_for_constant (multiplier);
2124 if (seq != NULL)
2126 tilegx_expand_constant_multiply_given_sequence (op0, op1, seq);
2127 return true;
2129 else
2130 return false;
2134 /* Expand the muldi pattern. */
2135 bool
2136 tilegx_expand_muldi (rtx op0, rtx op1, rtx op2)
2138 if (CONST_INT_P (op2))
2140 HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), DImode);
2141 return tilegx_expand_const_muldi (op0, op1, n);
2143 return false;
2147 /* Expand a high multiply pattern in DImode. RESULT, OP1, OP2 are the
2148 operands, and SIGN is true if it's a signed multiply, and false if
2149 it's an unsigned multiply. */
2150 static void
2151 tilegx_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
2153 rtx tmp0 = gen_reg_rtx (DImode);
2154 rtx tmp1 = gen_reg_rtx (DImode);
2155 rtx tmp2 = gen_reg_rtx (DImode);
2156 rtx tmp3 = gen_reg_rtx (DImode);
2157 rtx tmp4 = gen_reg_rtx (DImode);
2158 rtx tmp5 = gen_reg_rtx (DImode);
2159 rtx tmp6 = gen_reg_rtx (DImode);
2160 rtx tmp7 = gen_reg_rtx (DImode);
2161 rtx tmp8 = gen_reg_rtx (DImode);
2162 rtx tmp9 = gen_reg_rtx (DImode);
2163 rtx tmp10 = gen_reg_rtx (DImode);
2164 rtx tmp11 = gen_reg_rtx (DImode);
2165 rtx tmp12 = gen_reg_rtx (DImode);
2166 rtx tmp13 = gen_reg_rtx (DImode);
2167 rtx result_lo = gen_reg_rtx (DImode);
2169 if (sign)
2171 emit_insn (gen_insn_mul_hs_lu (tmp0, op1, op2));
2172 emit_insn (gen_insn_mul_hs_lu (tmp1, op2, op1));
2173 emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2174 emit_insn (gen_insn_mul_hs_hs (tmp3, op1, op2));
2176 else
2178 emit_insn (gen_insn_mul_hu_lu (tmp0, op1, op2));
2179 emit_insn (gen_insn_mul_hu_lu (tmp1, op2, op1));
2180 emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2181 emit_insn (gen_insn_mul_hu_hu (tmp3, op1, op2));
2184 emit_move_insn (tmp4, (gen_rtx_ASHIFT (DImode, tmp0, GEN_INT (32))));
2186 emit_move_insn (tmp5, (gen_rtx_ASHIFT (DImode, tmp1, GEN_INT (32))));
2188 emit_move_insn (tmp6, (gen_rtx_PLUS (DImode, tmp4, tmp5)));
2189 emit_move_insn (result_lo, (gen_rtx_PLUS (DImode, tmp2, tmp6)));
2191 emit_move_insn (tmp7, gen_rtx_LTU (DImode, tmp6, tmp4));
2192 emit_move_insn (tmp8, gen_rtx_LTU (DImode, result_lo, tmp2));
2194 if (sign)
2196 emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (DImode, tmp0, GEN_INT (32))));
2197 emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (DImode, tmp1, GEN_INT (32))));
2199 else
2201 emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (DImode, tmp0, GEN_INT (32))));
2202 emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (DImode, tmp1, GEN_INT (32))));
2205 emit_move_insn (tmp11, (gen_rtx_PLUS (DImode, tmp3, tmp7)));
2206 emit_move_insn (tmp12, (gen_rtx_PLUS (DImode, tmp8, tmp9)));
2207 emit_move_insn (tmp13, (gen_rtx_PLUS (DImode, tmp11, tmp12)));
2208 emit_move_insn (result, (gen_rtx_PLUS (DImode, tmp13, tmp10)));
2212 /* Implement smuldi3_highpart. */
2213 void
2214 tilegx_expand_smuldi3_highpart (rtx op0, rtx op1, rtx op2)
2216 tilegx_expand_high_multiply (op0, op1, op2, true);
2220 /* Implement umuldi3_highpart. */
2221 void
2222 tilegx_expand_umuldi3_highpart (rtx op0, rtx op1, rtx op2)
2224 tilegx_expand_high_multiply (op0, op1, op2, false);
2229 /* Compare and branches */
2231 /* Produce the rtx yielding a bool for a floating point
2232 comparison. */
2233 static bool
2234 tilegx_emit_fp_setcc (rtx res, enum rtx_code code, enum machine_mode mode,
2235 rtx op0, rtx op1)
2237 /* TODO: Certain compares again constants can be done using entirely
2238 integer operations. But you have to get the special cases right
2239 e.g. NaN, +0 == -0, etc. */
2241 rtx flags;
2242 int flag_index;
2243 rtx a = force_reg (DImode, gen_lowpart (DImode, op0));
2244 rtx b = force_reg (DImode, gen_lowpart (DImode, op1));
2246 flags = gen_reg_rtx (DImode);
2248 if (mode == SFmode)
2250 emit_insn (gen_insn_fsingle_add1 (flags, a, b));
2252 else
2254 gcc_assert (mode == DFmode);
2255 emit_insn (gen_insn_fdouble_add_flags (flags, a, b));
2258 switch (code)
2260 case EQ: flag_index = 30; break;
2261 case NE: flag_index = 31; break;
2262 case LE: flag_index = 27; break;
2263 case LT: flag_index = 26; break;
2264 case GE: flag_index = 29; break;
2265 case GT: flag_index = 28; break;
2266 default: gcc_unreachable ();
2269 gcc_assert (GET_MODE (res) == DImode);
2270 emit_move_insn (res, gen_rtx_ZERO_EXTRACT (DImode, flags, GEN_INT (1),
2271 GEN_INT (flag_index)));
2272 return true;
2276 /* Certain simplifications can be done to make invalid setcc
2277 operations valid. Return the final comparison, or NULL if we can't
2278 work. */
2279 static bool
2280 tilegx_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2281 enum machine_mode cmp_mode)
2283 rtx tmp;
2284 bool swap = false;
2286 if (cmp_mode == SFmode || cmp_mode == DFmode)
2287 return tilegx_emit_fp_setcc (res, code, cmp_mode, op0, op1);
2289 /* The general case: fold the comparison code to the types of
2290 compares that we have, choosing the branch as necessary. */
2292 switch (code)
2294 case EQ:
2295 case NE:
2296 case LE:
2297 case LT:
2298 case LEU:
2299 case LTU:
2300 /* We have these compares. */
2301 break;
2303 case GE:
2304 case GT:
2305 case GEU:
2306 case GTU:
2307 /* We do not have these compares, so we reverse the
2308 operands. */
2309 swap = true;
2310 break;
2312 default:
2313 /* We should not have called this with any other code. */
2314 gcc_unreachable ();
2317 if (swap)
2319 code = swap_condition (code);
2320 tmp = op0, op0 = op1, op1 = tmp;
2323 if (!reg_or_0_operand (op0, cmp_mode))
2324 op0 = force_reg (cmp_mode, op0);
2326 if (!CONST_INT_P (op1) && !register_operand (op1, cmp_mode))
2327 op1 = force_reg (cmp_mode, op1);
2329 /* Return the setcc comparison. */
2330 emit_insn (gen_rtx_SET (VOIDmode, res,
2331 gen_rtx_fmt_ee (code, DImode, op0, op1)));
2333 return true;
2337 /* Implement cstore patterns. */
2338 bool
2339 tilegx_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
2341 return
2342 tilegx_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2343 operands[2], operands[3], cmp_mode);
2347 /* Return whether CODE is a signed comparison. */
2348 static bool
2349 signed_compare_p (enum rtx_code code)
2351 return (code == EQ || code == NE || code == LT || code == LE
2352 || code == GT || code == GE);
2356 /* Generate the comparison for a DImode conditional branch. */
2357 static rtx
2358 tilegx_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2359 enum machine_mode cmp_mode, bool eq_ne_only)
2361 enum rtx_code branch_code;
2362 rtx temp;
2364 if (cmp_mode == SFmode || cmp_mode == DFmode)
2366 /* Compute a boolean saying whether the comparison is true. */
2367 temp = gen_reg_rtx (DImode);
2368 tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2370 /* Test that flag. */
2371 return gen_rtx_fmt_ee (NE, VOIDmode, temp, const0_rtx);
2374 /* Check for a compare against zero using a comparison we can do
2375 directly. */
2376 if (op1 == const0_rtx
2377 && (code == EQ || code == NE
2378 || (!eq_ne_only && signed_compare_p (code))))
2380 op0 = force_reg (cmp_mode, op0);
2381 return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2384 /* The general case: fold the comparison code to the types of
2385 compares that we have, choosing the branch as necessary. */
2386 switch (code)
2388 case EQ:
2389 case LE:
2390 case LT:
2391 case LEU:
2392 case LTU:
2393 /* We have these compares. */
2394 branch_code = NE;
2395 break;
2397 case NE:
2398 case GE:
2399 case GT:
2400 case GEU:
2401 case GTU:
2402 /* These must be reversed (except NE, but let's
2403 canonicalize). */
2404 code = reverse_condition (code);
2405 branch_code = EQ;
2406 break;
2408 default:
2409 gcc_unreachable ();
2412 if (CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2414 HOST_WIDE_INT n = INTVAL (op1);
2416 switch (code)
2418 case EQ:
2419 /* Subtract off the value we want to compare against and see
2420 if we get zero. This is cheaper than creating a constant
2421 in a register. Except that subtracting -128 is more
2422 expensive than seqi to -128, so we leave that alone. */
2423 /* ??? Don't do this when comparing against symbols,
2424 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2425 0), which will be declared false out of hand (at least
2426 for non-weak). */
2427 if (n != -128
2428 && add_operand (GEN_INT (-n), DImode)
2429 && !(symbolic_operand (op0, VOIDmode)
2430 || (REG_P (op0) && REG_POINTER (op0))))
2432 /* TODO: Use a SIMD add immediate to hit zero for tiled
2433 constants in a single instruction. */
2434 if (GET_MODE (op0) != DImode)
2436 /* Convert to DImode so we can use addli. Note that
2437 this will not actually generate any code because
2438 sign extension from SI -> DI is a no-op. I don't
2439 know if it's safe just to make a paradoxical
2440 subreg here though. */
2441 rtx temp2 = gen_reg_rtx (DImode);
2442 emit_insn (gen_extendsidi2 (temp2, op0));
2443 op0 = temp2;
2445 else
2447 op0 = force_reg (DImode, op0);
2449 temp = gen_reg_rtx (DImode);
2450 emit_move_insn (temp, gen_rtx_PLUS (DImode, op0, GEN_INT (-n)));
2451 return gen_rtx_fmt_ee (reverse_condition (branch_code),
2452 VOIDmode, temp, const0_rtx);
2454 break;
2456 case LEU:
2457 if (n == -1)
2458 break;
2459 /* FALLTHRU */
2461 case LTU:
2462 /* Change ((unsigned)x < 0x1000) into !((int)x >> 12), etc.
2463 We use arithmetic shift right because it's a 3-wide op,
2464 while logical shift right is not. */
2466 int first = exact_log2 (code == LTU ? n : n + 1);
2467 if (first != -1)
2469 op0 = force_reg (cmp_mode, op0);
2470 temp = gen_reg_rtx (cmp_mode);
2471 emit_move_insn (temp,
2472 gen_rtx_ASHIFTRT (cmp_mode, op0,
2473 GEN_INT (first)));
2474 return gen_rtx_fmt_ee (reverse_condition (branch_code),
2475 VOIDmode, temp, const0_rtx);
2478 break;
2480 default:
2481 break;
2485 /* Compute a flag saying whether we should branch. */
2486 temp = gen_reg_rtx (DImode);
2487 tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2489 /* Return the branch comparison. */
2490 return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2494 /* Generate the comparison for a conditional branch. */
2495 void
2496 tilegx_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
2498 rtx cmp_rtx =
2499 tilegx_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2500 cmp_mode, false);
2501 rtx branch_rtx = gen_rtx_SET (VOIDmode, pc_rtx,
2502 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2503 gen_rtx_LABEL_REF
2504 (VOIDmode,
2505 operands[3]),
2506 pc_rtx));
2507 emit_jump_insn (branch_rtx);
2511 /* Implement the mov<mode>cc pattern. */
2513 tilegx_emit_conditional_move (rtx cmp)
2515 return
2516 tilegx_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2517 GET_MODE (XEXP (cmp, 0)), true);
2521 /* Return true if INSN is annotated with a REG_BR_PROB note that
2522 indicates it's a branch that's predicted taken. */
2523 static bool
2524 cbranch_predicted_p (rtx insn)
2526 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2528 if (x)
2530 int pred_val = INTVAL (XEXP (x, 0));
2532 return pred_val >= REG_BR_PROB_BASE / 2;
2535 return false;
2539 /* Output assembly code for a specific branch instruction, appending
2540 the branch prediction flag to the opcode if appropriate. */
2541 static const char *
2542 tilegx_output_simple_cbranch_with_opcode (rtx insn, const char *opcode,
2543 int regop, bool reverse_predicted)
2545 static char buf[64];
2546 sprintf (buf, "%s%s\t%%r%d, %%l0", opcode,
2547 (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2548 regop);
2549 return buf;
2553 /* Output assembly code for a specific branch instruction, appending
2554 the branch prediction flag to the opcode if appropriate. */
2555 const char *
2556 tilegx_output_cbranch_with_opcode (rtx insn, rtx *operands,
2557 const char *opcode,
2558 const char *rev_opcode, int regop)
2560 const char *branch_if_false;
2561 rtx taken, not_taken;
2562 bool is_simple_branch;
2564 gcc_assert (LABEL_P (operands[0]));
2566 is_simple_branch = true;
2567 if (INSN_ADDRESSES_SET_P ())
2569 int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2570 int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2571 int delta = to_addr - from_addr;
2572 is_simple_branch = IN_RANGE (delta, -524288, 524280);
2575 if (is_simple_branch)
2577 /* Just a simple conditional branch. */
2578 return
2579 tilegx_output_simple_cbranch_with_opcode (insn, opcode, regop, false);
2582 /* Generate a reversed branch around a direct jump. This fallback
2583 does not use branch-likely instructions. */
2584 not_taken = gen_label_rtx ();
2585 taken = operands[0];
2587 /* Generate the reversed branch to NOT_TAKEN. */
2588 operands[0] = not_taken;
2589 branch_if_false =
2590 tilegx_output_simple_cbranch_with_opcode (insn, rev_opcode, regop, true);
2591 output_asm_insn (branch_if_false, operands);
2593 output_asm_insn ("j\t%l0", &taken);
2595 /* Output NOT_TAKEN. */
2596 targetm.asm_out.internal_label (asm_out_file, "L",
2597 CODE_LABEL_NUMBER (not_taken));
2598 return "";
2602 /* Output assembly code for a conditional branch instruction. */
2603 const char *
2604 tilegx_output_cbranch (rtx insn, rtx *operands, bool reversed)
2606 enum rtx_code code = GET_CODE (operands[1]);
2607 const char *opcode;
2608 const char *rev_opcode;
2610 if (reversed)
2611 code = reverse_condition (code);
2613 switch (code)
2615 case NE:
2616 opcode = "bnez";
2617 rev_opcode = "beqz";
2618 break;
2619 case EQ:
2620 opcode = "beqz";
2621 rev_opcode = "bnez";
2622 break;
2623 case GE:
2624 opcode = "bgez";
2625 rev_opcode = "bltz";
2626 break;
2627 case GT:
2628 opcode = "bgtz";
2629 rev_opcode = "blez";
2630 break;
2631 case LE:
2632 opcode = "blez";
2633 rev_opcode = "bgtz";
2634 break;
2635 case LT:
2636 opcode = "bltz";
2637 rev_opcode = "bgez";
2638 break;
2639 default:
2640 gcc_unreachable ();
2643 return tilegx_output_cbranch_with_opcode (insn, operands, opcode,
2644 rev_opcode, 2);
2648 /* Implement the tablejump pattern. */
2649 void
2650 tilegx_expand_tablejump (rtx op0, rtx op1)
2652 if (flag_pic)
2654 rtx temp = gen_reg_rtx (Pmode);
2655 rtx temp2 = gen_reg_rtx (Pmode);
2657 tilegx_compute_pcrel_address (temp, gen_rtx_LABEL_REF (Pmode, op1));
2658 emit_move_insn (temp2,
2659 gen_rtx_PLUS (Pmode,
2660 convert_to_mode (Pmode, op0, false),
2661 temp));
2662 op0 = temp2;
2665 emit_jump_insn (gen_tablejump_aux (op0, op1));
2669 /* Emit barrier before an atomic, as needed for the memory MODEL. */
2670 void
2671 tilegx_pre_atomic_barrier (enum memmodel model)
2673 if (need_atomic_barrier_p (model, true))
2674 emit_insn (gen_memory_barrier ());
2678 /* Emit barrier after an atomic, as needed for the memory MODEL. */
2679 void
2680 tilegx_post_atomic_barrier (enum memmodel model)
2682 if (need_atomic_barrier_p (model, false))
2683 emit_insn (gen_memory_barrier ());
2688 /* Expand a builtin vector binary op, by calling gen function GEN with
2689 operands in the proper modes. DEST is converted to DEST_MODE, and
2690 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2691 void
2692 tilegx_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2693 enum machine_mode dest_mode,
2694 rtx dest,
2695 enum machine_mode src_mode,
2696 rtx src0, rtx src1, bool do_src1)
2698 dest = gen_lowpart (dest_mode, dest);
2700 if (src0 == const0_rtx)
2701 src0 = CONST0_RTX (src_mode);
2702 else
2703 src0 = gen_lowpart (src_mode, src0);
2705 if (do_src1)
2707 if (src1 == const0_rtx)
2708 src1 = CONST0_RTX (src_mode);
2709 else
2710 src1 = gen_lowpart (src_mode, src1);
2713 emit_insn ((*gen) (dest, src0, src1));
2718 /* Intrinsics */
2721 struct tile_builtin_info
2723 enum insn_code icode;
2724 tree fndecl;
2727 static struct tile_builtin_info tilegx_builtin_info[TILEGX_BUILTIN_max] = {
2728 { CODE_FOR_adddi3, NULL }, /* add */
2729 { CODE_FOR_addsi3, NULL }, /* addx */
2730 { CODE_FOR_ssaddsi3, NULL }, /* addxsc */
2731 { CODE_FOR_anddi3, NULL }, /* and */
2732 { CODE_FOR_insn_bfexts, NULL }, /* bfexts */
2733 { CODE_FOR_insn_bfextu, NULL }, /* bfextu */
2734 { CODE_FOR_insn_bfins, NULL }, /* bfins */
2735 { CODE_FOR_clzdi2, NULL }, /* clz */
2736 { CODE_FOR_insn_cmoveqz, NULL }, /* cmoveqz */
2737 { CODE_FOR_insn_cmovnez, NULL }, /* cmovnez */
2738 { CODE_FOR_insn_cmpeq_didi, NULL }, /* cmpeq */
2739 { CODE_FOR_insn_cmpexch, NULL }, /* cmpexch */
2740 { CODE_FOR_insn_cmpexch4, NULL }, /* cmpexch4 */
2741 { CODE_FOR_insn_cmples_didi, NULL }, /* cmples */
2742 { CODE_FOR_insn_cmpleu_didi, NULL }, /* cmpleu */
2743 { CODE_FOR_insn_cmplts_didi, NULL }, /* cmplts */
2744 { CODE_FOR_insn_cmpltu_didi, NULL }, /* cmpltu */
2745 { CODE_FOR_insn_cmpne_didi, NULL }, /* cmpne */
2746 { CODE_FOR_insn_cmul, NULL }, /* cmul */
2747 { CODE_FOR_insn_cmula, NULL }, /* cmula */
2748 { CODE_FOR_insn_cmulaf, NULL }, /* cmulaf */
2749 { CODE_FOR_insn_cmulf, NULL }, /* cmulf */
2750 { CODE_FOR_insn_cmulfr, NULL }, /* cmulfr */
2751 { CODE_FOR_insn_cmulh, NULL }, /* cmulh */
2752 { CODE_FOR_insn_cmulhr, NULL }, /* cmulhr */
2753 { CODE_FOR_insn_crc32_32, NULL }, /* crc32_32 */
2754 { CODE_FOR_insn_crc32_8, NULL }, /* crc32_8 */
2755 { CODE_FOR_ctzdi2, NULL }, /* ctz */
2756 { CODE_FOR_insn_dblalign, NULL }, /* dblalign */
2757 { CODE_FOR_insn_dblalign2, NULL }, /* dblalign2 */
2758 { CODE_FOR_insn_dblalign4, NULL }, /* dblalign4 */
2759 { CODE_FOR_insn_dblalign6, NULL }, /* dblalign6 */
2760 { CODE_FOR_insn_drain, NULL }, /* drain */
2761 { CODE_FOR_insn_dtlbpr, NULL }, /* dtlbpr */
2762 { CODE_FOR_insn_exch, NULL }, /* exch */
2763 { CODE_FOR_insn_exch4, NULL }, /* exch4 */
2764 { CODE_FOR_insn_fdouble_add_flags, NULL }, /* fdouble_add_flags */
2765 { CODE_FOR_insn_fdouble_addsub, NULL }, /* fdouble_addsub */
2766 { CODE_FOR_insn_fdouble_mul_flags, NULL }, /* fdouble_mul_flags */
2767 { CODE_FOR_insn_fdouble_pack1, NULL }, /* fdouble_pack1 */
2768 { CODE_FOR_insn_fdouble_pack2, NULL }, /* fdouble_pack2 */
2769 { CODE_FOR_insn_fdouble_sub_flags, NULL }, /* fdouble_sub_flags */
2770 { CODE_FOR_insn_fdouble_unpack_max, NULL }, /* fdouble_unpack_max */
2771 { CODE_FOR_insn_fdouble_unpack_min, NULL }, /* fdouble_unpack_min */
2772 { CODE_FOR_insn_fetchadd, NULL }, /* fetchadd */
2773 { CODE_FOR_insn_fetchadd4, NULL }, /* fetchadd4 */
2774 { CODE_FOR_insn_fetchaddgez, NULL }, /* fetchaddgez */
2775 { CODE_FOR_insn_fetchaddgez4, NULL }, /* fetchaddgez4 */
2776 { CODE_FOR_insn_fetchand, NULL }, /* fetchand */
2777 { CODE_FOR_insn_fetchand4, NULL }, /* fetchand4 */
2778 { CODE_FOR_insn_fetchor, NULL }, /* fetchor */
2779 { CODE_FOR_insn_fetchor4, NULL }, /* fetchor4 */
2780 { CODE_FOR_insn_finv, NULL }, /* finv */
2781 { CODE_FOR_insn_flush, NULL }, /* flush */
2782 { CODE_FOR_insn_flushwb, NULL }, /* flushwb */
2783 { CODE_FOR_insn_fnop, NULL }, /* fnop */
2784 { CODE_FOR_insn_fsingle_add1, NULL }, /* fsingle_add1 */
2785 { CODE_FOR_insn_fsingle_addsub2, NULL }, /* fsingle_addsub2 */
2786 { CODE_FOR_insn_fsingle_mul1, NULL }, /* fsingle_mul1 */
2787 { CODE_FOR_insn_fsingle_mul2, NULL }, /* fsingle_mul2 */
2788 { CODE_FOR_insn_fsingle_pack1, NULL }, /* fsingle_pack1 */
2789 { CODE_FOR_insn_fsingle_pack2, NULL }, /* fsingle_pack2 */
2790 { CODE_FOR_insn_fsingle_sub1, NULL }, /* fsingle_sub1 */
2791 { CODE_FOR_insn_icoh, NULL }, /* icoh */
2792 { CODE_FOR_insn_ill, NULL }, /* ill */
2793 { CODE_FOR_insn_info, NULL }, /* info */
2794 { CODE_FOR_insn_infol, NULL }, /* infol */
2795 { CODE_FOR_insn_inv, NULL }, /* inv */
2796 { CODE_FOR_insn_ld, NULL }, /* ld */
2797 { CODE_FOR_insn_ld1s, NULL }, /* ld1s */
2798 { CODE_FOR_insn_ld1u, NULL }, /* ld1u */
2799 { CODE_FOR_insn_ld2s, NULL }, /* ld2s */
2800 { CODE_FOR_insn_ld2u, NULL }, /* ld2u */
2801 { CODE_FOR_insn_ld4s, NULL }, /* ld4s */
2802 { CODE_FOR_insn_ld4u, NULL }, /* ld4u */
2803 { CODE_FOR_insn_ldna, NULL }, /* ldna */
2804 { CODE_FOR_insn_ldnt, NULL }, /* ldnt */
2805 { CODE_FOR_insn_ldnt1s, NULL }, /* ldnt1s */
2806 { CODE_FOR_insn_ldnt1u, NULL }, /* ldnt1u */
2807 { CODE_FOR_insn_ldnt2s, NULL }, /* ldnt2s */
2808 { CODE_FOR_insn_ldnt2u, NULL }, /* ldnt2u */
2809 { CODE_FOR_insn_ldnt4s, NULL }, /* ldnt4s */
2810 { CODE_FOR_insn_ldnt4u, NULL }, /* ldnt4u */
2811 { CODE_FOR_insn_ld_L2, NULL }, /* ld_L2 */
2812 { CODE_FOR_insn_ld1s_L2, NULL }, /* ld1s_L2 */
2813 { CODE_FOR_insn_ld1u_L2, NULL }, /* ld1u_L2 */
2814 { CODE_FOR_insn_ld2s_L2, NULL }, /* ld2s_L2 */
2815 { CODE_FOR_insn_ld2u_L2, NULL }, /* ld2u_L2 */
2816 { CODE_FOR_insn_ld4s_L2, NULL }, /* ld4s_L2 */
2817 { CODE_FOR_insn_ld4u_L2, NULL }, /* ld4u_L2 */
2818 { CODE_FOR_insn_ldna_L2, NULL }, /* ldna_L2 */
2819 { CODE_FOR_insn_ldnt_L2, NULL }, /* ldnt_L2 */
2820 { CODE_FOR_insn_ldnt1s_L2, NULL }, /* ldnt1s_L2 */
2821 { CODE_FOR_insn_ldnt1u_L2, NULL }, /* ldnt1u_L2 */
2822 { CODE_FOR_insn_ldnt2s_L2, NULL }, /* ldnt2s_L2 */
2823 { CODE_FOR_insn_ldnt2u_L2, NULL }, /* ldnt2u_L2 */
2824 { CODE_FOR_insn_ldnt4s_L2, NULL }, /* ldnt4s_L2 */
2825 { CODE_FOR_insn_ldnt4u_L2, NULL }, /* ldnt4u_L2 */
2826 { CODE_FOR_insn_ld_miss, NULL }, /* ld_miss */
2827 { CODE_FOR_insn_ld1s_miss, NULL }, /* ld1s_miss */
2828 { CODE_FOR_insn_ld1u_miss, NULL }, /* ld1u_miss */
2829 { CODE_FOR_insn_ld2s_miss, NULL }, /* ld2s_miss */
2830 { CODE_FOR_insn_ld2u_miss, NULL }, /* ld2u_miss */
2831 { CODE_FOR_insn_ld4s_miss, NULL }, /* ld4s_miss */
2832 { CODE_FOR_insn_ld4u_miss, NULL }, /* ld4u_miss */
2833 { CODE_FOR_insn_ldna_miss, NULL }, /* ldna_miss */
2834 { CODE_FOR_insn_ldnt_miss, NULL }, /* ldnt_miss */
2835 { CODE_FOR_insn_ldnt1s_miss, NULL }, /* ldnt1s_miss */
2836 { CODE_FOR_insn_ldnt1u_miss, NULL }, /* ldnt1u_miss */
2837 { CODE_FOR_insn_ldnt2s_miss, NULL }, /* ldnt2s_miss */
2838 { CODE_FOR_insn_ldnt2u_miss, NULL }, /* ldnt2u_miss */
2839 { CODE_FOR_insn_ldnt4s_miss, NULL }, /* ldnt4s_miss */
2840 { CODE_FOR_insn_ldnt4u_miss, NULL }, /* ldnt4u_miss */
2841 { CODE_FOR_insn_lnk, NULL }, /* lnk */
2842 { CODE_FOR_memory_barrier, NULL }, /* mf */
2843 { CODE_FOR_insn_mfspr, NULL }, /* mfspr */
2844 { CODE_FOR_insn_mm, NULL }, /* mm */
2845 { CODE_FOR_insn_mnz, NULL }, /* mnz */
2846 { CODE_FOR_movdi, NULL }, /* move */
2847 { CODE_FOR_insn_mtspr, NULL }, /* mtspr */
2848 { CODE_FOR_insn_mul_hs_hs, NULL }, /* mul_hs_hs */
2849 { CODE_FOR_insn_mul_hs_hu, NULL }, /* mul_hs_hu */
2850 { CODE_FOR_insn_mul_hs_ls, NULL }, /* mul_hs_ls */
2851 { CODE_FOR_insn_mul_hs_lu, NULL }, /* mul_hs_lu */
2852 { CODE_FOR_insn_mul_hu_hu, NULL }, /* mul_hu_hu */
2853 { CODE_FOR_insn_mul_hu_ls, NULL }, /* mul_hu_ls */
2854 { CODE_FOR_insn_mul_hu_lu, NULL }, /* mul_hu_lu */
2855 { CODE_FOR_insn_mul_ls_ls, NULL }, /* mul_ls_ls */
2856 { CODE_FOR_insn_mul_ls_lu, NULL }, /* mul_ls_lu */
2857 { CODE_FOR_insn_mul_lu_lu, NULL }, /* mul_lu_lu */
2858 { CODE_FOR_insn_mula_hs_hs, NULL }, /* mula_hs_hs */
2859 { CODE_FOR_insn_mula_hs_hu, NULL }, /* mula_hs_hu */
2860 { CODE_FOR_insn_mula_hs_ls, NULL }, /* mula_hs_ls */
2861 { CODE_FOR_insn_mula_hs_lu, NULL }, /* mula_hs_lu */
2862 { CODE_FOR_insn_mula_hu_hu, NULL }, /* mula_hu_hu */
2863 { CODE_FOR_insn_mula_hu_ls, NULL }, /* mula_hu_ls */
2864 { CODE_FOR_insn_mula_hu_lu, NULL }, /* mula_hu_lu */
2865 { CODE_FOR_insn_mula_ls_ls, NULL }, /* mula_ls_ls */
2866 { CODE_FOR_insn_mula_ls_lu, NULL }, /* mula_ls_lu */
2867 { CODE_FOR_insn_mula_lu_lu, NULL }, /* mula_lu_lu */
2868 { CODE_FOR_insn_mulax, NULL }, /* mulax */
2869 { CODE_FOR_mulsi3, NULL }, /* mulx */
2870 { CODE_FOR_insn_mz, NULL }, /* mz */
2871 { CODE_FOR_insn_nap, NULL }, /* nap */
2872 { CODE_FOR_nop, NULL }, /* nop */
2873 { CODE_FOR_insn_nor_di, NULL }, /* nor */
2874 { CODE_FOR_iordi3, NULL }, /* or */
2875 { CODE_FOR_popcountdi2, NULL }, /* pcnt */
2876 { CODE_FOR_insn_prefetch_l1, NULL }, /* prefetch_l1 */
2877 { CODE_FOR_insn_prefetch_l1_fault, NULL }, /* prefetch_l1_fault */
2878 { CODE_FOR_insn_prefetch_l2, NULL }, /* prefetch_l2 */
2879 { CODE_FOR_insn_prefetch_l2_fault, NULL }, /* prefetch_l2_fault */
2880 { CODE_FOR_insn_prefetch_l3, NULL }, /* prefetch_l3 */
2881 { CODE_FOR_insn_prefetch_l3_fault, NULL }, /* prefetch_l3_fault */
2882 { CODE_FOR_insn_revbits, NULL }, /* revbits */
2883 { CODE_FOR_bswapdi2, NULL }, /* revbytes */
2884 { CODE_FOR_rotldi3, NULL }, /* rotl */
2885 { CODE_FOR_ashldi3, NULL }, /* shl */
2886 { CODE_FOR_insn_shl16insli, NULL }, /* shl16insli */
2887 { CODE_FOR_insn_shl1add, NULL }, /* shl1add */
2888 { CODE_FOR_insn_shl1addx, NULL }, /* shl1addx */
2889 { CODE_FOR_insn_shl2add, NULL }, /* shl2add */
2890 { CODE_FOR_insn_shl2addx, NULL }, /* shl2addx */
2891 { CODE_FOR_insn_shl3add, NULL }, /* shl3add */
2892 { CODE_FOR_insn_shl3addx, NULL }, /* shl3addx */
2893 { CODE_FOR_ashlsi3, NULL }, /* shlx */
2894 { CODE_FOR_ashrdi3, NULL }, /* shrs */
2895 { CODE_FOR_lshrdi3, NULL }, /* shru */
2896 { CODE_FOR_lshrsi3, NULL }, /* shrux */
2897 { CODE_FOR_insn_shufflebytes, NULL }, /* shufflebytes */
2898 { CODE_FOR_insn_st, NULL }, /* st */
2899 { CODE_FOR_insn_st1, NULL }, /* st1 */
2900 { CODE_FOR_insn_st2, NULL }, /* st2 */
2901 { CODE_FOR_insn_st4, NULL }, /* st4 */
2902 { CODE_FOR_insn_stnt, NULL }, /* stnt */
2903 { CODE_FOR_insn_stnt1, NULL }, /* stnt1 */
2904 { CODE_FOR_insn_stnt2, NULL }, /* stnt2 */
2905 { CODE_FOR_insn_stnt4, NULL }, /* stnt4 */
2906 { CODE_FOR_subdi3, NULL }, /* sub */
2907 { CODE_FOR_subsi3, NULL }, /* subx */
2908 { CODE_FOR_sssubsi3, NULL }, /* subxsc */
2909 { CODE_FOR_insn_tblidxb0, NULL }, /* tblidxb0 */
2910 { CODE_FOR_insn_tblidxb1, NULL }, /* tblidxb1 */
2911 { CODE_FOR_insn_tblidxb2, NULL }, /* tblidxb2 */
2912 { CODE_FOR_insn_tblidxb3, NULL }, /* tblidxb3 */
2913 { CODE_FOR_insn_v1add, NULL }, /* v1add */
2914 { CODE_FOR_insn_v1addi, NULL }, /* v1addi */
2915 { CODE_FOR_insn_v1adduc, NULL }, /* v1adduc */
2916 { CODE_FOR_insn_v1adiffu, NULL }, /* v1adiffu */
2917 { CODE_FOR_insn_v1avgu, NULL }, /* v1avgu */
2918 { CODE_FOR_insn_v1cmpeq, NULL }, /* v1cmpeq */
2919 { CODE_FOR_insn_v1cmpeqi, NULL }, /* v1cmpeqi */
2920 { CODE_FOR_insn_v1cmples, NULL }, /* v1cmples */
2921 { CODE_FOR_insn_v1cmpleu, NULL }, /* v1cmpleu */
2922 { CODE_FOR_insn_v1cmplts, NULL }, /* v1cmplts */
2923 { CODE_FOR_insn_v1cmpltsi, NULL }, /* v1cmpltsi */
2924 { CODE_FOR_insn_v1cmpltu, NULL }, /* v1cmpltu */
2925 { CODE_FOR_insn_v1cmpltui, NULL }, /* v1cmpltui */
2926 { CODE_FOR_insn_v1cmpne, NULL }, /* v1cmpne */
2927 { CODE_FOR_insn_v1ddotpu, NULL }, /* v1ddotpu */
2928 { CODE_FOR_insn_v1ddotpua, NULL }, /* v1ddotpua */
2929 { CODE_FOR_insn_v1ddotpus, NULL }, /* v1ddotpus */
2930 { CODE_FOR_insn_v1ddotpusa, NULL }, /* v1ddotpusa */
2931 { CODE_FOR_insn_v1dotp, NULL }, /* v1dotp */
2932 { CODE_FOR_insn_v1dotpa, NULL }, /* v1dotpa */
2933 { CODE_FOR_insn_v1dotpu, NULL }, /* v1dotpu */
2934 { CODE_FOR_insn_v1dotpua, NULL }, /* v1dotpua */
2935 { CODE_FOR_insn_v1dotpus, NULL }, /* v1dotpus */
2936 { CODE_FOR_insn_v1dotpusa, NULL }, /* v1dotpusa */
2937 { CODE_FOR_insn_v1int_h, NULL }, /* v1int_h */
2938 { CODE_FOR_insn_v1int_l, NULL }, /* v1int_l */
2939 { CODE_FOR_insn_v1maxu, NULL }, /* v1maxu */
2940 { CODE_FOR_insn_v1maxui, NULL }, /* v1maxui */
2941 { CODE_FOR_insn_v1minu, NULL }, /* v1minu */
2942 { CODE_FOR_insn_v1minui, NULL }, /* v1minui */
2943 { CODE_FOR_insn_v1mnz, NULL }, /* v1mnz */
2944 { CODE_FOR_insn_v1multu, NULL }, /* v1multu */
2945 { CODE_FOR_insn_v1mulu, NULL }, /* v1mulu */
2946 { CODE_FOR_insn_v1mulus, NULL }, /* v1mulus */
2947 { CODE_FOR_insn_v1mz, NULL }, /* v1mz */
2948 { CODE_FOR_insn_v1sadau, NULL }, /* v1sadau */
2949 { CODE_FOR_insn_v1sadu, NULL }, /* v1sadu */
2950 { CODE_FOR_insn_v1shl, NULL }, /* v1shl */
2951 { CODE_FOR_insn_v1shl, NULL }, /* v1shli */
2952 { CODE_FOR_insn_v1shrs, NULL }, /* v1shrs */
2953 { CODE_FOR_insn_v1shrs, NULL }, /* v1shrsi */
2954 { CODE_FOR_insn_v1shru, NULL }, /* v1shru */
2955 { CODE_FOR_insn_v1shru, NULL }, /* v1shrui */
2956 { CODE_FOR_insn_v1sub, NULL }, /* v1sub */
2957 { CODE_FOR_insn_v1subuc, NULL }, /* v1subuc */
2958 { CODE_FOR_insn_v2add, NULL }, /* v2add */
2959 { CODE_FOR_insn_v2addi, NULL }, /* v2addi */
2960 { CODE_FOR_insn_v2addsc, NULL }, /* v2addsc */
2961 { CODE_FOR_insn_v2adiffs, NULL }, /* v2adiffs */
2962 { CODE_FOR_insn_v2avgs, NULL }, /* v2avgs */
2963 { CODE_FOR_insn_v2cmpeq, NULL }, /* v2cmpeq */
2964 { CODE_FOR_insn_v2cmpeqi, NULL }, /* v2cmpeqi */
2965 { CODE_FOR_insn_v2cmples, NULL }, /* v2cmples */
2966 { CODE_FOR_insn_v2cmpleu, NULL }, /* v2cmpleu */
2967 { CODE_FOR_insn_v2cmplts, NULL }, /* v2cmplts */
2968 { CODE_FOR_insn_v2cmpltsi, NULL }, /* v2cmpltsi */
2969 { CODE_FOR_insn_v2cmpltu, NULL }, /* v2cmpltu */
2970 { CODE_FOR_insn_v2cmpltui, NULL }, /* v2cmpltui */
2971 { CODE_FOR_insn_v2cmpne, NULL }, /* v2cmpne */
2972 { CODE_FOR_insn_v2dotp, NULL }, /* v2dotp */
2973 { CODE_FOR_insn_v2dotpa, NULL }, /* v2dotpa */
2974 { CODE_FOR_insn_v2int_h, NULL }, /* v2int_h */
2975 { CODE_FOR_insn_v2int_l, NULL }, /* v2int_l */
2976 { CODE_FOR_insn_v2maxs, NULL }, /* v2maxs */
2977 { CODE_FOR_insn_v2maxsi, NULL }, /* v2maxsi */
2978 { CODE_FOR_insn_v2mins, NULL }, /* v2mins */
2979 { CODE_FOR_insn_v2minsi, NULL }, /* v2minsi */
2980 { CODE_FOR_insn_v2mnz, NULL }, /* v2mnz */
2981 { CODE_FOR_insn_v2mulfsc, NULL }, /* v2mulfsc */
2982 { CODE_FOR_insn_v2muls, NULL }, /* v2muls */
2983 { CODE_FOR_insn_v2mults, NULL }, /* v2mults */
2984 { CODE_FOR_insn_v2mz, NULL }, /* v2mz */
2985 { CODE_FOR_insn_v2packh, NULL }, /* v2packh */
2986 { CODE_FOR_insn_v2packl, NULL }, /* v2packl */
2987 { CODE_FOR_insn_v2packuc, NULL }, /* v2packuc */
2988 { CODE_FOR_insn_v2sadas, NULL }, /* v2sadas */
2989 { CODE_FOR_insn_v2sadau, NULL }, /* v2sadau */
2990 { CODE_FOR_insn_v2sads, NULL }, /* v2sads */
2991 { CODE_FOR_insn_v2sadu, NULL }, /* v2sadu */
2992 { CODE_FOR_insn_v2shl, NULL }, /* v2shl */
2993 { CODE_FOR_insn_v2shl, NULL }, /* v2shli */
2994 { CODE_FOR_insn_v2shlsc, NULL }, /* v2shlsc */
2995 { CODE_FOR_insn_v2shrs, NULL }, /* v2shrs */
2996 { CODE_FOR_insn_v2shrs, NULL }, /* v2shrsi */
2997 { CODE_FOR_insn_v2shru, NULL }, /* v2shru */
2998 { CODE_FOR_insn_v2shru, NULL }, /* v2shrui */
2999 { CODE_FOR_insn_v2sub, NULL }, /* v2sub */
3000 { CODE_FOR_insn_v2subsc, NULL }, /* v2subsc */
3001 { CODE_FOR_insn_v4add, NULL }, /* v4add */
3002 { CODE_FOR_insn_v4addsc, NULL }, /* v4addsc */
3003 { CODE_FOR_insn_v4int_h, NULL }, /* v4int_h */
3004 { CODE_FOR_insn_v4int_l, NULL }, /* v4int_l */
3005 { CODE_FOR_insn_v4packsc, NULL }, /* v4packsc */
3006 { CODE_FOR_insn_v4shl, NULL }, /* v4shl */
3007 { CODE_FOR_insn_v4shlsc, NULL }, /* v4shlsc */
3008 { CODE_FOR_insn_v4shrs, NULL }, /* v4shrs */
3009 { CODE_FOR_insn_v4shru, NULL }, /* v4shru */
3010 { CODE_FOR_insn_v4sub, NULL }, /* v4sub */
3011 { CODE_FOR_insn_v4subsc, NULL }, /* v4subsc */
3012 { CODE_FOR_insn_wh64, NULL }, /* wh64 */
3013 { CODE_FOR_xordi3, NULL }, /* xor */
3014 { CODE_FOR_tilegx_network_barrier, NULL }, /* network_barrier */
3015 { CODE_FOR_tilegx_idn0_receive, NULL }, /* idn0_receive */
3016 { CODE_FOR_tilegx_idn1_receive, NULL }, /* idn1_receive */
3017 { CODE_FOR_tilegx_idn_send, NULL }, /* idn_send */
3018 { CODE_FOR_tilegx_udn0_receive, NULL }, /* udn0_receive */
3019 { CODE_FOR_tilegx_udn1_receive, NULL }, /* udn1_receive */
3020 { CODE_FOR_tilegx_udn2_receive, NULL }, /* udn2_receive */
3021 { CODE_FOR_tilegx_udn3_receive, NULL }, /* udn3_receive */
3022 { CODE_FOR_tilegx_udn_send, NULL }, /* udn_send */
3026 struct tilegx_builtin_def
3028 const char *name;
3029 enum tilegx_builtin code;
3030 bool is_const;
3031 /* The first character is the return type. Subsequent characters
3032 are the argument types. See char_to_type. */
3033 const char *type;
3037 static const struct tilegx_builtin_def tilegx_builtins[] = {
3038 { "__insn_add", TILEGX_INSN_ADD, true, "lll" },
3039 { "__insn_addi", TILEGX_INSN_ADD, true, "lll" },
3040 { "__insn_addli", TILEGX_INSN_ADD, true, "lll" },
3041 { "__insn_addx", TILEGX_INSN_ADDX, true, "iii" },
3042 { "__insn_addxi", TILEGX_INSN_ADDX, true, "iii" },
3043 { "__insn_addxli", TILEGX_INSN_ADDX, true, "iii" },
3044 { "__insn_addxsc", TILEGX_INSN_ADDXSC, true, "iii" },
3045 { "__insn_and", TILEGX_INSN_AND, true, "lll" },
3046 { "__insn_andi", TILEGX_INSN_AND, true, "lll" },
3047 { "__insn_bfexts", TILEGX_INSN_BFEXTS, true, "llll" },
3048 { "__insn_bfextu", TILEGX_INSN_BFEXTU, true, "llll" },
3049 { "__insn_bfins", TILEGX_INSN_BFINS, true, "lllll"},
3050 { "__insn_clz", TILEGX_INSN_CLZ, true, "ll" },
3051 { "__insn_cmoveqz", TILEGX_INSN_CMOVEQZ, true, "llll" },
3052 { "__insn_cmovnez", TILEGX_INSN_CMOVNEZ, true, "llll" },
3053 { "__insn_cmpeq", TILEGX_INSN_CMPEQ, true, "lll" },
3054 { "__insn_cmpeqi", TILEGX_INSN_CMPEQ, true, "lll" },
3055 { "__insn_cmpexch", TILEGX_INSN_CMPEXCH, false, "lpl" },
3056 { "__insn_cmpexch4", TILEGX_INSN_CMPEXCH4, false, "ipi" },
3057 { "__insn_cmples", TILEGX_INSN_CMPLES, true, "lll" },
3058 { "__insn_cmpleu", TILEGX_INSN_CMPLEU, true, "lll" },
3059 { "__insn_cmplts", TILEGX_INSN_CMPLTS, true, "lll" },
3060 { "__insn_cmpltsi", TILEGX_INSN_CMPLTS, true, "lll" },
3061 { "__insn_cmpltu", TILEGX_INSN_CMPLTU, true, "lll" },
3062 { "__insn_cmpltui", TILEGX_INSN_CMPLTU, true, "lll" },
3063 { "__insn_cmpne", TILEGX_INSN_CMPNE, true, "lll" },
3064 { "__insn_cmul", TILEGX_INSN_CMUL, true, "lll" },
3065 { "__insn_cmula", TILEGX_INSN_CMULA, true, "llll" },
3066 { "__insn_cmulaf", TILEGX_INSN_CMULAF, true, "llll" },
3067 { "__insn_cmulf", TILEGX_INSN_CMULF, true, "lll" },
3068 { "__insn_cmulfr", TILEGX_INSN_CMULFR, true, "lll" },
3069 { "__insn_cmulh", TILEGX_INSN_CMULH, true, "lll" },
3070 { "__insn_cmulhr", TILEGX_INSN_CMULHR, true, "lll" },
3071 { "__insn_crc32_32", TILEGX_INSN_CRC32_32, true, "lll" },
3072 { "__insn_crc32_8", TILEGX_INSN_CRC32_8, true, "lll" },
3073 { "__insn_ctz", TILEGX_INSN_CTZ, true, "ll" },
3074 { "__insn_dblalign", TILEGX_INSN_DBLALIGN, true, "lllk" },
3075 { "__insn_dblalign2", TILEGX_INSN_DBLALIGN2, true, "lll" },
3076 { "__insn_dblalign4", TILEGX_INSN_DBLALIGN4, true, "lll" },
3077 { "__insn_dblalign6", TILEGX_INSN_DBLALIGN6, true, "lll" },
3078 { "__insn_drain", TILEGX_INSN_DRAIN, false, "v" },
3079 { "__insn_dtlbpr", TILEGX_INSN_DTLBPR, false, "vl" },
3080 { "__insn_exch", TILEGX_INSN_EXCH, false, "lpl" },
3081 { "__insn_exch4", TILEGX_INSN_EXCH4, false, "ipi" },
3082 { "__insn_fdouble_add_flags", TILEGX_INSN_FDOUBLE_ADD_FLAGS, true, "lll" },
3083 { "__insn_fdouble_addsub", TILEGX_INSN_FDOUBLE_ADDSUB, true, "llll" },
3084 { "__insn_fdouble_mul_flags", TILEGX_INSN_FDOUBLE_MUL_FLAGS, true, "lll" },
3085 { "__insn_fdouble_pack1", TILEGX_INSN_FDOUBLE_PACK1, true, "lll" },
3086 { "__insn_fdouble_pack2", TILEGX_INSN_FDOUBLE_PACK2, true, "llll" },
3087 { "__insn_fdouble_sub_flags", TILEGX_INSN_FDOUBLE_SUB_FLAGS, true, "lll" },
3088 { "__insn_fdouble_unpack_max", TILEGX_INSN_FDOUBLE_UNPACK_MAX, true, "lll" },
3089 { "__insn_fdouble_unpack_min", TILEGX_INSN_FDOUBLE_UNPACK_MIN, true, "lll" },
3090 { "__insn_fetchadd", TILEGX_INSN_FETCHADD, false, "lpl" },
3091 { "__insn_fetchadd4", TILEGX_INSN_FETCHADD4, false, "ipi" },
3092 { "__insn_fetchaddgez", TILEGX_INSN_FETCHADDGEZ, false, "lpl" },
3093 { "__insn_fetchaddgez4", TILEGX_INSN_FETCHADDGEZ4, false, "ipi" },
3094 { "__insn_fetchand", TILEGX_INSN_FETCHAND, false, "lpl" },
3095 { "__insn_fetchand4", TILEGX_INSN_FETCHAND4, false, "ipi" },
3096 { "__insn_fetchor", TILEGX_INSN_FETCHOR, false, "lpl" },
3097 { "__insn_fetchor4", TILEGX_INSN_FETCHOR4, false, "ipi" },
3098 { "__insn_finv", TILEGX_INSN_FINV, false, "vk" },
3099 { "__insn_flush", TILEGX_INSN_FLUSH, false, "vk" },
3100 { "__insn_flushwb", TILEGX_INSN_FLUSHWB, false, "v" },
3101 { "__insn_fnop", TILEGX_INSN_FNOP, false, "v" },
3102 { "__insn_fsingle_add1", TILEGX_INSN_FSINGLE_ADD1, true, "lll" },
3103 { "__insn_fsingle_addsub2", TILEGX_INSN_FSINGLE_ADDSUB2, true, "llll" },
3104 { "__insn_fsingle_mul1", TILEGX_INSN_FSINGLE_MUL1, true, "lll" },
3105 { "__insn_fsingle_mul2", TILEGX_INSN_FSINGLE_MUL2, true, "lll" },
3106 { "__insn_fsingle_pack1", TILEGX_INSN_FSINGLE_PACK1, true, "ll" },
3107 { "__insn_fsingle_pack2", TILEGX_INSN_FSINGLE_PACK2, true, "lll" },
3108 { "__insn_fsingle_sub1", TILEGX_INSN_FSINGLE_SUB1, true, "lll" },
3109 { "__insn_icoh", TILEGX_INSN_ICOH, false, "vk" },
3110 { "__insn_ill", TILEGX_INSN_ILL, false, "v" },
3111 { "__insn_info", TILEGX_INSN_INFO, false, "vl" },
3112 { "__insn_infol", TILEGX_INSN_INFOL, false, "vl" },
3113 { "__insn_inv", TILEGX_INSN_INV, false, "vp" },
3114 { "__insn_ld", TILEGX_INSN_LD, false, "lk" },
3115 { "__insn_ld1s", TILEGX_INSN_LD1S, false, "lk" },
3116 { "__insn_ld1u", TILEGX_INSN_LD1U, false, "lk" },
3117 { "__insn_ld2s", TILEGX_INSN_LD2S, false, "lk" },
3118 { "__insn_ld2u", TILEGX_INSN_LD2U, false, "lk" },
3119 { "__insn_ld4s", TILEGX_INSN_LD4S, false, "lk" },
3120 { "__insn_ld4u", TILEGX_INSN_LD4U, false, "lk" },
3121 { "__insn_ldna", TILEGX_INSN_LDNA, false, "lk" },
3122 { "__insn_ldnt", TILEGX_INSN_LDNT, false, "lk" },
3123 { "__insn_ldnt1s", TILEGX_INSN_LDNT1S, false, "lk" },
3124 { "__insn_ldnt1u", TILEGX_INSN_LDNT1U, false, "lk" },
3125 { "__insn_ldnt2s", TILEGX_INSN_LDNT2S, false, "lk" },
3126 { "__insn_ldnt2u", TILEGX_INSN_LDNT2U, false, "lk" },
3127 { "__insn_ldnt4s", TILEGX_INSN_LDNT4S, false, "lk" },
3128 { "__insn_ldnt4u", TILEGX_INSN_LDNT4U, false, "lk" },
3129 { "__insn_ld_L2", TILEGX_INSN_LD_L2, false, "lk" },
3130 { "__insn_ld1s_L2", TILEGX_INSN_LD1S_L2, false, "lk" },
3131 { "__insn_ld1u_L2", TILEGX_INSN_LD1U_L2, false, "lk" },
3132 { "__insn_ld2s_L2", TILEGX_INSN_LD2S_L2, false, "lk" },
3133 { "__insn_ld2u_L2", TILEGX_INSN_LD2U_L2, false, "lk" },
3134 { "__insn_ld4s_L2", TILEGX_INSN_LD4S_L2, false, "lk" },
3135 { "__insn_ld4u_L2", TILEGX_INSN_LD4U_L2, false, "lk" },
3136 { "__insn_ldna_L2", TILEGX_INSN_LDNA_L2, false, "lk" },
3137 { "__insn_ldnt_L2", TILEGX_INSN_LDNT_L2, false, "lk" },
3138 { "__insn_ldnt1s_L2", TILEGX_INSN_LDNT1S_L2, false, "lk" },
3139 { "__insn_ldnt1u_L2", TILEGX_INSN_LDNT1U_L2, false, "lk" },
3140 { "__insn_ldnt2s_L2", TILEGX_INSN_LDNT2S_L2, false, "lk" },
3141 { "__insn_ldnt2u_L2", TILEGX_INSN_LDNT2U_L2, false, "lk" },
3142 { "__insn_ldnt4s_L2", TILEGX_INSN_LDNT4S_L2, false, "lk" },
3143 { "__insn_ldnt4u_L2", TILEGX_INSN_LDNT4U_L2, false, "lk" },
3144 { "__insn_ld_miss", TILEGX_INSN_LD_MISS, false, "lk" },
3145 { "__insn_ld1s_miss", TILEGX_INSN_LD1S_MISS, false, "lk" },
3146 { "__insn_ld1u_miss", TILEGX_INSN_LD1U_MISS, false, "lk" },
3147 { "__insn_ld2s_miss", TILEGX_INSN_LD2S_MISS, false, "lk" },
3148 { "__insn_ld2u_miss", TILEGX_INSN_LD2U_MISS, false, "lk" },
3149 { "__insn_ld4s_miss", TILEGX_INSN_LD4S_MISS, false, "lk" },
3150 { "__insn_ld4u_miss", TILEGX_INSN_LD4U_MISS, false, "lk" },
3151 { "__insn_ldna_miss", TILEGX_INSN_LDNA_MISS, false, "lk" },
3152 { "__insn_ldnt_miss", TILEGX_INSN_LDNT_MISS, false, "lk" },
3153 { "__insn_ldnt1s_miss", TILEGX_INSN_LDNT1S_MISS, false, "lk" },
3154 { "__insn_ldnt1u_miss", TILEGX_INSN_LDNT1U_MISS, false, "lk" },
3155 { "__insn_ldnt2s_miss", TILEGX_INSN_LDNT2S_MISS, false, "lk" },
3156 { "__insn_ldnt2u_miss", TILEGX_INSN_LDNT2U_MISS, false, "lk" },
3157 { "__insn_ldnt4s_miss", TILEGX_INSN_LDNT4S_MISS, false, "lk" },
3158 { "__insn_ldnt4u_miss", TILEGX_INSN_LDNT4U_MISS, false, "lk" },
3159 { "__insn_lnk", TILEGX_INSN_LNK, true, "l" },
3160 { "__insn_mf", TILEGX_INSN_MF, false, "v" },
3161 { "__insn_mfspr", TILEGX_INSN_MFSPR, false, "ll" },
3162 { "__insn_mm", TILEGX_INSN_MM, true, "lllll"},
3163 { "__insn_mnz", TILEGX_INSN_MNZ, true, "lll" },
3164 { "__insn_move", TILEGX_INSN_MOVE, true, "ll" },
3165 { "__insn_movei", TILEGX_INSN_MOVE, true, "ll" },
3166 { "__insn_moveli", TILEGX_INSN_MOVE, true, "ll" },
3167 { "__insn_mtspr", TILEGX_INSN_MTSPR, false, "vll" },
3168 { "__insn_mul_hs_hs", TILEGX_INSN_MUL_HS_HS, true, "lll" },
3169 { "__insn_mul_hs_hu", TILEGX_INSN_MUL_HS_HU, true, "lll" },
3170 { "__insn_mul_hs_ls", TILEGX_INSN_MUL_HS_LS, true, "lll" },
3171 { "__insn_mul_hs_lu", TILEGX_INSN_MUL_HS_LU, true, "lll" },
3172 { "__insn_mul_hu_hu", TILEGX_INSN_MUL_HU_HU, true, "lll" },
3173 { "__insn_mul_hu_ls", TILEGX_INSN_MUL_HU_LS, true, "lll" },
3174 { "__insn_mul_hu_lu", TILEGX_INSN_MUL_HU_LU, true, "lll" },
3175 { "__insn_mul_ls_ls", TILEGX_INSN_MUL_LS_LS, true, "lll" },
3176 { "__insn_mul_ls_lu", TILEGX_INSN_MUL_LS_LU, true, "lll" },
3177 { "__insn_mul_lu_lu", TILEGX_INSN_MUL_LU_LU, true, "lll" },
3178 { "__insn_mula_hs_hs", TILEGX_INSN_MULA_HS_HS, true, "llll" },
3179 { "__insn_mula_hs_hu", TILEGX_INSN_MULA_HS_HU, true, "llll" },
3180 { "__insn_mula_hs_ls", TILEGX_INSN_MULA_HS_LS, true, "llll" },
3181 { "__insn_mula_hs_lu", TILEGX_INSN_MULA_HS_LU, true, "llll" },
3182 { "__insn_mula_hu_hu", TILEGX_INSN_MULA_HU_HU, true, "llll" },
3183 { "__insn_mula_hu_ls", TILEGX_INSN_MULA_HU_LS, true, "llll" },
3184 { "__insn_mula_hu_lu", TILEGX_INSN_MULA_HU_LU, true, "llll" },
3185 { "__insn_mula_ls_ls", TILEGX_INSN_MULA_LS_LS, true, "llll" },
3186 { "__insn_mula_ls_lu", TILEGX_INSN_MULA_LS_LU, true, "llll" },
3187 { "__insn_mula_lu_lu", TILEGX_INSN_MULA_LU_LU, true, "llll" },
3188 { "__insn_mulax", TILEGX_INSN_MULAX, true, "iiii" },
3189 { "__insn_mulx", TILEGX_INSN_MULX, true, "iii" },
3190 { "__insn_mz", TILEGX_INSN_MZ, true, "lll" },
3191 { "__insn_nap", TILEGX_INSN_NAP, false, "v" },
3192 { "__insn_nop", TILEGX_INSN_NOP, true, "v" },
3193 { "__insn_nor", TILEGX_INSN_NOR, true, "lll" },
3194 { "__insn_or", TILEGX_INSN_OR, true, "lll" },
3195 { "__insn_ori", TILEGX_INSN_OR, true, "lll" },
3196 { "__insn_pcnt", TILEGX_INSN_PCNT, true, "ll" },
3197 { "__insn_prefetch", TILEGX_INSN_PREFETCH_L1, false, "vk" },
3198 { "__insn_prefetch_l1", TILEGX_INSN_PREFETCH_L1, false, "vk" },
3199 { "__insn_prefetch_l1_fault", TILEGX_INSN_PREFETCH_L1_FAULT, false, "vk" },
3200 { "__insn_prefetch_l2", TILEGX_INSN_PREFETCH_L2, false, "vk" },
3201 { "__insn_prefetch_l2_fault", TILEGX_INSN_PREFETCH_L2_FAULT, false, "vk" },
3202 { "__insn_prefetch_l3", TILEGX_INSN_PREFETCH_L3, false, "vk" },
3203 { "__insn_prefetch_l3_fault", TILEGX_INSN_PREFETCH_L3_FAULT, false, "vk" },
3204 { "__insn_revbits", TILEGX_INSN_REVBITS, true, "ll" },
3205 { "__insn_revbytes", TILEGX_INSN_REVBYTES, true, "ll" },
3206 { "__insn_rotl", TILEGX_INSN_ROTL, true, "lli" },
3207 { "__insn_rotli", TILEGX_INSN_ROTL, true, "lli" },
3208 { "__insn_shl", TILEGX_INSN_SHL, true, "lli" },
3209 { "__insn_shl16insli", TILEGX_INSN_SHL16INSLI, true, "lll" },
3210 { "__insn_shl1add", TILEGX_INSN_SHL1ADD, true, "lll" },
3211 { "__insn_shl1addx", TILEGX_INSN_SHL1ADDX, true, "iii" },
3212 { "__insn_shl2add", TILEGX_INSN_SHL2ADD, true, "lll" },
3213 { "__insn_shl2addx", TILEGX_INSN_SHL2ADDX, true, "iii" },
3214 { "__insn_shl3add", TILEGX_INSN_SHL3ADD, true, "lll" },
3215 { "__insn_shl3addx", TILEGX_INSN_SHL3ADDX, true, "iii" },
3216 { "__insn_shli", TILEGX_INSN_SHL, true, "lli" },
3217 { "__insn_shlx", TILEGX_INSN_SHLX, true, "iii" },
3218 { "__insn_shlxi", TILEGX_INSN_SHLX, true, "iii" },
3219 { "__insn_shrs", TILEGX_INSN_SHRS, true, "lli" },
3220 { "__insn_shrsi", TILEGX_INSN_SHRS, true, "lli" },
3221 { "__insn_shru", TILEGX_INSN_SHRU, true, "lli" },
3222 { "__insn_shrui", TILEGX_INSN_SHRU, true, "lli" },
3223 { "__insn_shrux", TILEGX_INSN_SHRUX, true, "iii" },
3224 { "__insn_shruxi", TILEGX_INSN_SHRUX, true, "iii" },
3225 { "__insn_shufflebytes", TILEGX_INSN_SHUFFLEBYTES, true, "llll" },
3226 { "__insn_st", TILEGX_INSN_ST, false, "vpl" },
3227 { "__insn_st1", TILEGX_INSN_ST1, false, "vpl" },
3228 { "__insn_st2", TILEGX_INSN_ST2, false, "vpl" },
3229 { "__insn_st4", TILEGX_INSN_ST4, false, "vpl" },
3230 { "__insn_stnt", TILEGX_INSN_STNT, false, "vpl" },
3231 { "__insn_stnt1", TILEGX_INSN_STNT1, false, "vpl" },
3232 { "__insn_stnt2", TILEGX_INSN_STNT2, false, "vpl" },
3233 { "__insn_stnt4", TILEGX_INSN_STNT4, false, "vpl" },
3234 { "__insn_sub", TILEGX_INSN_SUB, true, "lll" },
3235 { "__insn_subx", TILEGX_INSN_SUBX, true, "iii" },
3236 { "__insn_subxsc", TILEGX_INSN_SUBXSC, true, "iii" },
3237 { "__insn_tblidxb0", TILEGX_INSN_TBLIDXB0, true, "lll" },
3238 { "__insn_tblidxb1", TILEGX_INSN_TBLIDXB1, true, "lll" },
3239 { "__insn_tblidxb2", TILEGX_INSN_TBLIDXB2, true, "lll" },
3240 { "__insn_tblidxb3", TILEGX_INSN_TBLIDXB3, true, "lll" },
3241 { "__insn_v1add", TILEGX_INSN_V1ADD, true, "lll" },
3242 { "__insn_v1addi", TILEGX_INSN_V1ADDI, true, "lll" },
3243 { "__insn_v1adduc", TILEGX_INSN_V1ADDUC, true, "lll" },
3244 { "__insn_v1adiffu", TILEGX_INSN_V1ADIFFU, true, "lll" },
3245 { "__insn_v1avgu", TILEGX_INSN_V1AVGU, true, "lll" },
3246 { "__insn_v1cmpeq", TILEGX_INSN_V1CMPEQ, true, "lll" },
3247 { "__insn_v1cmpeqi", TILEGX_INSN_V1CMPEQI, true, "lll" },
3248 { "__insn_v1cmples", TILEGX_INSN_V1CMPLES, true, "lll" },
3249 { "__insn_v1cmpleu", TILEGX_INSN_V1CMPLEU, true, "lll" },
3250 { "__insn_v1cmplts", TILEGX_INSN_V1CMPLTS, true, "lll" },
3251 { "__insn_v1cmpltsi", TILEGX_INSN_V1CMPLTSI, true, "lll" },
3252 { "__insn_v1cmpltu", TILEGX_INSN_V1CMPLTU, true, "lll" },
3253 { "__insn_v1cmpltui", TILEGX_INSN_V1CMPLTUI, true, "lll" },
3254 { "__insn_v1cmpne", TILEGX_INSN_V1CMPNE, true, "lll" },
3255 { "__insn_v1ddotpu", TILEGX_INSN_V1DDOTPU, true, "lll" },
3256 { "__insn_v1ddotpua", TILEGX_INSN_V1DDOTPUA, true, "llll" },
3257 { "__insn_v1ddotpus", TILEGX_INSN_V1DDOTPUS, true, "lll" },
3258 { "__insn_v1ddotpusa", TILEGX_INSN_V1DDOTPUSA, true, "llll" },
3259 { "__insn_v1dotp", TILEGX_INSN_V1DOTP, true, "lll" },
3260 { "__insn_v1dotpa", TILEGX_INSN_V1DOTPA, true, "llll" },
3261 { "__insn_v1dotpu", TILEGX_INSN_V1DOTPU, true, "lll" },
3262 { "__insn_v1dotpua", TILEGX_INSN_V1DOTPUA, true, "llll" },
3263 { "__insn_v1dotpus", TILEGX_INSN_V1DOTPUS, true, "lll" },
3264 { "__insn_v1dotpusa", TILEGX_INSN_V1DOTPUSA, true, "llll" },
3265 { "__insn_v1int_h", TILEGX_INSN_V1INT_H, true, "lll" },
3266 { "__insn_v1int_l", TILEGX_INSN_V1INT_L, true, "lll" },
3267 { "__insn_v1maxu", TILEGX_INSN_V1MAXU, true, "lll" },
3268 { "__insn_v1maxui", TILEGX_INSN_V1MAXUI, true, "lll" },
3269 { "__insn_v1minu", TILEGX_INSN_V1MINU, true, "lll" },
3270 { "__insn_v1minui", TILEGX_INSN_V1MINUI, true, "lll" },
3271 { "__insn_v1mnz", TILEGX_INSN_V1MNZ, true, "lll" },
3272 { "__insn_v1multu", TILEGX_INSN_V1MULTU, true, "lll" },
3273 { "__insn_v1mulu", TILEGX_INSN_V1MULU, true, "lll" },
3274 { "__insn_v1mulus", TILEGX_INSN_V1MULUS, true, "lll" },
3275 { "__insn_v1mz", TILEGX_INSN_V1MZ, true, "lll" },
3276 { "__insn_v1sadau", TILEGX_INSN_V1SADAU, true, "llll" },
3277 { "__insn_v1sadu", TILEGX_INSN_V1SADU, true, "lll" },
3278 { "__insn_v1shl", TILEGX_INSN_V1SHL, true, "lll" },
3279 { "__insn_v1shli", TILEGX_INSN_V1SHLI, true, "lll" },
3280 { "__insn_v1shrs", TILEGX_INSN_V1SHRS, true, "lll" },
3281 { "__insn_v1shrsi", TILEGX_INSN_V1SHRSI, true, "lll" },
3282 { "__insn_v1shru", TILEGX_INSN_V1SHRU, true, "lll" },
3283 { "__insn_v1shrui", TILEGX_INSN_V1SHRUI, true, "lll" },
3284 { "__insn_v1sub", TILEGX_INSN_V1SUB, true, "lll" },
3285 { "__insn_v1subuc", TILEGX_INSN_V1SUBUC, true, "lll" },
3286 { "__insn_v2add", TILEGX_INSN_V2ADD, true, "lll" },
3287 { "__insn_v2addi", TILEGX_INSN_V2ADDI, true, "lll" },
3288 { "__insn_v2addsc", TILEGX_INSN_V2ADDSC, true, "lll" },
3289 { "__insn_v2adiffs", TILEGX_INSN_V2ADIFFS, true, "lll" },
3290 { "__insn_v2avgs", TILEGX_INSN_V2AVGS, true, "lll" },
3291 { "__insn_v2cmpeq", TILEGX_INSN_V2CMPEQ, true, "lll" },
3292 { "__insn_v2cmpeqi", TILEGX_INSN_V2CMPEQI, true, "lll" },
3293 { "__insn_v2cmples", TILEGX_INSN_V2CMPLES, true, "lll" },
3294 { "__insn_v2cmpleu", TILEGX_INSN_V2CMPLEU, true, "lll" },
3295 { "__insn_v2cmplts", TILEGX_INSN_V2CMPLTS, true, "lll" },
3296 { "__insn_v2cmpltsi", TILEGX_INSN_V2CMPLTSI, true, "lll" },
3297 { "__insn_v2cmpltu", TILEGX_INSN_V2CMPLTU, true, "lll" },
3298 { "__insn_v2cmpltui", TILEGX_INSN_V2CMPLTUI, true, "lll" },
3299 { "__insn_v2cmpne", TILEGX_INSN_V2CMPNE, true, "lll" },
3300 { "__insn_v2dotp", TILEGX_INSN_V2DOTP, true, "lll" },
3301 { "__insn_v2dotpa", TILEGX_INSN_V2DOTPA, true, "llll" },
3302 { "__insn_v2int_h", TILEGX_INSN_V2INT_H, true, "lll" },
3303 { "__insn_v2int_l", TILEGX_INSN_V2INT_L, true, "lll" },
3304 { "__insn_v2maxs", TILEGX_INSN_V2MAXS, true, "lll" },
3305 { "__insn_v2maxsi", TILEGX_INSN_V2MAXSI, true, "lll" },
3306 { "__insn_v2mins", TILEGX_INSN_V2MINS, true, "lll" },
3307 { "__insn_v2minsi", TILEGX_INSN_V2MINSI, true, "lll" },
3308 { "__insn_v2mnz", TILEGX_INSN_V2MNZ, true, "lll" },
3309 { "__insn_v2mulfsc", TILEGX_INSN_V2MULFSC, true, "lll" },
3310 { "__insn_v2muls", TILEGX_INSN_V2MULS, true, "lll" },
3311 { "__insn_v2mults", TILEGX_INSN_V2MULTS, true, "lll" },
3312 { "__insn_v2mz", TILEGX_INSN_V2MZ, true, "lll" },
3313 { "__insn_v2packh", TILEGX_INSN_V2PACKH, true, "lll" },
3314 { "__insn_v2packl", TILEGX_INSN_V2PACKL, true, "lll" },
3315 { "__insn_v2packuc", TILEGX_INSN_V2PACKUC, true, "lll" },
3316 { "__insn_v2sadas", TILEGX_INSN_V2SADAS, true, "llll" },
3317 { "__insn_v2sadau", TILEGX_INSN_V2SADAU, true, "llll" },
3318 { "__insn_v2sads", TILEGX_INSN_V2SADS, true, "lll" },
3319 { "__insn_v2sadu", TILEGX_INSN_V2SADU, true, "lll" },
3320 { "__insn_v2shl", TILEGX_INSN_V2SHL, true, "lll" },
3321 { "__insn_v2shli", TILEGX_INSN_V2SHLI, true, "lll" },
3322 { "__insn_v2shlsc", TILEGX_INSN_V2SHLSC, true, "lll" },
3323 { "__insn_v2shrs", TILEGX_INSN_V2SHRS, true, "lll" },
3324 { "__insn_v2shrsi", TILEGX_INSN_V2SHRSI, true, "lll" },
3325 { "__insn_v2shru", TILEGX_INSN_V2SHRU, true, "lll" },
3326 { "__insn_v2shrui", TILEGX_INSN_V2SHRUI, true, "lll" },
3327 { "__insn_v2sub", TILEGX_INSN_V2SUB, true, "lll" },
3328 { "__insn_v2subsc", TILEGX_INSN_V2SUBSC, true, "lll" },
3329 { "__insn_v4add", TILEGX_INSN_V4ADD, true, "lll" },
3330 { "__insn_v4addsc", TILEGX_INSN_V4ADDSC, true, "lll" },
3331 { "__insn_v4int_h", TILEGX_INSN_V4INT_H, true, "lll" },
3332 { "__insn_v4int_l", TILEGX_INSN_V4INT_L, true, "lll" },
3333 { "__insn_v4packsc", TILEGX_INSN_V4PACKSC, true, "lll" },
3334 { "__insn_v4shl", TILEGX_INSN_V4SHL, true, "lll" },
3335 { "__insn_v4shlsc", TILEGX_INSN_V4SHLSC, true, "lll" },
3336 { "__insn_v4shrs", TILEGX_INSN_V4SHRS, true, "lll" },
3337 { "__insn_v4shru", TILEGX_INSN_V4SHRU, true, "lll" },
3338 { "__insn_v4sub", TILEGX_INSN_V4SUB, true, "lll" },
3339 { "__insn_v4subsc", TILEGX_INSN_V4SUBSC, true, "lll" },
3340 { "__insn_wh64", TILEGX_INSN_WH64, false, "vp" },
3341 { "__insn_xor", TILEGX_INSN_XOR, true, "lll" },
3342 { "__insn_xori", TILEGX_INSN_XOR, true, "lll" },
3343 { "__tile_network_barrier", TILEGX_NETWORK_BARRIER, false, "v" },
3344 { "__tile_idn0_receive", TILEGX_IDN0_RECEIVE, false, "l" },
3345 { "__tile_idn1_receive", TILEGX_IDN1_RECEIVE, false, "l" },
3346 { "__tile_idn_send", TILEGX_IDN_SEND, false, "vl" },
3347 { "__tile_udn0_receive", TILEGX_UDN0_RECEIVE, false, "l" },
3348 { "__tile_udn1_receive", TILEGX_UDN1_RECEIVE, false, "l" },
3349 { "__tile_udn2_receive", TILEGX_UDN2_RECEIVE, false, "l" },
3350 { "__tile_udn3_receive", TILEGX_UDN3_RECEIVE, false, "l" },
3351 { "__tile_udn_send", TILEGX_UDN_SEND, false, "vl" },
3355 /* Convert a character in a builtin type string to a tree type. */
3356 static tree
3357 char_to_type (char c)
3359 static tree volatile_ptr_type_node = NULL;
3360 static tree volatile_const_ptr_type_node = NULL;
3362 if (volatile_ptr_type_node == NULL)
3364 volatile_ptr_type_node =
3365 build_pointer_type (build_qualified_type (void_type_node,
3366 TYPE_QUAL_VOLATILE));
3367 volatile_const_ptr_type_node =
3368 build_pointer_type (build_qualified_type (void_type_node,
3369 TYPE_QUAL_CONST
3370 | TYPE_QUAL_VOLATILE));
3373 switch (c)
3375 case 'v':
3376 return void_type_node;
3377 case 'i':
3378 return unsigned_type_node;
3379 case 'l':
3380 return long_long_unsigned_type_node;
3381 case 'p':
3382 return volatile_ptr_type_node;
3383 case 'k':
3384 return volatile_const_ptr_type_node;
3385 default:
3386 gcc_unreachable ();
3391 /* Implement TARGET_INIT_BUILTINS. */
3392 static void
3393 tilegx_init_builtins (void)
3395 size_t i;
3397 for (i = 0; i < ARRAY_SIZE (tilegx_builtins); i++)
3399 const struct tilegx_builtin_def *p = &tilegx_builtins[i];
3400 tree ftype, ret_type, arg_type_list = void_list_node;
3401 tree decl;
3402 int j;
3404 for (j = strlen (p->type) - 1; j > 0; j--)
3406 arg_type_list =
3407 tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3410 ret_type = char_to_type (p->type[0]);
3412 ftype = build_function_type (ret_type, arg_type_list);
3414 decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3415 NULL, NULL);
3417 if (p->is_const)
3418 TREE_READONLY (decl) = 1;
3419 TREE_NOTHROW (decl) = 1;
3421 if (tilegx_builtin_info[p->code].fndecl == NULL)
3422 tilegx_builtin_info[p->code].fndecl = decl;
3427 /* Implement TARGET_EXPAND_BUILTIN. */
3428 static rtx
3429 tilegx_expand_builtin (tree exp,
3430 rtx target,
3431 rtx subtarget ATTRIBUTE_UNUSED,
3432 enum machine_mode mode ATTRIBUTE_UNUSED,
3433 int ignore ATTRIBUTE_UNUSED)
3435 #define MAX_BUILTIN_ARGS 4
3437 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3438 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3439 tree arg;
3440 call_expr_arg_iterator iter;
3441 enum insn_code icode;
3442 rtx op[MAX_BUILTIN_ARGS + 1], pat;
3443 int opnum;
3444 bool nonvoid;
3445 insn_gen_fn fn;
3447 if (fcode >= TILEGX_BUILTIN_max)
3448 internal_error ("bad builtin fcode");
3449 icode = tilegx_builtin_info[fcode].icode;
3450 if (icode == 0)
3451 internal_error ("bad builtin icode");
3453 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3455 opnum = nonvoid;
3456 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3458 const struct insn_operand_data *insn_op;
3460 if (arg == error_mark_node)
3461 return NULL_RTX;
3462 if (opnum > MAX_BUILTIN_ARGS)
3463 return NULL_RTX;
3465 insn_op = &insn_data[icode].operand[opnum];
3467 op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3469 if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3471 enum machine_mode opmode = insn_op->mode;
3473 /* pointer_operand and pmode_register_operand operands do
3474 not specify a mode, so use the operand's mode instead
3475 (which should always be right by the time we get here,
3476 except for constants, which are VOIDmode). */
3477 if (opmode == VOIDmode)
3479 enum machine_mode m = GET_MODE (op[opnum]);
3480 gcc_assert (m == Pmode || m == VOIDmode);
3481 opmode = Pmode;
3484 op[opnum] = copy_to_mode_reg (opmode, op[opnum]);
3487 if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3489 /* We still failed to meet the predicate even after moving
3490 into a register. Assume we needed an immediate. */
3491 error_at (EXPR_LOCATION (exp),
3492 "operand must be an immediate of the right size");
3493 return const0_rtx;
3496 opnum++;
3499 if (nonvoid)
3501 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3502 if (!target
3503 || GET_MODE (target) != tmode
3504 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3506 if (tmode == VOIDmode)
3508 /* get the mode from the return type. */
3509 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl)));
3511 target = gen_reg_rtx (tmode);
3513 op[0] = target;
3516 fn = GEN_FCN (icode);
3517 switch (opnum)
3519 case 0:
3520 pat = fn (NULL_RTX);
3521 break;
3522 case 1:
3523 pat = fn (op[0]);
3524 break;
3525 case 2:
3526 pat = fn (op[0], op[1]);
3527 break;
3528 case 3:
3529 pat = fn (op[0], op[1], op[2]);
3530 break;
3531 case 4:
3532 pat = fn (op[0], op[1], op[2], op[3]);
3533 break;
3534 case 5:
3535 pat = fn (op[0], op[1], op[2], op[3], op[4]);
3536 break;
3537 default:
3538 gcc_unreachable ();
3540 if (!pat)
3541 return NULL_RTX;
3542 emit_insn (pat);
3544 if (nonvoid)
3545 return target;
3546 else
3547 return const0_rtx;
3551 /* Implement TARGET_BUILTIN_DECL. */
3552 static tree
3553 tilegx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3555 if (code >= TILEGX_BUILTIN_max)
3556 return error_mark_node;
3558 return tilegx_builtin_info[code].fndecl;
3563 /* Stack frames */
3565 /* Return whether REGNO needs to be saved in the stack frame. */
3566 static bool
3567 need_to_save_reg (unsigned int regno)
3569 if (!fixed_regs[regno] && !call_used_regs[regno]
3570 && df_regs_ever_live_p (regno))
3571 return true;
3573 if (flag_pic
3574 && (regno == PIC_OFFSET_TABLE_REGNUM
3575 || regno == TILEGX_PIC_TEXT_LABEL_REGNUM)
3576 && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3577 return true;
3579 if (crtl->calls_eh_return)
3581 unsigned i;
3582 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3584 if (regno == EH_RETURN_DATA_REGNO (i))
3585 return true;
3589 return false;
3593 /* Return the size of the register savev area. This function is only
3594 correct starting with local register allocation */
3595 static int
3596 tilegx_saved_regs_size (void)
3598 int reg_save_size = 0;
3599 int regno;
3600 int offset_to_frame;
3601 int align_mask;
3603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3604 if (need_to_save_reg (regno))
3605 reg_save_size += UNITS_PER_WORD;
3607 /* Pad out the register save area if necessary to make
3608 frame_pointer_rtx be as aligned as the stack pointer. */
3609 offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3610 align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3611 reg_save_size += (-offset_to_frame) & align_mask;
3613 return reg_save_size;
3617 /* Round up frame size SIZE. */
3618 static int
3619 round_frame_size (int size)
3621 return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3622 & -STACK_BOUNDARY / BITS_PER_UNIT);
3626 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3627 emit the corresponding REG_CFA_OFFSET note described by CFA and
3628 CFA_OFFSET. Return the emitted insn. */
3629 static rtx
3630 frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3631 int cfa_offset)
3633 rtx reg = gen_rtx_REG (DImode, regno);
3634 rtx mem = gen_frame_mem (DImode, addr);
3635 rtx mov = gen_movdi (mem, reg);
3637 /* Describe what just happened in a way that dwarf understands. We
3638 use temporary registers to hold the address to make scheduling
3639 easier, and use the REG_CFA_OFFSET to describe the address as an
3640 offset from the CFA. */
3641 rtx reg_note = gen_rtx_REG (DImode, regno_note);
3642 rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, GEN_INT (cfa_offset));
3643 rtx cfa_relative_mem = gen_frame_mem (DImode, cfa_relative_addr);
3644 rtx real = gen_rtx_SET (VOIDmode, cfa_relative_mem, reg_note);
3645 add_reg_note (mov, REG_CFA_OFFSET, real);
3647 return emit_insn (mov);
3651 /* Emit a load in the stack frame to load REGNO from address ADDR.
3652 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3653 non-null. Return the emitted insn. */
3654 static rtx
3655 frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3657 rtx reg = gen_rtx_REG (DImode, regno);
3658 rtx mem = gen_frame_mem (DImode, addr);
3659 if (cfa_restores)
3660 *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3661 return emit_insn (gen_movdi (reg, mem));
3665 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3666 including sequences. */
3667 static rtx
3668 set_frame_related_p (void)
3670 rtx seq = get_insns ();
3671 rtx insn;
3673 end_sequence ();
3675 if (!seq)
3676 return NULL_RTX;
3678 if (INSN_P (seq))
3680 insn = seq;
3681 while (insn != NULL_RTX)
3683 RTX_FRAME_RELATED_P (insn) = 1;
3684 insn = NEXT_INSN (insn);
3686 seq = emit_insn (seq);
3688 else
3690 seq = emit_insn (seq);
3691 RTX_FRAME_RELATED_P (seq) = 1;
3693 return seq;
3697 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3699 /* This emits code for 'sp += offset'.
3701 The ABI only allows us to modify 'sp' in a single 'addi' or
3702 'addli', so the backtracer understands it. Larger amounts cannot
3703 use those instructions, so are added by placing the offset into a
3704 large register and using 'add'.
3706 This happens after reload, so we need to expand it ourselves. */
3707 static rtx
3708 emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3709 rtx reg_notes)
3711 rtx to_add;
3712 rtx imm_rtx = GEN_INT (offset);
3714 rtx insn;
3715 if (satisfies_constraint_J (imm_rtx))
3717 /* We can add this using a single immediate add. */
3718 to_add = imm_rtx;
3720 else
3722 rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3723 tilegx_expand_set_const64 (tmp, imm_rtx);
3724 to_add = tmp;
3727 /* Actually adjust the stack pointer. */
3728 if (TARGET_32BIT)
3729 insn = gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx, to_add);
3730 else
3731 insn = gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx, to_add);
3733 insn = emit_insn (insn);
3734 REG_NOTES (insn) = reg_notes;
3736 /* Describe what just happened in a way that dwarf understands. */
3737 if (frame_related)
3739 rtx real = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3740 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3741 imm_rtx));
3742 RTX_FRAME_RELATED_P (insn) = 1;
3743 add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3746 return insn;
3750 /* Return whether the current function is leaf. This takes into
3751 account whether the function calls tls_get_addr. */
3752 static bool
3753 tilegx_current_function_is_leaf (void)
3755 return crtl->is_leaf && !cfun->machine->calls_tls_get_addr;
3759 /* Return the frame size. */
3760 static int
3761 compute_total_frame_size (void)
3763 int total_size = (get_frame_size () + tilegx_saved_regs_size ()
3764 + crtl->outgoing_args_size
3765 + crtl->args.pretend_args_size);
3767 if (!tilegx_current_function_is_leaf () || cfun->calls_alloca)
3769 /* Make room for save area in callee. */
3770 total_size += STACK_POINTER_OFFSET;
3773 return round_frame_size (total_size);
3777 /* Return nonzero if this function is known to have a null epilogue.
3778 This allows the optimizer to omit jumps to jumps if no stack was
3779 created. */
3780 bool
3781 tilegx_can_use_return_insn_p (void)
3783 return (reload_completed
3784 && cfun->static_chain_decl == 0
3785 && compute_total_frame_size () == 0
3786 && tilegx_current_function_is_leaf ()
3787 && !crtl->profile && !df_regs_ever_live_p (TILEGX_LINK_REGNUM));
3791 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3792 is a frame pointer, it computes the value relative to
3793 that. Otherwise it uses the stack pointer. */
3794 static rtx
3795 compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3797 rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3798 int offset_from_base;
3800 if (frame_pointer_needed)
3802 base_reg_rtx = hard_frame_pointer_rtx;
3803 offset_from_base = offset_from_fp;
3805 else
3807 int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3808 offset_from_base = offset_from_sp;
3809 base_reg_rtx = stack_pointer_rtx;
3812 if (offset_from_base == 0)
3813 return base_reg_rtx;
3815 /* Compute the new value of the stack pointer. */
3816 tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3817 offset_rtx = GEN_INT (offset_from_base);
3819 if (!add_operand (offset_rtx, Pmode))
3821 expand_set_cint64 (tmp_reg_rtx, offset_rtx);
3822 offset_rtx = tmp_reg_rtx;
3825 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg_rtx,
3826 gen_rtx_PLUS (Pmode, base_reg_rtx, offset_rtx)));
3828 return tmp_reg_rtx;
3832 /* The stack frame looks like this:
3833 +-------------+
3834 | ... |
3835 | incoming |
3836 | stack args |
3837 AP -> +-------------+
3838 | caller's HFP|
3839 +-------------+
3840 | lr save |
3841 HFP -> +-------------+
3842 | var args |
3843 | reg save | crtl->args.pretend_args_size bytes
3844 +-------------+
3845 | ... |
3846 | saved regs | tilegx_saved_regs_size() bytes
3847 FP -> +-------------+
3848 | ... |
3849 | vars | get_frame_size() bytes
3850 +-------------+
3851 | ... |
3852 | outgoing |
3853 | stack args | crtl->outgoing_args_size bytes
3854 +-------------+
3855 | HFP | ptr_size bytes (only here if nonleaf / alloca)
3856 +-------------+
3857 | callee lr | ptr_size bytes (only here if nonleaf / alloca)
3858 | save |
3859 SP -> +-------------+
3861 HFP == incoming SP.
3863 For functions with a frame larger than 32767 bytes, or which use
3864 alloca (), r52 is used as a frame pointer. Otherwise there is no
3865 frame pointer.
3867 FP is saved at SP+ptr_size before calling a subroutine so the callee
3868 can chain. */
3869 void
3870 tilegx_expand_prologue (void)
3872 #define ROUND_ROBIN_SIZE 4
3873 /* We round-robin through four scratch registers to hold temporary
3874 addresses for saving registers, to make instruction scheduling
3875 easier. */
3876 rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3877 NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3879 rtx insn, cfa;
3880 unsigned int which_scratch;
3881 int offset, start_offset, regno;
3883 /* A register that holds a copy of the incoming fp. */
3884 int fp_copy_regno = -1;
3886 /* A register that holds a copy of the incoming sp. */
3887 int sp_copy_regno = -1;
3889 /* Next scratch register number to hand out (postdecrementing). */
3890 int next_scratch_regno = 29;
3892 int total_size = compute_total_frame_size ();
3894 if (flag_stack_usage_info)
3895 current_function_static_stack_size = total_size;
3897 /* Save lr first in its special location because code after this
3898 might use the link register as a scratch register. */
3899 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM) || crtl->calls_eh_return)
3900 FRP (frame_emit_store (TILEGX_LINK_REGNUM, TILEGX_LINK_REGNUM,
3901 stack_pointer_rtx, stack_pointer_rtx, 0));
3903 if (total_size == 0)
3905 /* Load the PIC register if needed. */
3906 if (flag_pic && crtl->uses_pic_offset_table)
3907 load_pic_register (false);
3909 return;
3912 cfa = stack_pointer_rtx;
3914 if (frame_pointer_needed)
3916 fp_copy_regno = next_scratch_regno--;
3918 /* Copy the old frame pointer aside so we can save it later. */
3919 insn =
3920 FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
3921 gen_lowpart (word_mode, hard_frame_pointer_rtx)));
3922 add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
3924 /* Set up the frame pointer. */
3925 insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
3926 add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
3927 cfa = hard_frame_pointer_rtx;
3928 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
3930 /* fp holds a copy of the incoming sp, in case we need to store
3931 it. */
3932 sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
3934 else if (!tilegx_current_function_is_leaf ())
3936 /* Copy the old stack pointer aside so we can save it later. */
3937 sp_copy_regno = next_scratch_regno--;
3938 emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
3939 stack_pointer_rtx);
3942 if (tilegx_current_function_is_leaf ())
3944 /* No need to store chain pointer to caller's frame. */
3945 emit_sp_adjust (-total_size, &next_scratch_regno,
3946 !frame_pointer_needed, NULL_RTX);
3948 else
3950 /* Save the frame pointer (incoming sp value) to support
3951 backtracing. First we need to create an rtx with the store
3952 address. */
3953 rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
3954 rtx size_rtx = GEN_INT (-(total_size - UNITS_PER_WORD));
3956 if (add_operand (size_rtx, Pmode))
3958 /* Expose more parallelism by computing this value from the
3959 original stack pointer, not the one after we have pushed
3960 the frame. */
3961 rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
3962 emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3963 emit_sp_adjust (-total_size, &next_scratch_regno,
3964 !frame_pointer_needed, NULL_RTX);
3966 else
3968 /* The stack frame is large, so just store the incoming sp
3969 value at *(new_sp + UNITS_PER_WORD). */
3970 rtx p;
3971 emit_sp_adjust (-total_size, &next_scratch_regno,
3972 !frame_pointer_needed, NULL_RTX);
3973 p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3974 GEN_INT (UNITS_PER_WORD));
3975 emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
3978 /* Save our frame pointer for backtrace chaining. */
3979 emit_insn (gen_movdi (gen_frame_mem (DImode, chain_addr),
3980 gen_rtx_REG (DImode, sp_copy_regno)));
3983 /* Compute where to start storing registers we need to save. */
3984 start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
3985 offset = start_offset;
3987 /* Store all registers that need saving. */
3988 which_scratch = 0;
3989 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
3990 if (need_to_save_reg (regno))
3992 rtx r = reg_save_addr[which_scratch];
3993 int from_regno;
3994 int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
3996 if (r == NULL_RTX)
3998 rtx p = compute_frame_addr (offset, &next_scratch_regno);
3999 r = gen_rtx_REG (Pmode, next_scratch_regno--);
4000 reg_save_addr[which_scratch] = r;
4002 emit_insn (gen_rtx_SET (VOIDmode, r, p));
4004 else
4006 /* Advance to the next stack slot to store this
4007 register. */
4008 int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
4009 rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
4010 emit_insn (gen_rtx_SET (VOIDmode, r, p));
4013 /* Save this register to the stack (but use the old fp value
4014 we copied aside if appropriate). */
4015 from_regno =
4016 (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
4017 ? fp_copy_regno : regno;
4018 FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
4020 offset -= UNITS_PER_WORD;
4021 which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
4024 /* If profiling, force that to happen after the frame is set up. */
4025 if (crtl->profile)
4026 emit_insn (gen_blockage ());
4028 /* Load the PIC register if needed. */
4029 if (flag_pic && crtl->uses_pic_offset_table)
4030 load_pic_register (false);
4034 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
4035 true for a sibcall_epilogue pattern, and false for an epilogue
4036 pattern. */
4037 void
4038 tilegx_expand_epilogue (bool sibcall_p)
4040 /* We round-robin through four scratch registers to hold temporary
4041 addresses for saving registers, to make instruction scheduling
4042 easier. */
4043 rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
4044 NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
4046 rtx last_insn, insn;
4047 unsigned int which_scratch;
4048 int offset, start_offset, regno;
4049 rtx cfa_restores = NULL_RTX;
4051 /* A register that holds a copy of the incoming fp. */
4052 int fp_copy_regno = -1;
4054 /* Next scratch register number to hand out (postdecrementing). */
4055 int next_scratch_regno = 29;
4057 int total_size = compute_total_frame_size ();
4059 last_insn = get_last_insn ();
4061 /* Load lr first since we are going to need it first. */
4062 insn = NULL;
4063 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM))
4065 insn = frame_emit_load (TILEGX_LINK_REGNUM,
4066 compute_frame_addr (0, &next_scratch_regno),
4067 &cfa_restores);
4070 if (total_size == 0)
4072 if (insn)
4074 RTX_FRAME_RELATED_P (insn) = 1;
4075 REG_NOTES (insn) = cfa_restores;
4077 goto done;
4080 /* Compute where to start restoring registers. */
4081 start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
4082 offset = start_offset;
4084 if (frame_pointer_needed)
4085 fp_copy_regno = next_scratch_regno--;
4087 /* Restore all callee-saved registers. */
4088 which_scratch = 0;
4089 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4090 if (need_to_save_reg (regno))
4092 rtx r = reg_save_addr[which_scratch];
4093 if (r == NULL_RTX)
4095 r = compute_frame_addr (offset, &next_scratch_regno);
4096 reg_save_addr[which_scratch] = r;
4098 else
4100 /* Advance to the next stack slot to store this register. */
4101 int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
4102 rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
4103 emit_insn (gen_rtx_SET (VOIDmode, r, p));
4106 if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
4107 frame_emit_load (fp_copy_regno, r, NULL);
4108 else
4109 frame_emit_load (regno, r, &cfa_restores);
4111 offset -= UNITS_PER_WORD;
4112 which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
4115 if (!tilegx_current_function_is_leaf ())
4116 cfa_restores =
4117 alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
4119 emit_insn (gen_blockage ());
4121 if (frame_pointer_needed)
4123 /* Restore the old stack pointer by copying from the frame
4124 pointer. */
4125 if (TARGET_32BIT)
4127 insn = emit_insn (gen_sp_restore_32bit (stack_pointer_rtx,
4128 hard_frame_pointer_rtx));
4130 else
4132 insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
4133 hard_frame_pointer_rtx));
4135 RTX_FRAME_RELATED_P (insn) = 1;
4136 REG_NOTES (insn) = cfa_restores;
4137 add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
4139 else
4141 insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
4142 cfa_restores);
4145 if (crtl->calls_eh_return)
4147 if (TARGET_32BIT)
4148 emit_insn (gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx,
4149 EH_RETURN_STACKADJ_RTX));
4150 else
4151 emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
4152 EH_RETURN_STACKADJ_RTX));
4155 /* Restore the old frame pointer. */
4156 if (frame_pointer_needed)
4158 insn = emit_move_insn (gen_lowpart (DImode, hard_frame_pointer_rtx),
4159 gen_rtx_REG (DImode, fp_copy_regno));
4160 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
4163 /* Mark the pic registers as live outside of the function. */
4164 if (flag_pic)
4166 emit_use (cfun->machine->text_label_rtx);
4167 emit_use (cfun->machine->got_rtx);
4170 done:
4171 if (!sibcall_p)
4173 emit_jump_insn (gen__return ());
4175 else
4177 emit_use (gen_rtx_REG (Pmode, TILEGX_LINK_REGNUM));
4180 /* Mark all insns we just emitted as frame-related. */
4181 for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
4182 RTX_FRAME_RELATED_P (last_insn) = 1;
4185 #undef ROUND_ROBIN_SIZE
4188 /* Implement INITIAL_ELIMINATION_OFFSET. */
4190 tilegx_initial_elimination_offset (int from, int to)
4192 int total_size = compute_total_frame_size ();
4194 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4196 return (total_size - crtl->args.pretend_args_size
4197 - tilegx_saved_regs_size ());
4199 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4201 return -(crtl->args.pretend_args_size + tilegx_saved_regs_size ());
4203 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4205 return STACK_POINTER_OFFSET + total_size;
4207 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4209 return STACK_POINTER_OFFSET;
4211 else
4212 gcc_unreachable ();
4216 /* Return an RTX indicating where the return address to the calling
4217 function can be found. */
4219 tilegx_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4221 if (count != 0)
4222 return const0_rtx;
4224 return get_hard_reg_initial_val (Pmode, TILEGX_LINK_REGNUM);
4228 /* Implement EH_RETURN_HANDLER_RTX. The MEM needs to be volatile to
4229 prevent it from being deleted. */
4231 tilegx_eh_return_handler_rtx (void)
4233 rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
4234 MEM_VOLATILE_P (tmp) = true;
4235 return tmp;
4240 /* Registers */
4242 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
4243 static void
4244 tilegx_conditional_register_usage (void)
4246 global_regs[TILEGX_NETORDER_REGNUM] = 1;
4247 /* TILEGX_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
4248 member of fixed_regs, and therefore must be member of
4249 call_used_regs, but it is not a member of call_really_used_regs[]
4250 because it is not clobbered by a call. */
4251 if (TILEGX_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
4253 fixed_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4254 call_used_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4256 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4258 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4259 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4264 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
4265 static bool
4266 tilegx_frame_pointer_required (void)
4268 return crtl->calls_eh_return || cfun->calls_alloca;
4273 /* Scheduling and reorg */
4275 /* Return the length of INSN. LENGTH is the initial length computed
4276 by attributes in the machine-description file. This is where we
4277 account for bundles. */
4279 tilegx_adjust_insn_length (rtx insn, int length)
4281 enum machine_mode mode = GET_MODE (insn);
4283 /* A non-termininating instruction in a bundle has length 0. */
4284 if (mode == SImode)
4285 return 0;
4287 /* By default, there is not length adjustment. */
4288 return length;
4292 /* Implement TARGET_SCHED_ISSUE_RATE. */
4293 static int
4294 tilegx_issue_rate (void)
4296 return 3;
4300 /* Return the rtx for the jump target. */
4301 static rtx
4302 get_jump_target (rtx branch)
4304 if (CALL_P (branch))
4306 rtx call;
4307 call = PATTERN (branch);
4309 if (GET_CODE (call) == PARALLEL)
4310 call = XVECEXP (call, 0, 0);
4312 if (GET_CODE (call) == SET)
4313 call = SET_SRC (call);
4315 if (GET_CODE (call) == CALL)
4316 return XEXP (XEXP (call, 0), 0);
4318 return 0;
4322 /* Implement TARGET_SCHED_ADJUST_COST. */
4323 static int
4324 tilegx_sched_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4326 /* If we have a true dependence, INSN is a call, and DEP_INSN
4327 defines a register that is needed by the call (argument or stack
4328 pointer) , set its latency to 0 so that it can be bundled with
4329 the call. Explicitly check for and exclude the case when
4330 DEP_INSN defines the target of the jump. */
4331 if (CALL_P (insn) && REG_NOTE_KIND (link) == REG_DEP_TRUE)
4333 rtx target = get_jump_target (insn);
4334 if (!REG_P (target) || !set_of (target, dep_insn))
4335 return 0;
4338 return cost;
4342 /* Skip over irrelevant NOTEs and such and look for the next insn we
4343 would consider bundling. */
4344 static rtx
4345 next_insn_to_bundle (rtx r, rtx end)
4347 for (; r != end; r = NEXT_INSN (r))
4349 if (NONDEBUG_INSN_P (r)
4350 && GET_CODE (PATTERN (r)) != USE
4351 && GET_CODE (PATTERN (r)) != CLOBBER)
4352 return r;
4355 return NULL_RTX;
4359 /* Go through all insns, and use the information generated during
4360 scheduling to generate SEQUENCEs to represent bundles of
4361 instructions issued simultaneously. */
4362 static void
4363 tilegx_gen_bundles (void)
4365 basic_block bb;
4366 FOR_EACH_BB (bb)
4368 rtx insn, next;
4369 rtx end = NEXT_INSN (BB_END (bb));
4371 for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn; insn = next)
4373 next = next_insn_to_bundle (NEXT_INSN (insn), end);
4375 /* Never wrap {} around inline asm. */
4376 if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
4378 if (next == NULL_RTX || GET_MODE (next) == TImode
4379 /* NOTE: The scheduler incorrectly believes a call
4380 insn can execute in the same cycle as the insn
4381 after the call. This is of course impossible.
4382 Really we need to fix the scheduler somehow, so
4383 the code after the call gets scheduled
4384 optimally. */
4385 || CALL_P (insn))
4387 /* Mark current insn as the end of a bundle. */
4388 PUT_MODE (insn, QImode);
4390 else
4392 /* Mark it as part of a bundle. */
4393 PUT_MODE (insn, SImode);
4401 /* Replace OLD_INSN with NEW_INSN. */
4402 static void
4403 replace_insns (rtx old_insn, rtx new_insns)
4405 if (new_insns)
4406 emit_insn_before (new_insns, old_insn);
4408 delete_insn (old_insn);
4412 /* Returns true if INSN is the first instruction of a pc-relative
4413 address compuatation. */
4414 static bool
4415 match_pcrel_step1 (rtx insn)
4417 rtx pattern = PATTERN (insn);
4418 rtx src;
4420 if (GET_CODE (pattern) != SET)
4421 return false;
4423 src = SET_SRC (pattern);
4425 return (GET_CODE (src) == CONST
4426 && GET_CODE (XEXP (src, 0)) == UNSPEC
4427 && XINT (XEXP (src, 0), 1) == UNSPEC_HW1_LAST_PCREL);
4431 /* Do the first replacement step in tilegx_fixup_pcrel_references. */
4432 static void
4433 replace_mov_pcrel_step1 (rtx insn)
4435 rtx pattern = PATTERN (insn);
4436 rtx unspec;
4437 rtx opnds[2];
4438 rtx new_insns;
4440 gcc_assert (GET_CODE (pattern) == SET);
4441 opnds[0] = SET_DEST (pattern);
4443 gcc_assert (GET_CODE (SET_SRC (pattern)) == CONST);
4445 unspec = XEXP (SET_SRC (pattern), 0);
4446 gcc_assert (GET_CODE (unspec) == UNSPEC);
4447 gcc_assert (XINT (unspec, 1) == UNSPEC_HW1_LAST_PCREL);
4448 opnds[1] = XVECEXP (unspec, 0, 0);
4450 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4451 if (GET_CODE (opnds[1]) != SYMBOL_REF)
4452 return;
4454 start_sequence ();
4456 if (flag_pic != 1)
4458 if (TARGET_32BIT)
4459 emit_insn (gen_mov_got32_step1_32bit (opnds[0], opnds[1]));
4460 else
4461 emit_insn (gen_mov_got32_step1 (opnds[0], opnds[1]));
4464 new_insns = get_insns ();
4465 end_sequence ();
4467 replace_insns (insn, new_insns);
4471 /* Returns true if INSN is the second instruction of a pc-relative
4472 address compuatation. */
4473 static bool
4474 match_pcrel_step2 (rtx insn)
4476 rtx unspec;
4477 rtx addr;
4479 if (TARGET_32BIT)
4481 if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli_32bit)
4482 return false;
4484 else
4486 if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli)
4487 return false;
4490 unspec = SET_SRC (PATTERN (insn));
4491 addr = XVECEXP (unspec, 0, 1);
4493 return (GET_CODE (addr) == CONST
4494 && GET_CODE (XEXP (addr, 0)) == UNSPEC
4495 && XINT (XEXP (addr, 0), 1) == UNSPEC_HW0_PCREL);
4499 /* Do the second replacement step in tilegx_fixup_pcrel_references. */
4500 static void
4501 replace_mov_pcrel_step2 (rtx insn)
4503 rtx pattern = PATTERN (insn);
4504 rtx unspec;
4505 rtx addr;
4506 rtx opnds[3];
4507 rtx new_insns;
4508 rtx got_rtx = tilegx_got_rtx ();
4510 gcc_assert (GET_CODE (pattern) == SET);
4511 opnds[0] = SET_DEST (pattern);
4513 unspec = SET_SRC (pattern);
4514 gcc_assert (GET_CODE (unspec) == UNSPEC);
4515 gcc_assert (XINT (unspec, 1) == UNSPEC_INSN_ADDR_SHL16INSLI);
4517 opnds[1] = XVECEXP (unspec, 0, 0);
4519 addr = XVECEXP (unspec, 0, 1);
4520 gcc_assert (GET_CODE (addr) == CONST);
4522 unspec = XEXP (addr, 0);
4523 gcc_assert (GET_CODE (unspec) == UNSPEC);
4524 gcc_assert (XINT (unspec, 1) == UNSPEC_HW0_PCREL);
4525 opnds[2] = XVECEXP (unspec, 0, 0);
4527 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4528 if (GET_CODE (opnds[2]) != SYMBOL_REF)
4529 return;
4531 start_sequence ();
4533 if (flag_pic == 1)
4535 if (TARGET_32BIT)
4536 emit_insn (gen_add_got16_32bit (opnds[0], got_rtx, opnds[2]));
4537 else
4538 emit_insn (gen_add_got16 (opnds[0], got_rtx, opnds[2]));
4540 else
4542 if (TARGET_32BIT)
4543 emit_insn (gen_mov_got32_step2_32bit
4544 (opnds[0], opnds[1], opnds[2]));
4545 else
4546 emit_insn (gen_mov_got32_step2 (opnds[0], opnds[1], opnds[2]));
4549 new_insns = get_insns ();
4550 end_sequence ();
4552 replace_insns (insn, new_insns);
4556 /* Do the third replacement step in tilegx_fixup_pcrel_references. */
4557 static void
4558 replace_mov_pcrel_step3 (rtx insn)
4560 rtx pattern = PATTERN (insn);
4561 rtx unspec;
4562 rtx opnds[4];
4563 rtx new_insns;
4564 rtx got_rtx = tilegx_got_rtx ();
4565 rtx text_label_rtx = tilegx_text_label_rtx ();
4567 gcc_assert (GET_CODE (pattern) == SET);
4568 opnds[0] = SET_DEST (pattern);
4570 unspec = SET_SRC (pattern);
4571 gcc_assert (GET_CODE (unspec) == UNSPEC);
4572 gcc_assert (XINT (unspec, 1) == UNSPEC_MOV_PCREL_STEP3);
4574 opnds[1] = got_rtx;
4576 if (XVECEXP (unspec, 0, 0) == text_label_rtx)
4577 opnds[2] = XVECEXP (unspec, 0, 1);
4578 else
4580 gcc_assert (XVECEXP (unspec, 0, 1) == text_label_rtx);
4581 opnds[2] = XVECEXP (unspec, 0, 0);
4584 opnds[3] = XVECEXP (unspec, 0, 2);
4586 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4587 if (GET_CODE (opnds[3]) != SYMBOL_REF)
4588 return;
4590 start_sequence ();
4592 if (flag_pic == 1)
4594 emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[2]));
4596 else
4598 emit_move_insn (opnds[0], gen_rtx_PLUS (Pmode, opnds[1], opnds[2]));
4599 emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[0]));
4602 new_insns = get_insns ();
4603 end_sequence ();
4605 replace_insns (insn, new_insns);
4609 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4610 going through the GOT when the symbol is local to the compilation
4611 unit. But such a symbol requires that the common text_label that
4612 we generate at the beginning of the function be in the same section
4613 as the reference to the SYMBOL_REF. This may not be true if we
4614 generate hot/cold sections. This function looks for such cases and
4615 replaces such references with the longer sequence going through the
4616 GOT.
4618 We expect following instruction sequence:
4619 moveli tmp1, hw1_last(x-.L_PICLNK) [1]
4620 shl16insli tmp2, tmp1, hw0(x-.L_PICLNK) [2]
4621 add<x> tmp3, txt_label_reg, tmp2 [3]
4623 If we're compiling -fpic, we replace with the following sequence
4624 (the numbers in brackets match the instructions they're replacing
4625 above).
4627 add<x>li tmp2, got_reg, hw0_last_got(x) [2]
4628 ld<4> tmp3, tmp2 [3]
4630 If we're compiling -fPIC, we replace the first instruction with:
4632 moveli tmp1, hw1_last_got(x) [1]
4633 shl16insli tmp2, tmp1, hw0_got(x) [2]
4634 add<x> tmp3, got_reg, tmp2 [3]
4635 ld<4> tmp3, tmp3 [3]
4637 Note that we're careful to disturb the instruction sequence as
4638 little as possible, since it's very late in the compilation
4639 process. */
4640 static void
4641 tilegx_fixup_pcrel_references (void)
4643 rtx insn, next_insn;
4644 bool same_section_as_entry = true;
4646 for (insn = get_insns (); insn; insn = next_insn)
4648 next_insn = NEXT_INSN (insn);
4650 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4652 same_section_as_entry = !same_section_as_entry;
4653 continue;
4656 if (same_section_as_entry)
4657 continue;
4659 if (!(INSN_P (insn)
4660 && GET_CODE (PATTERN (insn)) != USE
4661 && GET_CODE (PATTERN (insn)) != CLOBBER))
4662 continue;
4664 if (TARGET_32BIT)
4666 if (match_pcrel_step1 (insn))
4667 replace_mov_pcrel_step1 (insn);
4668 else if (match_pcrel_step2 (insn))
4669 replace_mov_pcrel_step2 (insn);
4670 else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3_32bit)
4671 replace_mov_pcrel_step3 (insn);
4673 else
4675 if (match_pcrel_step1 (insn))
4676 replace_mov_pcrel_step1 (insn);
4677 else if (match_pcrel_step2 (insn))
4678 replace_mov_pcrel_step2 (insn);
4679 else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3)
4680 replace_mov_pcrel_step3 (insn);
4686 /* Ensure that no var tracking notes are emitted in the middle of a
4687 three-instruction bundle. */
4688 static void
4689 reorder_var_tracking_notes (void)
4691 basic_block bb;
4692 FOR_EACH_BB (bb)
4694 rtx insn, next;
4695 rtx queue = NULL_RTX;
4696 bool in_bundle = false;
4698 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4700 next = NEXT_INSN (insn);
4702 if (INSN_P (insn))
4704 /* Emit queued up notes at the last instruction of a
4705 bundle. */
4706 if (GET_MODE (insn) == QImode)
4708 while (queue)
4710 rtx next_queue = PREV_INSN (queue);
4711 PREV_INSN (NEXT_INSN (insn)) = queue;
4712 NEXT_INSN (queue) = NEXT_INSN (insn);
4713 NEXT_INSN (insn) = queue;
4714 PREV_INSN (queue) = insn;
4715 queue = next_queue;
4717 in_bundle = false;
4719 else if (GET_MODE (insn) == SImode)
4720 in_bundle = true;
4722 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4724 if (in_bundle)
4726 rtx prev = PREV_INSN (insn);
4727 PREV_INSN (next) = prev;
4728 NEXT_INSN (prev) = next;
4730 PREV_INSN (insn) = queue;
4731 queue = insn;
4739 /* Perform machine dependent operations on the rtl chain INSNS. */
4740 static void
4741 tilegx_reorg (void)
4743 /* We are freeing block_for_insn in the toplev to keep compatibility
4744 with old MDEP_REORGS that are not CFG based. Recompute it
4745 now. */
4746 compute_bb_for_insn ();
4748 if (flag_reorder_blocks_and_partition)
4750 tilegx_fixup_pcrel_references ();
4753 if (flag_schedule_insns_after_reload)
4755 split_all_insns ();
4757 timevar_push (TV_SCHED2);
4758 schedule_insns ();
4759 timevar_pop (TV_SCHED2);
4761 /* Examine the schedule to group into bundles. */
4762 tilegx_gen_bundles ();
4765 df_analyze ();
4767 if (flag_var_tracking)
4769 timevar_push (TV_VAR_TRACKING);
4770 variable_tracking_main ();
4771 reorder_var_tracking_notes ();
4772 timevar_pop (TV_VAR_TRACKING);
4775 df_finish_pass (false);
4780 /* Assembly */
4782 /* Select a format to encode pointers in exception handling data.
4783 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4784 GLOBAL is true if the symbol may be affected by dynamic
4785 relocations. */
4787 tilegx_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4789 if (flag_pic)
4791 int type = TARGET_32BIT ? DW_EH_PE_sdata4 : DW_EH_PE_sdata8;
4792 return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | type;
4794 else
4795 return DW_EH_PE_absptr;
4799 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4800 static void
4801 tilegx_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4802 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4803 tree function)
4805 rtx this_rtx, insn, funexp, addend;
4807 /* Pretend to be a post-reload pass while generating rtl. */
4808 reload_completed = 1;
4810 /* Mark the end of the (empty) prologue. */
4811 emit_note (NOTE_INSN_PROLOGUE_END);
4813 /* Find the "this" pointer. If the function returns a structure,
4814 the structure return pointer is in $1. */
4815 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4816 this_rtx = gen_rtx_REG (Pmode, 1);
4817 else
4818 this_rtx = gen_rtx_REG (Pmode, 0);
4820 /* Add DELTA to THIS_RTX. */
4821 if (!(delta >= -32868 && delta <= 32767))
4823 addend = gen_rtx_REG (Pmode, 29);
4824 emit_move_insn (addend, GEN_INT (delta));
4826 else
4827 addend = GEN_INT (delta);
4829 if (TARGET_32BIT)
4830 emit_insn (gen_addsi3 (this_rtx, this_rtx, addend));
4831 else
4832 emit_insn (gen_adddi3 (this_rtx, this_rtx, addend));
4834 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4835 if (vcall_offset)
4837 rtx tmp;
4839 tmp = gen_rtx_REG (Pmode, 29);
4840 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4842 if (!(vcall_offset >= -32868 && vcall_offset <= 32767))
4844 addend = gen_rtx_REG (Pmode, 28);
4845 emit_move_insn (addend, GEN_INT (vcall_offset));
4847 else
4848 addend = GEN_INT (vcall_offset);
4850 if (TARGET_32BIT)
4851 emit_insn (gen_addsi3 (tmp, tmp, addend));
4852 else
4853 emit_insn (gen_adddi3 (tmp, tmp, addend));
4855 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4857 if (TARGET_32BIT)
4858 emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4859 else
4860 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
4863 /* Generate a tail call to the target function. */
4864 if (!TREE_USED (function))
4866 assemble_external (function);
4867 TREE_USED (function) = 1;
4869 funexp = XEXP (DECL_RTL (function), 0);
4870 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4871 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4872 SIBLING_CALL_P (insn) = 1;
4874 /* Run just enough of rest_of_compilation to get the insns emitted.
4875 There's not really enough bulk here to make other passes such as
4876 instruction scheduling worth while. Note that use_thunk calls
4877 assemble_start_function and assemble_end_function.
4879 We don't currently bundle, but the instruciton sequence is all
4880 serial except for the tail call, so we're only wasting one cycle.
4882 insn = get_insns ();
4883 shorten_branches (insn);
4884 final_start_function (insn, file, 1);
4885 final (insn, file, 1);
4886 final_end_function ();
4888 /* Stop pretending to be a post-reload pass. */
4889 reload_completed = 0;
4893 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4894 static void
4895 tilegx_asm_trampoline_template (FILE *file)
4897 int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4898 if (TARGET_32BIT)
4900 fprintf (file, "\tlnk r10\n");
4901 fprintf (file, "\taddxi r10, r10, 32\n");
4902 fprintf (file, "\tld4s_add r11, r10, %d\n", ptr_mode_size);
4903 fprintf (file, "\tld4s r10, r10\n");
4904 fprintf (file, "\tjr r11\n");
4905 fprintf (file, "\t.word 0 # <function address>\n");
4906 fprintf (file, "\t.word 0 # <static chain value>\n");
4908 else
4910 fprintf (file, "\tlnk r10\n");
4911 fprintf (file, "\taddi r10, r10, 32\n");
4912 fprintf (file, "\tld_add r11, r10, %d\n", ptr_mode_size);
4913 fprintf (file, "\tld r10, r10\n");
4914 fprintf (file, "\tjr r11\n");
4915 fprintf (file, "\t.quad 0 # <function address>\n");
4916 fprintf (file, "\t.quad 0 # <static chain value>\n");
4921 /* Implement TARGET_TRAMPOLINE_INIT. */
4922 static void
4923 tilegx_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
4925 rtx fnaddr, chaddr;
4926 rtx mem;
4927 rtx begin_addr, end_addr;
4928 int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
4930 fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
4931 chaddr = copy_to_reg (static_chain);
4933 emit_block_move (m_tramp, assemble_trampoline_template (),
4934 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
4936 mem = adjust_address (m_tramp, ptr_mode,
4937 TRAMPOLINE_SIZE - 2 * ptr_mode_size);
4938 emit_move_insn (mem, fnaddr);
4939 mem = adjust_address (m_tramp, ptr_mode,
4940 TRAMPOLINE_SIZE - ptr_mode_size);
4941 emit_move_insn (mem, chaddr);
4943 /* Get pointers to the beginning and end of the code block. */
4944 begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
4945 end_addr = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0),
4946 TRAMPOLINE_SIZE));
4948 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__clear_cache"),
4949 LCT_NORMAL, VOIDmode, 2, begin_addr, Pmode,
4950 end_addr, Pmode);
4954 /* Implement TARGET_PRINT_OPERAND. */
4955 static void
4956 tilegx_print_operand (FILE *file, rtx x, int code)
4958 switch (code)
4960 case 'c':
4961 /* Print the compare operator opcode for conditional moves. */
4962 switch (GET_CODE (x))
4964 case EQ:
4965 fputs ("z", file);
4966 break;
4967 case NE:
4968 fputs ("nz", file);
4969 break;
4970 default:
4971 output_operand_lossage ("invalid %%c operand");
4973 return;
4975 case 'C':
4976 /* Print the compare operator opcode for conditional moves. */
4977 switch (GET_CODE (x))
4979 case EQ:
4980 fputs ("nz", file);
4981 break;
4982 case NE:
4983 fputs ("z", file);
4984 break;
4985 default:
4986 output_operand_lossage ("invalid %%C operand");
4988 return;
4990 case 'd':
4992 /* Print the compare operator opcode for conditional moves. */
4993 switch (GET_CODE (x))
4995 case EQ:
4996 fputs ("eq", file);
4997 break;
4998 case NE:
4999 fputs ("ne", file);
5000 break;
5001 default:
5002 output_operand_lossage ("invalid %%d operand");
5004 return;
5007 case 'D':
5009 /* Print the compare operator opcode for conditional moves. */
5010 switch (GET_CODE (x))
5012 case EQ:
5013 fputs ("ne", file);
5014 break;
5015 case NE:
5016 fputs ("eq", file);
5017 break;
5018 default:
5019 output_operand_lossage ("invalid %%D operand");
5021 return;
5024 case 'H':
5026 if (GET_CODE (x) == CONST
5027 && GET_CODE (XEXP (x, 0)) == UNSPEC)
5029 rtx addr = XVECEXP (XEXP (x, 0), 0, 0);
5030 int unspec = XINT (XEXP (x, 0), 1);
5031 const char *opstr = NULL;
5032 switch (unspec)
5034 case UNSPEC_HW0:
5035 case UNSPEC_HW0_PCREL:
5036 opstr = "hw0";
5037 break;
5038 case UNSPEC_HW1:
5039 case UNSPEC_HW1_PCREL:
5040 opstr = "hw1";
5041 break;
5042 case UNSPEC_HW2:
5043 opstr = "hw2";
5044 break;
5045 case UNSPEC_HW3:
5046 opstr = "hw3";
5047 break;
5048 case UNSPEC_HW0_LAST:
5049 opstr = "hw0_last";
5050 break;
5051 case UNSPEC_HW1_LAST:
5052 case UNSPEC_HW1_LAST_PCREL:
5053 opstr = "hw1_last";
5054 break;
5055 case UNSPEC_HW2_LAST:
5056 case UNSPEC_HW2_LAST_PCREL:
5057 opstr = "hw2_last";
5058 break;
5059 case UNSPEC_HW0_GOT:
5060 opstr = "hw0_got";
5061 break;
5062 case UNSPEC_HW0_LAST_GOT:
5063 opstr = "hw0_last_got";
5064 break;
5065 case UNSPEC_HW1_LAST_GOT:
5066 opstr = "hw1_last_got";
5067 break;
5068 case UNSPEC_HW0_TLS_GD:
5069 opstr = "hw0_tls_gd";
5070 break;
5071 case UNSPEC_HW1_LAST_TLS_GD:
5072 opstr = "hw1_last_tls_gd";
5073 break;
5074 case UNSPEC_HW0_TLS_IE:
5075 opstr = "hw0_tls_ie";
5076 break;
5077 case UNSPEC_HW1_LAST_TLS_IE:
5078 opstr = "hw1_last_tls_ie";
5079 break;
5080 case UNSPEC_HW0_TLS_LE:
5081 opstr = "hw0_tls_le";
5082 break;
5083 case UNSPEC_HW1_LAST_TLS_LE:
5084 opstr = "hw1_last_tls_le";
5085 break;
5086 case UNSPEC_HW0_PLT_PCREL:
5087 opstr = "hw0_plt";
5088 break;
5089 case UNSPEC_HW1_PLT_PCREL:
5090 opstr = "hw1_plt";
5091 break;
5092 case UNSPEC_HW1_LAST_PLT_PCREL:
5093 opstr = "hw1_last_plt";
5094 break;
5095 case UNSPEC_HW2_LAST_PLT_PCREL:
5096 opstr = "hw2_last_plt";
5097 break;
5098 default:
5099 output_operand_lossage ("invalid %%H specifier");
5102 fputs (opstr, file);
5103 fputc ('(', file);
5104 output_addr_const (file, addr);
5106 if (unspec == UNSPEC_HW0_PCREL
5107 || unspec == UNSPEC_HW1_PCREL
5108 || unspec == UNSPEC_HW1_LAST_PCREL
5109 || unspec == UNSPEC_HW2_LAST_PCREL
5110 || unspec == UNSPEC_HW0_PLT_PCREL
5111 || unspec == UNSPEC_HW1_PLT_PCREL
5112 || unspec == UNSPEC_HW1_LAST_PLT_PCREL
5113 || unspec == UNSPEC_HW2_LAST_PLT_PCREL)
5115 rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
5116 fputs (" - " , file);
5117 output_addr_const (file, addr2);
5120 fputc (')', file);
5121 return;
5123 else if (symbolic_operand (x, VOIDmode))
5125 output_addr_const (file, x);
5126 return;
5129 /* FALLTHRU */
5131 case 'h':
5133 /* Print the low 16 bits of a constant. */
5134 HOST_WIDE_INT i;
5135 if (CONST_INT_P (x))
5136 i = INTVAL (x);
5137 else if (GET_CODE (x) == CONST_DOUBLE)
5138 i = CONST_DOUBLE_LOW (x);
5139 else
5141 output_operand_lossage ("invalid %%h operand");
5142 return;
5144 i = trunc_int_for_mode (i, HImode);
5145 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5146 return;
5149 case 'I':
5150 /* Print an auto-inc memory operand. */
5151 if (!MEM_P (x))
5153 output_operand_lossage ("invalid %%I operand");
5154 return;
5157 output_memory_reference_mode = GET_MODE (x);
5158 output_memory_autoinc_first = true;
5159 output_address (XEXP (x, 0));
5160 output_memory_reference_mode = VOIDmode;
5161 return;
5163 case 'i':
5164 /* Print an auto-inc memory operand. */
5165 if (!MEM_P (x))
5167 output_operand_lossage ("invalid %%i operand");
5168 return;
5171 output_memory_reference_mode = GET_MODE (x);
5172 output_memory_autoinc_first = false;
5173 output_address (XEXP (x, 0));
5174 output_memory_reference_mode = VOIDmode;
5175 return;
5177 case 'j':
5179 /* Print the low 8 bits of a constant. */
5180 HOST_WIDE_INT i;
5181 if (CONST_INT_P (x))
5182 i = INTVAL (x);
5183 else if (GET_CODE (x) == CONST_DOUBLE)
5184 i = CONST_DOUBLE_LOW (x);
5185 else if (GET_CODE (x) == CONST_VECTOR
5186 && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
5187 i = INTVAL (CONST_VECTOR_ELT (x, 0));
5188 else
5190 output_operand_lossage ("invalid %%j operand");
5191 return;
5193 i = trunc_int_for_mode (i, QImode);
5194 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5195 return;
5198 case 'P':
5200 /* Print a constant plus one. */
5201 if (!CONST_INT_P (x))
5203 output_operand_lossage ("invalid %%P operand");
5204 return;
5206 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
5207 return;
5210 case 'm':
5211 case 'M':
5213 /* Print a bfextu-style bit range. */
5214 int first_bit, last_bit;
5215 HOST_WIDE_INT flip = (code == 'm') ? ~0 : 0;
5217 if (!CONST_INT_P (x)
5218 || !tilegx_bitfield_operand_p (INTVAL (x) ^ flip,
5219 &first_bit, &last_bit))
5221 output_operand_lossage ("invalid %%%c operand", code);
5222 return;
5225 fprintf (file, "%d, %d", first_bit, last_bit);
5226 return;
5229 case 'N':
5231 const char *reg = NULL;
5233 /* Print a network register. */
5234 if (!CONST_INT_P (x))
5236 output_operand_lossage ("invalid %%N operand");
5237 return;
5240 switch (INTVAL (x))
5242 case TILEGX_NETREG_IDN0: reg = "idn0"; break;
5243 case TILEGX_NETREG_IDN1: reg = "idn1"; break;
5244 case TILEGX_NETREG_UDN0: reg = "udn0"; break;
5245 case TILEGX_NETREG_UDN1: reg = "udn1"; break;
5246 case TILEGX_NETREG_UDN2: reg = "udn2"; break;
5247 case TILEGX_NETREG_UDN3: reg = "udn3"; break;
5248 default:
5249 gcc_unreachable ();
5252 fprintf (file, reg);
5253 return;
5256 case 'p':
5257 if (GET_CODE (x) == SYMBOL_REF)
5259 if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5260 fprintf (file, "plt(");
5261 output_addr_const (file, x);
5262 if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5263 fprintf (file, ")");
5265 else
5266 output_addr_const (file, x);
5267 return;
5269 case 'r':
5270 /* In this case we need a register. Use 'zero' if the operand
5271 is const0_rtx. */
5272 if (x == const0_rtx
5273 || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
5275 fputs ("zero", file);
5276 return;
5278 else if (!REG_P (x))
5280 output_operand_lossage ("invalid operand for 'r' specifier");
5281 return;
5283 /* FALLTHRU */
5285 case 0:
5286 if (REG_P (x))
5288 fprintf (file, "%s", reg_names[REGNO (x)]);
5289 return;
5291 else if (MEM_P (x))
5293 output_memory_reference_mode = VOIDmode;
5294 output_address (XEXP (x, 0));
5295 return;
5297 else
5299 output_addr_const (file, x);
5300 return;
5304 debug_rtx (x);
5305 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
5306 code, code);
5310 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
5311 static void
5312 tilegx_print_operand_address (FILE *file, rtx addr)
5314 if (GET_CODE (addr) == POST_DEC
5315 || GET_CODE (addr) == POST_INC)
5317 int offset = GET_MODE_SIZE (output_memory_reference_mode);
5319 gcc_assert (output_memory_reference_mode != VOIDmode);
5321 if (output_memory_autoinc_first)
5322 fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5323 else
5324 fprintf (file, "%d",
5325 GET_CODE (addr) == POST_DEC ? -offset : offset);
5327 else if (GET_CODE (addr) == POST_MODIFY)
5329 gcc_assert (output_memory_reference_mode != VOIDmode);
5331 gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
5333 if (output_memory_autoinc_first)
5334 fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5335 else
5336 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5337 INTVAL (XEXP (XEXP (addr, 1), 1)));
5339 else
5340 tilegx_print_operand (file, addr, 'r');
5344 /* Machine mode of current insn, for determining curly brace
5345 placement. */
5346 static enum machine_mode insn_mode;
5349 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
5350 void
5351 tilegx_final_prescan_insn (rtx insn)
5353 /* Record this for tilegx_asm_output_opcode to examine. */
5354 insn_mode = GET_MODE (insn);
5358 /* While emitting asm, are we currently inside '{' for a bundle? */
5359 static bool tilegx_in_bundle = false;
5361 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
5362 appropriate given the bundling information recorded by
5363 tilegx_gen_bundles. */
5364 const char *
5365 tilegx_asm_output_opcode (FILE *stream, const char *code)
5367 bool pseudo = !strcmp (code, "pseudo");
5369 if (!tilegx_in_bundle && insn_mode == SImode)
5371 /* Start a new bundle. */
5372 fprintf (stream, "{\n\t");
5373 tilegx_in_bundle = true;
5376 if (tilegx_in_bundle && insn_mode == QImode)
5378 /* Close an existing bundle. */
5379 static char buf[100];
5381 gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
5383 strcpy (buf, pseudo ? "" : code);
5384 strcat (buf, "\n\t}");
5385 tilegx_in_bundle = false;
5387 return buf;
5389 else
5391 return pseudo ? "" : code;
5396 /* Output assembler code to FILE to increment profiler label # LABELNO
5397 for profiling a function entry. */
5398 void
5399 tilegx_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
5401 if (tilegx_in_bundle)
5403 fprintf (file, "\t}\n");
5406 if (flag_pic)
5408 fprintf (file,
5409 "\t{\n"
5410 "\tmove\tr10, lr\n"
5411 "\tjal\tplt(%s)\n"
5412 "\t}\n", MCOUNT_NAME);
5414 else
5416 fprintf (file,
5417 "\t{\n"
5418 "\tmove\tr10, lr\n"
5419 "\tjal\t%s\n"
5420 "\t}\n", MCOUNT_NAME);
5423 tilegx_in_bundle = false;
5427 /* Implement TARGET_ASM_FILE_END. */
5428 static void
5429 tilegx_file_end (void)
5431 if (NEED_INDICATE_EXEC_STACK)
5432 file_end_indicate_exec_stack ();
5437 #undef TARGET_HAVE_TLS
5438 #define TARGET_HAVE_TLS HAVE_AS_TLS
5440 #undef TARGET_OPTION_OVERRIDE
5441 #define TARGET_OPTION_OVERRIDE tilegx_option_override
5443 #undef TARGET_SCALAR_MODE_SUPPORTED_P
5444 #define TARGET_SCALAR_MODE_SUPPORTED_P tilegx_scalar_mode_supported_p
5446 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5447 #define TARGET_VECTOR_MODE_SUPPORTED_P tilegx_vector_mode_supported_p
5449 #undef TARGET_CANNOT_FORCE_CONST_MEM
5450 #define TARGET_CANNOT_FORCE_CONST_MEM tilegx_cannot_force_const_mem
5452 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5453 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilegx_function_ok_for_sibcall
5455 #undef TARGET_PASS_BY_REFERENCE
5456 #define TARGET_PASS_BY_REFERENCE tilegx_pass_by_reference
5458 #undef TARGET_RETURN_IN_MEMORY
5459 #define TARGET_RETURN_IN_MEMORY tilegx_return_in_memory
5461 #undef TARGET_MODE_REP_EXTENDED
5462 #define TARGET_MODE_REP_EXTENDED tilegx_mode_rep_extended
5464 #undef TARGET_FUNCTION_ARG_BOUNDARY
5465 #define TARGET_FUNCTION_ARG_BOUNDARY tilegx_function_arg_boundary
5467 #undef TARGET_FUNCTION_ARG
5468 #define TARGET_FUNCTION_ARG tilegx_function_arg
5470 #undef TARGET_FUNCTION_ARG_ADVANCE
5471 #define TARGET_FUNCTION_ARG_ADVANCE tilegx_function_arg_advance
5473 #undef TARGET_FUNCTION_VALUE
5474 #define TARGET_FUNCTION_VALUE tilegx_function_value
5476 #undef TARGET_LIBCALL_VALUE
5477 #define TARGET_LIBCALL_VALUE tilegx_libcall_value
5479 #undef TARGET_FUNCTION_VALUE_REGNO_P
5480 #define TARGET_FUNCTION_VALUE_REGNO_P tilegx_function_value_regno_p
5482 #undef TARGET_PROMOTE_FUNCTION_MODE
5483 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5485 #undef TARGET_PROMOTE_PROTOTYPES
5486 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5488 #undef TARGET_BUILD_BUILTIN_VA_LIST
5489 #define TARGET_BUILD_BUILTIN_VA_LIST tilegx_build_builtin_va_list
5491 #undef TARGET_EXPAND_BUILTIN_VA_START
5492 #define TARGET_EXPAND_BUILTIN_VA_START tilegx_va_start
5494 #undef TARGET_SETUP_INCOMING_VARARGS
5495 #define TARGET_SETUP_INCOMING_VARARGS tilegx_setup_incoming_varargs
5497 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5498 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilegx_gimplify_va_arg_expr
5500 #undef TARGET_RTX_COSTS
5501 #define TARGET_RTX_COSTS tilegx_rtx_costs
5503 #undef TARGET_SHIFT_TRUNCATION_MASK
5504 #define TARGET_SHIFT_TRUNCATION_MASK tilegx_shift_truncation_mask
5506 #undef TARGET_INIT_LIBFUNCS
5507 #define TARGET_INIT_LIBFUNCS tilegx_init_libfuncs
5509 /* Limit to what we can reach in one addli. */
5510 #undef TARGET_MIN_ANCHOR_OFFSET
5511 #define TARGET_MIN_ANCHOR_OFFSET -32768
5512 #undef TARGET_MAX_ANCHOR_OFFSET
5513 #define TARGET_MAX_ANCHOR_OFFSET 32767
5515 #undef TARGET_LEGITIMATE_CONSTANT_P
5516 #define TARGET_LEGITIMATE_CONSTANT_P tilegx_legitimate_constant_p
5518 #undef TARGET_LEGITIMATE_ADDRESS_P
5519 #define TARGET_LEGITIMATE_ADDRESS_P tilegx_legitimate_address_p
5521 #undef TARGET_LEGITIMIZE_ADDRESS
5522 #define TARGET_LEGITIMIZE_ADDRESS tilegx_legitimize_address
5524 #undef TARGET_DELEGITIMIZE_ADDRESS
5525 #define TARGET_DELEGITIMIZE_ADDRESS tilegx_delegitimize_address
5527 #undef TARGET_INIT_BUILTINS
5528 #define TARGET_INIT_BUILTINS tilegx_init_builtins
5530 #undef TARGET_BUILTIN_DECL
5531 #define TARGET_BUILTIN_DECL tilegx_builtin_decl
5533 #undef TARGET_EXPAND_BUILTIN
5534 #define TARGET_EXPAND_BUILTIN tilegx_expand_builtin
5536 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5537 #define TARGET_CONDITIONAL_REGISTER_USAGE tilegx_conditional_register_usage
5539 #undef TARGET_FRAME_POINTER_REQUIRED
5540 #define TARGET_FRAME_POINTER_REQUIRED tilegx_frame_pointer_required
5542 #undef TARGET_DELAY_SCHED2
5543 #define TARGET_DELAY_SCHED2 true
5545 #undef TARGET_DELAY_VARTRACK
5546 #define TARGET_DELAY_VARTRACK true
5548 #undef TARGET_SCHED_ISSUE_RATE
5549 #define TARGET_SCHED_ISSUE_RATE tilegx_issue_rate
5551 #undef TARGET_SCHED_ADJUST_COST
5552 #define TARGET_SCHED_ADJUST_COST tilegx_sched_adjust_cost
5554 #undef TARGET_MACHINE_DEPENDENT_REORG
5555 #define TARGET_MACHINE_DEPENDENT_REORG tilegx_reorg
5557 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5558 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5559 hook_bool_const_tree_hwi_hwi_const_tree_true
5561 #undef TARGET_ASM_OUTPUT_MI_THUNK
5562 #define TARGET_ASM_OUTPUT_MI_THUNK tilegx_output_mi_thunk
5564 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5565 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilegx_asm_trampoline_template
5567 #undef TARGET_TRAMPOLINE_INIT
5568 #define TARGET_TRAMPOLINE_INIT tilegx_trampoline_init
5570 #undef TARGET_PRINT_OPERAND
5571 #define TARGET_PRINT_OPERAND tilegx_print_operand
5573 #undef TARGET_PRINT_OPERAND_ADDRESS
5574 #define TARGET_PRINT_OPERAND_ADDRESS tilegx_print_operand_address
5576 #undef TARGET_ASM_FILE_END
5577 #define TARGET_ASM_FILE_END tilegx_file_end
5579 #undef TARGET_ASM_ALIGNED_DI_OP
5580 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
5583 struct gcc_target targetm = TARGET_INITIALIZER;
5585 #include "gt-tilegx.h"