remove pointer-set.[ch]
[official-gcc.git] / gcc / config / tilegx / tilegx.c
blob69c9be4f9b6e363c44c53835f5be4687be06c46e
1 /* Subroutines used for code generation on the Tilera TILE-Gx.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "insn-config.h"
28 #include "output.h"
29 #include "insn-attr.h"
30 #include "recog.h"
31 #include "expr.h"
32 #include "langhooks.h"
33 #include "optabs.h"
34 #include "sched-int.h"
35 #include "tm_p.h"
36 #include "tm-constrs.h"
37 #include "target.h"
38 #include "target-def.h"
39 #include "function.h"
40 #include "dwarf2.h"
41 #include "timevar.h"
42 #include "tree.h"
43 #include "hash-table.h"
44 #include "vec.h"
45 #include "ggc.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-fold.h"
50 #include "tree-eh.h"
51 #include "gimple-expr.h"
52 #include "is-a.h"
53 #include "gimple.h"
54 #include "stringpool.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "calls.h"
58 #include "gimplify.h"
59 #include "cfgloop.h"
60 #include "tilegx-builtins.h"
61 #include "tilegx-multiply.h"
62 #include "diagnostic.h"
63 #include "builtins.h"
65 /* SYMBOL_REF for GOT */
66 static GTY(()) rtx g_got_symbol = NULL;
68 /* In case of a POST_INC or POST_DEC memory reference, we must report
69 the mode of the memory reference from TARGET_PRINT_OPERAND to
70 TARGET_PRINT_OPERAND_ADDRESS. */
71 static enum machine_mode output_memory_reference_mode;
73 /* Report whether we're printing out the first address fragment of a
74 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
75 TARGET_PRINT_OPERAND_ADDRESS. */
76 static bool output_memory_autoinc_first;
80 /* Option handling */
82 /* Implement TARGET_OPTION_OVERRIDE. */
83 static void
84 tilegx_option_override (void)
86 if (global_options_set.x_tilegx_cmodel)
88 switch (tilegx_cmodel)
90 case CM_SMALL:
91 case CM_SMALL_PIC:
92 if (flag_pic)
93 tilegx_cmodel = CM_SMALL_PIC;
94 break;
96 case CM_LARGE:
97 case CM_LARGE_PIC:
98 if (flag_pic)
99 tilegx_cmodel = CM_LARGE_PIC;
100 break;
102 default:
103 gcc_unreachable ();
106 else
107 tilegx_cmodel = flag_pic ? CM_SMALL_PIC : CM_SMALL;
109 /* When modulo scheduling is enabled, we still rely on regular
110 scheduler for bundling. */
111 if (flag_modulo_sched)
112 flag_resched_modulo_sched = 1;
117 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
118 static bool
119 tilegx_scalar_mode_supported_p (enum machine_mode mode)
121 switch (mode)
123 case QImode:
124 case HImode:
125 case SImode:
126 case DImode:
127 case TImode:
128 return true;
130 case SFmode:
131 case DFmode:
132 return true;
134 default:
135 return false;
140 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
141 static bool
142 tilegx_vector_mode_supported_p (enum machine_mode mode)
144 return mode == V8QImode || mode == V4HImode || mode == V2SImode;
148 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
149 static bool
150 tilegx_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED,
151 rtx x ATTRIBUTE_UNUSED)
153 return true;
157 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
158 static bool
159 tilegx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
161 return (tilegx_cmodel != CM_LARGE && tilegx_cmodel != CM_LARGE_PIC
162 && (decl != NULL));
166 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
167 passed by reference. */
168 static bool
169 tilegx_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
170 enum machine_mode mode ATTRIBUTE_UNUSED,
171 const_tree type, bool named ATTRIBUTE_UNUSED)
173 return (type && TYPE_SIZE (type)
174 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST);
178 /* Implement TARGET_RETURN_IN_MSB. We return a value in the most
179 significant part of a register if:
180 - the target is big-endian; and
181 - the value has an aggregate type (e.g., structure or union). */
182 static bool
183 tilegx_return_in_msb (const_tree valtype)
185 return (TARGET_BIG_ENDIAN && AGGREGATE_TYPE_P (valtype));
189 /* Implement TARGET_RETURN_IN_MEMORY. */
190 static bool
191 tilegx_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
193 return !IN_RANGE (int_size_in_bytes (type),
194 0, TILEGX_NUM_RETURN_REGS * UNITS_PER_WORD);
198 /* Implement TARGET_MODE_REP_EXTENDED. */
199 static int
200 tilegx_mode_rep_extended (enum machine_mode mode, enum machine_mode mode_rep)
202 /* SImode register values are sign-extended to DImode. */
203 if (mode == SImode && mode_rep == DImode)
204 return SIGN_EXTEND;
206 return UNKNOWN;
210 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
211 static unsigned int
212 tilegx_function_arg_boundary (enum machine_mode mode, const_tree type)
214 unsigned int alignment;
216 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
217 if (alignment < PARM_BOUNDARY)
218 alignment = PARM_BOUNDARY;
219 if (alignment > STACK_BOUNDARY)
220 alignment = STACK_BOUNDARY;
221 return alignment;
225 /* Implement TARGET_FUNCTION_ARG. */
226 static rtx
227 tilegx_function_arg (cumulative_args_t cum_v,
228 enum machine_mode mode,
229 const_tree type, bool named ATTRIBUTE_UNUSED)
231 CUMULATIVE_ARGS cum = *get_cumulative_args (cum_v);
232 int byte_size = ((mode == BLKmode)
233 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
234 bool doubleword_aligned_p;
236 if (cum >= TILEGX_NUM_ARG_REGS)
237 return NULL_RTX;
239 /* See whether the argument has doubleword alignment. */
240 doubleword_aligned_p =
241 tilegx_function_arg_boundary (mode, type) > BITS_PER_WORD;
243 if (doubleword_aligned_p)
244 cum += cum & 1;
246 /* The ABI does not allow parameters to be passed partially in reg
247 and partially in stack. */
248 if ((cum + (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
249 > TILEGX_NUM_ARG_REGS)
250 return NULL_RTX;
252 return gen_rtx_REG (mode, cum);
256 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
257 static void
258 tilegx_function_arg_advance (cumulative_args_t cum_v,
259 enum machine_mode mode,
260 const_tree type, bool named ATTRIBUTE_UNUSED)
262 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
264 int byte_size = ((mode == BLKmode)
265 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode));
266 int word_size = (byte_size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
267 bool doubleword_aligned_p;
269 /* See whether the argument has doubleword alignment. */
270 doubleword_aligned_p =
271 tilegx_function_arg_boundary (mode, type) > BITS_PER_WORD;
273 if (doubleword_aligned_p)
274 *cum += *cum & 1;
276 /* If the current argument does not fit in the pretend_args space,
277 skip over it. */
278 if (*cum < TILEGX_NUM_ARG_REGS
279 && *cum + word_size > TILEGX_NUM_ARG_REGS)
280 *cum = TILEGX_NUM_ARG_REGS;
282 *cum += word_size;
286 /* Implement TARGET_FUNCTION_VALUE. */
287 static rtx
288 tilegx_function_value (const_tree valtype, const_tree fn_decl_or_type,
289 bool outgoing ATTRIBUTE_UNUSED)
291 enum machine_mode mode;
292 int unsigned_p;
294 mode = TYPE_MODE (valtype);
295 unsigned_p = TYPE_UNSIGNED (valtype);
297 mode = promote_function_mode (valtype, mode, &unsigned_p,
298 fn_decl_or_type, 1);
300 return gen_rtx_REG (mode, 0);
304 /* Implement TARGET_LIBCALL_VALUE. */
305 static rtx
306 tilegx_libcall_value (enum machine_mode mode,
307 const_rtx fun ATTRIBUTE_UNUSED)
309 return gen_rtx_REG (mode, 0);
313 /* Implement FUNCTION_VALUE_REGNO_P. */
314 static bool
315 tilegx_function_value_regno_p (const unsigned int regno)
317 return regno < TILEGX_NUM_RETURN_REGS;
321 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
322 static tree
323 tilegx_build_builtin_va_list (void)
325 tree f_args, f_skip, record, type_decl;
326 bool owp;
328 record = lang_hooks.types.make_type (RECORD_TYPE);
330 type_decl = build_decl (BUILTINS_LOCATION, TYPE_DECL,
331 get_identifier ("__va_list_tag"), record);
333 f_args = build_decl (BUILTINS_LOCATION, FIELD_DECL,
334 get_identifier ("__args"), ptr_type_node);
335 f_skip = build_decl (BUILTINS_LOCATION, FIELD_DECL,
336 get_identifier ("__skip"), ptr_type_node);
338 DECL_FIELD_CONTEXT (f_args) = record;
340 DECL_FIELD_CONTEXT (f_skip) = record;
342 TREE_CHAIN (record) = type_decl;
343 TYPE_NAME (record) = type_decl;
344 TYPE_FIELDS (record) = f_args;
345 TREE_CHAIN (f_args) = f_skip;
347 /* We know this is being padded and we want it too. It is an
348 internal type so hide the warnings from the user. */
349 owp = warn_padded;
350 warn_padded = false;
352 layout_type (record);
354 warn_padded = owp;
356 /* The correct type is an array type of one element. */
357 return record;
361 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
362 static void
363 tilegx_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
365 tree f_args, f_skip;
366 tree args, skip, t;
368 f_args = TYPE_FIELDS (TREE_TYPE (valist));
369 f_skip = TREE_CHAIN (f_args);
371 args =
372 build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
373 skip =
374 build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
376 /* Find the __args area. */
377 t = make_tree (TREE_TYPE (args), virtual_incoming_args_rtx);
378 t = fold_build_pointer_plus_hwi (t,
379 UNITS_PER_WORD *
380 (crtl->args.info - TILEGX_NUM_ARG_REGS));
382 if (crtl->args.pretend_args_size > 0)
383 t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
385 t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
386 TREE_SIDE_EFFECTS (t) = 1;
387 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
389 /* Find the __skip area. */
390 t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
391 t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
392 t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
393 TREE_SIDE_EFFECTS (t) = 1;
394 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
398 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
399 static void
400 tilegx_setup_incoming_varargs (cumulative_args_t cum,
401 enum machine_mode mode,
402 tree type, int *pretend_args, int no_rtl)
404 CUMULATIVE_ARGS local_cum = *get_cumulative_args (cum);
405 int first_reg;
407 /* The caller has advanced CUM up to, but not beyond, the last named
408 argument. Advance a local copy of CUM past the last "real" named
409 argument, to find out how many registers are left over. */
410 targetm.calls.function_arg_advance (pack_cumulative_args (&local_cum),
411 mode, type, true);
412 first_reg = local_cum;
414 if (local_cum < TILEGX_NUM_ARG_REGS)
416 *pretend_args = UNITS_PER_WORD * (TILEGX_NUM_ARG_REGS - first_reg);
418 if (!no_rtl)
420 alias_set_type set = get_varargs_alias_set ();
421 rtx tmp =
422 gen_rtx_MEM (BLKmode, plus_constant (Pmode,
423 virtual_incoming_args_rtx,
424 -STACK_POINTER_OFFSET -
425 UNITS_PER_WORD *
426 (TILEGX_NUM_ARG_REGS -
427 first_reg)));
428 MEM_NOTRAP_P (tmp) = 1;
429 set_mem_alias_set (tmp, set);
430 move_block_from_reg (first_reg, tmp,
431 TILEGX_NUM_ARG_REGS - first_reg);
434 else
435 *pretend_args = 0;
439 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
440 the va_list structure VALIST as required to retrieve an argument of
441 type TYPE, and returning that argument.
443 ret = va_arg(VALIST, TYPE);
445 generates code equivalent to:
447 paddedsize = (sizeof(TYPE) + 7) & -8;
448 if ( (VALIST.__args + paddedsize > VALIST.__skip)
449 & (VALIST.__args <= VALIST.__skip))
450 addr = VALIST.__skip + STACK_POINTER_OFFSET;
451 else
452 addr = VALIST.__args;
453 VALIST.__args = addr + paddedsize;
454 if (BYTES_BIG_ENDIAN)
455 ret = *(TYPE *)(addr + paddedsize - sizeof(TYPE));
456 else
457 ret = *(TYPE *)addr;
459 static tree
460 tilegx_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
461 gimple_seq *post_p ATTRIBUTE_UNUSED)
463 tree f_args, f_skip;
464 tree args, skip;
465 HOST_WIDE_INT size, rsize;
466 tree addr, tmp;
467 bool pass_by_reference_p;
469 f_args = TYPE_FIELDS (va_list_type_node);
470 f_skip = TREE_CHAIN (f_args);
472 args =
473 build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
474 skip =
475 build3 (COMPONENT_REF, TREE_TYPE (f_skip), valist, f_skip, NULL_TREE);
477 addr = create_tmp_var (ptr_type_node, "va_arg");
479 /* If an object is dynamically sized, a pointer to it is passed
480 instead of the object itself. */
481 pass_by_reference_p = pass_by_reference (NULL, TYPE_MODE (type), type,
482 false);
484 if (pass_by_reference_p)
485 type = build_pointer_type (type);
487 size = int_size_in_bytes (type);
488 rsize = ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD) * UNITS_PER_WORD;
490 /* If the alignment of the type is greater than the default for a
491 parameter, align to the STACK_BOUNDARY. */
492 if (TYPE_ALIGN (type) > PARM_BOUNDARY)
494 /* Assert the only case we generate code for: when
495 stack boundary = 2 * parm boundary. */
496 gcc_assert (STACK_BOUNDARY == PARM_BOUNDARY * 2);
498 tmp = build2 (BIT_AND_EXPR, sizetype,
499 fold_convert (sizetype, unshare_expr (args)),
500 size_int (PARM_BOUNDARY / 8));
501 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
502 unshare_expr (args), tmp);
504 gimplify_assign (unshare_expr (args), tmp, pre_p);
507 /* Build conditional expression to calculate addr. The expression
508 will be gimplified later. */
509 tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
510 tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
511 build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
512 build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
513 unshare_expr (skip)));
515 tmp = build3 (COND_EXPR, ptr_type_node, tmp,
516 build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
517 size_int (STACK_POINTER_OFFSET)),
518 unshare_expr (args));
520 /* Adjust the address of va_arg if it is in big endian mode. */
521 if (BYTES_BIG_ENDIAN && rsize > size)
522 tmp = fold_build_pointer_plus_hwi (tmp, rsize - size);
523 gimplify_assign (addr, tmp, pre_p);
525 /* Update VALIST.__args. */
527 if (BYTES_BIG_ENDIAN && rsize > size)
528 tmp = fold_build_pointer_plus_hwi (addr, size);
529 else
530 tmp = fold_build_pointer_plus_hwi (addr, rsize);
531 gimplify_assign (unshare_expr (args), tmp, pre_p);
533 addr = fold_convert (build_pointer_type (type), addr);
535 if (pass_by_reference_p)
536 addr = build_va_arg_indirect_ref (addr);
538 return build_va_arg_indirect_ref (addr);
543 /* Implement TARGET_RTX_COSTS. */
544 static bool
545 tilegx_rtx_costs (rtx x, int code, int outer_code, int opno, int *total,
546 bool speed)
548 switch (code)
550 case CONST_INT:
551 /* If this is an 8-bit constant, return zero since it can be
552 used nearly anywhere with no cost. If it is a valid operand
553 for an ADD or AND, likewise return 0 if we know it will be
554 used in that context. Otherwise, return 2 since it might be
555 used there later. All other constants take at least two
556 insns. */
557 if (satisfies_constraint_I (x))
559 *total = 0;
560 return true;
562 else if (outer_code == PLUS && add_operand (x, VOIDmode))
564 /* Slightly penalize large constants even though we can add
565 them in one instruction, because it forces the use of
566 2-wide bundling mode. */
567 *total = 1;
568 return true;
570 else if (move_operand (x, SImode))
572 /* We can materialize in one move. */
573 *total = COSTS_N_INSNS (1);
574 return true;
576 else
578 /* We can materialize in two moves. */
579 *total = COSTS_N_INSNS (2);
580 return true;
583 return false;
585 case CONST:
586 case LABEL_REF:
587 case SYMBOL_REF:
588 *total = COSTS_N_INSNS (2);
589 return true;
591 case CONST_DOUBLE:
592 *total = COSTS_N_INSNS (4);
593 return true;
595 case HIGH:
596 *total = 0;
597 return true;
599 case MEM:
600 /* If outer-code was a sign or zero extension, a cost of
601 COSTS_N_INSNS (1) was already added in, so account for
602 that. */
603 if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
604 *total = COSTS_N_INSNS (1);
605 else
606 *total = COSTS_N_INSNS (2);
607 return true;
609 case PLUS:
610 /* Convey that shl[123]add are efficient. */
611 if (GET_CODE (XEXP (x, 0)) == MULT
612 && cint_248_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
614 *total = (rtx_cost (XEXP (XEXP (x, 0), 0),
615 (enum rtx_code) outer_code, opno, speed)
616 + rtx_cost (XEXP (x, 1),
617 (enum rtx_code) outer_code, opno, speed)
618 + COSTS_N_INSNS (1));
619 return true;
621 return false;
623 case MULT:
624 *total = COSTS_N_INSNS (2);
625 return false;
627 case DIV:
628 case UDIV:
629 case MOD:
630 case UMOD:
631 /* These are handled by software and are very expensive. */
632 *total = COSTS_N_INSNS (100);
633 return false;
635 case UNSPEC:
636 case UNSPEC_VOLATILE:
638 int num = XINT (x, 1);
640 if (num <= TILEGX_LAST_LATENCY_1_INSN)
641 *total = COSTS_N_INSNS (1);
642 else if (num <= TILEGX_LAST_LATENCY_2_INSN)
643 *total = COSTS_N_INSNS (2);
644 else if (num > TILEGX_LAST_LATENCY_INSN)
646 if (num == UNSPEC_NON_TEMPORAL)
648 /* These are basically loads. */
649 if (outer_code == ZERO_EXTEND || outer_code == SIGN_EXTEND)
650 *total = COSTS_N_INSNS (1);
651 else
652 *total = COSTS_N_INSNS (2);
654 else
656 if (outer_code == PLUS)
657 *total = 0;
658 else
659 *total = COSTS_N_INSNS (1);
662 else
664 switch (num)
666 case UNSPEC_BLOCKAGE:
667 case UNSPEC_NETWORK_BARRIER:
668 case UNSPEC_ATOMIC:
669 *total = 0;
670 break;
672 case UNSPEC_LNK_AND_LABEL:
673 case UNSPEC_MF:
674 case UNSPEC_MOV_PCREL_STEP3:
675 case UNSPEC_NETWORK_RECEIVE:
676 case UNSPEC_NETWORK_SEND:
677 case UNSPEC_SPR_MOVE:
678 case UNSPEC_TLS_GD_ADD:
679 *total = COSTS_N_INSNS (1);
680 break;
682 case UNSPEC_TLS_IE_LOAD:
683 case UNSPEC_XCHG:
684 *total = COSTS_N_INSNS (2);
685 break;
687 case UNSPEC_SP_SET:
688 *total = COSTS_N_INSNS (3);
689 break;
691 case UNSPEC_SP_TEST:
692 *total = COSTS_N_INSNS (4);
693 break;
695 case UNSPEC_CMPXCHG:
696 case UNSPEC_INSN_CMPEXCH:
697 case UNSPEC_LATENCY_L2:
698 *total = COSTS_N_INSNS (11);
699 break;
701 case UNSPEC_TLS_GD_CALL:
702 *total = COSTS_N_INSNS (30);
703 break;
705 case UNSPEC_LATENCY_MISS:
706 *total = COSTS_N_INSNS (80);
707 break;
709 default:
710 *total = COSTS_N_INSNS (1);
713 return true;
716 default:
717 return false;
723 /* Rtl lowering. */
725 /* Create a temporary variable to hold a partial result, to enable
726 CSE. */
727 static rtx
728 create_temp_reg_if_possible (enum machine_mode mode, rtx default_reg)
730 return can_create_pseudo_p () ? gen_reg_rtx (mode) : default_reg;
734 /* Functions to save and restore machine-specific function data. */
735 static struct machine_function *
736 tilegx_init_machine_status (void)
738 return ggc_cleared_alloc<machine_function> ();
742 /* Do anything needed before RTL is emitted for each function. */
743 void
744 tilegx_init_expanders (void)
746 /* Arrange to initialize and mark the machine per-function
747 status. */
748 init_machine_status = tilegx_init_machine_status;
750 if (cfun && cfun->machine && flag_pic)
752 static int label_num = 0;
754 char text_label_name[32];
756 struct machine_function *machine = cfun->machine;
758 ASM_GENERATE_INTERNAL_LABEL (text_label_name, "L_PICLNK", label_num++);
760 machine->text_label_symbol =
761 gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (text_label_name));
763 machine->text_label_rtx =
764 gen_rtx_REG (Pmode, TILEGX_PIC_TEXT_LABEL_REGNUM);
766 machine->got_rtx = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
768 machine->calls_tls_get_addr = false;
773 /* Implement TARGET_EXPAND_TO_RTL_HOOK. */
774 static void
775 tilegx_expand_to_rtl_hook (void)
777 /* Exclude earlier sets of crtl->uses_pic_offset_table, because we
778 only care about uses actually emitted. */
779 crtl->uses_pic_offset_table = 0;
783 /* Implement TARGET_SHIFT_TRUNCATION_MASK. DImode shifts use the mode
784 matching insns and therefore guarantee that the shift count is
785 modulo 64. SImode shifts sometimes use the 64 bit version so do
786 not hold such guarantee. */
787 static unsigned HOST_WIDE_INT
788 tilegx_shift_truncation_mask (enum machine_mode mode)
790 return mode == DImode ? 63 : 0;
794 /* Implement TARGET_INIT_LIBFUNCS. */
795 static void
796 tilegx_init_libfuncs (void)
798 /* We need to explicitly generate these libfunc's to support
799 conversion of divide by constant to multiply (the divide stubs in
800 tilegx.md exist also for this reason). Normally we'd expect gcc
801 to lazily generate them when they are needed, but for some reason
802 it's set up to only generate them if the mode is the word
803 mode. */
804 set_optab_libfunc (sdiv_optab, SImode, "__divsi3");
805 set_optab_libfunc (udiv_optab, SImode, "__udivsi3");
806 set_optab_libfunc (smod_optab, SImode, "__modsi3");
807 set_optab_libfunc (umod_optab, SImode, "__umodsi3");
811 /* Return true if X contains a thread-local symbol. */
812 static bool
813 tilegx_tls_referenced_p (rtx x)
815 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS)
816 x = XEXP (XEXP (x, 0), 0);
818 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x))
819 return true;
821 /* That's all we handle in tilegx_legitimize_tls_address for
822 now. */
823 return false;
827 /* Return true if X requires a scratch register. It is given that
828 flag_pic is on and that X satisfies CONSTANT_P. */
829 static int
830 tilegx_pic_address_needs_scratch (rtx x)
832 if (GET_CODE (x) == CONST
833 && GET_CODE (XEXP (x, 0)) == PLUS
834 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
835 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
836 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
837 return true;
839 return false;
843 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
844 which we are willing to load the value into a register via a move
845 pattern. TLS cannot be treated as a constant because it can
846 include a function call. */
847 static bool
848 tilegx_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
850 switch (GET_CODE (x))
852 case CONST:
853 case SYMBOL_REF:
854 return !tilegx_tls_referenced_p (x);
856 default:
857 return true;
862 /* Return true if the constant value X is a legitimate general operand
863 when generating PIC code. It is given that flag_pic is on and that
864 X satisfies CONSTANT_P. */
865 bool
866 tilegx_legitimate_pic_operand_p (rtx x)
868 if (tilegx_pic_address_needs_scratch (x))
869 return false;
871 if (tilegx_tls_referenced_p (x))
872 return false;
874 return true;
878 /* Return true if the rtx X can be used as an address operand. */
879 static bool
880 tilegx_legitimate_address_p (enum machine_mode ARG_UNUSED (mode), rtx x,
881 bool strict)
883 if (GET_CODE (x) == SUBREG)
884 x = SUBREG_REG (x);
886 switch (GET_CODE (x))
888 case POST_INC:
889 case POST_DEC:
890 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
891 return false;
893 x = XEXP (x, 0);
894 break;
896 case POST_MODIFY:
897 if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
898 return false;
900 if (GET_CODE (XEXP (x, 1)) != PLUS)
901 return false;
903 if (!rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0)))
904 return false;
906 if (!satisfies_constraint_I (XEXP (XEXP (x, 1), 1)))
907 return false;
909 x = XEXP (x, 0);
910 break;
912 case REG:
913 break;
915 default:
916 return false;
919 /* Check if x is a valid reg. */
920 if (!REG_P (x))
921 return false;
923 if (strict)
924 return REGNO_OK_FOR_BASE_P (REGNO (x));
925 else
926 return true;
930 /* Return the rtx containing SYMBOL_REF to the text label. */
931 static rtx
932 tilegx_text_label_symbol (void)
934 return cfun->machine->text_label_symbol;
938 /* Return the register storing the value of the text label. */
939 static rtx
940 tilegx_text_label_rtx (void)
942 return cfun->machine->text_label_rtx;
946 /* Return the register storing the value of the global offset
947 table. */
948 static rtx
949 tilegx_got_rtx (void)
951 return cfun->machine->got_rtx;
955 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
956 static rtx
957 tilegx_got_symbol (void)
959 if (g_got_symbol == NULL)
960 g_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
962 return g_got_symbol;
966 /* Return a reference to the got to be used by tls references. */
967 static rtx
968 tilegx_tls_got (void)
970 rtx temp;
971 if (flag_pic)
973 crtl->uses_pic_offset_table = 1;
974 return tilegx_got_rtx ();
977 temp = gen_reg_rtx (Pmode);
978 emit_move_insn (temp, tilegx_got_symbol ());
980 return temp;
984 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
985 this (thread-local) address. */
986 static rtx
987 tilegx_legitimize_tls_address (rtx addr)
989 rtx ret;
991 gcc_assert (can_create_pseudo_p ());
993 if (GET_CODE (addr) == SYMBOL_REF)
994 switch (SYMBOL_REF_TLS_MODEL (addr))
996 case TLS_MODEL_GLOBAL_DYNAMIC:
997 case TLS_MODEL_LOCAL_DYNAMIC:
999 rtx r0, temp, temp2, temp3, got, last;
1001 ret = gen_reg_rtx (Pmode);
1002 r0 = gen_rtx_REG (Pmode, 0);
1003 temp = gen_reg_rtx (Pmode);
1004 temp2 = gen_reg_rtx (Pmode);
1005 temp3 = gen_reg_rtx (Pmode);
1007 got = tilegx_tls_got ();
1008 if (TARGET_32BIT)
1010 emit_insn (gen_mov_tls_gd_step1_32bit (temp, addr));
1011 emit_insn (gen_mov_tls_gd_step2_32bit (temp2, temp, addr));
1012 emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
1014 else
1016 emit_insn (gen_mov_tls_gd_step1 (temp, addr));
1017 emit_insn (gen_mov_tls_gd_step2 (temp2, temp, addr));
1018 emit_insn (gen_tls_add (temp2, got, temp2, addr));
1021 emit_move_insn (r0, temp2);
1023 if (TARGET_32BIT)
1025 emit_insn (gen_tls_gd_call_32bit (addr));
1027 else
1029 emit_insn (gen_tls_gd_call (addr));
1032 emit_move_insn (temp3, r0);
1034 if (TARGET_32BIT)
1035 last = emit_insn (gen_tls_gd_add_32bit (ret, temp3, addr));
1036 else
1037 last = emit_insn (gen_tls_gd_add (ret, temp3, addr));
1039 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
1040 break;
1042 case TLS_MODEL_INITIAL_EXEC:
1044 rtx temp, temp2, temp3, got, last;
1046 ret = gen_reg_rtx (Pmode);
1047 temp = gen_reg_rtx (Pmode);
1048 temp2 = gen_reg_rtx (Pmode);
1049 temp3 = gen_reg_rtx (Pmode);
1051 got = tilegx_tls_got ();
1052 if (TARGET_32BIT)
1054 emit_insn (gen_mov_tls_ie_step1_32bit (temp, addr));
1055 emit_insn (gen_mov_tls_ie_step2_32bit (temp2, temp, addr));
1056 emit_insn (gen_tls_add_32bit (temp2, got, temp2, addr));
1057 emit_insn (gen_tls_ie_load_32bit (temp3, temp2, addr));
1059 else
1061 emit_insn (gen_mov_tls_ie_step1 (temp, addr));
1062 emit_insn (gen_mov_tls_ie_step2 (temp2, temp, addr));
1063 emit_insn (gen_tls_add (temp2, got, temp2, addr));
1064 emit_insn (gen_tls_ie_load (temp3, temp2, addr));
1067 last =
1068 emit_move_insn(ret,
1069 gen_rtx_PLUS (Pmode,
1070 gen_rtx_REG (Pmode,
1071 THREAD_POINTER_REGNUM),
1072 temp3));
1073 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
1074 break;
1076 case TLS_MODEL_LOCAL_EXEC:
1078 rtx temp, temp2, last;
1080 ret = gen_reg_rtx (Pmode);
1081 temp = gen_reg_rtx (Pmode);
1082 temp2 = gen_reg_rtx (Pmode);
1084 if (TARGET_32BIT)
1086 emit_insn (gen_mov_tls_le_step1_32bit (temp, addr));
1087 emit_insn (gen_mov_tls_le_step2_32bit (temp2, temp, addr));
1089 else
1091 emit_insn (gen_mov_tls_le_step1 (temp, addr));
1092 emit_insn (gen_mov_tls_le_step2 (temp2, temp, addr));
1095 last =
1096 emit_move_insn (ret,
1097 gen_rtx_PLUS (Pmode,
1098 gen_rtx_REG (Pmode,
1099 THREAD_POINTER_REGNUM),
1100 temp2));
1101 set_unique_reg_note (last, REG_EQUAL, copy_rtx (addr));
1102 break;
1104 default:
1105 gcc_unreachable ();
1107 else if (GET_CODE (addr) == CONST)
1109 rtx base, offset;
1111 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS);
1113 base = tilegx_legitimize_tls_address (XEXP (XEXP (addr, 0), 0));
1114 offset = XEXP (XEXP (addr, 0), 1);
1116 base = force_operand (base, NULL_RTX);
1117 ret = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1119 else
1120 gcc_unreachable ();
1122 return ret;
1126 /* Returns a register that points to ADDR, a symbolic address, by
1127 computing its address relative to tilegx_text_label_symbol. */
1128 void
1129 tilegx_compute_pcrel_address (rtx result, rtx addr)
1131 rtx text_label_symbol = tilegx_text_label_symbol ();
1132 rtx text_label_rtx = tilegx_text_label_rtx ();
1133 rtx temp, temp2, temp3;
1135 temp = create_temp_reg_if_possible (Pmode, result);
1136 temp2 = create_temp_reg_if_possible (Pmode, result);
1138 if (TARGET_32BIT)
1140 emit_insn (gen_mov_pcrel_step1_32bit (temp, addr, text_label_symbol));
1141 emit_insn (gen_mov_pcrel_step2_32bit (temp2, temp, addr,
1142 text_label_symbol));
1143 emit_insn (gen_mov_pcrel_step3_32bit (result, temp2,
1144 text_label_rtx,
1145 addr, text_label_symbol));
1147 else if (tilegx_cmodel == CM_LARGE_PIC)
1149 temp3 = create_temp_reg_if_possible (Pmode, result);
1150 emit_insn (gen_mov_large_pcrel_step1 (temp, addr, text_label_symbol));
1151 emit_insn (gen_mov_large_pcrel_step2 (temp2, temp, addr,
1152 text_label_symbol));
1153 emit_insn (gen_mov_large_pcrel_step3 (temp3, temp2, addr,
1154 text_label_symbol));
1155 emit_insn (gen_mov_large_pcrel_step4 (result, temp3,
1156 text_label_rtx,
1157 addr, text_label_symbol));
1159 else
1161 emit_insn (gen_mov_pcrel_step1 (temp, addr, text_label_symbol));
1162 emit_insn (gen_mov_pcrel_step2 (temp2, temp, addr, text_label_symbol));
1163 emit_insn (gen_mov_pcrel_step3 (result, temp2,
1164 text_label_rtx,
1165 addr, text_label_symbol));
1170 /* Returns a register that points to the plt entry of ADDR, a symbolic
1171 address, by computing its address relative to
1172 tilegx_text_label_symbol. */
1173 void
1174 tilegx_compute_pcrel_plt_address (rtx result, rtx addr)
1176 rtx text_label_symbol = tilegx_text_label_symbol ();
1177 rtx text_label_rtx = tilegx_text_label_rtx ();
1178 rtx temp, temp2, temp3;
1180 temp = create_temp_reg_if_possible (Pmode, result);
1181 temp2 = create_temp_reg_if_possible (Pmode, result);
1183 if (TARGET_32BIT)
1185 emit_insn (gen_mov_plt_pcrel_step1_32bit (temp, addr,
1186 text_label_symbol));
1187 emit_insn (gen_mov_plt_pcrel_step2_32bit (temp2, temp, addr,
1188 text_label_symbol));
1189 emit_move_insn (result, gen_rtx_PLUS (Pmode, temp2, text_label_rtx));
1191 else
1193 temp3 = create_temp_reg_if_possible (Pmode, result);
1195 emit_insn (gen_mov_plt_pcrel_step1 (temp, addr, text_label_symbol));
1196 emit_insn (gen_mov_plt_pcrel_step2 (temp2, temp, addr,
1197 text_label_symbol));
1198 emit_insn (gen_mov_plt_pcrel_step3 (temp3, temp2, addr,
1199 text_label_symbol));
1200 emit_move_insn (result, gen_rtx_PLUS (Pmode, temp3, text_label_rtx));
1205 /* Legitimize PIC addresses. If the address is already
1206 position-independent, we return ORIG. Newly generated
1207 position-independent addresses go into a reg. This is REG if
1208 nonzero, otherwise we allocate register(s) as necessary. */
1209 static rtx
1210 tilegx_legitimize_pic_address (rtx orig,
1211 enum machine_mode mode ATTRIBUTE_UNUSED,
1212 rtx reg)
1214 if (GET_CODE (orig) == SYMBOL_REF)
1216 rtx address, pic_ref;
1218 if (reg == 0)
1220 gcc_assert (can_create_pseudo_p ());
1221 reg = gen_reg_rtx (Pmode);
1224 if (SYMBOL_REF_LOCAL_P (orig))
1226 /* If not during reload, allocate another temp reg here for
1227 loading in the address, so that these instructions can be
1228 optimized properly. */
1229 rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1230 tilegx_compute_pcrel_address (temp_reg, orig);
1232 /* Note: this is conservative. We use the text_label but we
1233 don't use the pic_offset_table. However, in some cases
1234 we may need the pic_offset_table (see
1235 tilegx_fixup_pcrel_references). */
1236 crtl->uses_pic_offset_table = 1;
1238 address = temp_reg;
1240 emit_move_insn (reg, address);
1241 return reg;
1243 else
1245 /* If not during reload, allocate another temp reg here for
1246 loading in the address, so that these instructions can be
1247 optimized properly. */
1248 rtx temp_reg = create_temp_reg_if_possible (Pmode, reg);
1250 gcc_assert (flag_pic);
1251 if (flag_pic == 1)
1253 if (TARGET_32BIT)
1255 emit_insn (gen_add_got16_32bit (temp_reg,
1256 tilegx_got_rtx (),
1257 orig));
1259 else
1261 emit_insn (gen_add_got16 (temp_reg,
1262 tilegx_got_rtx (), orig));
1265 else
1267 rtx temp_reg2 = create_temp_reg_if_possible (Pmode, reg);
1268 rtx temp_reg3 = create_temp_reg_if_possible (Pmode, reg);
1269 if (TARGET_32BIT)
1271 emit_insn (gen_mov_got32_step1_32bit (temp_reg3, orig));
1272 emit_insn (gen_mov_got32_step2_32bit
1273 (temp_reg2, temp_reg3, orig));
1275 else
1277 emit_insn (gen_mov_got32_step1 (temp_reg3, orig));
1278 emit_insn (gen_mov_got32_step2 (temp_reg2, temp_reg3,
1279 orig));
1281 emit_move_insn (temp_reg,
1282 gen_rtx_PLUS (Pmode,
1283 tilegx_got_rtx (), temp_reg2));
1286 address = temp_reg;
1288 pic_ref = gen_const_mem (Pmode, address);
1289 crtl->uses_pic_offset_table = 1;
1290 emit_move_insn (reg, pic_ref);
1291 /* The following put a REG_EQUAL note on this insn, so that
1292 it can be optimized by loop. But it causes the label to
1293 be optimized away. */
1294 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1295 return reg;
1298 else if (GET_CODE (orig) == CONST)
1300 rtx base, offset;
1302 if (GET_CODE (XEXP (orig, 0)) == PLUS
1303 && XEXP (XEXP (orig, 0), 0) == tilegx_got_rtx ())
1304 return orig;
1306 if (reg == 0)
1308 gcc_assert (can_create_pseudo_p ());
1309 reg = gen_reg_rtx (Pmode);
1312 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
1313 base = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
1314 Pmode, reg);
1315 offset = tilegx_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1316 base == reg ? 0 : reg);
1318 if (CONST_INT_P (offset))
1320 if (can_create_pseudo_p ())
1321 offset = force_reg (Pmode, offset);
1322 else
1323 /* If we reach here, then something is seriously wrong. */
1324 gcc_unreachable ();
1327 if (can_create_pseudo_p ())
1328 return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, offset));
1329 else
1330 gcc_unreachable ();
1332 else if (GET_CODE (orig) == LABEL_REF)
1334 rtx address;
1335 rtx temp_reg;
1337 if (reg == 0)
1339 gcc_assert (can_create_pseudo_p ());
1340 reg = gen_reg_rtx (Pmode);
1343 /* If not during reload, allocate another temp reg here for
1344 loading in the address, so that these instructions can be
1345 optimized properly. */
1346 temp_reg = create_temp_reg_if_possible (Pmode, reg);
1347 tilegx_compute_pcrel_address (temp_reg, orig);
1349 /* Note: this is conservative. We use the text_label but we
1350 don't use the pic_offset_table. */
1351 crtl->uses_pic_offset_table = 1;
1353 address = temp_reg;
1355 emit_move_insn (reg, address);
1357 return reg;
1360 return orig;
1364 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1365 static rtx
1366 tilegx_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1367 enum machine_mode mode)
1369 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
1370 && symbolic_operand (x, Pmode) && tilegx_tls_referenced_p (x))
1372 return tilegx_legitimize_tls_address (x);
1374 else if (flag_pic)
1376 return tilegx_legitimize_pic_address (x, mode, 0);
1378 else
1379 return x;
1383 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1384 static rtx
1385 tilegx_delegitimize_address (rtx x)
1387 x = delegitimize_mem_from_attrs (x);
1389 if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
1391 switch (XINT (XEXP (x, 0), 1))
1393 case UNSPEC_HW0:
1394 case UNSPEC_HW1:
1395 case UNSPEC_HW2:
1396 case UNSPEC_HW3:
1397 case UNSPEC_HW0_LAST:
1398 case UNSPEC_HW1_LAST:
1399 case UNSPEC_HW2_LAST:
1400 case UNSPEC_HW0_PCREL:
1401 case UNSPEC_HW1_PCREL:
1402 case UNSPEC_HW1_LAST_PCREL:
1403 case UNSPEC_HW2_LAST_PCREL:
1404 case UNSPEC_HW0_PLT_PCREL:
1405 case UNSPEC_HW1_PLT_PCREL:
1406 case UNSPEC_HW1_LAST_PLT_PCREL:
1407 case UNSPEC_HW2_LAST_PLT_PCREL:
1408 case UNSPEC_HW0_GOT:
1409 case UNSPEC_HW0_LAST_GOT:
1410 case UNSPEC_HW1_LAST_GOT:
1411 case UNSPEC_HW0_TLS_GD:
1412 case UNSPEC_HW1_LAST_TLS_GD:
1413 case UNSPEC_HW0_TLS_IE:
1414 case UNSPEC_HW1_LAST_TLS_IE:
1415 case UNSPEC_HW0_TLS_LE:
1416 case UNSPEC_HW1_LAST_TLS_LE:
1417 x = XVECEXP (XEXP (x, 0), 0, 0);
1418 break;
1422 return x;
1426 /* Emit code to load the PIC register. */
1427 static void
1428 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED)
1430 int orig_flag_pic = flag_pic;
1432 rtx got_symbol = tilegx_got_symbol ();
1433 rtx text_label_symbol = tilegx_text_label_symbol ();
1434 rtx text_label_rtx = tilegx_text_label_rtx ();
1435 flag_pic = 0;
1437 if (TARGET_32BIT)
1439 emit_insn (gen_insn_lnk_and_label_32bit (text_label_rtx,
1440 text_label_symbol));
1442 else
1444 emit_insn (gen_insn_lnk_and_label (text_label_rtx, text_label_symbol));
1447 tilegx_compute_pcrel_address (tilegx_got_rtx (), got_symbol);
1449 flag_pic = orig_flag_pic;
1451 /* Need to emit this whether or not we obey regdecls, since
1452 setjmp/longjmp can cause life info to screw up. ??? In the case
1453 where we don't obey regdecls, this is not sufficient since we may
1454 not fall out the bottom. */
1455 emit_use (tilegx_got_rtx ());
1459 /* Return the simd variant of the constant NUM of mode MODE, by
1460 replicating it to fill an interger of mode DImode. NUM is first
1461 truncated to fit in MODE. */
1463 tilegx_simd_int (rtx num, enum machine_mode mode)
1465 HOST_WIDE_INT n = 0;
1467 gcc_assert (CONST_INT_P (num));
1469 n = INTVAL (num);
1471 switch (mode)
1473 case QImode:
1474 n = 0x0101010101010101LL * (n & 0x000000FF);
1475 break;
1476 case HImode:
1477 n = 0x0001000100010001LL * (n & 0x0000FFFF);
1478 break;
1479 case SImode:
1480 n = 0x0000000100000001LL * (n & 0xFFFFFFFF);
1481 break;
1482 case DImode:
1483 break;
1484 default:
1485 gcc_unreachable ();
1488 return GEN_INT (n);
1492 /* Returns true iff VAL can be moved into a register in one
1493 instruction. And if it can, it emits the code to move the constant
1494 into DEST_REG.
1496 If THREE_WIDE_ONLY is true, this insists on an instruction that
1497 works in a bundle containing three instructions. */
1498 static bool
1499 expand_set_cint64_one_inst (rtx dest_reg,
1500 HOST_WIDE_INT val, bool three_wide_only)
1502 if (val == trunc_int_for_mode (val, QImode))
1504 /* Success! */
1505 emit_move_insn (dest_reg, GEN_INT (val));
1506 return true;
1508 else if (!three_wide_only)
1510 /* Test for the following constraints: J, K, N, P. We avoid
1511 generating an rtx and using existing predicates because we
1512 can be testing and rejecting a lot of constants, and GEN_INT
1513 is O(N). */
1514 if ((val >= -32768 && val <= 65535)
1515 || ((val == (val & 0xFF) * 0x0101010101010101LL))
1516 || (val == ((trunc_int_for_mode (val, QImode) & 0xFFFF)
1517 * 0x0001000100010001LL)))
1519 emit_move_insn (dest_reg, GEN_INT (val));
1520 return true;
1524 return false;
1528 /* Implement DImode rotatert. */
1529 static HOST_WIDE_INT
1530 rotate_right (HOST_WIDE_INT n, int count)
1532 unsigned HOST_WIDE_INT x = n & 0xFFFFFFFFFFFFFFFFULL;
1533 if (count == 0)
1534 return x;
1535 return ((x >> count) | (x << (64 - count))) & 0xFFFFFFFFFFFFFFFFULL;
1539 /* Return true iff n contains exactly one contiguous sequence of 1
1540 bits, possibly wrapping around from high bits to low bits. */
1541 bool
1542 tilegx_bitfield_operand_p (HOST_WIDE_INT n, int *first_bit, int *last_bit)
1544 int i;
1546 if (n == 0)
1547 return false;
1549 for (i = 0; i < 64; i++)
1551 unsigned HOST_WIDE_INT x = rotate_right (n, i);
1552 if (!(x & 1))
1553 continue;
1555 /* See if x is a power of two minus one, i.e. only consecutive 1
1556 bits starting from bit 0. */
1557 if ((x & (x + 1)) == 0)
1559 if (first_bit != NULL)
1560 *first_bit = i;
1561 if (last_bit != NULL)
1562 *last_bit = (i + exact_log2 (x ^ (x >> 1))) & 63;
1564 return true;
1568 return false;
1572 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1573 static void
1574 expand_set_cint64 (rtx dest_reg, rtx src_val)
1576 HOST_WIDE_INT val;
1577 int leading_zeroes, trailing_zeroes;
1578 int three_wide_only;
1579 int shift, ins_shift, zero_cluster_shift;
1580 rtx temp, subreg;
1582 gcc_assert (CONST_INT_P (src_val));
1583 val = trunc_int_for_mode (INTVAL (src_val), GET_MODE (dest_reg));
1585 /* See if we can generate the constant in one instruction. */
1586 if (expand_set_cint64_one_inst (dest_reg, val, false))
1587 return;
1589 /* Force the destination to DImode so we can use DImode instructions
1590 to create it. This both allows instructions like rotl, and
1591 certain efficient 3-wide instructions. */
1592 subreg = simplify_gen_subreg (DImode, dest_reg, GET_MODE (dest_reg), 0);
1593 gcc_assert (subreg != NULL);
1594 dest_reg = subreg;
1596 temp = create_temp_reg_if_possible (DImode, dest_reg);
1598 leading_zeroes = 63 - floor_log2 (val & 0xFFFFFFFFFFFFFFFFULL);
1599 trailing_zeroes = exact_log2 (val & -val);
1601 /* First try all three-wide instructions that generate a constant
1602 (i.e. movei) followed by various shifts and rotates. If none of
1603 those work, try various two-wide ways of generating a constant
1604 followed by various shifts and rotates. */
1605 for (three_wide_only = 1; three_wide_only >= 0; three_wide_only--)
1607 int count;
1609 if (expand_set_cint64_one_inst (temp, val >> trailing_zeroes,
1610 three_wide_only))
1612 /* 0xFFFFFFFFFFFFA500 becomes:
1613 movei temp, 0xFFFFFFFFFFFFFFA5
1614 shli dest, temp, 8 */
1615 emit_move_insn (dest_reg,
1616 gen_rtx_ASHIFT (DImode, temp,
1617 GEN_INT (trailing_zeroes)));
1618 return;
1621 if (expand_set_cint64_one_inst (temp, val << leading_zeroes,
1622 three_wide_only))
1624 /* 0x7FFFFFFFFFFFFFFF becomes:
1625 movei temp, -2
1626 shrui dest, temp, 1 */
1627 emit_move_insn (dest_reg,
1628 gen_rtx_LSHIFTRT (DImode, temp,
1629 GEN_INT (leading_zeroes)));
1630 return;
1633 /* Try rotating a one-instruction immediate. */
1634 for (count = 1; count < 64; count++)
1636 HOST_WIDE_INT r = rotate_right (val, count);
1637 if (expand_set_cint64_one_inst (temp, r, three_wide_only))
1639 /* 0xFFFFFFFFFFA5FFFF becomes:
1640 movei temp, 0xFFFFFFFFFFFFFFA5
1641 rotli dest, temp, 16 */
1642 emit_move_insn (dest_reg,
1643 gen_rtx_ROTATE (DImode, temp, GEN_INT (count)));
1644 return;
1649 /* There are two cases here to produce a large constant.
1650 In the most general case, we do this:
1652 moveli x, hw3(NUM)
1653 shl16insli x, x, hw2(NUM)
1654 shl16insli x, x, hw1(NUM)
1655 shl16insli x, x, hw0(NUM)
1657 However, we can sometimes do better. shl16insli is a poor way to
1658 insert 16 zero bits, because simply shifting left by 16 has more
1659 bundling freedom. So if we see any contiguous aligned sequence
1660 of 16 or more zero bits (below the highest set bit), it is always
1661 more efficient to materialize the bits above the zero bits, then
1662 left shift to put in the zeroes, then insert whatever bits
1663 remain. For example, we might end up with:
1665 movei x, NUM >> (37 + 16)
1666 shli x, x, 37
1667 shl16insli x, x, hw0(NUM) */
1669 zero_cluster_shift = -1;
1671 for (shift = 0; shift < 48 - leading_zeroes; shift += 16)
1673 HOST_WIDE_INT x = val >> shift;
1675 /* Find the least significant group of 16 aligned zero bits. */
1676 if ((x & 0xFFFF) == 0x0000)
1678 /* Grab any following zero bits as well. */
1679 zero_cluster_shift = exact_log2 (x & -x);
1680 shift += zero_cluster_shift;
1681 break;
1685 if (zero_cluster_shift >= 0)
1687 unsigned HOST_WIDE_INT leftover;
1689 /* Recursively create the constant above the lowest 16 zero
1690 bits. */
1691 expand_set_cint64 (temp, GEN_INT (val >> shift));
1693 /* See if we can easily insert the remaining bits, or if we need
1694 to fall through to the more general case. */
1695 leftover = val - ((val >> shift) << shift);
1696 if (leftover == 0)
1698 /* A simple left shift is enough. */
1699 emit_move_insn (dest_reg,
1700 gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1701 return;
1703 else if (leftover <= 32767)
1705 /* Left shift into position then add in the leftover. */
1706 rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1707 emit_move_insn (temp2,
1708 gen_rtx_ASHIFT (DImode, temp, GEN_INT (shift)));
1709 emit_move_insn (dest_reg,
1710 gen_rtx_PLUS (DImode, temp2, GEN_INT (leftover)));
1711 return;
1713 else
1715 /* Shift in the batch of >= 16 zeroes we detected earlier.
1716 After this, shift will be aligned mod 16 so the final
1717 loop can use shl16insli. */
1718 rtx temp2 = create_temp_reg_if_possible (DImode, temp);
1719 rtx shift_count_rtx = GEN_INT (zero_cluster_shift);
1721 emit_move_insn (temp2,
1722 gen_rtx_ASHIFT (DImode, temp, shift_count_rtx));
1724 shift -= zero_cluster_shift;
1725 temp = temp2;
1728 else
1730 /* Set as many high 16-bit blocks as we can with a single
1731 instruction. We'll insert the remaining 16-bit blocks
1732 below. */
1733 for (shift = 16;; shift += 16)
1735 gcc_assert (shift < 64);
1736 if (expand_set_cint64_one_inst (temp, val >> shift, false))
1737 break;
1741 /* At this point, temp == val >> shift, shift % 16 == 0, and we
1742 still need to insert any bits of 'val' below 'shift'. Those bits
1743 are guaranteed to not have 16 contiguous zeroes. */
1745 gcc_assert ((shift & 15) == 0);
1747 for (ins_shift = shift - 16; ins_shift >= 0; ins_shift -= 16)
1749 rtx result;
1750 HOST_WIDE_INT bits = (val >> ins_shift) & 0xFFFF;
1751 gcc_assert (bits != 0);
1753 /* On the last iteration we need to store into dest_reg. */
1754 if (ins_shift == 0)
1755 result = dest_reg;
1756 else
1757 result = create_temp_reg_if_possible (DImode, dest_reg);
1759 emit_insn (gen_insn_shl16insli (result, temp, GEN_INT (bits)));
1761 temp = result;
1766 /* Load OP1, a 64-bit constant, into OP0, a register. We know it
1767 can't be done in one insn when we get here, the move expander
1768 guarantees this. */
1769 void
1770 tilegx_expand_set_const64 (rtx op0, rtx op1)
1772 if (CONST_INT_P (op1))
1774 /* TODO: I don't know if we want to split large constants
1775 now, or wait until later (with a define_split).
1777 Does splitting early help CSE? Does it harm other
1778 optimizations that might fold loads? */
1779 expand_set_cint64 (op0, op1);
1781 else
1783 rtx temp = create_temp_reg_if_possible (Pmode, op0);
1785 if (TARGET_32BIT)
1787 /* Generate the 2-insn sequence to materialize a symbolic
1788 address. */
1789 emit_insn (gen_mov_address_32bit_step1 (temp, op1));
1790 emit_insn (gen_mov_address_32bit_step2 (op0, temp, op1));
1792 else
1794 /* Generate the 3-insn sequence to materialize a symbolic
1795 address. Note that this assumes that virtual addresses
1796 fit in 48 signed bits, which is currently true. */
1797 rtx temp2 = create_temp_reg_if_possible (Pmode, op0);
1798 emit_insn (gen_mov_address_step1 (temp, op1));
1799 emit_insn (gen_mov_address_step2 (temp2, temp, op1));
1800 emit_insn (gen_mov_address_step3 (op0, temp2, op1));
1806 /* Expand a move instruction. Return true if all work is done. */
1807 bool
1808 tilegx_expand_mov (enum machine_mode mode, rtx *operands)
1810 /* Handle sets of MEM first. */
1811 if (MEM_P (operands[0]))
1813 if (can_create_pseudo_p ())
1814 operands[0] = validize_mem (operands[0]);
1816 if (reg_or_0_operand (operands[1], mode))
1817 return false;
1819 if (!reload_in_progress)
1820 operands[1] = force_reg (mode, operands[1]);
1823 /* Fixup TLS cases. */
1824 if (CONSTANT_P (operands[1]) && tilegx_tls_referenced_p (operands[1]))
1826 operands[1] = tilegx_legitimize_tls_address (operands[1]);
1827 return false;
1830 /* Fixup PIC cases. */
1831 if (flag_pic && CONSTANT_P (operands[1]))
1833 if (tilegx_pic_address_needs_scratch (operands[1]))
1834 operands[1] = tilegx_legitimize_pic_address (operands[1], mode, 0);
1836 if (symbolic_operand (operands[1], mode))
1838 operands[1] = tilegx_legitimize_pic_address (operands[1],
1839 mode,
1840 (reload_in_progress ?
1841 operands[0] :
1842 NULL_RTX));
1843 return false;
1847 /* Accept non-constants and valid constants unmodified. */
1848 if (!CONSTANT_P (operands[1]) || move_operand (operands[1], mode))
1849 return false;
1851 /* Split large integers. */
1852 tilegx_expand_set_const64 (operands[0], operands[1]);
1853 return true;
1857 /* Expand unaligned loads. */
1858 void
1859 tilegx_expand_unaligned_load (rtx dest_reg, rtx mem, HOST_WIDE_INT bitsize,
1860 HOST_WIDE_INT bit_offset, bool sign)
1862 enum machine_mode mode;
1863 rtx addr_lo, addr_hi;
1864 rtx mem_lo, mem_hi, hi;
1865 rtx mema, wide_result;
1866 int last_byte_offset;
1867 HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1869 mode = GET_MODE (dest_reg);
1871 if (bitsize == 2 * BITS_PER_UNIT && (bit_offset % BITS_PER_UNIT) == 0)
1873 rtx mem_left, mem_right;
1874 rtx left = gen_reg_rtx (mode);
1876 /* When just loading a two byte value, we can load the two bytes
1877 individually and combine them efficiently. */
1879 mem_lo = adjust_address (mem, QImode, byte_offset);
1880 mem_hi = adjust_address (mem, QImode, byte_offset + 1);
1882 if (BYTES_BIG_ENDIAN)
1884 mem_left = mem_lo;
1885 mem_right = mem_hi;
1887 else
1889 mem_left = mem_hi;
1890 mem_right = mem_lo;
1893 if (sign)
1895 /* Do a signed load of the second byte and use bfins to set
1896 the high bits of the result. */
1897 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, dest_reg),
1898 mem_right));
1899 emit_insn (gen_extendqidi2 (gen_lowpart (DImode, left), mem_left));
1900 emit_insn (gen_insv (gen_lowpart (DImode, dest_reg),
1901 GEN_INT (64 - 8), GEN_INT (8),
1902 gen_lowpart (DImode, left)));
1904 else
1906 /* Do two unsigned loads and use v1int_l to interleave
1907 them. */
1908 rtx right = gen_reg_rtx (mode);
1909 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, right),
1910 mem_right));
1911 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode, left),
1912 mem_left));
1913 emit_insn (gen_insn_v1int_l (gen_lowpart (DImode, dest_reg),
1914 gen_lowpart (DImode, left),
1915 gen_lowpart (DImode, right)));
1918 return;
1921 mema = XEXP (mem, 0);
1923 /* AND addresses cannot be in any alias set, since they may
1924 implicitly alias surrounding code. Ideally we'd have some alias
1925 set that covered all types except those with alignment 8 or
1926 higher. */
1927 addr_lo = force_reg (Pmode, plus_constant (Pmode, mema, byte_offset));
1928 mem_lo = change_address (mem, mode,
1929 gen_rtx_AND (GET_MODE (mema), addr_lo,
1930 GEN_INT (-8)));
1931 set_mem_alias_set (mem_lo, 0);
1933 /* Load the high word at an address that will not fault if the low
1934 address is aligned and at the very end of a page. */
1935 last_byte_offset = (bit_offset + bitsize - 1) / BITS_PER_UNIT;
1936 addr_hi = force_reg (Pmode, plus_constant (Pmode, mema, last_byte_offset));
1937 mem_hi = change_address (mem, mode,
1938 gen_rtx_AND (GET_MODE (mema), addr_hi,
1939 GEN_INT (-8)));
1940 set_mem_alias_set (mem_hi, 0);
1942 if (bitsize == 64)
1944 addr_lo = make_safe_from (addr_lo, dest_reg);
1945 wide_result = dest_reg;
1947 else
1949 wide_result = gen_reg_rtx (mode);
1952 /* Load hi first in case dest_reg is used in mema. */
1953 hi = gen_reg_rtx (mode);
1954 emit_move_insn (hi, mem_hi);
1955 emit_move_insn (wide_result, mem_lo);
1957 emit_insn (gen_insn_dblalign (gen_lowpart (DImode, wide_result),
1958 gen_lowpart (DImode, wide_result),
1959 gen_lowpart (DImode, hi), addr_lo));
1961 if (bitsize != 64)
1963 rtx extracted =
1964 extract_bit_field (gen_lowpart (DImode, wide_result),
1965 bitsize, bit_offset % BITS_PER_UNIT,
1966 !sign, gen_lowpart (DImode, dest_reg),
1967 DImode, DImode);
1969 if (extracted != dest_reg)
1970 emit_move_insn (dest_reg, gen_lowpart (DImode, extracted));
1975 /* Expand unaligned stores. */
1976 static void
1977 tilegx_expand_unaligned_store (rtx mem, rtx src, HOST_WIDE_INT bitsize,
1978 HOST_WIDE_INT bit_offset)
1980 HOST_WIDE_INT byte_offset = bit_offset / BITS_PER_UNIT;
1981 HOST_WIDE_INT bytesize = bitsize / BITS_PER_UNIT;
1982 HOST_WIDE_INT shift_init, shift_increment, shift_amt;
1983 HOST_WIDE_INT i;
1984 rtx mem_addr;
1985 rtx store_val;
1987 shift_init = BYTES_BIG_ENDIAN ? (bitsize - BITS_PER_UNIT) : 0;
1988 shift_increment = BYTES_BIG_ENDIAN ? -BITS_PER_UNIT : BITS_PER_UNIT;
1990 for (i = 0, shift_amt = shift_init;
1991 i < bytesize;
1992 i++, shift_amt += shift_increment)
1994 mem_addr = adjust_address (mem, QImode, byte_offset + i);
1996 if (shift_amt)
1998 store_val = expand_simple_binop (DImode, LSHIFTRT,
1999 gen_lowpart (DImode, src),
2000 GEN_INT (shift_amt), NULL, 1,
2001 OPTAB_LIB_WIDEN);
2002 store_val = gen_lowpart (QImode, store_val);
2004 else
2006 store_val = gen_lowpart (QImode, src);
2009 emit_move_insn (mem_addr, store_val);
2014 /* Implement the movmisalign patterns. One of the operands is a
2015 memory that is not naturally aligned. Emit instructions to load
2016 it. */
2017 void
2018 tilegx_expand_movmisalign (enum machine_mode mode, rtx *operands)
2020 if (MEM_P (operands[1]))
2022 rtx tmp;
2024 if (register_operand (operands[0], mode))
2025 tmp = operands[0];
2026 else
2027 tmp = gen_reg_rtx (mode);
2029 tilegx_expand_unaligned_load (tmp, operands[1], GET_MODE_BITSIZE (mode),
2030 0, true);
2032 if (tmp != operands[0])
2033 emit_move_insn (operands[0], tmp);
2035 else if (MEM_P (operands[0]))
2037 if (!reg_or_0_operand (operands[1], mode))
2038 operands[1] = force_reg (mode, operands[1]);
2040 tilegx_expand_unaligned_store (operands[0], operands[1],
2041 GET_MODE_BITSIZE (mode), 0);
2043 else
2044 gcc_unreachable ();
2049 /* Implement the allocate_stack pattern (alloca). */
2050 void
2051 tilegx_allocate_stack (rtx op0, rtx op1)
2053 /* Technically the correct way to initialize chain_loc is with
2054 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
2055 * sets the alias_set to that of a frame reference. Some of our
2056 * tests rely on some unsafe assumption about when the chaining
2057 * update is done, we need to be conservative about reordering the
2058 * chaining instructions.
2060 rtx fp_addr = gen_reg_rtx (Pmode);
2061 rtx fp_value = gen_reg_rtx (Pmode);
2062 rtx fp_loc;
2064 emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2065 GEN_INT (UNITS_PER_WORD)));
2067 fp_loc = gen_frame_mem (Pmode, fp_addr);
2069 emit_move_insn (fp_value, fp_loc);
2071 op1 = force_reg (Pmode, op1);
2073 emit_move_insn (stack_pointer_rtx,
2074 gen_rtx_MINUS (Pmode, stack_pointer_rtx, op1));
2076 emit_move_insn (fp_addr, gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2077 GEN_INT (UNITS_PER_WORD)));
2079 fp_loc = gen_frame_mem (Pmode, fp_addr);
2081 emit_move_insn (fp_loc, fp_value);
2083 emit_move_insn (op0, virtual_stack_dynamic_rtx);
2088 /* Multiplies */
2091 /* Returns the insn_code in ENTRY. */
2092 static enum insn_code
2093 tilegx_multiply_get_opcode (const struct tilegx_multiply_insn_seq_entry
2094 *entry)
2096 return tilegx_multiply_insn_seq_decode_opcode[entry->compressed_opcode];
2100 /* Returns the length of the 'op' array. */
2101 static int
2102 tilegx_multiply_get_num_ops (const struct tilegx_multiply_insn_seq *seq)
2104 /* The array either uses all of its allocated slots or is terminated
2105 by a bogus opcode. Either way, the array size is the index of the
2106 last valid opcode plus one. */
2107 int i;
2108 for (i = tilegx_multiply_insn_seq_MAX_OPERATIONS - 1; i >= 0; i--)
2109 if (tilegx_multiply_get_opcode (&seq->op[i]) != CODE_FOR_nothing)
2110 return i + 1;
2112 /* An empty array is not allowed. */
2113 gcc_unreachable ();
2117 /* We precompute a number of expression trees for multiplying by
2118 constants. This generates code for such an expression tree by
2119 walking through the nodes in the tree (which are conveniently
2120 pre-linearized) and emitting an instruction for each one. */
2121 static void
2122 tilegx_expand_constant_multiply_given_sequence (rtx result, rtx src,
2123 const struct
2124 tilegx_multiply_insn_seq *seq)
2126 int i;
2127 int num_ops;
2129 /* Keep track of the subexpressions computed so far, so later
2130 instructions can refer to them. We seed the array with zero and
2131 the value being multiplied. */
2132 int num_subexprs = 2;
2133 rtx subexprs[tilegx_multiply_insn_seq_MAX_OPERATIONS + 2];
2134 subexprs[0] = const0_rtx;
2135 subexprs[1] = src;
2137 /* Determine how many instructions we are going to generate. */
2138 num_ops = tilegx_multiply_get_num_ops (seq);
2139 gcc_assert (num_ops > 0
2140 && num_ops <= tilegx_multiply_insn_seq_MAX_OPERATIONS);
2142 for (i = 0; i < num_ops; i++)
2144 const struct tilegx_multiply_insn_seq_entry *entry = &seq->op[i];
2146 /* Figure out where to store the output of this instruction. */
2147 const bool is_last_op = (i + 1 == num_ops);
2148 rtx out = is_last_op ? result : gen_reg_rtx (DImode);
2150 enum insn_code opcode = tilegx_multiply_get_opcode (entry);
2151 if (opcode == CODE_FOR_ashldi3)
2153 /* Handle shift by immediate. This is a special case because
2154 the meaning of the second operand is a constant shift
2155 count rather than an operand index. */
2157 /* Make sure the shift count is in range. Zero should not
2158 happen. */
2159 const int shift_count = entry->rhs;
2160 gcc_assert (shift_count > 0 && shift_count < 64);
2162 /* Emit the actual instruction. */
2163 emit_insn (GEN_FCN (opcode)
2164 (out, subexprs[entry->lhs],
2165 gen_rtx_CONST_INT (DImode, shift_count)));
2167 else
2169 /* Handle a normal two-operand instruction, such as add or
2170 shl1add. */
2172 /* Make sure we are referring to a previously computed
2173 subexpression. */
2174 gcc_assert (entry->rhs < num_subexprs);
2176 /* Emit the actual instruction. */
2177 emit_insn (GEN_FCN (opcode)
2178 (out, subexprs[entry->lhs], subexprs[entry->rhs]));
2181 /* Record this subexpression for use by later expressions. */
2182 subexprs[num_subexprs++] = out;
2187 /* bsearch helper function. */
2188 static int
2189 tilegx_compare_multipliers (const void *key, const void *t)
2191 long long delta =
2192 (*(const long long *) key
2193 - ((const struct tilegx_multiply_insn_seq *) t)->multiplier);
2194 return (delta < 0) ? -1 : (delta > 0);
2198 /* Returns the tilegx_multiply_insn_seq for multiplier, or NULL if none
2199 exists. */
2200 static const struct tilegx_multiply_insn_seq *
2201 tilegx_find_multiply_insn_seq_for_constant (long long multiplier)
2203 return ((const struct tilegx_multiply_insn_seq *)
2204 bsearch (&multiplier, tilegx_multiply_insn_seq_table,
2205 tilegx_multiply_insn_seq_table_size,
2206 sizeof tilegx_multiply_insn_seq_table[0],
2207 tilegx_compare_multipliers));
2211 /* Try to a expand constant multiply in DImode by looking it up in a
2212 precompiled table. OP0 is the result operand, OP1 is the source
2213 operand, and MULTIPLIER is the value of the constant. Return true
2214 if it succeeds. */
2215 static bool
2216 tilegx_expand_const_muldi (rtx op0, rtx op1, long long multiplier)
2218 /* See if we have precomputed an efficient way to multiply by this
2219 constant. */
2220 const struct tilegx_multiply_insn_seq *seq =
2221 tilegx_find_multiply_insn_seq_for_constant (multiplier);
2222 if (seq != NULL)
2224 tilegx_expand_constant_multiply_given_sequence (op0, op1, seq);
2225 return true;
2227 else
2228 return false;
2232 /* Expand the muldi pattern. */
2233 bool
2234 tilegx_expand_muldi (rtx op0, rtx op1, rtx op2)
2236 if (CONST_INT_P (op2))
2238 HOST_WIDE_INT n = trunc_int_for_mode (INTVAL (op2), DImode);
2239 return tilegx_expand_const_muldi (op0, op1, n);
2241 return false;
2245 /* Expand a high multiply pattern in DImode. RESULT, OP1, OP2 are the
2246 operands, and SIGN is true if it's a signed multiply, and false if
2247 it's an unsigned multiply. */
2248 static void
2249 tilegx_expand_high_multiply (rtx result, rtx op1, rtx op2, bool sign)
2251 rtx tmp0 = gen_reg_rtx (DImode);
2252 rtx tmp1 = gen_reg_rtx (DImode);
2253 rtx tmp2 = gen_reg_rtx (DImode);
2254 rtx tmp3 = gen_reg_rtx (DImode);
2255 rtx tmp4 = gen_reg_rtx (DImode);
2256 rtx tmp5 = gen_reg_rtx (DImode);
2257 rtx tmp6 = gen_reg_rtx (DImode);
2258 rtx tmp7 = gen_reg_rtx (DImode);
2259 rtx tmp8 = gen_reg_rtx (DImode);
2260 rtx tmp9 = gen_reg_rtx (DImode);
2261 rtx tmp10 = gen_reg_rtx (DImode);
2262 rtx tmp11 = gen_reg_rtx (DImode);
2263 rtx tmp12 = gen_reg_rtx (DImode);
2264 rtx tmp13 = gen_reg_rtx (DImode);
2265 rtx result_lo = gen_reg_rtx (DImode);
2267 if (sign)
2269 emit_insn (gen_insn_mul_hs_lu (tmp0, op1, op2));
2270 emit_insn (gen_insn_mul_hs_lu (tmp1, op2, op1));
2271 emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2272 emit_insn (gen_insn_mul_hs_hs (tmp3, op1, op2));
2274 else
2276 emit_insn (gen_insn_mul_hu_lu (tmp0, op1, op2));
2277 emit_insn (gen_insn_mul_hu_lu (tmp1, op2, op1));
2278 emit_insn (gen_insn_mul_lu_lu (tmp2, op1, op2));
2279 emit_insn (gen_insn_mul_hu_hu (tmp3, op1, op2));
2282 emit_move_insn (tmp4, (gen_rtx_ASHIFT (DImode, tmp0, GEN_INT (32))));
2284 emit_move_insn (tmp5, (gen_rtx_ASHIFT (DImode, tmp1, GEN_INT (32))));
2286 emit_move_insn (tmp6, (gen_rtx_PLUS (DImode, tmp4, tmp5)));
2287 emit_move_insn (result_lo, (gen_rtx_PLUS (DImode, tmp2, tmp6)));
2289 emit_move_insn (tmp7, gen_rtx_LTU (DImode, tmp6, tmp4));
2290 emit_move_insn (tmp8, gen_rtx_LTU (DImode, result_lo, tmp2));
2292 if (sign)
2294 emit_move_insn (tmp9, (gen_rtx_ASHIFTRT (DImode, tmp0, GEN_INT (32))));
2295 emit_move_insn (tmp10, (gen_rtx_ASHIFTRT (DImode, tmp1, GEN_INT (32))));
2297 else
2299 emit_move_insn (tmp9, (gen_rtx_LSHIFTRT (DImode, tmp0, GEN_INT (32))));
2300 emit_move_insn (tmp10, (gen_rtx_LSHIFTRT (DImode, tmp1, GEN_INT (32))));
2303 emit_move_insn (tmp11, (gen_rtx_PLUS (DImode, tmp3, tmp7)));
2304 emit_move_insn (tmp12, (gen_rtx_PLUS (DImode, tmp8, tmp9)));
2305 emit_move_insn (tmp13, (gen_rtx_PLUS (DImode, tmp11, tmp12)));
2306 emit_move_insn (result, (gen_rtx_PLUS (DImode, tmp13, tmp10)));
2310 /* Implement smuldi3_highpart. */
2311 void
2312 tilegx_expand_smuldi3_highpart (rtx op0, rtx op1, rtx op2)
2314 tilegx_expand_high_multiply (op0, op1, op2, true);
2318 /* Implement umuldi3_highpart. */
2319 void
2320 tilegx_expand_umuldi3_highpart (rtx op0, rtx op1, rtx op2)
2322 tilegx_expand_high_multiply (op0, op1, op2, false);
2327 /* Compare and branches */
2329 /* Produce the rtx yielding a bool for a floating point
2330 comparison. */
2331 static bool
2332 tilegx_emit_fp_setcc (rtx res, enum rtx_code code, enum machine_mode mode,
2333 rtx op0, rtx op1)
2335 /* TODO: Certain compares again constants can be done using entirely
2336 integer operations. But you have to get the special cases right
2337 e.g. NaN, +0 == -0, etc. */
2339 rtx flags;
2340 int flag_index;
2341 rtx a = force_reg (DImode, gen_lowpart (DImode, op0));
2342 rtx b = force_reg (DImode, gen_lowpart (DImode, op1));
2344 flags = gen_reg_rtx (DImode);
2346 if (mode == SFmode)
2348 emit_insn (gen_insn_fsingle_add1 (flags, a, b));
2350 else
2352 gcc_assert (mode == DFmode);
2353 emit_insn (gen_insn_fdouble_add_flags (flags, a, b));
2356 switch (code)
2358 case EQ: flag_index = 30; break;
2359 case NE: flag_index = 31; break;
2360 case LE: flag_index = 27; break;
2361 case LT: flag_index = 26; break;
2362 case GE: flag_index = 29; break;
2363 case GT: flag_index = 28; break;
2364 default: gcc_unreachable ();
2367 gcc_assert (GET_MODE (res) == DImode);
2368 emit_move_insn (res, gen_rtx_ZERO_EXTRACT (DImode, flags, GEN_INT (1),
2369 GEN_INT (flag_index)));
2370 return true;
2374 /* Certain simplifications can be done to make invalid setcc
2375 operations valid. Return the final comparison, or NULL if we can't
2376 work. */
2377 static bool
2378 tilegx_emit_setcc_internal (rtx res, enum rtx_code code, rtx op0, rtx op1,
2379 enum machine_mode cmp_mode)
2381 rtx tmp;
2382 bool swap = false;
2384 if (cmp_mode == SFmode || cmp_mode == DFmode)
2385 return tilegx_emit_fp_setcc (res, code, cmp_mode, op0, op1);
2387 /* The general case: fold the comparison code to the types of
2388 compares that we have, choosing the branch as necessary. */
2390 switch (code)
2392 case EQ:
2393 case NE:
2394 case LE:
2395 case LT:
2396 case LEU:
2397 case LTU:
2398 /* We have these compares. */
2399 break;
2401 case GE:
2402 case GT:
2403 case GEU:
2404 case GTU:
2405 /* We do not have these compares, so we reverse the
2406 operands. */
2407 swap = true;
2408 break;
2410 default:
2411 /* We should not have called this with any other code. */
2412 gcc_unreachable ();
2415 if (swap)
2417 code = swap_condition (code);
2418 tmp = op0, op0 = op1, op1 = tmp;
2421 if (!reg_or_0_operand (op0, cmp_mode))
2422 op0 = force_reg (cmp_mode, op0);
2424 if (!CONST_INT_P (op1) && !register_operand (op1, cmp_mode))
2425 op1 = force_reg (cmp_mode, op1);
2427 /* Return the setcc comparison. */
2428 emit_insn (gen_rtx_SET (VOIDmode, res,
2429 gen_rtx_fmt_ee (code, DImode, op0, op1)));
2431 return true;
2435 /* Implement cstore patterns. */
2436 bool
2437 tilegx_emit_setcc (rtx operands[], enum machine_mode cmp_mode)
2439 return
2440 tilegx_emit_setcc_internal (operands[0], GET_CODE (operands[1]),
2441 operands[2], operands[3], cmp_mode);
2445 /* Return whether CODE is a signed comparison. */
2446 static bool
2447 signed_compare_p (enum rtx_code code)
2449 return (code == EQ || code == NE || code == LT || code == LE
2450 || code == GT || code == GE);
2454 /* Generate the comparison for a DImode conditional branch. */
2455 static rtx
2456 tilegx_emit_cc_test (enum rtx_code code, rtx op0, rtx op1,
2457 enum machine_mode cmp_mode, bool eq_ne_only)
2459 enum rtx_code branch_code;
2460 rtx temp;
2462 if (cmp_mode == SFmode || cmp_mode == DFmode)
2464 /* Compute a boolean saying whether the comparison is true. */
2465 temp = gen_reg_rtx (DImode);
2466 tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2468 /* Test that flag. */
2469 return gen_rtx_fmt_ee (NE, VOIDmode, temp, const0_rtx);
2472 /* Check for a compare against zero using a comparison we can do
2473 directly. */
2474 if (op1 == const0_rtx
2475 && (code == EQ || code == NE
2476 || (!eq_ne_only && signed_compare_p (code))))
2478 op0 = force_reg (cmp_mode, op0);
2479 return gen_rtx_fmt_ee (code, VOIDmode, op0, const0_rtx);
2482 /* The general case: fold the comparison code to the types of
2483 compares that we have, choosing the branch as necessary. */
2484 switch (code)
2486 case EQ:
2487 case LE:
2488 case LT:
2489 case LEU:
2490 case LTU:
2491 /* We have these compares. */
2492 branch_code = NE;
2493 break;
2495 case NE:
2496 case GE:
2497 case GT:
2498 case GEU:
2499 case GTU:
2500 /* These must be reversed (except NE, but let's
2501 canonicalize). */
2502 code = reverse_condition (code);
2503 branch_code = EQ;
2504 break;
2506 default:
2507 gcc_unreachable ();
2510 if (CONST_INT_P (op1) && (!satisfies_constraint_I (op1) || code == LEU))
2512 HOST_WIDE_INT n = INTVAL (op1);
2514 switch (code)
2516 case EQ:
2517 /* Subtract off the value we want to compare against and see
2518 if we get zero. This is cheaper than creating a constant
2519 in a register. Except that subtracting -128 is more
2520 expensive than seqi to -128, so we leave that alone. */
2521 /* ??? Don't do this when comparing against symbols,
2522 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2523 0), which will be declared false out of hand (at least
2524 for non-weak). */
2525 if (n != -128
2526 && add_operand (GEN_INT (-n), DImode)
2527 && !(symbolic_operand (op0, VOIDmode)
2528 || (REG_P (op0) && REG_POINTER (op0))))
2530 /* TODO: Use a SIMD add immediate to hit zero for tiled
2531 constants in a single instruction. */
2532 if (GET_MODE (op0) != DImode)
2534 /* Convert to DImode so we can use addli. Note that
2535 this will not actually generate any code because
2536 sign extension from SI -> DI is a no-op. I don't
2537 know if it's safe just to make a paradoxical
2538 subreg here though. */
2539 rtx temp2 = gen_reg_rtx (DImode);
2540 emit_insn (gen_extendsidi2 (temp2, op0));
2541 op0 = temp2;
2543 else
2545 op0 = force_reg (DImode, op0);
2547 temp = gen_reg_rtx (DImode);
2548 emit_move_insn (temp, gen_rtx_PLUS (DImode, op0, GEN_INT (-n)));
2549 return gen_rtx_fmt_ee (reverse_condition (branch_code),
2550 VOIDmode, temp, const0_rtx);
2552 break;
2554 case LEU:
2555 if (n == -1)
2556 break;
2557 /* FALLTHRU */
2559 case LTU:
2560 /* Change ((unsigned)x < 0x1000) into !((int)x >> 12), etc.
2561 We use arithmetic shift right because it's a 3-wide op,
2562 while logical shift right is not. */
2564 int first = exact_log2 (code == LTU ? n : n + 1);
2565 if (first != -1)
2567 op0 = force_reg (cmp_mode, op0);
2568 temp = gen_reg_rtx (cmp_mode);
2569 emit_move_insn (temp,
2570 gen_rtx_ASHIFTRT (cmp_mode, op0,
2571 GEN_INT (first)));
2572 return gen_rtx_fmt_ee (reverse_condition (branch_code),
2573 VOIDmode, temp, const0_rtx);
2576 break;
2578 default:
2579 break;
2583 /* Compute a flag saying whether we should branch. */
2584 temp = gen_reg_rtx (DImode);
2585 tilegx_emit_setcc_internal (temp, code, op0, op1, cmp_mode);
2587 /* Return the branch comparison. */
2588 return gen_rtx_fmt_ee (branch_code, VOIDmode, temp, const0_rtx);
2592 /* Generate the comparison for a conditional branch. */
2593 void
2594 tilegx_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode)
2596 rtx cmp_rtx =
2597 tilegx_emit_cc_test (GET_CODE (operands[0]), operands[1], operands[2],
2598 cmp_mode, false);
2599 rtx branch_rtx = gen_rtx_SET (VOIDmode, pc_rtx,
2600 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp_rtx,
2601 gen_rtx_LABEL_REF
2602 (VOIDmode,
2603 operands[3]),
2604 pc_rtx));
2605 emit_jump_insn (branch_rtx);
2609 /* Implement the mov<mode>cc pattern. */
2611 tilegx_emit_conditional_move (rtx cmp)
2613 return
2614 tilegx_emit_cc_test (GET_CODE (cmp), XEXP (cmp, 0), XEXP (cmp, 1),
2615 GET_MODE (XEXP (cmp, 0)), true);
2619 /* Return true if INSN is annotated with a REG_BR_PROB note that
2620 indicates it's a branch that's predicted taken. */
2621 static bool
2622 cbranch_predicted_p (rtx insn)
2624 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2626 if (x)
2628 int pred_val = XINT (x, 0);
2630 return pred_val >= REG_BR_PROB_BASE / 2;
2633 return false;
2637 /* Output assembly code for a specific branch instruction, appending
2638 the branch prediction flag to the opcode if appropriate. */
2639 static const char *
2640 tilegx_output_simple_cbranch_with_opcode (rtx insn, const char *opcode,
2641 int regop, bool reverse_predicted)
2643 static char buf[64];
2644 sprintf (buf, "%s%s\t%%r%d, %%l0", opcode,
2645 (cbranch_predicted_p (insn) ^ reverse_predicted) ? "t" : "",
2646 regop);
2647 return buf;
2651 /* Output assembly code for a specific branch instruction, appending
2652 the branch prediction flag to the opcode if appropriate. */
2653 const char *
2654 tilegx_output_cbranch_with_opcode (rtx insn, rtx *operands,
2655 const char *opcode,
2656 const char *rev_opcode, int regop)
2658 const char *branch_if_false;
2659 rtx taken, not_taken;
2660 bool is_simple_branch;
2662 gcc_assert (LABEL_P (operands[0]));
2664 is_simple_branch = true;
2665 if (INSN_ADDRESSES_SET_P ())
2667 int from_addr = INSN_ADDRESSES (INSN_UID (insn));
2668 int to_addr = INSN_ADDRESSES (INSN_UID (operands[0]));
2669 int delta = to_addr - from_addr;
2670 is_simple_branch = IN_RANGE (delta, -524288, 524280);
2673 if (is_simple_branch)
2675 /* Just a simple conditional branch. */
2676 return
2677 tilegx_output_simple_cbranch_with_opcode (insn, opcode, regop, false);
2680 /* Generate a reversed branch around a direct jump. This fallback
2681 does not use branch-likely instructions. */
2682 not_taken = gen_label_rtx ();
2683 taken = operands[0];
2685 /* Generate the reversed branch to NOT_TAKEN. */
2686 operands[0] = not_taken;
2687 branch_if_false =
2688 tilegx_output_simple_cbranch_with_opcode (insn, rev_opcode, regop, true);
2689 output_asm_insn (branch_if_false, operands);
2691 output_asm_insn ("j\t%l0", &taken);
2693 /* Output NOT_TAKEN. */
2694 targetm.asm_out.internal_label (asm_out_file, "L",
2695 CODE_LABEL_NUMBER (not_taken));
2696 return "";
2700 /* Output assembly code for a conditional branch instruction. */
2701 const char *
2702 tilegx_output_cbranch (rtx insn, rtx *operands, bool reversed)
2704 enum rtx_code code = GET_CODE (operands[1]);
2705 const char *opcode;
2706 const char *rev_opcode;
2708 if (reversed)
2709 code = reverse_condition (code);
2711 switch (code)
2713 case NE:
2714 opcode = "bnez";
2715 rev_opcode = "beqz";
2716 break;
2717 case EQ:
2718 opcode = "beqz";
2719 rev_opcode = "bnez";
2720 break;
2721 case GE:
2722 opcode = "bgez";
2723 rev_opcode = "bltz";
2724 break;
2725 case GT:
2726 opcode = "bgtz";
2727 rev_opcode = "blez";
2728 break;
2729 case LE:
2730 opcode = "blez";
2731 rev_opcode = "bgtz";
2732 break;
2733 case LT:
2734 opcode = "bltz";
2735 rev_opcode = "bgez";
2736 break;
2737 default:
2738 gcc_unreachable ();
2741 return tilegx_output_cbranch_with_opcode (insn, operands, opcode,
2742 rev_opcode, 2);
2746 /* Implement the tablejump pattern. */
2747 void
2748 tilegx_expand_tablejump (rtx op0, rtx op1)
2750 if (flag_pic)
2752 rtx temp = gen_reg_rtx (Pmode);
2753 rtx temp2 = gen_reg_rtx (Pmode);
2755 tilegx_compute_pcrel_address (temp, gen_rtx_LABEL_REF (Pmode, op1));
2756 emit_move_insn (temp2,
2757 gen_rtx_PLUS (Pmode,
2758 convert_to_mode (Pmode, op0, false),
2759 temp));
2760 op0 = temp2;
2763 emit_jump_insn (gen_tablejump_aux (op0, op1));
2767 /* Emit barrier before an atomic, as needed for the memory MODEL. */
2768 void
2769 tilegx_pre_atomic_barrier (enum memmodel model)
2771 if (need_atomic_barrier_p (model, true))
2772 emit_insn (gen_memory_barrier ());
2776 /* Emit barrier after an atomic, as needed for the memory MODEL. */
2777 void
2778 tilegx_post_atomic_barrier (enum memmodel model)
2780 if (need_atomic_barrier_p (model, false))
2781 emit_insn (gen_memory_barrier ());
2786 /* Expand a builtin vector binary op, by calling gen function GEN with
2787 operands in the proper modes. DEST is converted to DEST_MODE, and
2788 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2789 void
2790 tilegx_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
2791 enum machine_mode dest_mode,
2792 rtx dest,
2793 enum machine_mode src_mode,
2794 rtx src0, rtx src1, bool do_src1)
2796 dest = gen_lowpart (dest_mode, dest);
2798 if (src0 == const0_rtx)
2799 src0 = CONST0_RTX (src_mode);
2800 else
2801 src0 = gen_lowpart (src_mode, src0);
2803 if (do_src1)
2805 if (src1 == const0_rtx)
2806 src1 = CONST0_RTX (src_mode);
2807 else
2808 src1 = gen_lowpart (src_mode, src1);
2811 emit_insn ((*gen) (dest, src0, src1));
2816 /* Intrinsics */
2819 struct tile_builtin_info
2821 enum insn_code icode;
2822 tree fndecl;
2825 static struct tile_builtin_info tilegx_builtin_info[TILEGX_BUILTIN_max] = {
2826 { CODE_FOR_adddi3, NULL }, /* add */
2827 { CODE_FOR_addsi3, NULL }, /* addx */
2828 { CODE_FOR_ssaddsi3, NULL }, /* addxsc */
2829 { CODE_FOR_anddi3, NULL }, /* and */
2830 { CODE_FOR_insn_bfexts, NULL }, /* bfexts */
2831 { CODE_FOR_insn_bfextu, NULL }, /* bfextu */
2832 { CODE_FOR_insn_bfins, NULL }, /* bfins */
2833 { CODE_FOR_clzdi2, NULL }, /* clz */
2834 { CODE_FOR_insn_cmoveqz, NULL }, /* cmoveqz */
2835 { CODE_FOR_insn_cmovnez, NULL }, /* cmovnez */
2836 { CODE_FOR_insn_cmpeq_didi, NULL }, /* cmpeq */
2837 { CODE_FOR_insn_cmpexch, NULL }, /* cmpexch */
2838 { CODE_FOR_insn_cmpexch4, NULL }, /* cmpexch4 */
2839 { CODE_FOR_insn_cmples_didi, NULL }, /* cmples */
2840 { CODE_FOR_insn_cmpleu_didi, NULL }, /* cmpleu */
2841 { CODE_FOR_insn_cmplts_didi, NULL }, /* cmplts */
2842 { CODE_FOR_insn_cmpltu_didi, NULL }, /* cmpltu */
2843 { CODE_FOR_insn_cmpne_didi, NULL }, /* cmpne */
2844 { CODE_FOR_insn_cmul, NULL }, /* cmul */
2845 { CODE_FOR_insn_cmula, NULL }, /* cmula */
2846 { CODE_FOR_insn_cmulaf, NULL }, /* cmulaf */
2847 { CODE_FOR_insn_cmulf, NULL }, /* cmulf */
2848 { CODE_FOR_insn_cmulfr, NULL }, /* cmulfr */
2849 { CODE_FOR_insn_cmulh, NULL }, /* cmulh */
2850 { CODE_FOR_insn_cmulhr, NULL }, /* cmulhr */
2851 { CODE_FOR_insn_crc32_32, NULL }, /* crc32_32 */
2852 { CODE_FOR_insn_crc32_8, NULL }, /* crc32_8 */
2853 { CODE_FOR_ctzdi2, NULL }, /* ctz */
2854 { CODE_FOR_insn_dblalign, NULL }, /* dblalign */
2855 { CODE_FOR_insn_dblalign2, NULL }, /* dblalign2 */
2856 { CODE_FOR_insn_dblalign4, NULL }, /* dblalign4 */
2857 { CODE_FOR_insn_dblalign6, NULL }, /* dblalign6 */
2858 { CODE_FOR_insn_drain, NULL }, /* drain */
2859 { CODE_FOR_insn_dtlbpr, NULL }, /* dtlbpr */
2860 { CODE_FOR_insn_exch, NULL }, /* exch */
2861 { CODE_FOR_insn_exch4, NULL }, /* exch4 */
2862 { CODE_FOR_insn_fdouble_add_flags, NULL }, /* fdouble_add_flags */
2863 { CODE_FOR_insn_fdouble_addsub, NULL }, /* fdouble_addsub */
2864 { CODE_FOR_insn_fdouble_mul_flags, NULL }, /* fdouble_mul_flags */
2865 { CODE_FOR_insn_fdouble_pack1, NULL }, /* fdouble_pack1 */
2866 { CODE_FOR_insn_fdouble_pack2, NULL }, /* fdouble_pack2 */
2867 { CODE_FOR_insn_fdouble_sub_flags, NULL }, /* fdouble_sub_flags */
2868 { CODE_FOR_insn_fdouble_unpack_max, NULL }, /* fdouble_unpack_max */
2869 { CODE_FOR_insn_fdouble_unpack_min, NULL }, /* fdouble_unpack_min */
2870 { CODE_FOR_insn_fetchadd, NULL }, /* fetchadd */
2871 { CODE_FOR_insn_fetchadd4, NULL }, /* fetchadd4 */
2872 { CODE_FOR_insn_fetchaddgez, NULL }, /* fetchaddgez */
2873 { CODE_FOR_insn_fetchaddgez4, NULL }, /* fetchaddgez4 */
2874 { CODE_FOR_insn_fetchand, NULL }, /* fetchand */
2875 { CODE_FOR_insn_fetchand4, NULL }, /* fetchand4 */
2876 { CODE_FOR_insn_fetchor, NULL }, /* fetchor */
2877 { CODE_FOR_insn_fetchor4, NULL }, /* fetchor4 */
2878 { CODE_FOR_insn_finv, NULL }, /* finv */
2879 { CODE_FOR_insn_flush, NULL }, /* flush */
2880 { CODE_FOR_insn_flushwb, NULL }, /* flushwb */
2881 { CODE_FOR_insn_fnop, NULL }, /* fnop */
2882 { CODE_FOR_insn_fsingle_add1, NULL }, /* fsingle_add1 */
2883 { CODE_FOR_insn_fsingle_addsub2, NULL }, /* fsingle_addsub2 */
2884 { CODE_FOR_insn_fsingle_mul1, NULL }, /* fsingle_mul1 */
2885 { CODE_FOR_insn_fsingle_mul2, NULL }, /* fsingle_mul2 */
2886 { CODE_FOR_insn_fsingle_pack1, NULL }, /* fsingle_pack1 */
2887 { CODE_FOR_insn_fsingle_pack2, NULL }, /* fsingle_pack2 */
2888 { CODE_FOR_insn_fsingle_sub1, NULL }, /* fsingle_sub1 */
2889 { CODE_FOR_insn_icoh, NULL }, /* icoh */
2890 { CODE_FOR_insn_ill, NULL }, /* ill */
2891 { CODE_FOR_insn_info, NULL }, /* info */
2892 { CODE_FOR_insn_infol, NULL }, /* infol */
2893 { CODE_FOR_insn_inv, NULL }, /* inv */
2894 { CODE_FOR_insn_ld, NULL }, /* ld */
2895 { CODE_FOR_insn_ld1s, NULL }, /* ld1s */
2896 { CODE_FOR_insn_ld1u, NULL }, /* ld1u */
2897 { CODE_FOR_insn_ld2s, NULL }, /* ld2s */
2898 { CODE_FOR_insn_ld2u, NULL }, /* ld2u */
2899 { CODE_FOR_insn_ld4s, NULL }, /* ld4s */
2900 { CODE_FOR_insn_ld4u, NULL }, /* ld4u */
2901 { CODE_FOR_insn_ldna, NULL }, /* ldna */
2902 { CODE_FOR_insn_ldnt, NULL }, /* ldnt */
2903 { CODE_FOR_insn_ldnt1s, NULL }, /* ldnt1s */
2904 { CODE_FOR_insn_ldnt1u, NULL }, /* ldnt1u */
2905 { CODE_FOR_insn_ldnt2s, NULL }, /* ldnt2s */
2906 { CODE_FOR_insn_ldnt2u, NULL }, /* ldnt2u */
2907 { CODE_FOR_insn_ldnt4s, NULL }, /* ldnt4s */
2908 { CODE_FOR_insn_ldnt4u, NULL }, /* ldnt4u */
2909 { CODE_FOR_insn_ld_L2, NULL }, /* ld_L2 */
2910 { CODE_FOR_insn_ld1s_L2, NULL }, /* ld1s_L2 */
2911 { CODE_FOR_insn_ld1u_L2, NULL }, /* ld1u_L2 */
2912 { CODE_FOR_insn_ld2s_L2, NULL }, /* ld2s_L2 */
2913 { CODE_FOR_insn_ld2u_L2, NULL }, /* ld2u_L2 */
2914 { CODE_FOR_insn_ld4s_L2, NULL }, /* ld4s_L2 */
2915 { CODE_FOR_insn_ld4u_L2, NULL }, /* ld4u_L2 */
2916 { CODE_FOR_insn_ldna_L2, NULL }, /* ldna_L2 */
2917 { CODE_FOR_insn_ldnt_L2, NULL }, /* ldnt_L2 */
2918 { CODE_FOR_insn_ldnt1s_L2, NULL }, /* ldnt1s_L2 */
2919 { CODE_FOR_insn_ldnt1u_L2, NULL }, /* ldnt1u_L2 */
2920 { CODE_FOR_insn_ldnt2s_L2, NULL }, /* ldnt2s_L2 */
2921 { CODE_FOR_insn_ldnt2u_L2, NULL }, /* ldnt2u_L2 */
2922 { CODE_FOR_insn_ldnt4s_L2, NULL }, /* ldnt4s_L2 */
2923 { CODE_FOR_insn_ldnt4u_L2, NULL }, /* ldnt4u_L2 */
2924 { CODE_FOR_insn_ld_miss, NULL }, /* ld_miss */
2925 { CODE_FOR_insn_ld1s_miss, NULL }, /* ld1s_miss */
2926 { CODE_FOR_insn_ld1u_miss, NULL }, /* ld1u_miss */
2927 { CODE_FOR_insn_ld2s_miss, NULL }, /* ld2s_miss */
2928 { CODE_FOR_insn_ld2u_miss, NULL }, /* ld2u_miss */
2929 { CODE_FOR_insn_ld4s_miss, NULL }, /* ld4s_miss */
2930 { CODE_FOR_insn_ld4u_miss, NULL }, /* ld4u_miss */
2931 { CODE_FOR_insn_ldna_miss, NULL }, /* ldna_miss */
2932 { CODE_FOR_insn_ldnt_miss, NULL }, /* ldnt_miss */
2933 { CODE_FOR_insn_ldnt1s_miss, NULL }, /* ldnt1s_miss */
2934 { CODE_FOR_insn_ldnt1u_miss, NULL }, /* ldnt1u_miss */
2935 { CODE_FOR_insn_ldnt2s_miss, NULL }, /* ldnt2s_miss */
2936 { CODE_FOR_insn_ldnt2u_miss, NULL }, /* ldnt2u_miss */
2937 { CODE_FOR_insn_ldnt4s_miss, NULL }, /* ldnt4s_miss */
2938 { CODE_FOR_insn_ldnt4u_miss, NULL }, /* ldnt4u_miss */
2939 { CODE_FOR_insn_lnk, NULL }, /* lnk */
2940 { CODE_FOR_memory_barrier, NULL }, /* mf */
2941 { CODE_FOR_insn_mfspr, NULL }, /* mfspr */
2942 { CODE_FOR_insn_mm, NULL }, /* mm */
2943 { CODE_FOR_insn_mnz, NULL }, /* mnz */
2944 { CODE_FOR_movdi, NULL }, /* move */
2945 { CODE_FOR_insn_mtspr, NULL }, /* mtspr */
2946 { CODE_FOR_insn_mul_hs_hs, NULL }, /* mul_hs_hs */
2947 { CODE_FOR_insn_mul_hs_hu, NULL }, /* mul_hs_hu */
2948 { CODE_FOR_insn_mul_hs_ls, NULL }, /* mul_hs_ls */
2949 { CODE_FOR_insn_mul_hs_lu, NULL }, /* mul_hs_lu */
2950 { CODE_FOR_insn_mul_hu_hu, NULL }, /* mul_hu_hu */
2951 { CODE_FOR_insn_mul_hu_ls, NULL }, /* mul_hu_ls */
2952 { CODE_FOR_insn_mul_hu_lu, NULL }, /* mul_hu_lu */
2953 { CODE_FOR_insn_mul_ls_ls, NULL }, /* mul_ls_ls */
2954 { CODE_FOR_insn_mul_ls_lu, NULL }, /* mul_ls_lu */
2955 { CODE_FOR_insn_mul_lu_lu, NULL }, /* mul_lu_lu */
2956 { CODE_FOR_insn_mula_hs_hs, NULL }, /* mula_hs_hs */
2957 { CODE_FOR_insn_mula_hs_hu, NULL }, /* mula_hs_hu */
2958 { CODE_FOR_insn_mula_hs_ls, NULL }, /* mula_hs_ls */
2959 { CODE_FOR_insn_mula_hs_lu, NULL }, /* mula_hs_lu */
2960 { CODE_FOR_insn_mula_hu_hu, NULL }, /* mula_hu_hu */
2961 { CODE_FOR_insn_mula_hu_ls, NULL }, /* mula_hu_ls */
2962 { CODE_FOR_insn_mula_hu_lu, NULL }, /* mula_hu_lu */
2963 { CODE_FOR_insn_mula_ls_ls, NULL }, /* mula_ls_ls */
2964 { CODE_FOR_insn_mula_ls_lu, NULL }, /* mula_ls_lu */
2965 { CODE_FOR_insn_mula_lu_lu, NULL }, /* mula_lu_lu */
2966 { CODE_FOR_insn_mulax, NULL }, /* mulax */
2967 { CODE_FOR_mulsi3, NULL }, /* mulx */
2968 { CODE_FOR_insn_mz, NULL }, /* mz */
2969 { CODE_FOR_insn_nap, NULL }, /* nap */
2970 { CODE_FOR_nop, NULL }, /* nop */
2971 { CODE_FOR_insn_nor_di, NULL }, /* nor */
2972 { CODE_FOR_iordi3, NULL }, /* or */
2973 { CODE_FOR_popcountdi2, NULL }, /* pcnt */
2974 { CODE_FOR_insn_prefetch_l1, NULL }, /* prefetch_l1 */
2975 { CODE_FOR_insn_prefetch_l1_fault, NULL }, /* prefetch_l1_fault */
2976 { CODE_FOR_insn_prefetch_l2, NULL }, /* prefetch_l2 */
2977 { CODE_FOR_insn_prefetch_l2_fault, NULL }, /* prefetch_l2_fault */
2978 { CODE_FOR_insn_prefetch_l3, NULL }, /* prefetch_l3 */
2979 { CODE_FOR_insn_prefetch_l3_fault, NULL }, /* prefetch_l3_fault */
2980 { CODE_FOR_insn_revbits, NULL }, /* revbits */
2981 { CODE_FOR_bswapdi2, NULL }, /* revbytes */
2982 { CODE_FOR_rotldi3, NULL }, /* rotl */
2983 { CODE_FOR_ashldi3, NULL }, /* shl */
2984 { CODE_FOR_insn_shl16insli, NULL }, /* shl16insli */
2985 { CODE_FOR_insn_shl1add, NULL }, /* shl1add */
2986 { CODE_FOR_insn_shl1addx, NULL }, /* shl1addx */
2987 { CODE_FOR_insn_shl2add, NULL }, /* shl2add */
2988 { CODE_FOR_insn_shl2addx, NULL }, /* shl2addx */
2989 { CODE_FOR_insn_shl3add, NULL }, /* shl3add */
2990 { CODE_FOR_insn_shl3addx, NULL }, /* shl3addx */
2991 { CODE_FOR_ashlsi3, NULL }, /* shlx */
2992 { CODE_FOR_ashrdi3, NULL }, /* shrs */
2993 { CODE_FOR_lshrdi3, NULL }, /* shru */
2994 { CODE_FOR_lshrsi3, NULL }, /* shrux */
2995 { CODE_FOR_insn_shufflebytes, NULL }, /* shufflebytes */
2996 { CODE_FOR_insn_shufflebytes1, NULL }, /* shufflebytes1 */
2997 { CODE_FOR_insn_st, NULL }, /* st */
2998 { CODE_FOR_insn_st1, NULL }, /* st1 */
2999 { CODE_FOR_insn_st2, NULL }, /* st2 */
3000 { CODE_FOR_insn_st4, NULL }, /* st4 */
3001 { CODE_FOR_insn_stnt, NULL }, /* stnt */
3002 { CODE_FOR_insn_stnt1, NULL }, /* stnt1 */
3003 { CODE_FOR_insn_stnt2, NULL }, /* stnt2 */
3004 { CODE_FOR_insn_stnt4, NULL }, /* stnt4 */
3005 { CODE_FOR_subdi3, NULL }, /* sub */
3006 { CODE_FOR_subsi3, NULL }, /* subx */
3007 { CODE_FOR_sssubsi3, NULL }, /* subxsc */
3008 { CODE_FOR_insn_tblidxb0, NULL }, /* tblidxb0 */
3009 { CODE_FOR_insn_tblidxb1, NULL }, /* tblidxb1 */
3010 { CODE_FOR_insn_tblidxb2, NULL }, /* tblidxb2 */
3011 { CODE_FOR_insn_tblidxb3, NULL }, /* tblidxb3 */
3012 { CODE_FOR_insn_v1add, NULL }, /* v1add */
3013 { CODE_FOR_insn_v1addi, NULL }, /* v1addi */
3014 { CODE_FOR_insn_v1adduc, NULL }, /* v1adduc */
3015 { CODE_FOR_insn_v1adiffu, NULL }, /* v1adiffu */
3016 { CODE_FOR_insn_v1avgu, NULL }, /* v1avgu */
3017 { CODE_FOR_insn_v1cmpeq, NULL }, /* v1cmpeq */
3018 { CODE_FOR_insn_v1cmpeqi, NULL }, /* v1cmpeqi */
3019 { CODE_FOR_insn_v1cmples, NULL }, /* v1cmples */
3020 { CODE_FOR_insn_v1cmpleu, NULL }, /* v1cmpleu */
3021 { CODE_FOR_insn_v1cmplts, NULL }, /* v1cmplts */
3022 { CODE_FOR_insn_v1cmpltsi, NULL }, /* v1cmpltsi */
3023 { CODE_FOR_insn_v1cmpltu, NULL }, /* v1cmpltu */
3024 { CODE_FOR_insn_v1cmpltui, NULL }, /* v1cmpltui */
3025 { CODE_FOR_insn_v1cmpne, NULL }, /* v1cmpne */
3026 { CODE_FOR_insn_v1ddotpu, NULL }, /* v1ddotpu */
3027 { CODE_FOR_insn_v1ddotpua, NULL }, /* v1ddotpua */
3028 { CODE_FOR_insn_v1ddotpus, NULL }, /* v1ddotpus */
3029 { CODE_FOR_insn_v1ddotpusa, NULL }, /* v1ddotpusa */
3030 { CODE_FOR_insn_v1dotp, NULL }, /* v1dotp */
3031 { CODE_FOR_insn_v1dotpa, NULL }, /* v1dotpa */
3032 { CODE_FOR_insn_v1dotpu, NULL }, /* v1dotpu */
3033 { CODE_FOR_insn_v1dotpua, NULL }, /* v1dotpua */
3034 { CODE_FOR_insn_v1dotpus, NULL }, /* v1dotpus */
3035 { CODE_FOR_insn_v1dotpusa, NULL }, /* v1dotpusa */
3036 { CODE_FOR_insn_v1int_h, NULL }, /* v1int_h */
3037 { CODE_FOR_insn_v1int_l, NULL }, /* v1int_l */
3038 { CODE_FOR_insn_v1maxu, NULL }, /* v1maxu */
3039 { CODE_FOR_insn_v1maxui, NULL }, /* v1maxui */
3040 { CODE_FOR_insn_v1minu, NULL }, /* v1minu */
3041 { CODE_FOR_insn_v1minui, NULL }, /* v1minui */
3042 { CODE_FOR_insn_v1mnz, NULL }, /* v1mnz */
3043 { CODE_FOR_insn_v1multu, NULL }, /* v1multu */
3044 { CODE_FOR_insn_v1mulu, NULL }, /* v1mulu */
3045 { CODE_FOR_insn_v1mulus, NULL }, /* v1mulus */
3046 { CODE_FOR_insn_v1mz, NULL }, /* v1mz */
3047 { CODE_FOR_insn_v1sadau, NULL }, /* v1sadau */
3048 { CODE_FOR_insn_v1sadu, NULL }, /* v1sadu */
3049 { CODE_FOR_insn_v1shl, NULL }, /* v1shl */
3050 { CODE_FOR_insn_v1shl, NULL }, /* v1shli */
3051 { CODE_FOR_insn_v1shrs, NULL }, /* v1shrs */
3052 { CODE_FOR_insn_v1shrs, NULL }, /* v1shrsi */
3053 { CODE_FOR_insn_v1shru, NULL }, /* v1shru */
3054 { CODE_FOR_insn_v1shru, NULL }, /* v1shrui */
3055 { CODE_FOR_insn_v1sub, NULL }, /* v1sub */
3056 { CODE_FOR_insn_v1subuc, NULL }, /* v1subuc */
3057 { CODE_FOR_insn_v2add, NULL }, /* v2add */
3058 { CODE_FOR_insn_v2addi, NULL }, /* v2addi */
3059 { CODE_FOR_insn_v2addsc, NULL }, /* v2addsc */
3060 { CODE_FOR_insn_v2adiffs, NULL }, /* v2adiffs */
3061 { CODE_FOR_insn_v2avgs, NULL }, /* v2avgs */
3062 { CODE_FOR_insn_v2cmpeq, NULL }, /* v2cmpeq */
3063 { CODE_FOR_insn_v2cmpeqi, NULL }, /* v2cmpeqi */
3064 { CODE_FOR_insn_v2cmples, NULL }, /* v2cmples */
3065 { CODE_FOR_insn_v2cmpleu, NULL }, /* v2cmpleu */
3066 { CODE_FOR_insn_v2cmplts, NULL }, /* v2cmplts */
3067 { CODE_FOR_insn_v2cmpltsi, NULL }, /* v2cmpltsi */
3068 { CODE_FOR_insn_v2cmpltu, NULL }, /* v2cmpltu */
3069 { CODE_FOR_insn_v2cmpltui, NULL }, /* v2cmpltui */
3070 { CODE_FOR_insn_v2cmpne, NULL }, /* v2cmpne */
3071 { CODE_FOR_insn_v2dotp, NULL }, /* v2dotp */
3072 { CODE_FOR_insn_v2dotpa, NULL }, /* v2dotpa */
3073 { CODE_FOR_insn_v2int_h, NULL }, /* v2int_h */
3074 { CODE_FOR_insn_v2int_l, NULL }, /* v2int_l */
3075 { CODE_FOR_insn_v2maxs, NULL }, /* v2maxs */
3076 { CODE_FOR_insn_v2maxsi, NULL }, /* v2maxsi */
3077 { CODE_FOR_insn_v2mins, NULL }, /* v2mins */
3078 { CODE_FOR_insn_v2minsi, NULL }, /* v2minsi */
3079 { CODE_FOR_insn_v2mnz, NULL }, /* v2mnz */
3080 { CODE_FOR_insn_v2mulfsc, NULL }, /* v2mulfsc */
3081 { CODE_FOR_insn_v2muls, NULL }, /* v2muls */
3082 { CODE_FOR_insn_v2mults, NULL }, /* v2mults */
3083 { CODE_FOR_insn_v2mz, NULL }, /* v2mz */
3084 { CODE_FOR_insn_v2packh, NULL }, /* v2packh */
3085 { CODE_FOR_insn_v2packl, NULL }, /* v2packl */
3086 { CODE_FOR_insn_v2packuc, NULL }, /* v2packuc */
3087 { CODE_FOR_insn_v2sadas, NULL }, /* v2sadas */
3088 { CODE_FOR_insn_v2sadau, NULL }, /* v2sadau */
3089 { CODE_FOR_insn_v2sads, NULL }, /* v2sads */
3090 { CODE_FOR_insn_v2sadu, NULL }, /* v2sadu */
3091 { CODE_FOR_insn_v2shl, NULL }, /* v2shl */
3092 { CODE_FOR_insn_v2shl, NULL }, /* v2shli */
3093 { CODE_FOR_insn_v2shlsc, NULL }, /* v2shlsc */
3094 { CODE_FOR_insn_v2shrs, NULL }, /* v2shrs */
3095 { CODE_FOR_insn_v2shrs, NULL }, /* v2shrsi */
3096 { CODE_FOR_insn_v2shru, NULL }, /* v2shru */
3097 { CODE_FOR_insn_v2shru, NULL }, /* v2shrui */
3098 { CODE_FOR_insn_v2sub, NULL }, /* v2sub */
3099 { CODE_FOR_insn_v2subsc, NULL }, /* v2subsc */
3100 { CODE_FOR_insn_v4add, NULL }, /* v4add */
3101 { CODE_FOR_insn_v4addsc, NULL }, /* v4addsc */
3102 { CODE_FOR_insn_v4int_h, NULL }, /* v4int_h */
3103 { CODE_FOR_insn_v4int_l, NULL }, /* v4int_l */
3104 { CODE_FOR_insn_v4packsc, NULL }, /* v4packsc */
3105 { CODE_FOR_insn_v4shl, NULL }, /* v4shl */
3106 { CODE_FOR_insn_v4shlsc, NULL }, /* v4shlsc */
3107 { CODE_FOR_insn_v4shrs, NULL }, /* v4shrs */
3108 { CODE_FOR_insn_v4shru, NULL }, /* v4shru */
3109 { CODE_FOR_insn_v4sub, NULL }, /* v4sub */
3110 { CODE_FOR_insn_v4subsc, NULL }, /* v4subsc */
3111 { CODE_FOR_insn_wh64, NULL }, /* wh64 */
3112 { CODE_FOR_xordi3, NULL }, /* xor */
3113 { CODE_FOR_tilegx_network_barrier, NULL }, /* network_barrier */
3114 { CODE_FOR_tilegx_idn0_receive, NULL }, /* idn0_receive */
3115 { CODE_FOR_tilegx_idn1_receive, NULL }, /* idn1_receive */
3116 { CODE_FOR_tilegx_idn_send, NULL }, /* idn_send */
3117 { CODE_FOR_tilegx_udn0_receive, NULL }, /* udn0_receive */
3118 { CODE_FOR_tilegx_udn1_receive, NULL }, /* udn1_receive */
3119 { CODE_FOR_tilegx_udn2_receive, NULL }, /* udn2_receive */
3120 { CODE_FOR_tilegx_udn3_receive, NULL }, /* udn3_receive */
3121 { CODE_FOR_tilegx_udn_send, NULL }, /* udn_send */
3125 struct tilegx_builtin_def
3127 const char *name;
3128 enum tilegx_builtin code;
3129 bool is_const;
3130 /* The first character is the return type. Subsequent characters
3131 are the argument types. See char_to_type. */
3132 const char *type;
3136 static const struct tilegx_builtin_def tilegx_builtins[] = {
3137 { "__insn_add", TILEGX_INSN_ADD, true, "lll" },
3138 { "__insn_addi", TILEGX_INSN_ADD, true, "lll" },
3139 { "__insn_addli", TILEGX_INSN_ADD, true, "lll" },
3140 { "__insn_addx", TILEGX_INSN_ADDX, true, "iii" },
3141 { "__insn_addxi", TILEGX_INSN_ADDX, true, "iii" },
3142 { "__insn_addxli", TILEGX_INSN_ADDX, true, "iii" },
3143 { "__insn_addxsc", TILEGX_INSN_ADDXSC, true, "iii" },
3144 { "__insn_and", TILEGX_INSN_AND, true, "lll" },
3145 { "__insn_andi", TILEGX_INSN_AND, true, "lll" },
3146 { "__insn_bfexts", TILEGX_INSN_BFEXTS, true, "llll" },
3147 { "__insn_bfextu", TILEGX_INSN_BFEXTU, true, "llll" },
3148 { "__insn_bfins", TILEGX_INSN_BFINS, true, "lllll"},
3149 { "__insn_clz", TILEGX_INSN_CLZ, true, "ll" },
3150 { "__insn_cmoveqz", TILEGX_INSN_CMOVEQZ, true, "llll" },
3151 { "__insn_cmovnez", TILEGX_INSN_CMOVNEZ, true, "llll" },
3152 { "__insn_cmpeq", TILEGX_INSN_CMPEQ, true, "lll" },
3153 { "__insn_cmpeqi", TILEGX_INSN_CMPEQ, true, "lll" },
3154 { "__insn_cmpexch", TILEGX_INSN_CMPEXCH, false, "lpl" },
3155 { "__insn_cmpexch4", TILEGX_INSN_CMPEXCH4, false, "ipi" },
3156 { "__insn_cmples", TILEGX_INSN_CMPLES, true, "lll" },
3157 { "__insn_cmpleu", TILEGX_INSN_CMPLEU, true, "lll" },
3158 { "__insn_cmplts", TILEGX_INSN_CMPLTS, true, "lll" },
3159 { "__insn_cmpltsi", TILEGX_INSN_CMPLTS, true, "lll" },
3160 { "__insn_cmpltu", TILEGX_INSN_CMPLTU, true, "lll" },
3161 { "__insn_cmpltui", TILEGX_INSN_CMPLTU, true, "lll" },
3162 { "__insn_cmpne", TILEGX_INSN_CMPNE, true, "lll" },
3163 { "__insn_cmul", TILEGX_INSN_CMUL, true, "lll" },
3164 { "__insn_cmula", TILEGX_INSN_CMULA, true, "llll" },
3165 { "__insn_cmulaf", TILEGX_INSN_CMULAF, true, "llll" },
3166 { "__insn_cmulf", TILEGX_INSN_CMULF, true, "lll" },
3167 { "__insn_cmulfr", TILEGX_INSN_CMULFR, true, "lll" },
3168 { "__insn_cmulh", TILEGX_INSN_CMULH, true, "lll" },
3169 { "__insn_cmulhr", TILEGX_INSN_CMULHR, true, "lll" },
3170 { "__insn_crc32_32", TILEGX_INSN_CRC32_32, true, "lll" },
3171 { "__insn_crc32_8", TILEGX_INSN_CRC32_8, true, "lll" },
3172 { "__insn_ctz", TILEGX_INSN_CTZ, true, "ll" },
3173 { "__insn_dblalign", TILEGX_INSN_DBLALIGN, true, "lllk" },
3174 { "__insn_dblalign2", TILEGX_INSN_DBLALIGN2, true, "lll" },
3175 { "__insn_dblalign4", TILEGX_INSN_DBLALIGN4, true, "lll" },
3176 { "__insn_dblalign6", TILEGX_INSN_DBLALIGN6, true, "lll" },
3177 { "__insn_drain", TILEGX_INSN_DRAIN, false, "v" },
3178 { "__insn_dtlbpr", TILEGX_INSN_DTLBPR, false, "vl" },
3179 { "__insn_exch", TILEGX_INSN_EXCH, false, "lpl" },
3180 { "__insn_exch4", TILEGX_INSN_EXCH4, false, "ipi" },
3181 { "__insn_fdouble_add_flags", TILEGX_INSN_FDOUBLE_ADD_FLAGS, true, "lll" },
3182 { "__insn_fdouble_addsub", TILEGX_INSN_FDOUBLE_ADDSUB, true, "llll" },
3183 { "__insn_fdouble_mul_flags", TILEGX_INSN_FDOUBLE_MUL_FLAGS, true, "lll" },
3184 { "__insn_fdouble_pack1", TILEGX_INSN_FDOUBLE_PACK1, true, "lll" },
3185 { "__insn_fdouble_pack2", TILEGX_INSN_FDOUBLE_PACK2, true, "llll" },
3186 { "__insn_fdouble_sub_flags", TILEGX_INSN_FDOUBLE_SUB_FLAGS, true, "lll" },
3187 { "__insn_fdouble_unpack_max", TILEGX_INSN_FDOUBLE_UNPACK_MAX, true, "lll" },
3188 { "__insn_fdouble_unpack_min", TILEGX_INSN_FDOUBLE_UNPACK_MIN, true, "lll" },
3189 { "__insn_fetchadd", TILEGX_INSN_FETCHADD, false, "lpl" },
3190 { "__insn_fetchadd4", TILEGX_INSN_FETCHADD4, false, "ipi" },
3191 { "__insn_fetchaddgez", TILEGX_INSN_FETCHADDGEZ, false, "lpl" },
3192 { "__insn_fetchaddgez4", TILEGX_INSN_FETCHADDGEZ4, false, "ipi" },
3193 { "__insn_fetchand", TILEGX_INSN_FETCHAND, false, "lpl" },
3194 { "__insn_fetchand4", TILEGX_INSN_FETCHAND4, false, "ipi" },
3195 { "__insn_fetchor", TILEGX_INSN_FETCHOR, false, "lpl" },
3196 { "__insn_fetchor4", TILEGX_INSN_FETCHOR4, false, "ipi" },
3197 { "__insn_finv", TILEGX_INSN_FINV, false, "vk" },
3198 { "__insn_flush", TILEGX_INSN_FLUSH, false, "vk" },
3199 { "__insn_flushwb", TILEGX_INSN_FLUSHWB, false, "v" },
3200 { "__insn_fnop", TILEGX_INSN_FNOP, false, "v" },
3201 { "__insn_fsingle_add1", TILEGX_INSN_FSINGLE_ADD1, true, "lll" },
3202 { "__insn_fsingle_addsub2", TILEGX_INSN_FSINGLE_ADDSUB2, true, "llll" },
3203 { "__insn_fsingle_mul1", TILEGX_INSN_FSINGLE_MUL1, true, "lll" },
3204 { "__insn_fsingle_mul2", TILEGX_INSN_FSINGLE_MUL2, true, "lll" },
3205 { "__insn_fsingle_pack1", TILEGX_INSN_FSINGLE_PACK1, true, "ll" },
3206 { "__insn_fsingle_pack2", TILEGX_INSN_FSINGLE_PACK2, true, "lll" },
3207 { "__insn_fsingle_sub1", TILEGX_INSN_FSINGLE_SUB1, true, "lll" },
3208 { "__insn_icoh", TILEGX_INSN_ICOH, false, "vk" },
3209 { "__insn_ill", TILEGX_INSN_ILL, false, "v" },
3210 { "__insn_info", TILEGX_INSN_INFO, false, "vl" },
3211 { "__insn_infol", TILEGX_INSN_INFOL, false, "vl" },
3212 { "__insn_inv", TILEGX_INSN_INV, false, "vp" },
3213 { "__insn_ld", TILEGX_INSN_LD, false, "lk" },
3214 { "__insn_ld1s", TILEGX_INSN_LD1S, false, "lk" },
3215 { "__insn_ld1u", TILEGX_INSN_LD1U, false, "lk" },
3216 { "__insn_ld2s", TILEGX_INSN_LD2S, false, "lk" },
3217 { "__insn_ld2u", TILEGX_INSN_LD2U, false, "lk" },
3218 { "__insn_ld4s", TILEGX_INSN_LD4S, false, "lk" },
3219 { "__insn_ld4u", TILEGX_INSN_LD4U, false, "lk" },
3220 { "__insn_ldna", TILEGX_INSN_LDNA, false, "lk" },
3221 { "__insn_ldnt", TILEGX_INSN_LDNT, false, "lk" },
3222 { "__insn_ldnt1s", TILEGX_INSN_LDNT1S, false, "lk" },
3223 { "__insn_ldnt1u", TILEGX_INSN_LDNT1U, false, "lk" },
3224 { "__insn_ldnt2s", TILEGX_INSN_LDNT2S, false, "lk" },
3225 { "__insn_ldnt2u", TILEGX_INSN_LDNT2U, false, "lk" },
3226 { "__insn_ldnt4s", TILEGX_INSN_LDNT4S, false, "lk" },
3227 { "__insn_ldnt4u", TILEGX_INSN_LDNT4U, false, "lk" },
3228 { "__insn_ld_L2", TILEGX_INSN_LD_L2, false, "lk" },
3229 { "__insn_ld1s_L2", TILEGX_INSN_LD1S_L2, false, "lk" },
3230 { "__insn_ld1u_L2", TILEGX_INSN_LD1U_L2, false, "lk" },
3231 { "__insn_ld2s_L2", TILEGX_INSN_LD2S_L2, false, "lk" },
3232 { "__insn_ld2u_L2", TILEGX_INSN_LD2U_L2, false, "lk" },
3233 { "__insn_ld4s_L2", TILEGX_INSN_LD4S_L2, false, "lk" },
3234 { "__insn_ld4u_L2", TILEGX_INSN_LD4U_L2, false, "lk" },
3235 { "__insn_ldna_L2", TILEGX_INSN_LDNA_L2, false, "lk" },
3236 { "__insn_ldnt_L2", TILEGX_INSN_LDNT_L2, false, "lk" },
3237 { "__insn_ldnt1s_L2", TILEGX_INSN_LDNT1S_L2, false, "lk" },
3238 { "__insn_ldnt1u_L2", TILEGX_INSN_LDNT1U_L2, false, "lk" },
3239 { "__insn_ldnt2s_L2", TILEGX_INSN_LDNT2S_L2, false, "lk" },
3240 { "__insn_ldnt2u_L2", TILEGX_INSN_LDNT2U_L2, false, "lk" },
3241 { "__insn_ldnt4s_L2", TILEGX_INSN_LDNT4S_L2, false, "lk" },
3242 { "__insn_ldnt4u_L2", TILEGX_INSN_LDNT4U_L2, false, "lk" },
3243 { "__insn_ld_miss", TILEGX_INSN_LD_MISS, false, "lk" },
3244 { "__insn_ld1s_miss", TILEGX_INSN_LD1S_MISS, false, "lk" },
3245 { "__insn_ld1u_miss", TILEGX_INSN_LD1U_MISS, false, "lk" },
3246 { "__insn_ld2s_miss", TILEGX_INSN_LD2S_MISS, false, "lk" },
3247 { "__insn_ld2u_miss", TILEGX_INSN_LD2U_MISS, false, "lk" },
3248 { "__insn_ld4s_miss", TILEGX_INSN_LD4S_MISS, false, "lk" },
3249 { "__insn_ld4u_miss", TILEGX_INSN_LD4U_MISS, false, "lk" },
3250 { "__insn_ldna_miss", TILEGX_INSN_LDNA_MISS, false, "lk" },
3251 { "__insn_ldnt_miss", TILEGX_INSN_LDNT_MISS, false, "lk" },
3252 { "__insn_ldnt1s_miss", TILEGX_INSN_LDNT1S_MISS, false, "lk" },
3253 { "__insn_ldnt1u_miss", TILEGX_INSN_LDNT1U_MISS, false, "lk" },
3254 { "__insn_ldnt2s_miss", TILEGX_INSN_LDNT2S_MISS, false, "lk" },
3255 { "__insn_ldnt2u_miss", TILEGX_INSN_LDNT2U_MISS, false, "lk" },
3256 { "__insn_ldnt4s_miss", TILEGX_INSN_LDNT4S_MISS, false, "lk" },
3257 { "__insn_ldnt4u_miss", TILEGX_INSN_LDNT4U_MISS, false, "lk" },
3258 { "__insn_lnk", TILEGX_INSN_LNK, true, "l" },
3259 { "__insn_mf", TILEGX_INSN_MF, false, "v" },
3260 { "__insn_mfspr", TILEGX_INSN_MFSPR, false, "ll" },
3261 { "__insn_mm", TILEGX_INSN_MM, true, "lllll"},
3262 { "__insn_mnz", TILEGX_INSN_MNZ, true, "lll" },
3263 { "__insn_move", TILEGX_INSN_MOVE, true, "ll" },
3264 { "__insn_movei", TILEGX_INSN_MOVE, true, "ll" },
3265 { "__insn_moveli", TILEGX_INSN_MOVE, true, "ll" },
3266 { "__insn_mtspr", TILEGX_INSN_MTSPR, false, "vll" },
3267 { "__insn_mul_hs_hs", TILEGX_INSN_MUL_HS_HS, true, "lll" },
3268 { "__insn_mul_hs_hu", TILEGX_INSN_MUL_HS_HU, true, "lll" },
3269 { "__insn_mul_hs_ls", TILEGX_INSN_MUL_HS_LS, true, "lll" },
3270 { "__insn_mul_hs_lu", TILEGX_INSN_MUL_HS_LU, true, "lll" },
3271 { "__insn_mul_hu_hu", TILEGX_INSN_MUL_HU_HU, true, "lll" },
3272 { "__insn_mul_hu_ls", TILEGX_INSN_MUL_HU_LS, true, "lll" },
3273 { "__insn_mul_hu_lu", TILEGX_INSN_MUL_HU_LU, true, "lll" },
3274 { "__insn_mul_ls_ls", TILEGX_INSN_MUL_LS_LS, true, "lll" },
3275 { "__insn_mul_ls_lu", TILEGX_INSN_MUL_LS_LU, true, "lll" },
3276 { "__insn_mul_lu_lu", TILEGX_INSN_MUL_LU_LU, true, "lll" },
3277 { "__insn_mula_hs_hs", TILEGX_INSN_MULA_HS_HS, true, "llll" },
3278 { "__insn_mula_hs_hu", TILEGX_INSN_MULA_HS_HU, true, "llll" },
3279 { "__insn_mula_hs_ls", TILEGX_INSN_MULA_HS_LS, true, "llll" },
3280 { "__insn_mula_hs_lu", TILEGX_INSN_MULA_HS_LU, true, "llll" },
3281 { "__insn_mula_hu_hu", TILEGX_INSN_MULA_HU_HU, true, "llll" },
3282 { "__insn_mula_hu_ls", TILEGX_INSN_MULA_HU_LS, true, "llll" },
3283 { "__insn_mula_hu_lu", TILEGX_INSN_MULA_HU_LU, true, "llll" },
3284 { "__insn_mula_ls_ls", TILEGX_INSN_MULA_LS_LS, true, "llll" },
3285 { "__insn_mula_ls_lu", TILEGX_INSN_MULA_LS_LU, true, "llll" },
3286 { "__insn_mula_lu_lu", TILEGX_INSN_MULA_LU_LU, true, "llll" },
3287 { "__insn_mulax", TILEGX_INSN_MULAX, true, "iiii" },
3288 { "__insn_mulx", TILEGX_INSN_MULX, true, "iii" },
3289 { "__insn_mz", TILEGX_INSN_MZ, true, "lll" },
3290 { "__insn_nap", TILEGX_INSN_NAP, false, "v" },
3291 { "__insn_nop", TILEGX_INSN_NOP, true, "v" },
3292 { "__insn_nor", TILEGX_INSN_NOR, true, "lll" },
3293 { "__insn_or", TILEGX_INSN_OR, true, "lll" },
3294 { "__insn_ori", TILEGX_INSN_OR, true, "lll" },
3295 { "__insn_pcnt", TILEGX_INSN_PCNT, true, "ll" },
3296 { "__insn_prefetch", TILEGX_INSN_PREFETCH_L1, false, "vk" },
3297 { "__insn_prefetch_l1", TILEGX_INSN_PREFETCH_L1, false, "vk" },
3298 { "__insn_prefetch_l1_fault", TILEGX_INSN_PREFETCH_L1_FAULT, false, "vk" },
3299 { "__insn_prefetch_l2", TILEGX_INSN_PREFETCH_L2, false, "vk" },
3300 { "__insn_prefetch_l2_fault", TILEGX_INSN_PREFETCH_L2_FAULT, false, "vk" },
3301 { "__insn_prefetch_l3", TILEGX_INSN_PREFETCH_L3, false, "vk" },
3302 { "__insn_prefetch_l3_fault", TILEGX_INSN_PREFETCH_L3_FAULT, false, "vk" },
3303 { "__insn_revbits", TILEGX_INSN_REVBITS, true, "ll" },
3304 { "__insn_revbytes", TILEGX_INSN_REVBYTES, true, "ll" },
3305 { "__insn_rotl", TILEGX_INSN_ROTL, true, "lli" },
3306 { "__insn_rotli", TILEGX_INSN_ROTL, true, "lli" },
3307 { "__insn_shl", TILEGX_INSN_SHL, true, "lli" },
3308 { "__insn_shl16insli", TILEGX_INSN_SHL16INSLI, true, "lll" },
3309 { "__insn_shl1add", TILEGX_INSN_SHL1ADD, true, "lll" },
3310 { "__insn_shl1addx", TILEGX_INSN_SHL1ADDX, true, "iii" },
3311 { "__insn_shl2add", TILEGX_INSN_SHL2ADD, true, "lll" },
3312 { "__insn_shl2addx", TILEGX_INSN_SHL2ADDX, true, "iii" },
3313 { "__insn_shl3add", TILEGX_INSN_SHL3ADD, true, "lll" },
3314 { "__insn_shl3addx", TILEGX_INSN_SHL3ADDX, true, "iii" },
3315 { "__insn_shli", TILEGX_INSN_SHL, true, "lli" },
3316 { "__insn_shlx", TILEGX_INSN_SHLX, true, "iii" },
3317 { "__insn_shlxi", TILEGX_INSN_SHLX, true, "iii" },
3318 { "__insn_shrs", TILEGX_INSN_SHRS, true, "lli" },
3319 { "__insn_shrsi", TILEGX_INSN_SHRS, true, "lli" },
3320 { "__insn_shru", TILEGX_INSN_SHRU, true, "lli" },
3321 { "__insn_shrui", TILEGX_INSN_SHRU, true, "lli" },
3322 { "__insn_shrux", TILEGX_INSN_SHRUX, true, "iii" },
3323 { "__insn_shruxi", TILEGX_INSN_SHRUX, true, "iii" },
3324 { "__insn_shufflebytes", TILEGX_INSN_SHUFFLEBYTES, true, "llll" },
3325 { "__insn_shufflebytes1", TILEGX_INSN_SHUFFLEBYTES1, true, "lll" },
3326 { "__insn_st", TILEGX_INSN_ST, false, "vpl" },
3327 { "__insn_st1", TILEGX_INSN_ST1, false, "vpl" },
3328 { "__insn_st2", TILEGX_INSN_ST2, false, "vpl" },
3329 { "__insn_st4", TILEGX_INSN_ST4, false, "vpl" },
3330 { "__insn_stnt", TILEGX_INSN_STNT, false, "vpl" },
3331 { "__insn_stnt1", TILEGX_INSN_STNT1, false, "vpl" },
3332 { "__insn_stnt2", TILEGX_INSN_STNT2, false, "vpl" },
3333 { "__insn_stnt4", TILEGX_INSN_STNT4, false, "vpl" },
3334 { "__insn_sub", TILEGX_INSN_SUB, true, "lll" },
3335 { "__insn_subx", TILEGX_INSN_SUBX, true, "iii" },
3336 { "__insn_subxsc", TILEGX_INSN_SUBXSC, true, "iii" },
3337 { "__insn_tblidxb0", TILEGX_INSN_TBLIDXB0, true, "lll" },
3338 { "__insn_tblidxb1", TILEGX_INSN_TBLIDXB1, true, "lll" },
3339 { "__insn_tblidxb2", TILEGX_INSN_TBLIDXB2, true, "lll" },
3340 { "__insn_tblidxb3", TILEGX_INSN_TBLIDXB3, true, "lll" },
3341 { "__insn_v1add", TILEGX_INSN_V1ADD, true, "lll" },
3342 { "__insn_v1addi", TILEGX_INSN_V1ADDI, true, "lll" },
3343 { "__insn_v1adduc", TILEGX_INSN_V1ADDUC, true, "lll" },
3344 { "__insn_v1adiffu", TILEGX_INSN_V1ADIFFU, true, "lll" },
3345 { "__insn_v1avgu", TILEGX_INSN_V1AVGU, true, "lll" },
3346 { "__insn_v1cmpeq", TILEGX_INSN_V1CMPEQ, true, "lll" },
3347 { "__insn_v1cmpeqi", TILEGX_INSN_V1CMPEQI, true, "lll" },
3348 { "__insn_v1cmples", TILEGX_INSN_V1CMPLES, true, "lll" },
3349 { "__insn_v1cmpleu", TILEGX_INSN_V1CMPLEU, true, "lll" },
3350 { "__insn_v1cmplts", TILEGX_INSN_V1CMPLTS, true, "lll" },
3351 { "__insn_v1cmpltsi", TILEGX_INSN_V1CMPLTSI, true, "lll" },
3352 { "__insn_v1cmpltu", TILEGX_INSN_V1CMPLTU, true, "lll" },
3353 { "__insn_v1cmpltui", TILEGX_INSN_V1CMPLTUI, true, "lll" },
3354 { "__insn_v1cmpne", TILEGX_INSN_V1CMPNE, true, "lll" },
3355 { "__insn_v1ddotpu", TILEGX_INSN_V1DDOTPU, true, "lll" },
3356 { "__insn_v1ddotpua", TILEGX_INSN_V1DDOTPUA, true, "llll" },
3357 { "__insn_v1ddotpus", TILEGX_INSN_V1DDOTPUS, true, "lll" },
3358 { "__insn_v1ddotpusa", TILEGX_INSN_V1DDOTPUSA, true, "llll" },
3359 { "__insn_v1dotp", TILEGX_INSN_V1DOTP, true, "lll" },
3360 { "__insn_v1dotpa", TILEGX_INSN_V1DOTPA, true, "llll" },
3361 { "__insn_v1dotpu", TILEGX_INSN_V1DOTPU, true, "lll" },
3362 { "__insn_v1dotpua", TILEGX_INSN_V1DOTPUA, true, "llll" },
3363 { "__insn_v1dotpus", TILEGX_INSN_V1DOTPUS, true, "lll" },
3364 { "__insn_v1dotpusa", TILEGX_INSN_V1DOTPUSA, true, "llll" },
3365 { "__insn_v1int_h", TILEGX_INSN_V1INT_H, true, "lll" },
3366 { "__insn_v1int_l", TILEGX_INSN_V1INT_L, true, "lll" },
3367 { "__insn_v1maxu", TILEGX_INSN_V1MAXU, true, "lll" },
3368 { "__insn_v1maxui", TILEGX_INSN_V1MAXUI, true, "lll" },
3369 { "__insn_v1minu", TILEGX_INSN_V1MINU, true, "lll" },
3370 { "__insn_v1minui", TILEGX_INSN_V1MINUI, true, "lll" },
3371 { "__insn_v1mnz", TILEGX_INSN_V1MNZ, true, "lll" },
3372 { "__insn_v1multu", TILEGX_INSN_V1MULTU, true, "lll" },
3373 { "__insn_v1mulu", TILEGX_INSN_V1MULU, true, "lll" },
3374 { "__insn_v1mulus", TILEGX_INSN_V1MULUS, true, "lll" },
3375 { "__insn_v1mz", TILEGX_INSN_V1MZ, true, "lll" },
3376 { "__insn_v1sadau", TILEGX_INSN_V1SADAU, true, "llll" },
3377 { "__insn_v1sadu", TILEGX_INSN_V1SADU, true, "lll" },
3378 { "__insn_v1shl", TILEGX_INSN_V1SHL, true, "lll" },
3379 { "__insn_v1shli", TILEGX_INSN_V1SHLI, true, "lll" },
3380 { "__insn_v1shrs", TILEGX_INSN_V1SHRS, true, "lll" },
3381 { "__insn_v1shrsi", TILEGX_INSN_V1SHRSI, true, "lll" },
3382 { "__insn_v1shru", TILEGX_INSN_V1SHRU, true, "lll" },
3383 { "__insn_v1shrui", TILEGX_INSN_V1SHRUI, true, "lll" },
3384 { "__insn_v1sub", TILEGX_INSN_V1SUB, true, "lll" },
3385 { "__insn_v1subuc", TILEGX_INSN_V1SUBUC, true, "lll" },
3386 { "__insn_v2add", TILEGX_INSN_V2ADD, true, "lll" },
3387 { "__insn_v2addi", TILEGX_INSN_V2ADDI, true, "lll" },
3388 { "__insn_v2addsc", TILEGX_INSN_V2ADDSC, true, "lll" },
3389 { "__insn_v2adiffs", TILEGX_INSN_V2ADIFFS, true, "lll" },
3390 { "__insn_v2avgs", TILEGX_INSN_V2AVGS, true, "lll" },
3391 { "__insn_v2cmpeq", TILEGX_INSN_V2CMPEQ, true, "lll" },
3392 { "__insn_v2cmpeqi", TILEGX_INSN_V2CMPEQI, true, "lll" },
3393 { "__insn_v2cmples", TILEGX_INSN_V2CMPLES, true, "lll" },
3394 { "__insn_v2cmpleu", TILEGX_INSN_V2CMPLEU, true, "lll" },
3395 { "__insn_v2cmplts", TILEGX_INSN_V2CMPLTS, true, "lll" },
3396 { "__insn_v2cmpltsi", TILEGX_INSN_V2CMPLTSI, true, "lll" },
3397 { "__insn_v2cmpltu", TILEGX_INSN_V2CMPLTU, true, "lll" },
3398 { "__insn_v2cmpltui", TILEGX_INSN_V2CMPLTUI, true, "lll" },
3399 { "__insn_v2cmpne", TILEGX_INSN_V2CMPNE, true, "lll" },
3400 { "__insn_v2dotp", TILEGX_INSN_V2DOTP, true, "lll" },
3401 { "__insn_v2dotpa", TILEGX_INSN_V2DOTPA, true, "llll" },
3402 { "__insn_v2int_h", TILEGX_INSN_V2INT_H, true, "lll" },
3403 { "__insn_v2int_l", TILEGX_INSN_V2INT_L, true, "lll" },
3404 { "__insn_v2maxs", TILEGX_INSN_V2MAXS, true, "lll" },
3405 { "__insn_v2maxsi", TILEGX_INSN_V2MAXSI, true, "lll" },
3406 { "__insn_v2mins", TILEGX_INSN_V2MINS, true, "lll" },
3407 { "__insn_v2minsi", TILEGX_INSN_V2MINSI, true, "lll" },
3408 { "__insn_v2mnz", TILEGX_INSN_V2MNZ, true, "lll" },
3409 { "__insn_v2mulfsc", TILEGX_INSN_V2MULFSC, true, "lll" },
3410 { "__insn_v2muls", TILEGX_INSN_V2MULS, true, "lll" },
3411 { "__insn_v2mults", TILEGX_INSN_V2MULTS, true, "lll" },
3412 { "__insn_v2mz", TILEGX_INSN_V2MZ, true, "lll" },
3413 { "__insn_v2packh", TILEGX_INSN_V2PACKH, true, "lll" },
3414 { "__insn_v2packl", TILEGX_INSN_V2PACKL, true, "lll" },
3415 { "__insn_v2packuc", TILEGX_INSN_V2PACKUC, true, "lll" },
3416 { "__insn_v2sadas", TILEGX_INSN_V2SADAS, true, "llll" },
3417 { "__insn_v2sadau", TILEGX_INSN_V2SADAU, true, "llll" },
3418 { "__insn_v2sads", TILEGX_INSN_V2SADS, true, "lll" },
3419 { "__insn_v2sadu", TILEGX_INSN_V2SADU, true, "lll" },
3420 { "__insn_v2shl", TILEGX_INSN_V2SHL, true, "lll" },
3421 { "__insn_v2shli", TILEGX_INSN_V2SHLI, true, "lll" },
3422 { "__insn_v2shlsc", TILEGX_INSN_V2SHLSC, true, "lll" },
3423 { "__insn_v2shrs", TILEGX_INSN_V2SHRS, true, "lll" },
3424 { "__insn_v2shrsi", TILEGX_INSN_V2SHRSI, true, "lll" },
3425 { "__insn_v2shru", TILEGX_INSN_V2SHRU, true, "lll" },
3426 { "__insn_v2shrui", TILEGX_INSN_V2SHRUI, true, "lll" },
3427 { "__insn_v2sub", TILEGX_INSN_V2SUB, true, "lll" },
3428 { "__insn_v2subsc", TILEGX_INSN_V2SUBSC, true, "lll" },
3429 { "__insn_v4add", TILEGX_INSN_V4ADD, true, "lll" },
3430 { "__insn_v4addsc", TILEGX_INSN_V4ADDSC, true, "lll" },
3431 { "__insn_v4int_h", TILEGX_INSN_V4INT_H, true, "lll" },
3432 { "__insn_v4int_l", TILEGX_INSN_V4INT_L, true, "lll" },
3433 { "__insn_v4packsc", TILEGX_INSN_V4PACKSC, true, "lll" },
3434 { "__insn_v4shl", TILEGX_INSN_V4SHL, true, "lll" },
3435 { "__insn_v4shlsc", TILEGX_INSN_V4SHLSC, true, "lll" },
3436 { "__insn_v4shrs", TILEGX_INSN_V4SHRS, true, "lll" },
3437 { "__insn_v4shru", TILEGX_INSN_V4SHRU, true, "lll" },
3438 { "__insn_v4sub", TILEGX_INSN_V4SUB, true, "lll" },
3439 { "__insn_v4subsc", TILEGX_INSN_V4SUBSC, true, "lll" },
3440 { "__insn_wh64", TILEGX_INSN_WH64, false, "vp" },
3441 { "__insn_xor", TILEGX_INSN_XOR, true, "lll" },
3442 { "__insn_xori", TILEGX_INSN_XOR, true, "lll" },
3443 { "__tile_network_barrier", TILEGX_NETWORK_BARRIER, false, "v" },
3444 { "__tile_idn0_receive", TILEGX_IDN0_RECEIVE, false, "l" },
3445 { "__tile_idn1_receive", TILEGX_IDN1_RECEIVE, false, "l" },
3446 { "__tile_idn_send", TILEGX_IDN_SEND, false, "vl" },
3447 { "__tile_udn0_receive", TILEGX_UDN0_RECEIVE, false, "l" },
3448 { "__tile_udn1_receive", TILEGX_UDN1_RECEIVE, false, "l" },
3449 { "__tile_udn2_receive", TILEGX_UDN2_RECEIVE, false, "l" },
3450 { "__tile_udn3_receive", TILEGX_UDN3_RECEIVE, false, "l" },
3451 { "__tile_udn_send", TILEGX_UDN_SEND, false, "vl" },
3455 /* Convert a character in a builtin type string to a tree type. */
3456 static tree
3457 char_to_type (char c)
3459 static tree volatile_ptr_type_node = NULL;
3460 static tree volatile_const_ptr_type_node = NULL;
3462 if (volatile_ptr_type_node == NULL)
3464 volatile_ptr_type_node =
3465 build_pointer_type (build_qualified_type (void_type_node,
3466 TYPE_QUAL_VOLATILE));
3467 volatile_const_ptr_type_node =
3468 build_pointer_type (build_qualified_type (void_type_node,
3469 TYPE_QUAL_CONST
3470 | TYPE_QUAL_VOLATILE));
3473 switch (c)
3475 case 'v':
3476 return void_type_node;
3477 case 'i':
3478 return unsigned_type_node;
3479 case 'l':
3480 return long_long_unsigned_type_node;
3481 case 'p':
3482 return volatile_ptr_type_node;
3483 case 'k':
3484 return volatile_const_ptr_type_node;
3485 default:
3486 gcc_unreachable ();
3491 /* Implement TARGET_INIT_BUILTINS. */
3492 static void
3493 tilegx_init_builtins (void)
3495 size_t i;
3497 for (i = 0; i < ARRAY_SIZE (tilegx_builtins); i++)
3499 const struct tilegx_builtin_def *p = &tilegx_builtins[i];
3500 tree ftype, ret_type, arg_type_list = void_list_node;
3501 tree decl;
3502 int j;
3504 for (j = strlen (p->type) - 1; j > 0; j--)
3506 arg_type_list =
3507 tree_cons (NULL_TREE, char_to_type (p->type[j]), arg_type_list);
3510 ret_type = char_to_type (p->type[0]);
3512 ftype = build_function_type (ret_type, arg_type_list);
3514 decl = add_builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
3515 NULL, NULL);
3517 if (p->is_const)
3518 TREE_READONLY (decl) = 1;
3519 TREE_NOTHROW (decl) = 1;
3521 if (tilegx_builtin_info[p->code].fndecl == NULL)
3522 tilegx_builtin_info[p->code].fndecl = decl;
3527 /* Implement TARGET_EXPAND_BUILTIN. */
3528 static rtx
3529 tilegx_expand_builtin (tree exp,
3530 rtx target,
3531 rtx subtarget ATTRIBUTE_UNUSED,
3532 enum machine_mode mode ATTRIBUTE_UNUSED,
3533 int ignore ATTRIBUTE_UNUSED)
3535 #define MAX_BUILTIN_ARGS 4
3537 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3538 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3539 tree arg;
3540 call_expr_arg_iterator iter;
3541 enum insn_code icode;
3542 rtx op[MAX_BUILTIN_ARGS + 1], pat;
3543 int opnum;
3544 bool nonvoid;
3545 insn_gen_fn fn;
3547 if (fcode >= TILEGX_BUILTIN_max)
3548 internal_error ("bad builtin fcode");
3549 icode = tilegx_builtin_info[fcode].icode;
3550 if (icode == 0)
3551 internal_error ("bad builtin icode");
3553 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
3555 opnum = nonvoid;
3556 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3558 const struct insn_operand_data *insn_op;
3560 if (arg == error_mark_node)
3561 return NULL_RTX;
3562 if (opnum > MAX_BUILTIN_ARGS)
3563 return NULL_RTX;
3565 insn_op = &insn_data[icode].operand[opnum];
3567 op[opnum] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
3569 if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3571 enum machine_mode opmode = insn_op->mode;
3573 /* pointer_operand and pmode_register_operand operands do
3574 not specify a mode, so use the operand's mode instead
3575 (which should always be right by the time we get here,
3576 except for constants, which are VOIDmode). */
3577 if (opmode == VOIDmode)
3579 enum machine_mode m = GET_MODE (op[opnum]);
3580 gcc_assert (m == Pmode || m == VOIDmode);
3581 opmode = Pmode;
3584 op[opnum] = copy_to_mode_reg (opmode, op[opnum]);
3587 if (!(*insn_op->predicate) (op[opnum], insn_op->mode))
3589 /* We still failed to meet the predicate even after moving
3590 into a register. Assume we needed an immediate. */
3591 error_at (EXPR_LOCATION (exp),
3592 "operand must be an immediate of the right size");
3593 return const0_rtx;
3596 opnum++;
3599 if (nonvoid)
3601 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3602 if (!target
3603 || GET_MODE (target) != tmode
3604 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
3606 if (tmode == VOIDmode)
3608 /* get the mode from the return type. */
3609 tmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl)));
3611 target = gen_reg_rtx (tmode);
3613 op[0] = target;
3616 fn = GEN_FCN (icode);
3617 switch (opnum)
3619 case 0:
3620 pat = fn (NULL_RTX);
3621 break;
3622 case 1:
3623 pat = fn (op[0]);
3624 break;
3625 case 2:
3626 pat = fn (op[0], op[1]);
3627 break;
3628 case 3:
3629 pat = fn (op[0], op[1], op[2]);
3630 break;
3631 case 4:
3632 pat = fn (op[0], op[1], op[2], op[3]);
3633 break;
3634 case 5:
3635 pat = fn (op[0], op[1], op[2], op[3], op[4]);
3636 break;
3637 default:
3638 gcc_unreachable ();
3640 if (!pat)
3641 return NULL_RTX;
3643 /* If we are generating a prefetch, tell the scheduler not to move
3644 it around. */
3645 if (GET_CODE (pat) == PREFETCH)
3646 PREFETCH_SCHEDULE_BARRIER_P (pat) = true;
3648 emit_insn (pat);
3650 if (nonvoid)
3651 return target;
3652 else
3653 return const0_rtx;
3657 /* Implement TARGET_BUILTIN_DECL. */
3658 static tree
3659 tilegx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
3661 if (code >= TILEGX_BUILTIN_max)
3662 return error_mark_node;
3664 return tilegx_builtin_info[code].fndecl;
3669 /* Stack frames */
3671 /* Return whether REGNO needs to be saved in the stack frame. */
3672 static bool
3673 need_to_save_reg (unsigned int regno)
3675 if (!fixed_regs[regno] && !call_used_regs[regno]
3676 && df_regs_ever_live_p (regno))
3677 return true;
3679 if (flag_pic
3680 && (regno == PIC_OFFSET_TABLE_REGNUM
3681 || regno == TILEGX_PIC_TEXT_LABEL_REGNUM)
3682 && (crtl->uses_pic_offset_table || crtl->saves_all_registers))
3683 return true;
3685 if (crtl->calls_eh_return)
3687 unsigned i;
3688 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; i++)
3690 if (regno == EH_RETURN_DATA_REGNO (i))
3691 return true;
3695 return false;
3699 /* Return the size of the register savev area. This function is only
3700 correct starting with local register allocation */
3701 static int
3702 tilegx_saved_regs_size (void)
3704 int reg_save_size = 0;
3705 int regno;
3706 int offset_to_frame;
3707 int align_mask;
3709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
3710 if (need_to_save_reg (regno))
3711 reg_save_size += UNITS_PER_WORD;
3713 /* Pad out the register save area if necessary to make
3714 frame_pointer_rtx be as aligned as the stack pointer. */
3715 offset_to_frame = crtl->args.pretend_args_size + reg_save_size;
3716 align_mask = (STACK_BOUNDARY / BITS_PER_UNIT) - 1;
3717 reg_save_size += (-offset_to_frame) & align_mask;
3719 return reg_save_size;
3723 /* Round up frame size SIZE. */
3724 static int
3725 round_frame_size (int size)
3727 return ((size + STACK_BOUNDARY / BITS_PER_UNIT - 1)
3728 & -STACK_BOUNDARY / BITS_PER_UNIT);
3732 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3733 emit the corresponding REG_CFA_OFFSET note described by CFA and
3734 CFA_OFFSET. Return the emitted insn. */
3735 static rtx
3736 frame_emit_store (int regno, int regno_note, rtx addr, rtx cfa,
3737 int cfa_offset)
3739 rtx reg = gen_rtx_REG (DImode, regno);
3740 rtx mem = gen_frame_mem (DImode, addr);
3741 rtx mov = gen_movdi (mem, reg);
3743 /* Describe what just happened in a way that dwarf understands. We
3744 use temporary registers to hold the address to make scheduling
3745 easier, and use the REG_CFA_OFFSET to describe the address as an
3746 offset from the CFA. */
3747 rtx reg_note = gen_rtx_REG (DImode, regno_note);
3748 rtx cfa_relative_addr = gen_rtx_PLUS (Pmode, cfa, GEN_INT (cfa_offset));
3749 rtx cfa_relative_mem = gen_frame_mem (DImode, cfa_relative_addr);
3750 rtx real = gen_rtx_SET (VOIDmode, cfa_relative_mem, reg_note);
3751 add_reg_note (mov, REG_CFA_OFFSET, real);
3753 return emit_insn (mov);
3757 /* Emit a load in the stack frame to load REGNO from address ADDR.
3758 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3759 non-null. Return the emitted insn. */
3760 static rtx
3761 frame_emit_load (int regno, rtx addr, rtx *cfa_restores)
3763 rtx reg = gen_rtx_REG (DImode, regno);
3764 rtx mem = gen_frame_mem (DImode, addr);
3765 if (cfa_restores)
3766 *cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, *cfa_restores);
3767 return emit_insn (gen_movdi (reg, mem));
3771 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3772 including sequences. */
3773 static rtx
3774 set_frame_related_p (void)
3776 rtx seq = get_insns ();
3777 rtx insn;
3779 end_sequence ();
3781 if (!seq)
3782 return NULL_RTX;
3784 if (INSN_P (seq))
3786 insn = seq;
3787 while (insn != NULL_RTX)
3789 RTX_FRAME_RELATED_P (insn) = 1;
3790 insn = NEXT_INSN (insn);
3792 seq = emit_insn (seq);
3794 else
3796 seq = emit_insn (seq);
3797 RTX_FRAME_RELATED_P (seq) = 1;
3799 return seq;
3803 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3805 /* This emits code for 'sp += offset'.
3807 The ABI only allows us to modify 'sp' in a single 'addi' or
3808 'addli', so the backtracer understands it. Larger amounts cannot
3809 use those instructions, so are added by placing the offset into a
3810 large register and using 'add'.
3812 This happens after reload, so we need to expand it ourselves. */
3813 static rtx
3814 emit_sp_adjust (int offset, int *next_scratch_regno, bool frame_related,
3815 rtx reg_notes)
3817 rtx to_add;
3818 rtx imm_rtx = GEN_INT (offset);
3820 rtx insn;
3821 if (satisfies_constraint_J (imm_rtx))
3823 /* We can add this using a single immediate add. */
3824 to_add = imm_rtx;
3826 else
3828 rtx tmp = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3829 tilegx_expand_set_const64 (tmp, imm_rtx);
3830 to_add = tmp;
3833 /* Actually adjust the stack pointer. */
3834 if (TARGET_32BIT)
3835 insn = gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx, to_add);
3836 else
3837 insn = gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx, to_add);
3839 insn = emit_insn (insn);
3840 REG_NOTES (insn) = reg_notes;
3842 /* Describe what just happened in a way that dwarf understands. */
3843 if (frame_related)
3845 rtx real = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
3846 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3847 imm_rtx));
3848 RTX_FRAME_RELATED_P (insn) = 1;
3849 add_reg_note (insn, REG_CFA_ADJUST_CFA, real);
3852 return insn;
3856 /* Return whether the current function is leaf. This takes into
3857 account whether the function calls tls_get_addr. */
3858 static bool
3859 tilegx_current_function_is_leaf (void)
3861 return crtl->is_leaf && !cfun->machine->calls_tls_get_addr;
3865 /* Return the frame size. */
3866 static int
3867 compute_total_frame_size (void)
3869 int total_size = (get_frame_size () + tilegx_saved_regs_size ()
3870 + crtl->outgoing_args_size
3871 + crtl->args.pretend_args_size);
3873 if (!tilegx_current_function_is_leaf () || cfun->calls_alloca)
3875 /* Make room for save area in callee. */
3876 total_size += STACK_POINTER_OFFSET;
3879 return round_frame_size (total_size);
3883 /* Return nonzero if this function is known to have a null epilogue.
3884 This allows the optimizer to omit jumps to jumps if no stack was
3885 created. */
3886 bool
3887 tilegx_can_use_return_insn_p (void)
3889 return (reload_completed
3890 && cfun->static_chain_decl == 0
3891 && compute_total_frame_size () == 0
3892 && tilegx_current_function_is_leaf ()
3893 && !crtl->profile && !df_regs_ever_live_p (TILEGX_LINK_REGNUM));
3897 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3898 is a frame pointer, it computes the value relative to
3899 that. Otherwise it uses the stack pointer. */
3900 static rtx
3901 compute_frame_addr (int offset_from_fp, int *next_scratch_regno)
3903 rtx base_reg_rtx, tmp_reg_rtx, offset_rtx;
3904 int offset_from_base;
3906 if (frame_pointer_needed)
3908 base_reg_rtx = hard_frame_pointer_rtx;
3909 offset_from_base = offset_from_fp;
3911 else
3913 int offset_from_sp = compute_total_frame_size () + offset_from_fp;
3914 offset_from_base = offset_from_sp;
3915 base_reg_rtx = stack_pointer_rtx;
3918 if (offset_from_base == 0)
3919 return base_reg_rtx;
3921 /* Compute the new value of the stack pointer. */
3922 tmp_reg_rtx = gen_rtx_REG (Pmode, (*next_scratch_regno)--);
3923 offset_rtx = GEN_INT (offset_from_base);
3925 if (!add_operand (offset_rtx, Pmode))
3927 expand_set_cint64 (tmp_reg_rtx, offset_rtx);
3928 offset_rtx = tmp_reg_rtx;
3931 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg_rtx,
3932 gen_rtx_PLUS (Pmode, base_reg_rtx, offset_rtx)));
3934 return tmp_reg_rtx;
3938 /* The stack frame looks like this:
3939 +-------------+
3940 | ... |
3941 | incoming |
3942 | stack args |
3943 AP -> +-------------+
3944 | caller's HFP|
3945 +-------------+
3946 | lr save |
3947 HFP -> +-------------+
3948 | var args |
3949 | reg save | crtl->args.pretend_args_size bytes
3950 +-------------+
3951 | ... |
3952 | saved regs | tilegx_saved_regs_size() bytes
3953 FP -> +-------------+
3954 | ... |
3955 | vars | get_frame_size() bytes
3956 +-------------+
3957 | ... |
3958 | outgoing |
3959 | stack args | crtl->outgoing_args_size bytes
3960 +-------------+
3961 | HFP | ptr_size bytes (only here if nonleaf / alloca)
3962 +-------------+
3963 | callee lr | ptr_size bytes (only here if nonleaf / alloca)
3964 | save |
3965 SP -> +-------------+
3967 HFP == incoming SP.
3969 For functions with a frame larger than 32767 bytes, or which use
3970 alloca (), r52 is used as a frame pointer. Otherwise there is no
3971 frame pointer.
3973 FP is saved at SP+ptr_size before calling a subroutine so the callee
3974 can chain. */
3975 void
3976 tilegx_expand_prologue (void)
3978 #define ROUND_ROBIN_SIZE 4
3979 /* We round-robin through four scratch registers to hold temporary
3980 addresses for saving registers, to make instruction scheduling
3981 easier. */
3982 rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
3983 NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
3985 rtx insn, cfa;
3986 unsigned int which_scratch;
3987 int offset, start_offset, regno;
3989 /* A register that holds a copy of the incoming fp. */
3990 int fp_copy_regno = -1;
3992 /* A register that holds a copy of the incoming sp. */
3993 int sp_copy_regno = -1;
3995 /* Next scratch register number to hand out (postdecrementing). */
3996 int next_scratch_regno = 29;
3998 int total_size = compute_total_frame_size ();
4000 if (flag_stack_usage_info)
4001 current_function_static_stack_size = total_size;
4003 /* Save lr first in its special location because code after this
4004 might use the link register as a scratch register. */
4005 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM) || crtl->calls_eh_return)
4006 FRP (frame_emit_store (TILEGX_LINK_REGNUM, TILEGX_LINK_REGNUM,
4007 stack_pointer_rtx, stack_pointer_rtx, 0));
4009 if (total_size == 0)
4011 /* Load the PIC register if needed. */
4012 if (flag_pic && crtl->uses_pic_offset_table)
4013 load_pic_register (false);
4015 return;
4018 cfa = stack_pointer_rtx;
4020 if (frame_pointer_needed)
4022 fp_copy_regno = next_scratch_regno--;
4024 /* Copy the old frame pointer aside so we can save it later. */
4025 insn =
4026 FRP (emit_move_insn (gen_rtx_REG (word_mode, fp_copy_regno),
4027 gen_lowpart (word_mode, hard_frame_pointer_rtx)));
4028 add_reg_note (insn, REG_CFA_REGISTER, NULL_RTX);
4030 /* Set up the frame pointer. */
4031 insn = FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
4032 add_reg_note (insn, REG_CFA_DEF_CFA, hard_frame_pointer_rtx);
4033 cfa = hard_frame_pointer_rtx;
4034 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4036 /* fp holds a copy of the incoming sp, in case we need to store
4037 it. */
4038 sp_copy_regno = HARD_FRAME_POINTER_REGNUM;
4040 else if (!tilegx_current_function_is_leaf ())
4042 /* Copy the old stack pointer aside so we can save it later. */
4043 sp_copy_regno = next_scratch_regno--;
4044 emit_move_insn (gen_rtx_REG (Pmode, sp_copy_regno),
4045 stack_pointer_rtx);
4048 if (tilegx_current_function_is_leaf ())
4050 /* No need to store chain pointer to caller's frame. */
4051 emit_sp_adjust (-total_size, &next_scratch_regno,
4052 !frame_pointer_needed, NULL_RTX);
4054 else
4056 /* Save the frame pointer (incoming sp value) to support
4057 backtracing. First we need to create an rtx with the store
4058 address. */
4059 rtx chain_addr = gen_rtx_REG (Pmode, next_scratch_regno--);
4060 rtx size_rtx = GEN_INT (-(total_size - UNITS_PER_WORD));
4062 if (add_operand (size_rtx, Pmode))
4064 /* Expose more parallelism by computing this value from the
4065 original stack pointer, not the one after we have pushed
4066 the frame. */
4067 rtx p = gen_rtx_PLUS (Pmode, stack_pointer_rtx, size_rtx);
4068 emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
4069 emit_sp_adjust (-total_size, &next_scratch_regno,
4070 !frame_pointer_needed, NULL_RTX);
4072 else
4074 /* The stack frame is large, so just store the incoming sp
4075 value at *(new_sp + UNITS_PER_WORD). */
4076 rtx p;
4077 emit_sp_adjust (-total_size, &next_scratch_regno,
4078 !frame_pointer_needed, NULL_RTX);
4079 p = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4080 GEN_INT (UNITS_PER_WORD));
4081 emit_insn (gen_rtx_SET (VOIDmode, chain_addr, p));
4084 /* Save our frame pointer for backtrace chaining. */
4085 emit_insn (gen_movdi (gen_frame_mem (DImode, chain_addr),
4086 gen_rtx_REG (DImode, sp_copy_regno)));
4089 /* Compute where to start storing registers we need to save. */
4090 start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
4091 offset = start_offset;
4093 /* Store all registers that need saving. */
4094 which_scratch = 0;
4095 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4096 if (need_to_save_reg (regno))
4098 rtx r = reg_save_addr[which_scratch];
4099 int from_regno;
4100 int cfa_offset = frame_pointer_needed ? offset : total_size + offset;
4102 if (r == NULL_RTX)
4104 int prev_scratch_regno = next_scratch_regno;
4105 r = compute_frame_addr (offset, &next_scratch_regno);
4106 if (prev_scratch_regno != next_scratch_regno)
4107 reg_save_addr[which_scratch] = r;
4109 else
4111 /* Advance to the next stack slot to store this
4112 register. */
4113 int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
4114 rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
4115 emit_insn (gen_rtx_SET (VOIDmode, r, p));
4118 /* Save this register to the stack (but use the old fp value
4119 we copied aside if appropriate). */
4120 from_regno =
4121 (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
4122 ? fp_copy_regno : regno;
4123 FRP (frame_emit_store (from_regno, regno, r, cfa, cfa_offset));
4125 offset -= UNITS_PER_WORD;
4126 which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
4129 /* If profiling, force that to happen after the frame is set up. */
4130 if (crtl->profile)
4131 emit_insn (gen_blockage ());
4133 /* Load the PIC register if needed. */
4134 if (flag_pic && crtl->uses_pic_offset_table)
4135 load_pic_register (false);
4139 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
4140 true for a sibcall_epilogue pattern, and false for an epilogue
4141 pattern. */
4142 void
4143 tilegx_expand_epilogue (bool sibcall_p)
4145 /* We round-robin through four scratch registers to hold temporary
4146 addresses for saving registers, to make instruction scheduling
4147 easier. */
4148 rtx reg_save_addr[ROUND_ROBIN_SIZE] = {
4149 NULL_RTX, NULL_RTX, NULL_RTX, NULL_RTX
4151 rtx last_insn, insn;
4152 unsigned int which_scratch;
4153 int offset, start_offset, regno;
4154 rtx cfa_restores = NULL_RTX;
4156 /* A register that holds a copy of the incoming fp. */
4157 int fp_copy_regno = -1;
4159 /* Next scratch register number to hand out (postdecrementing). */
4160 int next_scratch_regno = 29;
4162 int total_size = compute_total_frame_size ();
4164 last_insn = get_last_insn ();
4166 /* Load lr first since we are going to need it first. */
4167 insn = NULL;
4168 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM))
4170 insn = frame_emit_load (TILEGX_LINK_REGNUM,
4171 compute_frame_addr (0, &next_scratch_regno),
4172 &cfa_restores);
4175 if (total_size == 0)
4177 if (insn)
4179 RTX_FRAME_RELATED_P (insn) = 1;
4180 REG_NOTES (insn) = cfa_restores;
4182 goto done;
4185 /* Compute where to start restoring registers. */
4186 start_offset = -crtl->args.pretend_args_size - UNITS_PER_WORD;
4187 offset = start_offset;
4189 if (frame_pointer_needed)
4190 fp_copy_regno = next_scratch_regno--;
4192 /* Restore all callee-saved registers. */
4193 which_scratch = 0;
4194 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
4195 if (need_to_save_reg (regno))
4197 rtx r = reg_save_addr[which_scratch];
4198 if (r == NULL_RTX)
4200 r = compute_frame_addr (offset, &next_scratch_regno);
4201 reg_save_addr[which_scratch] = r;
4203 else
4205 /* Advance to the next stack slot to store this register. */
4206 int stride = ROUND_ROBIN_SIZE * -UNITS_PER_WORD;
4207 rtx p = gen_rtx_PLUS (Pmode, r, GEN_INT (stride));
4208 emit_insn (gen_rtx_SET (VOIDmode, r, p));
4211 if (fp_copy_regno >= 0 && regno == HARD_FRAME_POINTER_REGNUM)
4212 frame_emit_load (fp_copy_regno, r, NULL);
4213 else
4214 frame_emit_load (regno, r, &cfa_restores);
4216 offset -= UNITS_PER_WORD;
4217 which_scratch = (which_scratch + 1) % ROUND_ROBIN_SIZE;
4220 if (!tilegx_current_function_is_leaf ())
4221 cfa_restores =
4222 alloc_reg_note (REG_CFA_RESTORE, stack_pointer_rtx, cfa_restores);
4224 emit_insn (gen_blockage ());
4226 if (frame_pointer_needed)
4228 /* Restore the old stack pointer by copying from the frame
4229 pointer. */
4230 if (TARGET_32BIT)
4232 insn = emit_insn (gen_sp_restore_32bit (stack_pointer_rtx,
4233 hard_frame_pointer_rtx));
4235 else
4237 insn = emit_insn (gen_sp_restore (stack_pointer_rtx,
4238 hard_frame_pointer_rtx));
4240 RTX_FRAME_RELATED_P (insn) = 1;
4241 REG_NOTES (insn) = cfa_restores;
4242 add_reg_note (insn, REG_CFA_DEF_CFA, stack_pointer_rtx);
4244 else
4246 insn = emit_sp_adjust (total_size, &next_scratch_regno, true,
4247 cfa_restores);
4250 if (crtl->calls_eh_return)
4252 if (TARGET_32BIT)
4253 emit_insn (gen_sp_adjust_32bit (stack_pointer_rtx, stack_pointer_rtx,
4254 EH_RETURN_STACKADJ_RTX));
4255 else
4256 emit_insn (gen_sp_adjust (stack_pointer_rtx, stack_pointer_rtx,
4257 EH_RETURN_STACKADJ_RTX));
4260 /* Restore the old frame pointer. */
4261 if (frame_pointer_needed)
4263 insn = emit_move_insn (gen_lowpart (DImode, hard_frame_pointer_rtx),
4264 gen_rtx_REG (DImode, fp_copy_regno));
4265 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
4268 /* Mark the pic registers as live outside of the function. */
4269 if (flag_pic)
4271 emit_use (cfun->machine->text_label_rtx);
4272 emit_use (cfun->machine->got_rtx);
4275 done:
4276 if (!sibcall_p)
4278 emit_jump_insn (gen__return ());
4280 else
4282 emit_use (gen_rtx_REG (Pmode, TILEGX_LINK_REGNUM));
4285 /* Mark all insns we just emitted as frame-related. */
4286 for (; last_insn != NULL_RTX; last_insn = next_insn (last_insn))
4287 RTX_FRAME_RELATED_P (last_insn) = 1;
4290 #undef ROUND_ROBIN_SIZE
4293 /* Implement INITIAL_ELIMINATION_OFFSET. */
4295 tilegx_initial_elimination_offset (int from, int to)
4297 int total_size = compute_total_frame_size ();
4299 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4301 return (total_size - crtl->args.pretend_args_size
4302 - tilegx_saved_regs_size ());
4304 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4306 return -(crtl->args.pretend_args_size + tilegx_saved_regs_size ());
4308 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
4310 return STACK_POINTER_OFFSET + total_size;
4312 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
4314 return STACK_POINTER_OFFSET;
4316 else
4317 gcc_unreachable ();
4321 /* Return an RTX indicating where the return address to the calling
4322 function can be found. */
4324 tilegx_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
4326 if (count != 0)
4327 return const0_rtx;
4329 return get_hard_reg_initial_val (Pmode, TILEGX_LINK_REGNUM);
4333 /* Implement EH_RETURN_HANDLER_RTX. The MEM needs to be volatile to
4334 prevent it from being deleted. */
4336 tilegx_eh_return_handler_rtx (void)
4338 rtx tmp = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
4339 MEM_VOLATILE_P (tmp) = true;
4340 return tmp;
4345 /* Registers */
4347 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
4348 static void
4349 tilegx_conditional_register_usage (void)
4351 global_regs[TILEGX_NETORDER_REGNUM] = 1;
4352 /* TILEGX_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
4353 member of fixed_regs, and therefore must be member of
4354 call_used_regs, but it is not a member of call_really_used_regs[]
4355 because it is not clobbered by a call. */
4356 if (TILEGX_PIC_TEXT_LABEL_REGNUM != INVALID_REGNUM)
4358 fixed_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4359 call_used_regs[TILEGX_PIC_TEXT_LABEL_REGNUM] = 1;
4361 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
4363 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4364 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
4369 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
4370 static bool
4371 tilegx_frame_pointer_required (void)
4373 return crtl->calls_eh_return || cfun->calls_alloca;
4378 /* Scheduling and reorg */
4380 /* Return the length of INSN. LENGTH is the initial length computed
4381 by attributes in the machine-description file. This is where we
4382 account for bundles. */
4384 tilegx_adjust_insn_length (rtx insn, int length)
4386 enum machine_mode mode = GET_MODE (insn);
4388 /* A non-termininating instruction in a bundle has length 0. */
4389 if (mode == SImode)
4390 return 0;
4392 /* By default, there is not length adjustment. */
4393 return length;
4397 /* Implement TARGET_SCHED_ISSUE_RATE. */
4398 static int
4399 tilegx_issue_rate (void)
4401 return 3;
4405 /* Return the rtx for the jump target. */
4406 static rtx
4407 get_jump_target (rtx branch)
4409 if (CALL_P (branch))
4411 rtx call;
4412 call = PATTERN (branch);
4414 if (GET_CODE (call) == PARALLEL)
4415 call = XVECEXP (call, 0, 0);
4417 if (GET_CODE (call) == SET)
4418 call = SET_SRC (call);
4420 if (GET_CODE (call) == CALL)
4421 return XEXP (XEXP (call, 0), 0);
4423 return 0;
4427 /* Implement TARGET_SCHED_ADJUST_COST. */
4428 static int
4429 tilegx_sched_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
4431 /* If we have a true dependence, INSN is a call, and DEP_INSN
4432 defines a register that is needed by the call (argument or stack
4433 pointer) , set its latency to 0 so that it can be bundled with
4434 the call. Explicitly check for and exclude the case when
4435 DEP_INSN defines the target of the jump. */
4436 if (CALL_P (insn) && REG_NOTE_KIND (link) == REG_DEP_TRUE)
4438 rtx target = get_jump_target (insn);
4439 if (!REG_P (target) || !set_of (target, dep_insn))
4440 return 0;
4443 return cost;
4447 /* Skip over irrelevant NOTEs and such and look for the next insn we
4448 would consider bundling. */
4449 static rtx
4450 next_insn_to_bundle (rtx r, rtx end)
4452 for (; r != end; r = NEXT_INSN (r))
4454 if (NONDEBUG_INSN_P (r)
4455 && GET_CODE (PATTERN (r)) != USE
4456 && GET_CODE (PATTERN (r)) != CLOBBER)
4457 return r;
4460 return NULL_RTX;
4464 /* Go through all insns, and use the information generated during
4465 scheduling to generate SEQUENCEs to represent bundles of
4466 instructions issued simultaneously. */
4467 static void
4468 tilegx_gen_bundles (void)
4470 basic_block bb;
4471 FOR_EACH_BB_FN (bb, cfun)
4473 rtx insn, next, prev;
4474 rtx end = NEXT_INSN (BB_END (bb));
4476 prev = NULL_RTX;
4477 for (insn = next_insn_to_bundle (BB_HEAD (bb), end); insn;
4478 prev = insn, insn = next)
4480 next = next_insn_to_bundle (NEXT_INSN (insn), end);
4482 /* Never wrap {} around inline asm. */
4483 if (GET_CODE (PATTERN (insn)) != ASM_INPUT)
4485 if (next == NULL_RTX || GET_MODE (next) == TImode
4486 /* NOTE: The scheduler incorrectly believes a call
4487 insn can execute in the same cycle as the insn
4488 after the call. This is of course impossible.
4489 Really we need to fix the scheduler somehow, so
4490 the code after the call gets scheduled
4491 optimally. */
4492 || CALL_P (insn))
4494 /* Mark current insn as the end of a bundle. */
4495 PUT_MODE (insn, QImode);
4497 else
4499 /* Mark it as part of a bundle. */
4500 PUT_MODE (insn, SImode);
4504 /* Delete barrier insns, because they can mess up the
4505 emitting of bundle braces. If it is end-of-bundle, then
4506 the previous insn must be marked end-of-bundle. */
4507 if (get_attr_type (insn) == TYPE_NOTHING) {
4508 if (GET_MODE (insn) == QImode && prev != NULL
4509 && GET_MODE (prev) == SImode)
4511 PUT_MODE (prev, QImode);
4513 delete_insn (insn);
4520 /* Replace OLD_INSN with NEW_INSN. */
4521 static void
4522 replace_insns (rtx old_insn, rtx new_insns)
4524 if (new_insns)
4525 emit_insn_before (new_insns, old_insn);
4527 delete_insn (old_insn);
4531 /* Returns true if INSN is the first instruction of a pc-relative
4532 address compuatation. */
4533 static bool
4534 match_pcrel_step1 (rtx insn)
4536 rtx pattern = PATTERN (insn);
4537 rtx src;
4539 if (GET_CODE (pattern) != SET)
4540 return false;
4542 src = SET_SRC (pattern);
4544 return (GET_CODE (src) == CONST
4545 && GET_CODE (XEXP (src, 0)) == UNSPEC
4546 && XINT (XEXP (src, 0), 1) == UNSPEC_HW1_LAST_PCREL);
4550 /* Do the first replacement step in tilegx_fixup_pcrel_references. */
4551 static void
4552 replace_mov_pcrel_step1 (rtx insn)
4554 rtx pattern = PATTERN (insn);
4555 rtx unspec;
4556 rtx opnds[2];
4557 rtx new_insns;
4559 gcc_assert (GET_CODE (pattern) == SET);
4560 opnds[0] = SET_DEST (pattern);
4562 gcc_assert (GET_CODE (SET_SRC (pattern)) == CONST);
4564 unspec = XEXP (SET_SRC (pattern), 0);
4565 gcc_assert (GET_CODE (unspec) == UNSPEC);
4566 gcc_assert (XINT (unspec, 1) == UNSPEC_HW1_LAST_PCREL);
4567 opnds[1] = XVECEXP (unspec, 0, 0);
4569 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4570 if (GET_CODE (opnds[1]) != SYMBOL_REF)
4571 return;
4573 start_sequence ();
4575 if (flag_pic != 1)
4577 if (TARGET_32BIT)
4578 emit_insn (gen_mov_got32_step1_32bit (opnds[0], opnds[1]));
4579 else
4580 emit_insn (gen_mov_got32_step1 (opnds[0], opnds[1]));
4583 new_insns = get_insns ();
4584 end_sequence ();
4586 replace_insns (insn, new_insns);
4590 /* Returns true if INSN is the second instruction of a pc-relative
4591 address compuatation. */
4592 static bool
4593 match_pcrel_step2 (rtx insn)
4595 rtx unspec;
4596 rtx addr;
4598 if (TARGET_32BIT)
4600 if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli_32bit)
4601 return false;
4603 else
4605 if (recog_memoized (insn) != CODE_FOR_insn_addr_shl16insli)
4606 return false;
4609 unspec = SET_SRC (PATTERN (insn));
4610 addr = XVECEXP (unspec, 0, 1);
4612 return (GET_CODE (addr) == CONST
4613 && GET_CODE (XEXP (addr, 0)) == UNSPEC
4614 && XINT (XEXP (addr, 0), 1) == UNSPEC_HW0_PCREL);
4618 /* Do the second replacement step in tilegx_fixup_pcrel_references. */
4619 static void
4620 replace_mov_pcrel_step2 (rtx insn)
4622 rtx pattern = PATTERN (insn);
4623 rtx unspec;
4624 rtx addr;
4625 rtx opnds[3];
4626 rtx new_insns;
4627 rtx got_rtx = tilegx_got_rtx ();
4629 gcc_assert (GET_CODE (pattern) == SET);
4630 opnds[0] = SET_DEST (pattern);
4632 unspec = SET_SRC (pattern);
4633 gcc_assert (GET_CODE (unspec) == UNSPEC);
4634 gcc_assert (XINT (unspec, 1) == UNSPEC_INSN_ADDR_SHL16INSLI);
4636 opnds[1] = XVECEXP (unspec, 0, 0);
4638 addr = XVECEXP (unspec, 0, 1);
4639 gcc_assert (GET_CODE (addr) == CONST);
4641 unspec = XEXP (addr, 0);
4642 gcc_assert (GET_CODE (unspec) == UNSPEC);
4643 gcc_assert (XINT (unspec, 1) == UNSPEC_HW0_PCREL);
4644 opnds[2] = XVECEXP (unspec, 0, 0);
4646 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4647 if (GET_CODE (opnds[2]) != SYMBOL_REF)
4648 return;
4650 start_sequence ();
4652 if (flag_pic == 1)
4654 if (TARGET_32BIT)
4655 emit_insn (gen_add_got16_32bit (opnds[0], got_rtx, opnds[2]));
4656 else
4657 emit_insn (gen_add_got16 (opnds[0], got_rtx, opnds[2]));
4659 else
4661 if (TARGET_32BIT)
4662 emit_insn (gen_mov_got32_step2_32bit
4663 (opnds[0], opnds[1], opnds[2]));
4664 else
4665 emit_insn (gen_mov_got32_step2 (opnds[0], opnds[1], opnds[2]));
4668 new_insns = get_insns ();
4669 end_sequence ();
4671 replace_insns (insn, new_insns);
4675 /* Do the third replacement step in tilegx_fixup_pcrel_references. */
4676 static void
4677 replace_mov_pcrel_step3 (rtx insn)
4679 rtx pattern = PATTERN (insn);
4680 rtx unspec;
4681 rtx opnds[4];
4682 rtx new_insns;
4683 rtx got_rtx = tilegx_got_rtx ();
4684 rtx text_label_rtx = tilegx_text_label_rtx ();
4686 gcc_assert (GET_CODE (pattern) == SET);
4687 opnds[0] = SET_DEST (pattern);
4689 unspec = SET_SRC (pattern);
4690 gcc_assert (GET_CODE (unspec) == UNSPEC);
4691 gcc_assert (XINT (unspec, 1) == UNSPEC_MOV_PCREL_STEP3);
4693 opnds[1] = got_rtx;
4695 if (XVECEXP (unspec, 0, 0) == text_label_rtx)
4696 opnds[2] = XVECEXP (unspec, 0, 1);
4697 else
4699 gcc_assert (XVECEXP (unspec, 0, 1) == text_label_rtx);
4700 opnds[2] = XVECEXP (unspec, 0, 0);
4703 opnds[3] = XVECEXP (unspec, 0, 2);
4705 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4706 if (GET_CODE (opnds[3]) != SYMBOL_REF)
4707 return;
4709 start_sequence ();
4711 if (flag_pic == 1)
4713 emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[2]));
4715 else
4717 emit_move_insn (opnds[0], gen_rtx_PLUS (Pmode, opnds[1], opnds[2]));
4718 emit_move_insn (opnds[0], gen_const_mem (Pmode, opnds[0]));
4721 new_insns = get_insns ();
4722 end_sequence ();
4724 replace_insns (insn, new_insns);
4728 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4729 going through the GOT when the symbol is local to the compilation
4730 unit. But such a symbol requires that the common text_label that
4731 we generate at the beginning of the function be in the same section
4732 as the reference to the SYMBOL_REF. This may not be true if we
4733 generate hot/cold sections. This function looks for such cases and
4734 replaces such references with the longer sequence going through the
4735 GOT.
4737 We expect following instruction sequence:
4738 moveli tmp1, hw1_last(x-.L_PICLNK) [1]
4739 shl16insli tmp2, tmp1, hw0(x-.L_PICLNK) [2]
4740 add<x> tmp3, txt_label_reg, tmp2 [3]
4742 If we're compiling -fpic, we replace with the following sequence
4743 (the numbers in brackets match the instructions they're replacing
4744 above).
4746 add<x>li tmp2, got_reg, hw0_last_got(x) [2]
4747 ld<4> tmp3, tmp2 [3]
4749 If we're compiling -fPIC, we replace the first instruction with:
4751 moveli tmp1, hw1_last_got(x) [1]
4752 shl16insli tmp2, tmp1, hw0_got(x) [2]
4753 add<x> tmp3, got_reg, tmp2 [3]
4754 ld<4> tmp3, tmp3 [3]
4756 Note that we're careful to disturb the instruction sequence as
4757 little as possible, since it's very late in the compilation
4758 process. */
4759 static void
4760 tilegx_fixup_pcrel_references (void)
4762 rtx insn, next_insn;
4763 bool same_section_as_entry = true;
4765 for (insn = get_insns (); insn; insn = next_insn)
4767 next_insn = NEXT_INSN (insn);
4769 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
4771 same_section_as_entry = !same_section_as_entry;
4772 continue;
4775 if (same_section_as_entry)
4776 continue;
4778 if (!(INSN_P (insn)
4779 && GET_CODE (PATTERN (insn)) != USE
4780 && GET_CODE (PATTERN (insn)) != CLOBBER))
4781 continue;
4783 if (TARGET_32BIT)
4785 if (match_pcrel_step1 (insn))
4786 replace_mov_pcrel_step1 (insn);
4787 else if (match_pcrel_step2 (insn))
4788 replace_mov_pcrel_step2 (insn);
4789 else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3_32bit)
4790 replace_mov_pcrel_step3 (insn);
4792 else
4794 if (match_pcrel_step1 (insn))
4795 replace_mov_pcrel_step1 (insn);
4796 else if (match_pcrel_step2 (insn))
4797 replace_mov_pcrel_step2 (insn);
4798 else if (recog_memoized (insn) == CODE_FOR_mov_pcrel_step3)
4799 replace_mov_pcrel_step3 (insn);
4805 /* Ensure that no var tracking notes are emitted in the middle of a
4806 three-instruction bundle. */
4807 static void
4808 reorder_var_tracking_notes (void)
4810 basic_block bb;
4811 FOR_EACH_BB_FN (bb, cfun)
4813 rtx insn, next;
4814 rtx queue = NULL_RTX;
4815 bool in_bundle = false;
4817 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = next)
4819 next = NEXT_INSN (insn);
4821 if (INSN_P (insn))
4823 /* Emit queued up notes at the last instruction of a
4824 bundle. */
4825 if (GET_MODE (insn) == QImode)
4827 while (queue)
4829 rtx next_queue = PREV_INSN (queue);
4830 PREV_INSN (NEXT_INSN (insn)) = queue;
4831 NEXT_INSN (queue) = NEXT_INSN (insn);
4832 NEXT_INSN (insn) = queue;
4833 PREV_INSN (queue) = insn;
4834 queue = next_queue;
4836 in_bundle = false;
4838 else if (GET_MODE (insn) == SImode)
4839 in_bundle = true;
4841 else if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
4843 if (in_bundle)
4845 rtx prev = PREV_INSN (insn);
4846 PREV_INSN (next) = prev;
4847 NEXT_INSN (prev) = next;
4849 PREV_INSN (insn) = queue;
4850 queue = insn;
4858 /* Perform machine dependent operations on the rtl chain INSNS. */
4859 static void
4860 tilegx_reorg (void)
4862 /* We are freeing block_for_insn in the toplev to keep compatibility
4863 with old MDEP_REORGS that are not CFG based. Recompute it
4864 now. */
4865 compute_bb_for_insn ();
4867 if (flag_reorder_blocks_and_partition)
4869 tilegx_fixup_pcrel_references ();
4872 if (flag_schedule_insns_after_reload)
4874 split_all_insns ();
4876 timevar_push (TV_SCHED2);
4877 schedule_insns ();
4878 timevar_pop (TV_SCHED2);
4880 /* Examine the schedule to group into bundles. */
4881 tilegx_gen_bundles ();
4884 df_analyze ();
4886 if (flag_var_tracking)
4888 timevar_push (TV_VAR_TRACKING);
4889 variable_tracking_main ();
4890 reorder_var_tracking_notes ();
4891 timevar_pop (TV_VAR_TRACKING);
4894 df_finish_pass (false);
4899 /* Assembly */
4901 /* Select a format to encode pointers in exception handling data.
4902 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4903 GLOBAL is true if the symbol may be affected by dynamic
4904 relocations. */
4906 tilegx_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED, int global)
4908 int type = TARGET_32BIT ? DW_EH_PE_sdata4 : DW_EH_PE_sdata8;
4909 return (global ? DW_EH_PE_indirect : 0) | DW_EH_PE_pcrel | type;
4913 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4914 static void
4915 tilegx_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
4916 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
4917 tree function)
4919 rtx this_rtx, insn, funexp, addend;
4921 /* Pretend to be a post-reload pass while generating rtl. */
4922 reload_completed = 1;
4924 /* Mark the end of the (empty) prologue. */
4925 emit_note (NOTE_INSN_PROLOGUE_END);
4927 /* Find the "this" pointer. If the function returns a structure,
4928 the structure return pointer is in $1. */
4929 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
4930 this_rtx = gen_rtx_REG (Pmode, 1);
4931 else
4932 this_rtx = gen_rtx_REG (Pmode, 0);
4934 /* Add DELTA to THIS_RTX. */
4935 if (!(delta >= -32868 && delta <= 32767))
4937 addend = gen_rtx_REG (Pmode, 29);
4938 emit_move_insn (addend, GEN_INT (delta));
4940 else
4941 addend = GEN_INT (delta);
4943 if (TARGET_32BIT)
4944 emit_insn (gen_addsi3 (this_rtx, this_rtx, addend));
4945 else
4946 emit_insn (gen_adddi3 (this_rtx, this_rtx, addend));
4948 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4949 if (vcall_offset)
4951 rtx tmp;
4953 tmp = gen_rtx_REG (Pmode, 29);
4954 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this_rtx));
4956 if (!(vcall_offset >= -32868 && vcall_offset <= 32767))
4958 addend = gen_rtx_REG (Pmode, 28);
4959 emit_move_insn (addend, GEN_INT (vcall_offset));
4961 else
4962 addend = GEN_INT (vcall_offset);
4964 if (TARGET_32BIT)
4965 emit_insn (gen_addsi3 (tmp, tmp, addend));
4966 else
4967 emit_insn (gen_adddi3 (tmp, tmp, addend));
4969 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
4971 if (TARGET_32BIT)
4972 emit_insn (gen_addsi3 (this_rtx, this_rtx, tmp));
4973 else
4974 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
4977 /* Generate a tail call to the target function. */
4978 if (!TREE_USED (function))
4980 assemble_external (function);
4981 TREE_USED (function) = 1;
4983 funexp = XEXP (DECL_RTL (function), 0);
4984 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
4985 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
4986 SIBLING_CALL_P (insn) = 1;
4988 /* Run just enough of rest_of_compilation to get the insns emitted.
4989 There's not really enough bulk here to make other passes such as
4990 instruction scheduling worth while. Note that use_thunk calls
4991 assemble_start_function and assemble_end_function.
4993 We don't currently bundle, but the instruciton sequence is all
4994 serial except for the tail call, so we're only wasting one cycle.
4996 insn = get_insns ();
4997 shorten_branches (insn);
4998 final_start_function (insn, file, 1);
4999 final (insn, file, 1);
5000 final_end_function ();
5002 /* Stop pretending to be a post-reload pass. */
5003 reload_completed = 0;
5007 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
5008 static void
5009 tilegx_asm_trampoline_template (FILE *file)
5011 int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
5012 if (TARGET_32BIT)
5014 fprintf (file, "\tlnk r10\n");
5015 fprintf (file, "\taddxi r10, r10, 32\n");
5016 fprintf (file, "\tld4s_add r11, r10, %d\n", ptr_mode_size);
5017 fprintf (file, "\tld4s r10, r10\n");
5018 fprintf (file, "\tjr r11\n");
5019 fprintf (file, "\t.word 0 # <function address>\n");
5020 fprintf (file, "\t.word 0 # <static chain value>\n");
5022 else
5024 fprintf (file, "\tlnk r10\n");
5025 fprintf (file, "\taddi r10, r10, 32\n");
5026 fprintf (file, "\tld_add r11, r10, %d\n", ptr_mode_size);
5027 fprintf (file, "\tld r10, r10\n");
5028 fprintf (file, "\tjr r11\n");
5029 fprintf (file, "\t.quad 0 # <function address>\n");
5030 fprintf (file, "\t.quad 0 # <static chain value>\n");
5035 /* Implement TARGET_TRAMPOLINE_INIT. */
5036 static void
5037 tilegx_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
5039 rtx fnaddr, chaddr;
5040 rtx mem;
5041 rtx begin_addr, end_addr;
5042 int ptr_mode_size = GET_MODE_SIZE (ptr_mode);
5044 fnaddr = copy_to_reg (XEXP (DECL_RTL (fndecl), 0));
5045 chaddr = copy_to_reg (static_chain);
5047 emit_block_move (m_tramp, assemble_trampoline_template (),
5048 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5050 mem = adjust_address (m_tramp, ptr_mode,
5051 TRAMPOLINE_SIZE - 2 * ptr_mode_size);
5052 emit_move_insn (mem, fnaddr);
5053 mem = adjust_address (m_tramp, ptr_mode,
5054 TRAMPOLINE_SIZE - ptr_mode_size);
5055 emit_move_insn (mem, chaddr);
5057 /* Get pointers to the beginning and end of the code block. */
5058 begin_addr = force_reg (Pmode, XEXP (m_tramp, 0));
5059 end_addr = force_reg (Pmode, plus_constant (Pmode, XEXP (m_tramp, 0),
5060 TRAMPOLINE_SIZE));
5062 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__clear_cache"),
5063 LCT_NORMAL, VOIDmode, 2, begin_addr, Pmode,
5064 end_addr, Pmode);
5068 /* Implement TARGET_PRINT_OPERAND. */
5069 static void
5070 tilegx_print_operand (FILE *file, rtx x, int code)
5072 switch (code)
5074 case 'c':
5075 /* Print the compare operator opcode for conditional moves. */
5076 switch (GET_CODE (x))
5078 case EQ:
5079 fputs ("z", file);
5080 break;
5081 case NE:
5082 fputs ("nz", file);
5083 break;
5084 default:
5085 output_operand_lossage ("invalid %%c operand");
5087 return;
5089 case 'C':
5090 /* Print the compare operator opcode for conditional moves. */
5091 switch (GET_CODE (x))
5093 case EQ:
5094 fputs ("nz", file);
5095 break;
5096 case NE:
5097 fputs ("z", file);
5098 break;
5099 default:
5100 output_operand_lossage ("invalid %%C operand");
5102 return;
5104 case 'd':
5106 /* Print the compare operator opcode for conditional moves. */
5107 switch (GET_CODE (x))
5109 case EQ:
5110 fputs ("eq", file);
5111 break;
5112 case NE:
5113 fputs ("ne", file);
5114 break;
5115 default:
5116 output_operand_lossage ("invalid %%d operand");
5118 return;
5121 case 'D':
5123 /* Print the compare operator opcode for conditional moves. */
5124 switch (GET_CODE (x))
5126 case EQ:
5127 fputs ("ne", file);
5128 break;
5129 case NE:
5130 fputs ("eq", file);
5131 break;
5132 default:
5133 output_operand_lossage ("invalid %%D operand");
5135 return;
5138 case 'H':
5140 if (GET_CODE (x) == CONST
5141 && GET_CODE (XEXP (x, 0)) == UNSPEC)
5143 rtx addr = XVECEXP (XEXP (x, 0), 0, 0);
5144 int unspec = XINT (XEXP (x, 0), 1);
5145 const char *opstr = NULL;
5146 switch (unspec)
5148 case UNSPEC_HW0:
5149 case UNSPEC_HW0_PCREL:
5150 opstr = "hw0";
5151 break;
5152 case UNSPEC_HW1:
5153 case UNSPEC_HW1_PCREL:
5154 opstr = "hw1";
5155 break;
5156 case UNSPEC_HW2:
5157 opstr = "hw2";
5158 break;
5159 case UNSPEC_HW3:
5160 opstr = "hw3";
5161 break;
5162 case UNSPEC_HW0_LAST:
5163 opstr = "hw0_last";
5164 break;
5165 case UNSPEC_HW1_LAST:
5166 case UNSPEC_HW1_LAST_PCREL:
5167 opstr = "hw1_last";
5168 break;
5169 case UNSPEC_HW2_LAST:
5170 case UNSPEC_HW2_LAST_PCREL:
5171 opstr = "hw2_last";
5172 break;
5173 case UNSPEC_HW0_GOT:
5174 opstr = "hw0_got";
5175 break;
5176 case UNSPEC_HW0_LAST_GOT:
5177 opstr = "hw0_last_got";
5178 break;
5179 case UNSPEC_HW1_LAST_GOT:
5180 opstr = "hw1_last_got";
5181 break;
5182 case UNSPEC_HW0_TLS_GD:
5183 opstr = "hw0_tls_gd";
5184 break;
5185 case UNSPEC_HW1_LAST_TLS_GD:
5186 opstr = "hw1_last_tls_gd";
5187 break;
5188 case UNSPEC_HW0_TLS_IE:
5189 opstr = "hw0_tls_ie";
5190 break;
5191 case UNSPEC_HW1_LAST_TLS_IE:
5192 opstr = "hw1_last_tls_ie";
5193 break;
5194 case UNSPEC_HW0_TLS_LE:
5195 opstr = "hw0_tls_le";
5196 break;
5197 case UNSPEC_HW1_LAST_TLS_LE:
5198 opstr = "hw1_last_tls_le";
5199 break;
5200 case UNSPEC_HW0_PLT_PCREL:
5201 opstr = "hw0_plt";
5202 break;
5203 case UNSPEC_HW1_PLT_PCREL:
5204 opstr = "hw1_plt";
5205 break;
5206 case UNSPEC_HW1_LAST_PLT_PCREL:
5207 opstr = "hw1_last_plt";
5208 break;
5209 case UNSPEC_HW2_LAST_PLT_PCREL:
5210 opstr = "hw2_last_plt";
5211 break;
5212 default:
5213 output_operand_lossage ("invalid %%H specifier");
5216 fputs (opstr, file);
5217 fputc ('(', file);
5218 output_addr_const (file, addr);
5220 if (unspec == UNSPEC_HW0_PCREL
5221 || unspec == UNSPEC_HW1_PCREL
5222 || unspec == UNSPEC_HW1_LAST_PCREL
5223 || unspec == UNSPEC_HW2_LAST_PCREL
5224 || unspec == UNSPEC_HW0_PLT_PCREL
5225 || unspec == UNSPEC_HW1_PLT_PCREL
5226 || unspec == UNSPEC_HW1_LAST_PLT_PCREL
5227 || unspec == UNSPEC_HW2_LAST_PLT_PCREL)
5229 rtx addr2 = XVECEXP (XEXP (x, 0), 0, 1);
5230 fputs (" - " , file);
5231 output_addr_const (file, addr2);
5234 fputc (')', file);
5235 return;
5237 else if (symbolic_operand (x, VOIDmode))
5239 output_addr_const (file, x);
5240 return;
5243 /* FALLTHRU */
5245 case 'h':
5247 /* Print the low 16 bits of a constant. */
5248 HOST_WIDE_INT i;
5249 if (CONST_INT_P (x))
5250 i = INTVAL (x);
5251 else if (GET_CODE (x) == CONST_DOUBLE)
5252 i = CONST_DOUBLE_LOW (x);
5253 else
5255 output_operand_lossage ("invalid %%h operand");
5256 return;
5258 i = trunc_int_for_mode (i, HImode);
5259 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5260 return;
5263 case 'I':
5264 /* Print an auto-inc memory operand. */
5265 if (!MEM_P (x))
5267 output_operand_lossage ("invalid %%I operand");
5268 return;
5271 output_memory_reference_mode = GET_MODE (x);
5272 output_memory_autoinc_first = true;
5273 output_address (XEXP (x, 0));
5274 output_memory_reference_mode = VOIDmode;
5275 return;
5277 case 'i':
5278 /* Print an auto-inc memory operand. */
5279 if (!MEM_P (x))
5281 output_operand_lossage ("invalid %%i operand");
5282 return;
5285 output_memory_reference_mode = GET_MODE (x);
5286 output_memory_autoinc_first = false;
5287 output_address (XEXP (x, 0));
5288 output_memory_reference_mode = VOIDmode;
5289 return;
5291 case 'j':
5293 /* Print the low 8 bits of a constant. */
5294 HOST_WIDE_INT i;
5295 if (CONST_INT_P (x))
5296 i = INTVAL (x);
5297 else if (GET_CODE (x) == CONST_DOUBLE)
5298 i = CONST_DOUBLE_LOW (x);
5299 else if (GET_CODE (x) == CONST_VECTOR
5300 && CONST_INT_P (CONST_VECTOR_ELT (x, 0)))
5301 i = INTVAL (CONST_VECTOR_ELT (x, 0));
5302 else
5304 output_operand_lossage ("invalid %%j operand");
5305 return;
5307 i = trunc_int_for_mode (i, QImode);
5308 fprintf (file, HOST_WIDE_INT_PRINT_DEC, i);
5309 return;
5312 case 'P':
5314 /* Print a constant plus one. */
5315 if (!CONST_INT_P (x))
5317 output_operand_lossage ("invalid %%P operand");
5318 return;
5320 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
5321 return;
5324 case 'm':
5325 case 'M':
5327 /* Print a bfextu-style bit range. */
5328 int first_bit, last_bit;
5329 HOST_WIDE_INT flip = (code == 'm') ? ~0 : 0;
5331 if (!CONST_INT_P (x)
5332 || !tilegx_bitfield_operand_p (INTVAL (x) ^ flip,
5333 &first_bit, &last_bit))
5335 output_operand_lossage ("invalid %%%c operand", code);
5336 return;
5339 fprintf (file, "%d, %d", first_bit, last_bit);
5340 return;
5343 case 'N':
5345 const char *reg = NULL;
5347 /* Print a network register. */
5348 if (!CONST_INT_P (x))
5350 output_operand_lossage ("invalid %%N operand");
5351 return;
5354 switch (INTVAL (x))
5356 case TILEGX_NETREG_IDN0: reg = "idn0"; break;
5357 case TILEGX_NETREG_IDN1: reg = "idn1"; break;
5358 case TILEGX_NETREG_UDN0: reg = "udn0"; break;
5359 case TILEGX_NETREG_UDN1: reg = "udn1"; break;
5360 case TILEGX_NETREG_UDN2: reg = "udn2"; break;
5361 case TILEGX_NETREG_UDN3: reg = "udn3"; break;
5362 default:
5363 gcc_unreachable ();
5366 fprintf (file, reg);
5367 return;
5370 case 'p':
5371 if (GET_CODE (x) == SYMBOL_REF)
5373 if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5374 fprintf (file, "plt(");
5375 output_addr_const (file, x);
5376 if (flag_pic && !SYMBOL_REF_LOCAL_P (x))
5377 fprintf (file, ")");
5379 else
5380 output_addr_const (file, x);
5381 return;
5383 case 'r':
5384 /* In this case we need a register. Use 'zero' if the operand
5385 is const0_rtx. */
5386 if (x == const0_rtx
5387 || (GET_MODE (x) != VOIDmode && x == CONST0_RTX (GET_MODE (x))))
5389 fputs ("zero", file);
5390 return;
5392 else if (!REG_P (x))
5394 output_operand_lossage ("invalid operand for 'r' specifier");
5395 return;
5397 /* FALLTHRU */
5399 case 0:
5400 if (REG_P (x))
5402 fprintf (file, "%s", reg_names[REGNO (x)]);
5403 return;
5405 else if (MEM_P (x))
5407 output_memory_reference_mode = VOIDmode;
5408 output_address (XEXP (x, 0));
5409 return;
5411 else
5413 output_addr_const (file, x);
5414 return;
5418 debug_rtx (x);
5419 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
5420 code, code);
5424 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
5425 static void
5426 tilegx_print_operand_address (FILE *file, rtx addr)
5428 if (GET_CODE (addr) == POST_DEC
5429 || GET_CODE (addr) == POST_INC)
5431 int offset = GET_MODE_SIZE (output_memory_reference_mode);
5433 gcc_assert (output_memory_reference_mode != VOIDmode);
5435 if (output_memory_autoinc_first)
5436 fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5437 else
5438 fprintf (file, "%d",
5439 GET_CODE (addr) == POST_DEC ? -offset : offset);
5441 else if (GET_CODE (addr) == POST_MODIFY)
5443 gcc_assert (output_memory_reference_mode != VOIDmode);
5445 gcc_assert (GET_CODE (XEXP (addr, 1)) == PLUS);
5447 if (output_memory_autoinc_first)
5448 fprintf (file, "%s", reg_names[REGNO (XEXP (addr, 0))]);
5449 else
5450 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5451 INTVAL (XEXP (XEXP (addr, 1), 1)));
5453 else
5454 tilegx_print_operand (file, addr, 'r');
5458 /* Machine mode of current insn, for determining curly brace
5459 placement. */
5460 static enum machine_mode insn_mode;
5463 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
5464 void
5465 tilegx_final_prescan_insn (rtx insn)
5467 /* Record this for tilegx_asm_output_opcode to examine. */
5468 insn_mode = GET_MODE (insn);
5472 /* While emitting asm, are we currently inside '{' for a bundle? */
5473 static bool tilegx_in_bundle = false;
5475 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
5476 appropriate given the bundling information recorded by
5477 tilegx_gen_bundles. */
5478 const char *
5479 tilegx_asm_output_opcode (FILE *stream, const char *code)
5481 bool pseudo = !strcmp (code, "pseudo");
5483 if (!tilegx_in_bundle && insn_mode == SImode)
5485 /* Start a new bundle. */
5486 fprintf (stream, "{\n\t");
5487 tilegx_in_bundle = true;
5490 if (tilegx_in_bundle && insn_mode == QImode)
5492 /* Close an existing bundle. */
5493 static char buf[100];
5495 gcc_assert (strlen (code) + 3 + 1 < sizeof (buf));
5497 strcpy (buf, pseudo ? "" : code);
5498 strcat (buf, "\n\t}");
5499 tilegx_in_bundle = false;
5501 return buf;
5503 else
5505 return pseudo ? "" : code;
5510 /* Output assembler code to FILE to increment profiler label # LABELNO
5511 for profiling a function entry. */
5512 void
5513 tilegx_function_profiler (FILE *file, int labelno ATTRIBUTE_UNUSED)
5515 if (tilegx_in_bundle)
5517 fprintf (file, "\t}\n");
5520 if (flag_pic)
5522 fprintf (file,
5523 "\t{\n"
5524 "\tmove\tr10, lr\n"
5525 "\tjal\tplt(%s)\n"
5526 "\t}\n", MCOUNT_NAME);
5528 else
5530 fprintf (file,
5531 "\t{\n"
5532 "\tmove\tr10, lr\n"
5533 "\tjal\t%s\n"
5534 "\t}\n", MCOUNT_NAME);
5537 tilegx_in_bundle = false;
5541 /* Implement TARGET_ASM_FILE_END. */
5542 static void
5543 tilegx_file_end (void)
5545 if (NEED_INDICATE_EXEC_STACK)
5546 file_end_indicate_exec_stack ();
5551 #undef TARGET_HAVE_TLS
5552 #define TARGET_HAVE_TLS HAVE_AS_TLS
5554 #undef TARGET_OPTION_OVERRIDE
5555 #define TARGET_OPTION_OVERRIDE tilegx_option_override
5557 #undef TARGET_SCALAR_MODE_SUPPORTED_P
5558 #define TARGET_SCALAR_MODE_SUPPORTED_P tilegx_scalar_mode_supported_p
5560 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5561 #define TARGET_VECTOR_MODE_SUPPORTED_P tilegx_vector_mode_supported_p
5563 #undef TARGET_CANNOT_FORCE_CONST_MEM
5564 #define TARGET_CANNOT_FORCE_CONST_MEM tilegx_cannot_force_const_mem
5566 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5567 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilegx_function_ok_for_sibcall
5569 #undef TARGET_PASS_BY_REFERENCE
5570 #define TARGET_PASS_BY_REFERENCE tilegx_pass_by_reference
5572 #undef TARGET_RETURN_IN_MSB
5573 #define TARGET_RETURN_IN_MSB tilegx_return_in_msb
5575 #undef TARGET_RETURN_IN_MEMORY
5576 #define TARGET_RETURN_IN_MEMORY tilegx_return_in_memory
5578 #undef TARGET_MODE_REP_EXTENDED
5579 #define TARGET_MODE_REP_EXTENDED tilegx_mode_rep_extended
5581 #undef TARGET_FUNCTION_ARG_BOUNDARY
5582 #define TARGET_FUNCTION_ARG_BOUNDARY tilegx_function_arg_boundary
5584 #undef TARGET_FUNCTION_ARG
5585 #define TARGET_FUNCTION_ARG tilegx_function_arg
5587 #undef TARGET_FUNCTION_ARG_ADVANCE
5588 #define TARGET_FUNCTION_ARG_ADVANCE tilegx_function_arg_advance
5590 #undef TARGET_FUNCTION_VALUE
5591 #define TARGET_FUNCTION_VALUE tilegx_function_value
5593 #undef TARGET_LIBCALL_VALUE
5594 #define TARGET_LIBCALL_VALUE tilegx_libcall_value
5596 #undef TARGET_FUNCTION_VALUE_REGNO_P
5597 #define TARGET_FUNCTION_VALUE_REGNO_P tilegx_function_value_regno_p
5599 #undef TARGET_PROMOTE_FUNCTION_MODE
5600 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5602 #undef TARGET_PROMOTE_PROTOTYPES
5603 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5605 #undef TARGET_BUILD_BUILTIN_VA_LIST
5606 #define TARGET_BUILD_BUILTIN_VA_LIST tilegx_build_builtin_va_list
5608 #undef TARGET_EXPAND_BUILTIN_VA_START
5609 #define TARGET_EXPAND_BUILTIN_VA_START tilegx_va_start
5611 #undef TARGET_SETUP_INCOMING_VARARGS
5612 #define TARGET_SETUP_INCOMING_VARARGS tilegx_setup_incoming_varargs
5614 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5615 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilegx_gimplify_va_arg_expr
5617 #undef TARGET_RTX_COSTS
5618 #define TARGET_RTX_COSTS tilegx_rtx_costs
5620 #undef TARGET_EXPAND_TO_RTL_HOOK
5621 #define TARGET_EXPAND_TO_RTL_HOOK tilegx_expand_to_rtl_hook
5623 #undef TARGET_SHIFT_TRUNCATION_MASK
5624 #define TARGET_SHIFT_TRUNCATION_MASK tilegx_shift_truncation_mask
5626 #undef TARGET_INIT_LIBFUNCS
5627 #define TARGET_INIT_LIBFUNCS tilegx_init_libfuncs
5629 /* Limit to what we can reach in one addli. */
5630 #undef TARGET_MIN_ANCHOR_OFFSET
5631 #define TARGET_MIN_ANCHOR_OFFSET -32768
5632 #undef TARGET_MAX_ANCHOR_OFFSET
5633 #define TARGET_MAX_ANCHOR_OFFSET 32767
5635 #undef TARGET_LEGITIMATE_CONSTANT_P
5636 #define TARGET_LEGITIMATE_CONSTANT_P tilegx_legitimate_constant_p
5638 #undef TARGET_LEGITIMATE_ADDRESS_P
5639 #define TARGET_LEGITIMATE_ADDRESS_P tilegx_legitimate_address_p
5641 #undef TARGET_LEGITIMIZE_ADDRESS
5642 #define TARGET_LEGITIMIZE_ADDRESS tilegx_legitimize_address
5644 #undef TARGET_DELEGITIMIZE_ADDRESS
5645 #define TARGET_DELEGITIMIZE_ADDRESS tilegx_delegitimize_address
5647 #undef TARGET_INIT_BUILTINS
5648 #define TARGET_INIT_BUILTINS tilegx_init_builtins
5650 #undef TARGET_BUILTIN_DECL
5651 #define TARGET_BUILTIN_DECL tilegx_builtin_decl
5653 #undef TARGET_EXPAND_BUILTIN
5654 #define TARGET_EXPAND_BUILTIN tilegx_expand_builtin
5656 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5657 #define TARGET_CONDITIONAL_REGISTER_USAGE tilegx_conditional_register_usage
5659 #undef TARGET_FRAME_POINTER_REQUIRED
5660 #define TARGET_FRAME_POINTER_REQUIRED tilegx_frame_pointer_required
5662 #undef TARGET_DELAY_SCHED2
5663 #define TARGET_DELAY_SCHED2 true
5665 #undef TARGET_DELAY_VARTRACK
5666 #define TARGET_DELAY_VARTRACK true
5668 #undef TARGET_SCHED_ISSUE_RATE
5669 #define TARGET_SCHED_ISSUE_RATE tilegx_issue_rate
5671 #undef TARGET_SCHED_ADJUST_COST
5672 #define TARGET_SCHED_ADJUST_COST tilegx_sched_adjust_cost
5674 #undef TARGET_MACHINE_DEPENDENT_REORG
5675 #define TARGET_MACHINE_DEPENDENT_REORG tilegx_reorg
5677 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5678 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5679 hook_bool_const_tree_hwi_hwi_const_tree_true
5681 #undef TARGET_ASM_OUTPUT_MI_THUNK
5682 #define TARGET_ASM_OUTPUT_MI_THUNK tilegx_output_mi_thunk
5684 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5685 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilegx_asm_trampoline_template
5687 #undef TARGET_TRAMPOLINE_INIT
5688 #define TARGET_TRAMPOLINE_INIT tilegx_trampoline_init
5690 #undef TARGET_PRINT_OPERAND
5691 #define TARGET_PRINT_OPERAND tilegx_print_operand
5693 #undef TARGET_PRINT_OPERAND_ADDRESS
5694 #define TARGET_PRINT_OPERAND_ADDRESS tilegx_print_operand_address
5696 #undef TARGET_ASM_FILE_END
5697 #define TARGET_ASM_FILE_END tilegx_file_end
5699 #undef TARGET_ASM_ALIGNED_DI_OP
5700 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
5702 #undef TARGET_CAN_USE_DOLOOP_P
5703 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5705 struct gcc_target targetm = TARGET_INITIALIZER;
5707 #include "gt-tilegx.h"