1 /* Subroutines used for code generation on the Tilera TILE-Gx.
2 Copyright (C) 2011-2018 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
25 #include "coretypes.h"
34 #include "stringpool.h"
41 #include "diagnostic.h"
43 #include "insn-attr.h"
49 #include "langhooks.h"
51 #include "tm-constrs.h"
53 #include "fold-const.h"
54 #include "stor-layout.h"
56 #include "tilegx-builtins.h"
57 #include "tilegx-multiply.h"
60 /* This file should be included last. */
61 #include "target-def.h"
63 /* SYMBOL_REF for GOT */
64 static GTY(()) rtx g_got_symbol
= NULL
;
66 /* Report whether we're printing out the first address fragment of a
67 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
68 TARGET_PRINT_OPERAND_ADDRESS. */
69 static bool output_memory_autoinc_first
;
75 /* Implement TARGET_OPTION_OVERRIDE. */
77 tilegx_option_override (void)
79 if (global_options_set
.x_tilegx_cmodel
)
81 switch (tilegx_cmodel
)
86 tilegx_cmodel
= CM_SMALL_PIC
;
92 tilegx_cmodel
= CM_LARGE_PIC
;
100 tilegx_cmodel
= flag_pic
? CM_SMALL_PIC
: CM_SMALL
;
102 /* When modulo scheduling is enabled, we still rely on regular
103 scheduler for bundling. */
104 if (flag_modulo_sched
)
105 flag_resched_modulo_sched
= 1;
110 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
112 tilegx_scalar_mode_supported_p (scalar_mode mode
)
133 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
135 tilegx_vector_mode_supported_p (machine_mode mode
)
137 return mode
== V8QImode
|| mode
== V4HImode
|| mode
== V2SImode
;
141 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
143 tilegx_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED
,
144 rtx x ATTRIBUTE_UNUSED
)
150 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
152 tilegx_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
154 return (tilegx_cmodel
!= CM_LARGE
&& tilegx_cmodel
!= CM_LARGE_PIC
159 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
160 passed by reference. */
162 tilegx_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
163 machine_mode mode ATTRIBUTE_UNUSED
,
164 const_tree type
, bool named ATTRIBUTE_UNUSED
)
166 return (type
&& TYPE_SIZE (type
)
167 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
171 /* Implement TARGET_RETURN_IN_MSB. We return a value in the most
172 significant part of a register if:
173 - the target is big-endian; and
174 - the value has an aggregate type (e.g., structure or union). */
176 tilegx_return_in_msb (const_tree valtype
)
178 return (TARGET_BIG_ENDIAN
&& AGGREGATE_TYPE_P (valtype
));
182 /* Implement TARGET_RETURN_IN_MEMORY. */
184 tilegx_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
186 return !IN_RANGE (int_size_in_bytes (type
),
187 0, TILEGX_NUM_RETURN_REGS
* UNITS_PER_WORD
);
191 /* Implement TARGET_MODE_REP_EXTENDED. */
193 tilegx_mode_rep_extended (scalar_int_mode mode
, scalar_int_mode mode_rep
)
195 /* SImode register values are sign-extended to DImode. */
196 if (mode
== SImode
&& mode_rep
== DImode
)
203 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
205 tilegx_function_arg_boundary (machine_mode mode
, const_tree type
)
207 unsigned int alignment
;
209 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
210 if (alignment
< PARM_BOUNDARY
)
211 alignment
= PARM_BOUNDARY
;
212 if (alignment
> STACK_BOUNDARY
)
213 alignment
= STACK_BOUNDARY
;
218 /* Implement TARGET_FUNCTION_ARG. */
220 tilegx_function_arg (cumulative_args_t cum_v
,
222 const_tree type
, bool named ATTRIBUTE_UNUSED
)
224 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
225 int byte_size
= ((mode
== BLKmode
)
226 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
227 bool doubleword_aligned_p
;
229 if (cum
>= TILEGX_NUM_ARG_REGS
)
232 /* See whether the argument has doubleword alignment. */
233 doubleword_aligned_p
=
234 tilegx_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
236 if (doubleword_aligned_p
)
239 /* The ABI does not allow parameters to be passed partially in reg
240 and partially in stack. */
241 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
242 > TILEGX_NUM_ARG_REGS
)
245 return gen_rtx_REG (mode
, cum
);
249 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
251 tilegx_function_arg_advance (cumulative_args_t cum_v
,
253 const_tree type
, bool named ATTRIBUTE_UNUSED
)
255 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
257 int byte_size
= ((mode
== BLKmode
)
258 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
259 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
260 bool doubleword_aligned_p
;
262 /* See whether the argument has doubleword alignment. */
263 doubleword_aligned_p
=
264 tilegx_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
266 if (doubleword_aligned_p
)
269 /* If the current argument does not fit in the pretend_args space,
271 if (*cum
< TILEGX_NUM_ARG_REGS
272 && *cum
+ word_size
> TILEGX_NUM_ARG_REGS
)
273 *cum
= TILEGX_NUM_ARG_REGS
;
279 /* Implement TARGET_FUNCTION_VALUE. */
281 tilegx_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
282 bool outgoing ATTRIBUTE_UNUSED
)
287 mode
= TYPE_MODE (valtype
);
288 unsigned_p
= TYPE_UNSIGNED (valtype
);
290 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
293 return gen_rtx_REG (mode
, 0);
297 /* Implement TARGET_LIBCALL_VALUE. */
299 tilegx_libcall_value (machine_mode mode
,
300 const_rtx fun ATTRIBUTE_UNUSED
)
302 return gen_rtx_REG (mode
, 0);
306 /* Implement FUNCTION_VALUE_REGNO_P. */
308 tilegx_function_value_regno_p (const unsigned int regno
)
310 return regno
< TILEGX_NUM_RETURN_REGS
;
314 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
316 tilegx_build_builtin_va_list (void)
318 tree f_args
, f_skip
, record
, type_decl
;
321 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
323 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
324 get_identifier ("__va_list_tag"), record
);
326 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
327 get_identifier ("__args"), ptr_type_node
);
328 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
329 get_identifier ("__skip"), ptr_type_node
);
331 DECL_FIELD_CONTEXT (f_args
) = record
;
333 DECL_FIELD_CONTEXT (f_skip
) = record
;
335 TREE_CHAIN (record
) = type_decl
;
336 TYPE_NAME (record
) = type_decl
;
337 TYPE_FIELDS (record
) = f_args
;
338 TREE_CHAIN (f_args
) = f_skip
;
340 /* We know this is being padded and we want it too. It is an
341 internal type so hide the warnings from the user. */
345 layout_type (record
);
349 /* The correct type is an array type of one element. */
354 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
356 tilegx_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
361 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
362 f_skip
= TREE_CHAIN (f_args
);
365 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
367 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
369 /* Find the __args area. */
370 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
371 t
= fold_build_pointer_plus_hwi (t
,
373 (crtl
->args
.info
- TILEGX_NUM_ARG_REGS
));
375 if (crtl
->args
.pretend_args_size
> 0)
376 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
378 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
379 TREE_SIDE_EFFECTS (t
) = 1;
380 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
382 /* Find the __skip area. */
383 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
384 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
385 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
386 TREE_SIDE_EFFECTS (t
) = 1;
387 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
391 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
393 tilegx_setup_incoming_varargs (cumulative_args_t cum
,
395 tree type
, int *pretend_args
, int no_rtl
)
397 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
400 /* The caller has advanced CUM up to, but not beyond, the last named
401 argument. Advance a local copy of CUM past the last "real" named
402 argument, to find out how many registers are left over. */
403 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
405 first_reg
= local_cum
;
407 if (local_cum
< TILEGX_NUM_ARG_REGS
)
409 *pretend_args
= UNITS_PER_WORD
* (TILEGX_NUM_ARG_REGS
- first_reg
);
413 alias_set_type set
= get_varargs_alias_set ();
415 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
,
416 virtual_incoming_args_rtx
,
417 -STACK_POINTER_OFFSET
-
419 (TILEGX_NUM_ARG_REGS
-
421 MEM_NOTRAP_P (tmp
) = 1;
422 set_mem_alias_set (tmp
, set
);
423 move_block_from_reg (first_reg
, tmp
,
424 TILEGX_NUM_ARG_REGS
- first_reg
);
432 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
433 the va_list structure VALIST as required to retrieve an argument of
434 type TYPE, and returning that argument.
436 ret = va_arg(VALIST, TYPE);
438 generates code equivalent to:
440 paddedsize = (sizeof(TYPE) + 7) & -8;
441 if ( (VALIST.__args + paddedsize > VALIST.__skip)
442 & (VALIST.__args <= VALIST.__skip))
443 addr = VALIST.__skip + STACK_POINTER_OFFSET;
445 addr = VALIST.__args;
446 VALIST.__args = addr + paddedsize;
447 if (BYTES_BIG_ENDIAN)
448 ret = *(TYPE *)(addr + paddedsize - sizeof(TYPE));
453 tilegx_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
*pre_p
,
454 gimple_seq
*post_p ATTRIBUTE_UNUSED
)
458 HOST_WIDE_INT size
, rsize
;
460 bool pass_by_reference_p
;
462 f_args
= TYPE_FIELDS (va_list_type_node
);
463 f_skip
= TREE_CHAIN (f_args
);
466 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
468 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
470 addr
= create_tmp_var (ptr_type_node
, "va_arg");
472 /* If an object is dynamically sized, a pointer to it is passed
473 instead of the object itself. */
474 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
477 if (pass_by_reference_p
)
478 type
= build_pointer_type (type
);
480 size
= int_size_in_bytes (type
);
481 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
483 /* If the alignment of the type is greater than the default for a
484 parameter, align to the STACK_BOUNDARY. */
485 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
487 /* Assert the only case we generate code for: when
488 stack boundary = 2 * parm boundary. */
489 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
491 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
492 fold_convert (sizetype
, unshare_expr (args
)),
493 size_int (PARM_BOUNDARY
/ 8));
494 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
495 unshare_expr (args
), tmp
);
497 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
500 /* Build conditional expression to calculate addr. The expression
501 will be gimplified later. */
502 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
503 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
504 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
505 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
506 unshare_expr (skip
)));
508 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
509 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
510 size_int (STACK_POINTER_OFFSET
)),
511 unshare_expr (args
));
513 /* Adjust the address of va_arg if it is in big endian mode. */
514 if (BYTES_BIG_ENDIAN
&& rsize
> size
)
515 tmp
= fold_build_pointer_plus_hwi (tmp
, rsize
- size
);
516 gimplify_assign (addr
, tmp
, pre_p
);
518 /* Update VALIST.__args. */
520 if (BYTES_BIG_ENDIAN
&& rsize
> size
)
521 tmp
= fold_build_pointer_plus_hwi (addr
, size
);
523 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
524 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
526 addr
= fold_convert (build_pointer_type (type
), addr
);
528 if (pass_by_reference_p
)
529 addr
= build_va_arg_indirect_ref (addr
);
531 return build_va_arg_indirect_ref (addr
);
536 /* Implement TARGET_RTX_COSTS. */
538 tilegx_rtx_costs (rtx x
, machine_mode mode
, int outer_code
, int opno
,
539 int *total
, bool speed
)
541 int code
= GET_CODE (x
);
546 /* If this is an 8-bit constant, return zero since it can be
547 used nearly anywhere with no cost. If it is a valid operand
548 for an ADD or AND, likewise return 0 if we know it will be
549 used in that context. Otherwise, return 2 since it might be
550 used there later. All other constants take at least two
552 if (satisfies_constraint_I (x
))
557 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
559 /* Slightly penalize large constants even though we can add
560 them in one instruction, because it forces the use of
561 2-wide bundling mode. */
565 else if (move_operand (x
, SImode
))
567 /* We can materialize in one move. */
568 *total
= COSTS_N_INSNS (1);
573 /* We can materialize in two moves. */
574 *total
= COSTS_N_INSNS (2);
583 *total
= COSTS_N_INSNS (2);
587 *total
= COSTS_N_INSNS (4);
595 /* If outer-code was a sign or zero extension, a cost of
596 COSTS_N_INSNS (1) was already added in, so account for
598 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
599 *total
= COSTS_N_INSNS (1);
601 *total
= COSTS_N_INSNS (2);
605 /* Convey that shl[123]add are efficient. */
606 if (GET_CODE (XEXP (x
, 0)) == MULT
607 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
609 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0), mode
,
610 (enum rtx_code
) outer_code
, opno
, speed
)
611 + rtx_cost (XEXP (x
, 1), mode
,
612 (enum rtx_code
) outer_code
, opno
, speed
)
613 + COSTS_N_INSNS (1));
619 *total
= COSTS_N_INSNS (2);
626 /* These are handled by software and are very expensive. */
627 *total
= COSTS_N_INSNS (100);
631 case UNSPEC_VOLATILE
:
633 int num
= XINT (x
, 1);
635 if (num
<= TILEGX_LAST_LATENCY_1_INSN
)
636 *total
= COSTS_N_INSNS (1);
637 else if (num
<= TILEGX_LAST_LATENCY_2_INSN
)
638 *total
= COSTS_N_INSNS (2);
639 else if (num
> TILEGX_LAST_LATENCY_INSN
)
641 if (num
== UNSPEC_NON_TEMPORAL
)
643 /* These are basically loads. */
644 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
645 *total
= COSTS_N_INSNS (1);
647 *total
= COSTS_N_INSNS (2);
651 if (outer_code
== PLUS
)
654 *total
= COSTS_N_INSNS (1);
661 case UNSPEC_BLOCKAGE
:
662 case UNSPEC_NETWORK_BARRIER
:
667 case UNSPEC_LNK_AND_LABEL
:
669 case UNSPEC_MOV_PCREL_STEP3
:
670 case UNSPEC_NETWORK_RECEIVE
:
671 case UNSPEC_NETWORK_SEND
:
672 case UNSPEC_SPR_MOVE
:
673 case UNSPEC_TLS_GD_ADD
:
674 *total
= COSTS_N_INSNS (1);
677 case UNSPEC_TLS_IE_LOAD
:
679 *total
= COSTS_N_INSNS (2);
683 *total
= COSTS_N_INSNS (3);
687 *total
= COSTS_N_INSNS (4);
691 case UNSPEC_INSN_CMPEXCH
:
692 case UNSPEC_LATENCY_L2
:
693 *total
= COSTS_N_INSNS (11);
696 case UNSPEC_TLS_GD_CALL
:
697 *total
= COSTS_N_INSNS (30);
700 case UNSPEC_LATENCY_MISS
:
701 *total
= COSTS_N_INSNS (80);
705 *total
= COSTS_N_INSNS (1);
720 /* Create a temporary variable to hold a partial result, to enable
723 create_temp_reg_if_possible (machine_mode mode
, rtx default_reg
)
725 return can_create_pseudo_p () ? gen_reg_rtx (mode
) : default_reg
;
729 /* Functions to save and restore machine-specific function data. */
730 static struct machine_function
*
731 tilegx_init_machine_status (void)
733 return ggc_cleared_alloc
<machine_function
> ();
737 /* Do anything needed before RTL is emitted for each function. */
739 tilegx_init_expanders (void)
741 /* Arrange to initialize and mark the machine per-function
743 init_machine_status
= tilegx_init_machine_status
;
745 if (cfun
&& cfun
->machine
&& flag_pic
)
747 static int label_num
= 0;
749 char text_label_name
[32];
751 struct machine_function
*machine
= cfun
->machine
;
753 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
755 machine
->text_label_symbol
=
756 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
758 machine
->text_label_rtx
=
759 gen_rtx_REG (Pmode
, TILEGX_PIC_TEXT_LABEL_REGNUM
);
761 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
763 machine
->calls_tls_get_addr
= false;
768 /* Implement TARGET_EXPAND_TO_RTL_HOOK. */
770 tilegx_expand_to_rtl_hook (void)
772 /* Exclude earlier sets of crtl->uses_pic_offset_table, because we
773 only care about uses actually emitted. */
774 crtl
->uses_pic_offset_table
= 0;
778 /* Implement TARGET_SHIFT_TRUNCATION_MASK. DImode shifts use the mode
779 matching insns and therefore guarantee that the shift count is
780 modulo 64. SImode shifts sometimes use the 64 bit version so do
781 not hold such guarantee. */
782 static unsigned HOST_WIDE_INT
783 tilegx_shift_truncation_mask (machine_mode mode
)
785 return mode
== DImode
? 63 : 0;
789 /* Implement TARGET_INIT_LIBFUNCS. */
791 tilegx_init_libfuncs (void)
793 /* We need to explicitly generate these libfunc's to support
794 conversion of divide by constant to multiply (the divide stubs in
795 tilegx.md exist also for this reason). Normally we'd expect gcc
796 to lazily generate them when they are needed, but for some reason
797 it's set up to only generate them if the mode is the word
799 set_optab_libfunc (sdiv_optab
, SImode
, "__divsi3");
800 set_optab_libfunc (udiv_optab
, SImode
, "__udivsi3");
801 set_optab_libfunc (smod_optab
, SImode
, "__modsi3");
802 set_optab_libfunc (umod_optab
, SImode
, "__umodsi3");
806 /* Return true if X contains a thread-local symbol. */
808 tilegx_tls_referenced_p (rtx x
)
810 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
811 x
= XEXP (XEXP (x
, 0), 0);
813 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
816 /* That's all we handle in tilegx_legitimize_tls_address for
822 /* Return true if X requires a scratch register. It is given that
823 flag_pic is on and that X satisfies CONSTANT_P. */
825 tilegx_pic_address_needs_scratch (rtx x
)
827 if (GET_CODE (x
) == CONST
828 && GET_CODE (XEXP (x
, 0)) == PLUS
829 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
830 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
831 && (CONST_INT_P (XEXP (XEXP (x
, 0), 1))))
838 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
839 which we are willing to load the value into a register via a move
840 pattern. TLS cannot be treated as a constant because it can
841 include a function call. */
843 tilegx_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
845 switch (GET_CODE (x
))
849 return !tilegx_tls_referenced_p (x
);
857 /* Return true if the constant value X is a legitimate general operand
858 when generating PIC code. It is given that flag_pic is on and that
859 X satisfies CONSTANT_P. */
861 tilegx_legitimate_pic_operand_p (rtx x
)
863 if (tilegx_pic_address_needs_scratch (x
))
866 if (tilegx_tls_referenced_p (x
))
873 /* Return true if the rtx X can be used as an address operand. */
875 tilegx_legitimate_address_p (machine_mode
ARG_UNUSED (mode
), rtx x
,
878 if (GET_CODE (x
) == SUBREG
)
881 switch (GET_CODE (x
))
885 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
892 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
895 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
898 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
901 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
914 /* Check if x is a valid reg. */
919 return REGNO_OK_FOR_BASE_P (REGNO (x
));
925 /* Return the rtx containing SYMBOL_REF to the text label. */
927 tilegx_text_label_symbol (void)
929 return cfun
->machine
->text_label_symbol
;
933 /* Return the register storing the value of the text label. */
935 tilegx_text_label_rtx (void)
937 return cfun
->machine
->text_label_rtx
;
941 /* Return the register storing the value of the global offset
944 tilegx_got_rtx (void)
946 return cfun
->machine
->got_rtx
;
950 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
952 tilegx_got_symbol (void)
954 if (g_got_symbol
== NULL
)
955 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
961 /* Return a reference to the got to be used by tls references. */
963 tilegx_tls_got (void)
968 crtl
->uses_pic_offset_table
= 1;
969 return tilegx_got_rtx ();
972 temp
= gen_reg_rtx (Pmode
);
973 emit_move_insn (temp
, tilegx_got_symbol ());
979 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
980 this (thread-local) address. */
982 tilegx_legitimize_tls_address (rtx addr
)
986 gcc_assert (can_create_pseudo_p ());
988 if (GET_CODE (addr
) == SYMBOL_REF
)
989 switch (SYMBOL_REF_TLS_MODEL (addr
))
991 case TLS_MODEL_GLOBAL_DYNAMIC
:
992 case TLS_MODEL_LOCAL_DYNAMIC
:
994 rtx r0
, temp
, temp2
, temp3
, got
;
996 ret
= gen_reg_rtx (Pmode
);
997 r0
= gen_rtx_REG (Pmode
, 0);
998 temp
= gen_reg_rtx (Pmode
);
999 temp2
= gen_reg_rtx (Pmode
);
1000 temp3
= gen_reg_rtx (Pmode
);
1002 got
= tilegx_tls_got ();
1005 emit_insn (gen_mov_tls_gd_step1_32bit (temp
, addr
));
1006 emit_insn (gen_mov_tls_gd_step2_32bit (temp2
, temp
, addr
));
1007 emit_insn (gen_tls_add_32bit (temp2
, got
, temp2
, addr
));
1011 emit_insn (gen_mov_tls_gd_step1 (temp
, addr
));
1012 emit_insn (gen_mov_tls_gd_step2 (temp2
, temp
, addr
));
1013 emit_insn (gen_tls_add (temp2
, got
, temp2
, addr
));
1016 emit_move_insn (r0
, temp2
);
1020 emit_insn (gen_tls_gd_call_32bit (addr
));
1024 emit_insn (gen_tls_gd_call (addr
));
1027 emit_move_insn (temp3
, r0
);
1031 last
= emit_insn (gen_tls_gd_add_32bit (ret
, temp3
, addr
));
1033 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
1035 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
1038 case TLS_MODEL_INITIAL_EXEC
:
1040 rtx temp
, temp2
, temp3
, got
;
1043 ret
= gen_reg_rtx (Pmode
);
1044 temp
= gen_reg_rtx (Pmode
);
1045 temp2
= gen_reg_rtx (Pmode
);
1046 temp3
= gen_reg_rtx (Pmode
);
1048 got
= tilegx_tls_got ();
1051 emit_insn (gen_mov_tls_ie_step1_32bit (temp
, addr
));
1052 emit_insn (gen_mov_tls_ie_step2_32bit (temp2
, temp
, addr
));
1053 emit_insn (gen_tls_add_32bit (temp2
, got
, temp2
, addr
));
1054 emit_insn (gen_tls_ie_load_32bit (temp3
, temp2
, addr
));
1058 emit_insn (gen_mov_tls_ie_step1 (temp
, addr
));
1059 emit_insn (gen_mov_tls_ie_step2 (temp2
, temp
, addr
));
1060 emit_insn (gen_tls_add (temp2
, got
, temp2
, addr
));
1061 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
1066 gen_rtx_PLUS (Pmode
,
1068 THREAD_POINTER_REGNUM
),
1070 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
1073 case TLS_MODEL_LOCAL_EXEC
:
1078 ret
= gen_reg_rtx (Pmode
);
1079 temp
= gen_reg_rtx (Pmode
);
1080 temp2
= gen_reg_rtx (Pmode
);
1084 emit_insn (gen_mov_tls_le_step1_32bit (temp
, addr
));
1085 emit_insn (gen_mov_tls_le_step2_32bit (temp2
, temp
, addr
));
1089 emit_insn (gen_mov_tls_le_step1 (temp
, addr
));
1090 emit_insn (gen_mov_tls_le_step2 (temp2
, temp
, addr
));
1094 emit_move_insn (ret
,
1095 gen_rtx_PLUS (Pmode
,
1097 THREAD_POINTER_REGNUM
),
1099 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
1105 else if (GET_CODE (addr
) == CONST
)
1109 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
1111 base
= tilegx_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
1112 offset
= XEXP (XEXP (addr
, 0), 1);
1114 base
= force_operand (base
, NULL_RTX
);
1115 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1124 /* Returns a register that points to ADDR, a symbolic address, by
1125 computing its address relative to tilegx_text_label_symbol. */
1127 tilegx_compute_pcrel_address (rtx result
, rtx addr
)
1129 rtx text_label_symbol
= tilegx_text_label_symbol ();
1130 rtx text_label_rtx
= tilegx_text_label_rtx ();
1131 rtx temp
, temp2
, temp3
;
1133 temp
= create_temp_reg_if_possible (Pmode
, result
);
1134 temp2
= create_temp_reg_if_possible (Pmode
, result
);
1138 emit_insn (gen_mov_pcrel_step1_32bit (temp
, addr
, text_label_symbol
));
1139 emit_insn (gen_mov_pcrel_step2_32bit (temp2
, temp
, addr
,
1140 text_label_symbol
));
1141 emit_insn (gen_mov_pcrel_step3_32bit (result
, temp2
,
1143 addr
, text_label_symbol
));
1145 else if (tilegx_cmodel
== CM_LARGE_PIC
)
1147 temp3
= create_temp_reg_if_possible (Pmode
, result
);
1148 emit_insn (gen_mov_large_pcrel_step1 (temp
, addr
, text_label_symbol
));
1149 emit_insn (gen_mov_large_pcrel_step2 (temp2
, temp
, addr
,
1150 text_label_symbol
));
1151 emit_insn (gen_mov_large_pcrel_step3 (temp3
, temp2
, addr
,
1152 text_label_symbol
));
1153 emit_insn (gen_mov_large_pcrel_step4 (result
, temp3
,
1155 addr
, text_label_symbol
));
1159 emit_insn (gen_mov_pcrel_step1 (temp
, addr
, text_label_symbol
));
1160 emit_insn (gen_mov_pcrel_step2 (temp2
, temp
, addr
, text_label_symbol
));
1161 emit_insn (gen_mov_pcrel_step3 (result
, temp2
,
1163 addr
, text_label_symbol
));
1168 /* Returns a register that points to the plt entry of ADDR, a symbolic
1169 address, by computing its address relative to
1170 tilegx_text_label_symbol. */
1172 tilegx_compute_pcrel_plt_address (rtx result
, rtx addr
)
1174 rtx text_label_symbol
= tilegx_text_label_symbol ();
1175 rtx text_label_rtx
= tilegx_text_label_rtx ();
1176 rtx temp
, temp2
, temp3
;
1178 temp
= create_temp_reg_if_possible (Pmode
, result
);
1179 temp2
= create_temp_reg_if_possible (Pmode
, result
);
1183 emit_insn (gen_mov_plt_pcrel_step1_32bit (temp
, addr
,
1184 text_label_symbol
));
1185 emit_insn (gen_mov_plt_pcrel_step2_32bit (temp2
, temp
, addr
,
1186 text_label_symbol
));
1187 emit_move_insn (result
, gen_rtx_PLUS (Pmode
, temp2
, text_label_rtx
));
1191 temp3
= create_temp_reg_if_possible (Pmode
, result
);
1193 emit_insn (gen_mov_plt_pcrel_step1 (temp
, addr
, text_label_symbol
));
1194 emit_insn (gen_mov_plt_pcrel_step2 (temp2
, temp
, addr
,
1195 text_label_symbol
));
1196 emit_insn (gen_mov_plt_pcrel_step3 (temp3
, temp2
, addr
,
1197 text_label_symbol
));
1198 emit_move_insn (result
, gen_rtx_PLUS (Pmode
, temp3
, text_label_rtx
));
1203 /* Legitimize PIC addresses. If the address is already
1204 position-independent, we return ORIG. Newly generated
1205 position-independent addresses go into a reg. This is REG if
1206 nonzero, otherwise we allocate register(s) as necessary. */
1208 tilegx_legitimize_pic_address (rtx orig
,
1209 machine_mode mode ATTRIBUTE_UNUSED
,
1212 if (GET_CODE (orig
) == SYMBOL_REF
)
1214 rtx address
, pic_ref
;
1218 gcc_assert (can_create_pseudo_p ());
1219 reg
= gen_reg_rtx (Pmode
);
1222 if (SYMBOL_REF_LOCAL_P (orig
))
1224 /* If not during reload, allocate another temp reg here for
1225 loading in the address, so that these instructions can be
1226 optimized properly. */
1227 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1228 tilegx_compute_pcrel_address (temp_reg
, orig
);
1230 /* Note: this is conservative. We use the text_label but we
1231 don't use the pic_offset_table. However, in some cases
1232 we may need the pic_offset_table (see
1233 tilegx_fixup_pcrel_references). */
1234 crtl
->uses_pic_offset_table
= 1;
1238 emit_move_insn (reg
, address
);
1243 /* If not during reload, allocate another temp reg here for
1244 loading in the address, so that these instructions can be
1245 optimized properly. */
1246 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1248 gcc_assert (flag_pic
);
1253 emit_insn (gen_add_got16_32bit (temp_reg
,
1259 emit_insn (gen_add_got16 (temp_reg
,
1260 tilegx_got_rtx (), orig
));
1265 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1266 rtx temp_reg3
= create_temp_reg_if_possible (Pmode
, reg
);
1269 emit_insn (gen_mov_got32_step1_32bit (temp_reg3
, orig
));
1270 emit_insn (gen_mov_got32_step2_32bit
1271 (temp_reg2
, temp_reg3
, orig
));
1275 emit_insn (gen_mov_got32_step1 (temp_reg3
, orig
));
1276 emit_insn (gen_mov_got32_step2 (temp_reg2
, temp_reg3
,
1279 emit_move_insn (temp_reg
,
1280 gen_rtx_PLUS (Pmode
,
1281 tilegx_got_rtx (), temp_reg2
));
1286 pic_ref
= gen_const_mem (Pmode
, address
);
1287 crtl
->uses_pic_offset_table
= 1;
1288 emit_move_insn (reg
, pic_ref
);
1289 /* The following put a REG_EQUAL note on this insn, so that
1290 it can be optimized by loop. But it causes the label to
1291 be optimized away. */
1292 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1296 else if (GET_CODE (orig
) == CONST
)
1300 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1301 && XEXP (XEXP (orig
, 0), 0) == tilegx_got_rtx ())
1306 gcc_assert (can_create_pseudo_p ());
1307 reg
= gen_reg_rtx (Pmode
);
1310 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1311 base
= tilegx_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
1313 offset
= tilegx_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1314 base
== reg
? 0 : reg
);
1316 if (CONST_INT_P (offset
))
1318 if (can_create_pseudo_p ())
1319 offset
= force_reg (Pmode
, offset
);
1321 /* If we reach here, then something is seriously wrong. */
1325 if (can_create_pseudo_p ())
1326 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1330 else if (GET_CODE (orig
) == LABEL_REF
)
1337 gcc_assert (can_create_pseudo_p ());
1338 reg
= gen_reg_rtx (Pmode
);
1341 /* If not during reload, allocate another temp reg here for
1342 loading in the address, so that these instructions can be
1343 optimized properly. */
1344 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1345 tilegx_compute_pcrel_address (temp_reg
, orig
);
1347 /* Note: this is conservative. We use the text_label but we
1348 don't use the pic_offset_table. */
1349 crtl
->uses_pic_offset_table
= 1;
1353 emit_move_insn (reg
, address
);
1362 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1364 tilegx_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1367 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1368 && symbolic_operand (x
, Pmode
) && tilegx_tls_referenced_p (x
))
1370 return tilegx_legitimize_tls_address (x
);
1374 return tilegx_legitimize_pic_address (x
, mode
, 0);
1381 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1383 tilegx_delegitimize_address (rtx x
)
1385 x
= delegitimize_mem_from_attrs (x
);
1387 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1389 switch (XINT (XEXP (x
, 0), 1))
1395 case UNSPEC_HW0_LAST
:
1396 case UNSPEC_HW1_LAST
:
1397 case UNSPEC_HW2_LAST
:
1398 case UNSPEC_HW0_PCREL
:
1399 case UNSPEC_HW1_PCREL
:
1400 case UNSPEC_HW1_LAST_PCREL
:
1401 case UNSPEC_HW2_LAST_PCREL
:
1402 case UNSPEC_HW0_PLT_PCREL
:
1403 case UNSPEC_HW1_PLT_PCREL
:
1404 case UNSPEC_HW1_LAST_PLT_PCREL
:
1405 case UNSPEC_HW2_LAST_PLT_PCREL
:
1406 case UNSPEC_HW0_GOT
:
1407 case UNSPEC_HW0_LAST_GOT
:
1408 case UNSPEC_HW1_LAST_GOT
:
1409 case UNSPEC_HW0_TLS_GD
:
1410 case UNSPEC_HW1_LAST_TLS_GD
:
1411 case UNSPEC_HW0_TLS_IE
:
1412 case UNSPEC_HW1_LAST_TLS_IE
:
1413 case UNSPEC_HW0_TLS_LE
:
1414 case UNSPEC_HW1_LAST_TLS_LE
:
1415 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1424 /* Emit code to load the PIC register. */
1426 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1428 int orig_flag_pic
= flag_pic
;
1430 rtx got_symbol
= tilegx_got_symbol ();
1431 rtx text_label_symbol
= tilegx_text_label_symbol ();
1432 rtx text_label_rtx
= tilegx_text_label_rtx ();
1437 emit_insn (gen_insn_lnk_and_label_32bit (text_label_rtx
,
1438 text_label_symbol
));
1442 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1445 tilegx_compute_pcrel_address (tilegx_got_rtx (), got_symbol
);
1447 flag_pic
= orig_flag_pic
;
1449 /* Need to emit this whether or not we obey regdecls, since
1450 setjmp/longjmp can cause life info to screw up. ??? In the case
1451 where we don't obey regdecls, this is not sufficient since we may
1452 not fall out the bottom. */
1453 emit_use (tilegx_got_rtx ());
1457 /* Return the simd variant of the constant NUM of mode MODE, by
1458 replicating it to fill an interger of mode DImode. NUM is first
1459 truncated to fit in MODE. */
1461 tilegx_simd_int (rtx num
, machine_mode mode
)
1463 HOST_WIDE_INT n
= 0;
1465 gcc_assert (CONST_INT_P (num
));
1472 n
= 0x0101010101010101LL
* (n
& 0x000000FF);
1475 n
= 0x0001000100010001LL
* (n
& 0x0000FFFF);
1478 n
= 0x0000000100000001LL
* (n
& 0xFFFFFFFF);
1490 /* Returns true iff VAL can be moved into a register in one
1491 instruction. And if it can, it emits the code to move the constant
1494 If THREE_WIDE_ONLY is true, this insists on an instruction that
1495 works in a bundle containing three instructions. */
1497 expand_set_cint64_one_inst (rtx dest_reg
,
1498 HOST_WIDE_INT val
, bool three_wide_only
)
1500 if (val
== trunc_int_for_mode (val
, QImode
))
1503 emit_move_insn (dest_reg
, GEN_INT (val
));
1506 else if (!three_wide_only
)
1508 /* Test for the following constraints: J, K, N, P. We avoid
1509 generating an rtx and using existing predicates because we
1510 can be testing and rejecting a lot of constants, and GEN_INT
1512 if ((val
>= -32768 && val
<= 65535)
1513 || ((val
== (val
& 0xFF) * 0x0101010101010101LL
))
1514 || (val
== ((trunc_int_for_mode (val
, QImode
) & 0xFFFF)
1515 * 0x0001000100010001LL
)))
1517 emit_move_insn (dest_reg
, GEN_INT (val
));
1526 /* Implement DImode rotatert. */
1527 static HOST_WIDE_INT
1528 rotate_right (HOST_WIDE_INT n
, int count
)
1530 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFFFFFFFFFFULL
;
1533 return ((x
>> count
) | (x
<< (64 - count
))) & 0xFFFFFFFFFFFFFFFFULL
;
1537 /* Return true iff n contains exactly one contiguous sequence of 1
1538 bits, possibly wrapping around from high bits to low bits. */
1540 tilegx_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1547 for (i
= 0; i
< 64; i
++)
1549 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1553 /* See if x is a power of two minus one, i.e. only consecutive 1
1554 bits starting from bit 0. */
1555 if ((x
& (x
+ 1)) == 0)
1557 if (first_bit
!= NULL
)
1559 if (last_bit
!= NULL
)
1560 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 63;
1570 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1572 expand_set_cint64 (rtx dest_reg
, rtx src_val
)
1575 int leading_zeroes
, trailing_zeroes
;
1576 int three_wide_only
;
1577 int shift
, ins_shift
, zero_cluster_shift
;
1580 gcc_assert (CONST_INT_P (src_val
));
1581 val
= trunc_int_for_mode (INTVAL (src_val
), GET_MODE (dest_reg
));
1583 /* See if we can generate the constant in one instruction. */
1584 if (expand_set_cint64_one_inst (dest_reg
, val
, false))
1587 /* Force the destination to DImode so we can use DImode instructions
1588 to create it. This both allows instructions like rotl, and
1589 certain efficient 3-wide instructions. */
1590 subreg
= simplify_gen_subreg (DImode
, dest_reg
, GET_MODE (dest_reg
), 0);
1591 gcc_assert (subreg
!= NULL
);
1594 temp
= create_temp_reg_if_possible (DImode
, dest_reg
);
1596 leading_zeroes
= 63 - floor_log2 (val
& 0xFFFFFFFFFFFFFFFFULL
);
1597 trailing_zeroes
= exact_log2 (val
& -val
);
1599 /* First try all three-wide instructions that generate a constant
1600 (i.e. movei) followed by various shifts and rotates. If none of
1601 those work, try various two-wide ways of generating a constant
1602 followed by various shifts and rotates. */
1603 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1607 if (expand_set_cint64_one_inst (temp
, val
>> trailing_zeroes
,
1610 /* 0xFFFFFFFFFFFFA500 becomes:
1611 movei temp, 0xFFFFFFFFFFFFFFA5
1612 shli dest, temp, 8 */
1613 emit_move_insn (dest_reg
,
1614 gen_rtx_ASHIFT (DImode
, temp
,
1615 GEN_INT (trailing_zeroes
)));
1619 if (expand_set_cint64_one_inst (temp
, val
<< leading_zeroes
,
1622 /* 0x7FFFFFFFFFFFFFFF becomes:
1624 shrui dest, temp, 1 */
1625 emit_move_insn (dest_reg
,
1626 gen_rtx_LSHIFTRT (DImode
, temp
,
1627 GEN_INT (leading_zeroes
)));
1631 /* Try rotating a one-instruction immediate. */
1632 for (count
= 1; count
< 64; count
++)
1634 HOST_WIDE_INT r
= rotate_right (val
, count
);
1635 if (expand_set_cint64_one_inst (temp
, r
, three_wide_only
))
1637 /* 0xFFFFFFFFFFA5FFFF becomes:
1638 movei temp, 0xFFFFFFFFFFFFFFA5
1639 rotli dest, temp, 16 */
1640 emit_move_insn (dest_reg
,
1641 gen_rtx_ROTATE (DImode
, temp
, GEN_INT (count
)));
1647 /* There are two cases here to produce a large constant.
1648 In the most general case, we do this:
1651 shl16insli x, x, hw2(NUM)
1652 shl16insli x, x, hw1(NUM)
1653 shl16insli x, x, hw0(NUM)
1655 However, we can sometimes do better. shl16insli is a poor way to
1656 insert 16 zero bits, because simply shifting left by 16 has more
1657 bundling freedom. So if we see any contiguous aligned sequence
1658 of 16 or more zero bits (below the highest set bit), it is always
1659 more efficient to materialize the bits above the zero bits, then
1660 left shift to put in the zeroes, then insert whatever bits
1661 remain. For example, we might end up with:
1663 movei x, NUM >> (37 + 16)
1665 shl16insli x, x, hw0(NUM) */
1667 zero_cluster_shift
= -1;
1669 for (shift
= 0; shift
< 48 - leading_zeroes
; shift
+= 16)
1671 HOST_WIDE_INT x
= val
>> shift
;
1673 /* Find the least significant group of 16 aligned zero bits. */
1674 if ((x
& 0xFFFF) == 0x0000)
1676 /* Grab any following zero bits as well. */
1677 zero_cluster_shift
= exact_log2 (x
& -x
);
1678 shift
+= zero_cluster_shift
;
1683 if (zero_cluster_shift
>= 0)
1685 unsigned HOST_WIDE_INT leftover
;
1687 /* Recursively create the constant above the lowest 16 zero
1689 expand_set_cint64 (temp
, GEN_INT (val
>> shift
));
1691 /* See if we can easily insert the remaining bits, or if we need
1692 to fall through to the more general case. */
1693 leftover
= val
- ((val
>> shift
) << shift
);
1696 /* A simple left shift is enough. */
1697 emit_move_insn (dest_reg
,
1698 gen_rtx_ASHIFT (DImode
, temp
, GEN_INT (shift
)));
1701 else if (leftover
<= 32767)
1703 /* Left shift into position then add in the leftover. */
1704 rtx temp2
= create_temp_reg_if_possible (DImode
, temp
);
1705 emit_move_insn (temp2
,
1706 gen_rtx_ASHIFT (DImode
, temp
, GEN_INT (shift
)));
1707 emit_move_insn (dest_reg
,
1708 gen_rtx_PLUS (DImode
, temp2
, GEN_INT (leftover
)));
1713 /* Shift in the batch of >= 16 zeroes we detected earlier.
1714 After this, shift will be aligned mod 16 so the final
1715 loop can use shl16insli. */
1716 rtx temp2
= create_temp_reg_if_possible (DImode
, temp
);
1717 rtx shift_count_rtx
= GEN_INT (zero_cluster_shift
);
1719 emit_move_insn (temp2
,
1720 gen_rtx_ASHIFT (DImode
, temp
, shift_count_rtx
));
1722 shift
-= zero_cluster_shift
;
1728 /* Set as many high 16-bit blocks as we can with a single
1729 instruction. We'll insert the remaining 16-bit blocks
1731 for (shift
= 16;; shift
+= 16)
1733 gcc_assert (shift
< 64);
1734 if (expand_set_cint64_one_inst (temp
, val
>> shift
, false))
1739 /* At this point, temp == val >> shift, shift % 16 == 0, and we
1740 still need to insert any bits of 'val' below 'shift'. Those bits
1741 are guaranteed to not have 16 contiguous zeroes. */
1743 gcc_assert ((shift
& 15) == 0);
1745 for (ins_shift
= shift
- 16; ins_shift
>= 0; ins_shift
-= 16)
1748 HOST_WIDE_INT bits
= (val
>> ins_shift
) & 0xFFFF;
1749 gcc_assert (bits
!= 0);
1751 /* On the last iteration we need to store into dest_reg. */
1755 result
= create_temp_reg_if_possible (DImode
, dest_reg
);
1757 emit_insn (gen_insn_shl16insli (result
, temp
, GEN_INT (bits
)));
1764 /* Load OP1, a 64-bit constant, into OP0, a register. We know it
1765 can't be done in one insn when we get here, the move expander
1768 tilegx_expand_set_const64 (rtx op0
, rtx op1
)
1770 if (CONST_INT_P (op1
))
1772 /* TODO: I don't know if we want to split large constants
1773 now, or wait until later (with a define_split).
1775 Does splitting early help CSE? Does it harm other
1776 optimizations that might fold loads? */
1777 expand_set_cint64 (op0
, op1
);
1781 rtx temp
= create_temp_reg_if_possible (Pmode
, op0
);
1785 /* Generate the 2-insn sequence to materialize a symbolic
1787 emit_insn (gen_mov_address_32bit_step1 (temp
, op1
));
1788 emit_insn (gen_mov_address_32bit_step2 (op0
, temp
, op1
));
1792 /* Generate the 3-insn sequence to materialize a symbolic
1793 address. Note that this assumes that virtual addresses
1794 fit in 48 signed bits, which is currently true. */
1795 rtx temp2
= create_temp_reg_if_possible (Pmode
, op0
);
1796 emit_insn (gen_mov_address_step1 (temp
, op1
));
1797 emit_insn (gen_mov_address_step2 (temp2
, temp
, op1
));
1798 emit_insn (gen_mov_address_step3 (op0
, temp2
, op1
));
1804 /* Expand a move instruction. Return true if all work is done. */
1806 tilegx_expand_mov (machine_mode mode
, rtx
*operands
)
1808 /* Handle sets of MEM first. */
1809 if (MEM_P (operands
[0]))
1811 if (can_create_pseudo_p ())
1812 operands
[0] = validize_mem (operands
[0]);
1814 if (reg_or_0_operand (operands
[1], mode
))
1817 if (!reload_in_progress
)
1818 operands
[1] = force_reg (mode
, operands
[1]);
1821 /* Fixup TLS cases. */
1822 if (CONSTANT_P (operands
[1]) && tilegx_tls_referenced_p (operands
[1]))
1824 operands
[1] = tilegx_legitimize_tls_address (operands
[1]);
1828 /* Fixup PIC cases. */
1829 if (flag_pic
&& CONSTANT_P (operands
[1]))
1831 if (tilegx_pic_address_needs_scratch (operands
[1]))
1832 operands
[1] = tilegx_legitimize_pic_address (operands
[1], mode
, 0);
1834 if (symbolic_operand (operands
[1], mode
))
1836 operands
[1] = tilegx_legitimize_pic_address (operands
[1],
1838 (reload_in_progress
?
1845 /* Accept non-constants and valid constants unmodified. */
1846 if (!CONSTANT_P (operands
[1]) || move_operand (operands
[1], mode
))
1849 /* Split large integers. */
1850 tilegx_expand_set_const64 (operands
[0], operands
[1]);
1855 /* Expand unaligned loads. */
1857 tilegx_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1858 HOST_WIDE_INT bit_offset
, bool sign
)
1861 rtx addr_lo
, addr_hi
;
1862 rtx mem_lo
, mem_hi
, hi
;
1863 rtx mema
, wide_result
;
1864 int last_byte_offset
;
1865 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1867 mode
= GET_MODE (dest_reg
);
1869 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1871 rtx mem_left
, mem_right
;
1872 rtx left
= gen_reg_rtx (mode
);
1874 /* When just loading a two byte value, we can load the two bytes
1875 individually and combine them efficiently. */
1877 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1878 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1880 if (BYTES_BIG_ENDIAN
)
1893 /* Do a signed load of the second byte and use bfins to set
1894 the high bits of the result. */
1895 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode
, dest_reg
),
1897 emit_insn (gen_extendqidi2 (gen_lowpart (DImode
, left
), mem_left
));
1898 emit_insn (gen_insv (gen_lowpart (DImode
, dest_reg
),
1899 GEN_INT (64 - 8), GEN_INT (8),
1900 gen_lowpart (DImode
, left
)));
1904 /* Do two unsigned loads and use v1int_l to interleave
1906 rtx right
= gen_reg_rtx (mode
);
1907 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode
, right
),
1909 emit_insn (gen_zero_extendqidi2 (gen_lowpart (DImode
, left
),
1911 emit_insn (gen_insn_v1int_l (gen_lowpart (DImode
, dest_reg
),
1912 gen_lowpart (DImode
, left
),
1913 gen_lowpart (DImode
, right
)));
1919 mema
= XEXP (mem
, 0);
1921 /* AND addresses cannot be in any alias set, since they may
1922 implicitly alias surrounding code. Ideally we'd have some alias
1923 set that covered all types except those with alignment 8 or
1925 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1926 mem_lo
= change_address (mem
, mode
,
1927 gen_rtx_AND (GET_MODE (mema
), addr_lo
,
1929 set_mem_alias_set (mem_lo
, 0);
1931 /* Load the high word at an address that will not fault if the low
1932 address is aligned and at the very end of a page. */
1933 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1934 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1935 mem_hi
= change_address (mem
, mode
,
1936 gen_rtx_AND (GET_MODE (mema
), addr_hi
,
1938 set_mem_alias_set (mem_hi
, 0);
1942 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1943 wide_result
= dest_reg
;
1947 wide_result
= gen_reg_rtx (mode
);
1950 /* Load hi first in case dest_reg is used in mema. */
1951 hi
= gen_reg_rtx (mode
);
1952 emit_move_insn (hi
, mem_hi
);
1953 emit_move_insn (wide_result
, mem_lo
);
1955 emit_insn (gen_insn_dblalign (gen_lowpart (DImode
, wide_result
),
1956 gen_lowpart (DImode
, wide_result
),
1957 gen_lowpart (DImode
, hi
), addr_lo
));
1962 extract_bit_field (gen_lowpart (DImode
, wide_result
),
1963 bitsize
, bit_offset
% BITS_PER_UNIT
,
1964 !sign
, gen_lowpart (DImode
, dest_reg
),
1965 DImode
, DImode
, false, NULL
);
1967 if (extracted
!= dest_reg
)
1968 emit_move_insn (dest_reg
, gen_lowpart (DImode
, extracted
));
1973 /* Expand unaligned stores. */
1975 tilegx_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1976 HOST_WIDE_INT bit_offset
)
1978 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1979 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1980 HOST_WIDE_INT shift_init
, shift_increment
, shift_amt
;
1985 shift_init
= BYTES_BIG_ENDIAN
? (bitsize
- BITS_PER_UNIT
) : 0;
1986 shift_increment
= BYTES_BIG_ENDIAN
? -BITS_PER_UNIT
: BITS_PER_UNIT
;
1988 for (i
= 0, shift_amt
= shift_init
;
1990 i
++, shift_amt
+= shift_increment
)
1992 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1996 store_val
= expand_simple_binop (DImode
, LSHIFTRT
,
1997 gen_lowpart (DImode
, src
),
1998 GEN_INT (shift_amt
), NULL
, 1,
2000 store_val
= gen_lowpart (QImode
, store_val
);
2004 store_val
= gen_lowpart (QImode
, src
);
2007 emit_move_insn (mem_addr
, store_val
);
2012 /* Implement the movmisalign patterns. One of the operands is a
2013 memory that is not naturally aligned. Emit instructions to load
2016 tilegx_expand_movmisalign (machine_mode mode
, rtx
*operands
)
2018 if (MEM_P (operands
[1]))
2022 if (register_operand (operands
[0], mode
))
2025 tmp
= gen_reg_rtx (mode
);
2027 tilegx_expand_unaligned_load (tmp
, operands
[1], GET_MODE_BITSIZE (mode
),
2030 if (tmp
!= operands
[0])
2031 emit_move_insn (operands
[0], tmp
);
2033 else if (MEM_P (operands
[0]))
2035 if (!reg_or_0_operand (operands
[1], mode
))
2036 operands
[1] = force_reg (mode
, operands
[1]);
2038 tilegx_expand_unaligned_store (operands
[0], operands
[1],
2039 GET_MODE_BITSIZE (mode
), 0);
2047 /* Implement the allocate_stack pattern (alloca). */
2049 tilegx_allocate_stack (rtx op0
, rtx op1
)
2051 /* Technically the correct way to initialize chain_loc is with
2052 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
2053 * sets the alias_set to that of a frame reference. Some of our
2054 * tests rely on some unsafe assumption about when the chaining
2055 * update is done, we need to be conservative about reordering the
2056 * chaining instructions.
2058 rtx fp_addr
= gen_reg_rtx (Pmode
);
2059 rtx fp_value
= gen_reg_rtx (Pmode
);
2062 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2063 GEN_INT (UNITS_PER_WORD
)));
2065 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
2067 emit_move_insn (fp_value
, fp_loc
);
2069 op1
= force_reg (Pmode
, op1
);
2071 emit_move_insn (stack_pointer_rtx
,
2072 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
2074 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
2075 GEN_INT (UNITS_PER_WORD
)));
2077 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
2079 emit_move_insn (fp_loc
, fp_value
);
2081 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
2089 /* Returns the insn_code in ENTRY. */
2090 static enum insn_code
2091 tilegx_multiply_get_opcode (const struct tilegx_multiply_insn_seq_entry
2094 return tilegx_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
2098 /* Returns the length of the 'op' array. */
2100 tilegx_multiply_get_num_ops (const struct tilegx_multiply_insn_seq
*seq
)
2102 /* The array either uses all of its allocated slots or is terminated
2103 by a bogus opcode. Either way, the array size is the index of the
2104 last valid opcode plus one. */
2106 for (i
= tilegx_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
2107 if (tilegx_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
2110 /* An empty array is not allowed. */
2115 /* We precompute a number of expression trees for multiplying by
2116 constants. This generates code for such an expression tree by
2117 walking through the nodes in the tree (which are conveniently
2118 pre-linearized) and emitting an instruction for each one. */
2120 tilegx_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
2122 tilegx_multiply_insn_seq
*seq
)
2127 /* Keep track of the subexpressions computed so far, so later
2128 instructions can refer to them. We seed the array with zero and
2129 the value being multiplied. */
2130 int num_subexprs
= 2;
2131 rtx subexprs
[tilegx_multiply_insn_seq_MAX_OPERATIONS
+ 2];
2132 subexprs
[0] = const0_rtx
;
2135 /* Determine how many instructions we are going to generate. */
2136 num_ops
= tilegx_multiply_get_num_ops (seq
);
2137 gcc_assert (num_ops
> 0
2138 && num_ops
<= tilegx_multiply_insn_seq_MAX_OPERATIONS
);
2140 for (i
= 0; i
< num_ops
; i
++)
2142 const struct tilegx_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
2144 /* Figure out where to store the output of this instruction. */
2145 const bool is_last_op
= (i
+ 1 == num_ops
);
2146 rtx out
= is_last_op
? result
: gen_reg_rtx (DImode
);
2148 enum insn_code opcode
= tilegx_multiply_get_opcode (entry
);
2149 if (opcode
== CODE_FOR_ashldi3
)
2151 /* Handle shift by immediate. This is a special case because
2152 the meaning of the second operand is a constant shift
2153 count rather than an operand index. */
2155 /* Make sure the shift count is in range. Zero should not
2157 const int shift_count
= entry
->rhs
;
2158 gcc_assert (shift_count
> 0 && shift_count
< 64);
2160 /* Emit the actual instruction. */
2161 emit_insn (GEN_FCN (opcode
)
2162 (out
, subexprs
[entry
->lhs
],
2163 gen_rtx_CONST_INT (DImode
, shift_count
)));
2167 /* Handle a normal two-operand instruction, such as add or
2170 /* Make sure we are referring to a previously computed
2172 gcc_assert (entry
->rhs
< num_subexprs
);
2174 /* Emit the actual instruction. */
2175 emit_insn (GEN_FCN (opcode
)
2176 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
2179 /* Record this subexpression for use by later expressions. */
2180 subexprs
[num_subexprs
++] = out
;
2185 /* bsearch helper function. */
2187 tilegx_compare_multipliers (const void *key
, const void *t
)
2190 (*(const long long *) key
2191 - ((const struct tilegx_multiply_insn_seq
*) t
)->multiplier
);
2192 return (delta
< 0) ? -1 : (delta
> 0);
2196 /* Returns the tilegx_multiply_insn_seq for multiplier, or NULL if none
2198 static const struct tilegx_multiply_insn_seq
*
2199 tilegx_find_multiply_insn_seq_for_constant (long long multiplier
)
2201 return ((const struct tilegx_multiply_insn_seq
*)
2202 bsearch (&multiplier
, tilegx_multiply_insn_seq_table
,
2203 tilegx_multiply_insn_seq_table_size
,
2204 sizeof tilegx_multiply_insn_seq_table
[0],
2205 tilegx_compare_multipliers
));
2209 /* Try to a expand constant multiply in DImode by looking it up in a
2210 precompiled table. OP0 is the result operand, OP1 is the source
2211 operand, and MULTIPLIER is the value of the constant. Return true
2214 tilegx_expand_const_muldi (rtx op0
, rtx op1
, long long multiplier
)
2216 /* See if we have precomputed an efficient way to multiply by this
2218 const struct tilegx_multiply_insn_seq
*seq
=
2219 tilegx_find_multiply_insn_seq_for_constant (multiplier
);
2222 tilegx_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
2230 /* Expand the muldi pattern. */
2232 tilegx_expand_muldi (rtx op0
, rtx op1
, rtx op2
)
2234 if (CONST_INT_P (op2
))
2236 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), DImode
);
2237 return tilegx_expand_const_muldi (op0
, op1
, n
);
2243 /* Expand a high multiply pattern in DImode. RESULT, OP1, OP2 are the
2244 operands, and SIGN is true if it's a signed multiply, and false if
2245 it's an unsigned multiply. */
2247 tilegx_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
2249 rtx tmp0
= gen_reg_rtx (DImode
);
2250 rtx tmp1
= gen_reg_rtx (DImode
);
2251 rtx tmp2
= gen_reg_rtx (DImode
);
2252 rtx tmp3
= gen_reg_rtx (DImode
);
2253 rtx tmp4
= gen_reg_rtx (DImode
);
2254 rtx tmp5
= gen_reg_rtx (DImode
);
2255 rtx tmp6
= gen_reg_rtx (DImode
);
2256 rtx tmp7
= gen_reg_rtx (DImode
);
2257 rtx tmp8
= gen_reg_rtx (DImode
);
2258 rtx tmp9
= gen_reg_rtx (DImode
);
2259 rtx tmp10
= gen_reg_rtx (DImode
);
2260 rtx tmp11
= gen_reg_rtx (DImode
);
2261 rtx tmp12
= gen_reg_rtx (DImode
);
2262 rtx tmp13
= gen_reg_rtx (DImode
);
2263 rtx result_lo
= gen_reg_rtx (DImode
);
2267 emit_insn (gen_insn_mul_hs_lu (tmp0
, op1
, op2
));
2268 emit_insn (gen_insn_mul_hs_lu (tmp1
, op2
, op1
));
2269 emit_insn (gen_insn_mul_lu_lu (tmp2
, op1
, op2
));
2270 emit_insn (gen_insn_mul_hs_hs (tmp3
, op1
, op2
));
2274 emit_insn (gen_insn_mul_hu_lu (tmp0
, op1
, op2
));
2275 emit_insn (gen_insn_mul_hu_lu (tmp1
, op2
, op1
));
2276 emit_insn (gen_insn_mul_lu_lu (tmp2
, op1
, op2
));
2277 emit_insn (gen_insn_mul_hu_hu (tmp3
, op1
, op2
));
2280 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (DImode
, tmp0
, GEN_INT (32))));
2282 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (DImode
, tmp1
, GEN_INT (32))));
2284 emit_move_insn (tmp6
, (gen_rtx_PLUS (DImode
, tmp4
, tmp5
)));
2285 emit_move_insn (result_lo
, (gen_rtx_PLUS (DImode
, tmp2
, tmp6
)));
2287 emit_move_insn (tmp7
, gen_rtx_LTU (DImode
, tmp6
, tmp4
));
2288 emit_move_insn (tmp8
, gen_rtx_LTU (DImode
, result_lo
, tmp2
));
2292 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (DImode
, tmp0
, GEN_INT (32))));
2293 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (DImode
, tmp1
, GEN_INT (32))));
2297 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (DImode
, tmp0
, GEN_INT (32))));
2298 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (DImode
, tmp1
, GEN_INT (32))));
2301 emit_move_insn (tmp11
, (gen_rtx_PLUS (DImode
, tmp3
, tmp7
)));
2302 emit_move_insn (tmp12
, (gen_rtx_PLUS (DImode
, tmp8
, tmp9
)));
2303 emit_move_insn (tmp13
, (gen_rtx_PLUS (DImode
, tmp11
, tmp12
)));
2304 emit_move_insn (result
, (gen_rtx_PLUS (DImode
, tmp13
, tmp10
)));
2308 /* Implement smuldi3_highpart. */
2310 tilegx_expand_smuldi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2312 tilegx_expand_high_multiply (op0
, op1
, op2
, true);
2316 /* Implement umuldi3_highpart. */
2318 tilegx_expand_umuldi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2320 tilegx_expand_high_multiply (op0
, op1
, op2
, false);
2325 /* Compare and branches */
2327 /* Produce the rtx yielding a bool for a floating point
2330 tilegx_emit_fp_setcc (rtx res
, enum rtx_code code
, machine_mode mode
,
2333 /* TODO: Certain compares again constants can be done using entirely
2334 integer operations. But you have to get the special cases right
2335 e.g. NaN, +0 == -0, etc. */
2339 rtx a
= force_reg (DImode
, gen_lowpart (DImode
, op0
));
2340 rtx b
= force_reg (DImode
, gen_lowpart (DImode
, op1
));
2342 flags
= gen_reg_rtx (DImode
);
2346 emit_insn (gen_insn_fsingle_add1 (flags
, a
, b
));
2350 gcc_assert (mode
== DFmode
);
2351 emit_insn (gen_insn_fdouble_add_flags (flags
, a
, b
));
2356 case EQ
: flag_index
= 30; break;
2357 case NE
: flag_index
= 31; break;
2358 case LE
: flag_index
= 27; break;
2359 case LT
: flag_index
= 26; break;
2360 case GE
: flag_index
= 29; break;
2361 case GT
: flag_index
= 28; break;
2362 default: gcc_unreachable ();
2365 gcc_assert (GET_MODE (res
) == DImode
);
2366 emit_move_insn (res
, gen_rtx_ZERO_EXTRACT (DImode
, flags
, GEN_INT (1),
2367 GEN_INT (flag_index
)));
2372 /* Certain simplifications can be done to make invalid setcc
2373 operations valid. Return the final comparison, or NULL if we can't
2376 tilegx_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2377 machine_mode cmp_mode
)
2382 if (cmp_mode
== SFmode
|| cmp_mode
== DFmode
)
2383 return tilegx_emit_fp_setcc (res
, code
, cmp_mode
, op0
, op1
);
2385 /* The general case: fold the comparison code to the types of
2386 compares that we have, choosing the branch as necessary. */
2396 /* We have these compares. */
2403 /* We do not have these compares, so we reverse the
2409 /* We should not have called this with any other code. */
2415 code
= swap_condition (code
);
2416 tmp
= op0
, op0
= op1
, op1
= tmp
;
2419 if (!reg_or_0_operand (op0
, cmp_mode
))
2420 op0
= force_reg (cmp_mode
, op0
);
2422 if (!CONST_INT_P (op1
) && !register_operand (op1
, cmp_mode
))
2423 op1
= force_reg (cmp_mode
, op1
);
2425 /* Return the setcc comparison. */
2426 emit_insn (gen_rtx_SET (res
, gen_rtx_fmt_ee (code
, DImode
, op0
, op1
)));
2432 /* Implement cstore patterns. */
2434 tilegx_emit_setcc (rtx operands
[], machine_mode cmp_mode
)
2437 tilegx_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2438 operands
[2], operands
[3], cmp_mode
);
2442 /* Return whether CODE is a signed comparison. */
2444 signed_compare_p (enum rtx_code code
)
2446 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2447 || code
== GT
|| code
== GE
);
2451 /* Generate the comparison for a DImode conditional branch. */
2453 tilegx_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2454 machine_mode cmp_mode
, bool eq_ne_only
)
2456 enum rtx_code branch_code
;
2459 if (cmp_mode
== SFmode
|| cmp_mode
== DFmode
)
2461 /* Compute a boolean saying whether the comparison is true. */
2462 temp
= gen_reg_rtx (DImode
);
2463 tilegx_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2465 /* Test that flag. */
2466 return gen_rtx_fmt_ee (NE
, VOIDmode
, temp
, const0_rtx
);
2469 /* Check for a compare against zero using a comparison we can do
2471 if (op1
== const0_rtx
2472 && (code
== EQ
|| code
== NE
2473 || (!eq_ne_only
&& signed_compare_p (code
))))
2475 op0
= force_reg (cmp_mode
, op0
);
2476 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2479 /* The general case: fold the comparison code to the types of
2480 compares that we have, choosing the branch as necessary. */
2488 /* We have these compares. */
2497 /* These must be reversed (except NE, but let's
2499 code
= reverse_condition (code
);
2507 if (CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2509 HOST_WIDE_INT n
= INTVAL (op1
);
2514 /* Subtract off the value we want to compare against and see
2515 if we get zero. This is cheaper than creating a constant
2516 in a register. Except that subtracting -128 is more
2517 expensive than seqi to -128, so we leave that alone. */
2518 /* ??? Don't do this when comparing against symbols,
2519 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2520 0), which will be declared false out of hand (at least
2523 && add_operand (GEN_INT (-n
), DImode
)
2524 && !(symbolic_operand (op0
, VOIDmode
)
2525 || (REG_P (op0
) && REG_POINTER (op0
))))
2527 /* TODO: Use a SIMD add immediate to hit zero for tiled
2528 constants in a single instruction. */
2529 if (GET_MODE (op0
) != DImode
)
2531 /* Convert to DImode so we can use addli. Note that
2532 this will not actually generate any code because
2533 sign extension from SI -> DI is a no-op. I don't
2534 know if it's safe just to make a paradoxical
2535 subreg here though. */
2536 rtx temp2
= gen_reg_rtx (DImode
);
2537 emit_insn (gen_extendsidi2 (temp2
, op0
));
2542 op0
= force_reg (DImode
, op0
);
2544 temp
= gen_reg_rtx (DImode
);
2545 emit_move_insn (temp
, gen_rtx_PLUS (DImode
, op0
, GEN_INT (-n
)));
2546 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2547 VOIDmode
, temp
, const0_rtx
);
2557 /* Change ((unsigned)x < 0x1000) into !((int)x >> 12), etc.
2558 We use arithmetic shift right because it's a 3-wide op,
2559 while logical shift right is not. */
2561 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2564 op0
= force_reg (cmp_mode
, op0
);
2565 temp
= gen_reg_rtx (cmp_mode
);
2566 emit_move_insn (temp
,
2567 gen_rtx_ASHIFTRT (cmp_mode
, op0
,
2569 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2570 VOIDmode
, temp
, const0_rtx
);
2580 /* Compute a flag saying whether we should branch. */
2581 temp
= gen_reg_rtx (DImode
);
2582 tilegx_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2584 /* Return the branch comparison. */
2585 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2589 /* Generate the comparison for a conditional branch. */
2591 tilegx_emit_conditional_branch (rtx operands
[], machine_mode cmp_mode
)
2594 tilegx_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2596 rtx branch_rtx
= gen_rtx_SET (pc_rtx
,
2597 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2602 emit_jump_insn (branch_rtx
);
2606 /* Implement the mov<mode>cc pattern. */
2608 tilegx_emit_conditional_move (rtx cmp
)
2611 tilegx_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2612 GET_MODE (XEXP (cmp
, 0)), true);
2616 /* Return true if INSN is annotated with a REG_BR_PROB note that
2617 indicates it's a branch that's predicted taken. */
2619 cbranch_predicted_p (rtx_insn
*insn
)
2621 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2625 return profile_probability::from_reg_br_prob_note (XINT (x
, 0))
2626 >= profile_probability::even ();
2633 /* Output assembly code for a specific branch instruction, appending
2634 the branch prediction flag to the opcode if appropriate. */
2636 tilegx_output_simple_cbranch_with_opcode (rtx_insn
*insn
, const char *opcode
,
2637 int regop
, bool reverse_predicted
)
2639 static char buf
[64];
2640 sprintf (buf
, "%s%s\t%%r%d, %%l0", opcode
,
2641 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2647 /* Output assembly code for a specific branch instruction, appending
2648 the branch prediction flag to the opcode if appropriate. */
2650 tilegx_output_cbranch_with_opcode (rtx_insn
*insn
, rtx
*operands
,
2652 const char *rev_opcode
, int regop
)
2654 const char *branch_if_false
;
2655 rtx taken
, not_taken
;
2656 bool is_simple_branch
;
2658 gcc_assert (LABEL_P (operands
[0]));
2660 is_simple_branch
= true;
2661 if (INSN_ADDRESSES_SET_P ())
2663 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2664 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2665 int delta
= to_addr
- from_addr
;
2666 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2669 if (is_simple_branch
)
2671 /* Just a simple conditional branch. */
2673 tilegx_output_simple_cbranch_with_opcode (insn
, opcode
, regop
, false);
2676 /* Generate a reversed branch around a direct jump. This fallback
2677 does not use branch-likely instructions. */
2678 not_taken
= gen_label_rtx ();
2679 taken
= operands
[0];
2681 /* Generate the reversed branch to NOT_TAKEN. */
2682 operands
[0] = not_taken
;
2684 tilegx_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
, true);
2685 output_asm_insn (branch_if_false
, operands
);
2687 output_asm_insn ("j\t%l0", &taken
);
2689 /* Output NOT_TAKEN. */
2690 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2691 CODE_LABEL_NUMBER (not_taken
));
2696 /* Output assembly code for a conditional branch instruction. */
2698 tilegx_output_cbranch (rtx_insn
*insn
, rtx
*operands
, bool reversed
)
2700 enum rtx_code code
= GET_CODE (operands
[1]);
2702 const char *rev_opcode
;
2705 code
= reverse_condition (code
);
2711 rev_opcode
= "beqz";
2715 rev_opcode
= "bnez";
2719 rev_opcode
= "bltz";
2723 rev_opcode
= "blez";
2727 rev_opcode
= "bgtz";
2731 rev_opcode
= "bgez";
2737 return tilegx_output_cbranch_with_opcode (insn
, operands
, opcode
,
2742 /* Implement the tablejump pattern. */
2744 tilegx_expand_tablejump (rtx op0
, rtx op1
)
2748 rtx temp
= gen_reg_rtx (Pmode
);
2749 rtx temp2
= gen_reg_rtx (Pmode
);
2751 tilegx_compute_pcrel_address (temp
, gen_rtx_LABEL_REF (Pmode
, op1
));
2752 emit_move_insn (temp2
,
2753 gen_rtx_PLUS (Pmode
,
2754 convert_to_mode (Pmode
, op0
, false),
2759 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2763 /* Emit barrier before an atomic, as needed for the memory MODEL. */
2765 tilegx_pre_atomic_barrier (enum memmodel model
)
2767 if (need_atomic_barrier_p (model
, true))
2768 emit_insn (gen_memory_barrier ());
2772 /* Emit barrier after an atomic, as needed for the memory MODEL. */
2774 tilegx_post_atomic_barrier (enum memmodel model
)
2776 if (need_atomic_barrier_p (model
, false))
2777 emit_insn (gen_memory_barrier ());
2782 /* Expand a builtin vector binary op, by calling gen function GEN with
2783 operands in the proper modes. DEST is converted to DEST_MODE, and
2784 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2786 tilegx_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2787 machine_mode dest_mode
,
2789 machine_mode src_mode
,
2790 rtx src0
, rtx src1
, bool do_src1
)
2792 dest
= gen_lowpart (dest_mode
, dest
);
2794 if (src0
== const0_rtx
)
2795 src0
= CONST0_RTX (src_mode
);
2797 src0
= gen_lowpart (src_mode
, src0
);
2801 if (src1
== const0_rtx
)
2802 src1
= CONST0_RTX (src_mode
);
2804 src1
= gen_lowpart (src_mode
, src1
);
2807 emit_insn ((*gen
) (dest
, src0
, src1
));
2815 struct tile_builtin_info
2817 enum insn_code icode
;
2821 static struct tile_builtin_info tilegx_builtin_info
[TILEGX_BUILTIN_max
] = {
2822 { CODE_FOR_adddi3
, NULL
}, /* add */
2823 { CODE_FOR_addsi3
, NULL
}, /* addx */
2824 { CODE_FOR_ssaddsi3
, NULL
}, /* addxsc */
2825 { CODE_FOR_anddi3
, NULL
}, /* and */
2826 { CODE_FOR_insn_bfexts
, NULL
}, /* bfexts */
2827 { CODE_FOR_insn_bfextu
, NULL
}, /* bfextu */
2828 { CODE_FOR_insn_bfins
, NULL
}, /* bfins */
2829 { CODE_FOR_clzdi2
, NULL
}, /* clz */
2830 { CODE_FOR_insn_cmoveqz
, NULL
}, /* cmoveqz */
2831 { CODE_FOR_insn_cmovnez
, NULL
}, /* cmovnez */
2832 { CODE_FOR_insn_cmpeq_didi
, NULL
}, /* cmpeq */
2833 { CODE_FOR_insn_cmpexch
, NULL
}, /* cmpexch */
2834 { CODE_FOR_insn_cmpexch4
, NULL
}, /* cmpexch4 */
2835 { CODE_FOR_insn_cmples_didi
, NULL
}, /* cmples */
2836 { CODE_FOR_insn_cmpleu_didi
, NULL
}, /* cmpleu */
2837 { CODE_FOR_insn_cmplts_didi
, NULL
}, /* cmplts */
2838 { CODE_FOR_insn_cmpltu_didi
, NULL
}, /* cmpltu */
2839 { CODE_FOR_insn_cmpne_didi
, NULL
}, /* cmpne */
2840 { CODE_FOR_insn_cmul
, NULL
}, /* cmul */
2841 { CODE_FOR_insn_cmula
, NULL
}, /* cmula */
2842 { CODE_FOR_insn_cmulaf
, NULL
}, /* cmulaf */
2843 { CODE_FOR_insn_cmulf
, NULL
}, /* cmulf */
2844 { CODE_FOR_insn_cmulfr
, NULL
}, /* cmulfr */
2845 { CODE_FOR_insn_cmulh
, NULL
}, /* cmulh */
2846 { CODE_FOR_insn_cmulhr
, NULL
}, /* cmulhr */
2847 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2848 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2849 { CODE_FOR_ctzdi2
, NULL
}, /* ctz */
2850 { CODE_FOR_insn_dblalign
, NULL
}, /* dblalign */
2851 { CODE_FOR_insn_dblalign2
, NULL
}, /* dblalign2 */
2852 { CODE_FOR_insn_dblalign4
, NULL
}, /* dblalign4 */
2853 { CODE_FOR_insn_dblalign6
, NULL
}, /* dblalign6 */
2854 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2855 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2856 { CODE_FOR_insn_exch
, NULL
}, /* exch */
2857 { CODE_FOR_insn_exch4
, NULL
}, /* exch4 */
2858 { CODE_FOR_insn_fdouble_add_flags
, NULL
}, /* fdouble_add_flags */
2859 { CODE_FOR_insn_fdouble_addsub
, NULL
}, /* fdouble_addsub */
2860 { CODE_FOR_insn_fdouble_mul_flags
, NULL
}, /* fdouble_mul_flags */
2861 { CODE_FOR_insn_fdouble_pack1
, NULL
}, /* fdouble_pack1 */
2862 { CODE_FOR_insn_fdouble_pack2
, NULL
}, /* fdouble_pack2 */
2863 { CODE_FOR_insn_fdouble_sub_flags
, NULL
}, /* fdouble_sub_flags */
2864 { CODE_FOR_insn_fdouble_unpack_max
, NULL
}, /* fdouble_unpack_max */
2865 { CODE_FOR_insn_fdouble_unpack_min
, NULL
}, /* fdouble_unpack_min */
2866 { CODE_FOR_insn_fetchadd
, NULL
}, /* fetchadd */
2867 { CODE_FOR_insn_fetchadd4
, NULL
}, /* fetchadd4 */
2868 { CODE_FOR_insn_fetchaddgez
, NULL
}, /* fetchaddgez */
2869 { CODE_FOR_insn_fetchaddgez4
, NULL
}, /* fetchaddgez4 */
2870 { CODE_FOR_insn_fetchand
, NULL
}, /* fetchand */
2871 { CODE_FOR_insn_fetchand4
, NULL
}, /* fetchand4 */
2872 { CODE_FOR_insn_fetchor
, NULL
}, /* fetchor */
2873 { CODE_FOR_insn_fetchor4
, NULL
}, /* fetchor4 */
2874 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2875 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2876 { CODE_FOR_insn_flushwb
, NULL
}, /* flushwb */
2877 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2878 { CODE_FOR_insn_fsingle_add1
, NULL
}, /* fsingle_add1 */
2879 { CODE_FOR_insn_fsingle_addsub2
, NULL
}, /* fsingle_addsub2 */
2880 { CODE_FOR_insn_fsingle_mul1
, NULL
}, /* fsingle_mul1 */
2881 { CODE_FOR_insn_fsingle_mul2
, NULL
}, /* fsingle_mul2 */
2882 { CODE_FOR_insn_fsingle_pack1
, NULL
}, /* fsingle_pack1 */
2883 { CODE_FOR_insn_fsingle_pack2
, NULL
}, /* fsingle_pack2 */
2884 { CODE_FOR_insn_fsingle_sub1
, NULL
}, /* fsingle_sub1 */
2885 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2886 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2887 { CODE_FOR_insn_info
, NULL
}, /* info */
2888 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2889 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2890 { CODE_FOR_insn_ld
, NULL
}, /* ld */
2891 { CODE_FOR_insn_ld1s
, NULL
}, /* ld1s */
2892 { CODE_FOR_insn_ld1u
, NULL
}, /* ld1u */
2893 { CODE_FOR_insn_ld2s
, NULL
}, /* ld2s */
2894 { CODE_FOR_insn_ld2u
, NULL
}, /* ld2u */
2895 { CODE_FOR_insn_ld4s
, NULL
}, /* ld4s */
2896 { CODE_FOR_insn_ld4u
, NULL
}, /* ld4u */
2897 { CODE_FOR_insn_ldna
, NULL
}, /* ldna */
2898 { CODE_FOR_insn_ldnt
, NULL
}, /* ldnt */
2899 { CODE_FOR_insn_ldnt1s
, NULL
}, /* ldnt1s */
2900 { CODE_FOR_insn_ldnt1u
, NULL
}, /* ldnt1u */
2901 { CODE_FOR_insn_ldnt2s
, NULL
}, /* ldnt2s */
2902 { CODE_FOR_insn_ldnt2u
, NULL
}, /* ldnt2u */
2903 { CODE_FOR_insn_ldnt4s
, NULL
}, /* ldnt4s */
2904 { CODE_FOR_insn_ldnt4u
, NULL
}, /* ldnt4u */
2905 { CODE_FOR_insn_ld_L2
, NULL
}, /* ld_L2 */
2906 { CODE_FOR_insn_ld1s_L2
, NULL
}, /* ld1s_L2 */
2907 { CODE_FOR_insn_ld1u_L2
, NULL
}, /* ld1u_L2 */
2908 { CODE_FOR_insn_ld2s_L2
, NULL
}, /* ld2s_L2 */
2909 { CODE_FOR_insn_ld2u_L2
, NULL
}, /* ld2u_L2 */
2910 { CODE_FOR_insn_ld4s_L2
, NULL
}, /* ld4s_L2 */
2911 { CODE_FOR_insn_ld4u_L2
, NULL
}, /* ld4u_L2 */
2912 { CODE_FOR_insn_ldna_L2
, NULL
}, /* ldna_L2 */
2913 { CODE_FOR_insn_ldnt_L2
, NULL
}, /* ldnt_L2 */
2914 { CODE_FOR_insn_ldnt1s_L2
, NULL
}, /* ldnt1s_L2 */
2915 { CODE_FOR_insn_ldnt1u_L2
, NULL
}, /* ldnt1u_L2 */
2916 { CODE_FOR_insn_ldnt2s_L2
, NULL
}, /* ldnt2s_L2 */
2917 { CODE_FOR_insn_ldnt2u_L2
, NULL
}, /* ldnt2u_L2 */
2918 { CODE_FOR_insn_ldnt4s_L2
, NULL
}, /* ldnt4s_L2 */
2919 { CODE_FOR_insn_ldnt4u_L2
, NULL
}, /* ldnt4u_L2 */
2920 { CODE_FOR_insn_ld_miss
, NULL
}, /* ld_miss */
2921 { CODE_FOR_insn_ld1s_miss
, NULL
}, /* ld1s_miss */
2922 { CODE_FOR_insn_ld1u_miss
, NULL
}, /* ld1u_miss */
2923 { CODE_FOR_insn_ld2s_miss
, NULL
}, /* ld2s_miss */
2924 { CODE_FOR_insn_ld2u_miss
, NULL
}, /* ld2u_miss */
2925 { CODE_FOR_insn_ld4s_miss
, NULL
}, /* ld4s_miss */
2926 { CODE_FOR_insn_ld4u_miss
, NULL
}, /* ld4u_miss */
2927 { CODE_FOR_insn_ldna_miss
, NULL
}, /* ldna_miss */
2928 { CODE_FOR_insn_ldnt_miss
, NULL
}, /* ldnt_miss */
2929 { CODE_FOR_insn_ldnt1s_miss
, NULL
}, /* ldnt1s_miss */
2930 { CODE_FOR_insn_ldnt1u_miss
, NULL
}, /* ldnt1u_miss */
2931 { CODE_FOR_insn_ldnt2s_miss
, NULL
}, /* ldnt2s_miss */
2932 { CODE_FOR_insn_ldnt2u_miss
, NULL
}, /* ldnt2u_miss */
2933 { CODE_FOR_insn_ldnt4s_miss
, NULL
}, /* ldnt4s_miss */
2934 { CODE_FOR_insn_ldnt4u_miss
, NULL
}, /* ldnt4u_miss */
2935 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2936 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2937 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2938 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2939 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2940 { CODE_FOR_movdi
, NULL
}, /* move */
2941 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2942 { CODE_FOR_insn_mul_hs_hs
, NULL
}, /* mul_hs_hs */
2943 { CODE_FOR_insn_mul_hs_hu
, NULL
}, /* mul_hs_hu */
2944 { CODE_FOR_insn_mul_hs_ls
, NULL
}, /* mul_hs_ls */
2945 { CODE_FOR_insn_mul_hs_lu
, NULL
}, /* mul_hs_lu */
2946 { CODE_FOR_insn_mul_hu_hu
, NULL
}, /* mul_hu_hu */
2947 { CODE_FOR_insn_mul_hu_ls
, NULL
}, /* mul_hu_ls */
2948 { CODE_FOR_insn_mul_hu_lu
, NULL
}, /* mul_hu_lu */
2949 { CODE_FOR_insn_mul_ls_ls
, NULL
}, /* mul_ls_ls */
2950 { CODE_FOR_insn_mul_ls_lu
, NULL
}, /* mul_ls_lu */
2951 { CODE_FOR_insn_mul_lu_lu
, NULL
}, /* mul_lu_lu */
2952 { CODE_FOR_insn_mula_hs_hs
, NULL
}, /* mula_hs_hs */
2953 { CODE_FOR_insn_mula_hs_hu
, NULL
}, /* mula_hs_hu */
2954 { CODE_FOR_insn_mula_hs_ls
, NULL
}, /* mula_hs_ls */
2955 { CODE_FOR_insn_mula_hs_lu
, NULL
}, /* mula_hs_lu */
2956 { CODE_FOR_insn_mula_hu_hu
, NULL
}, /* mula_hu_hu */
2957 { CODE_FOR_insn_mula_hu_ls
, NULL
}, /* mula_hu_ls */
2958 { CODE_FOR_insn_mula_hu_lu
, NULL
}, /* mula_hu_lu */
2959 { CODE_FOR_insn_mula_ls_ls
, NULL
}, /* mula_ls_ls */
2960 { CODE_FOR_insn_mula_ls_lu
, NULL
}, /* mula_ls_lu */
2961 { CODE_FOR_insn_mula_lu_lu
, NULL
}, /* mula_lu_lu */
2962 { CODE_FOR_insn_mulax
, NULL
}, /* mulax */
2963 { CODE_FOR_mulsi3
, NULL
}, /* mulx */
2964 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2965 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2966 { CODE_FOR_nop
, NULL
}, /* nop */
2967 { CODE_FOR_insn_nor_di
, NULL
}, /* nor */
2968 { CODE_FOR_iordi3
, NULL
}, /* or */
2969 { CODE_FOR_popcountdi2
, NULL
}, /* pcnt */
2970 { CODE_FOR_insn_prefetch_l1
, NULL
}, /* prefetch_l1 */
2971 { CODE_FOR_insn_prefetch_l1_fault
, NULL
}, /* prefetch_l1_fault */
2972 { CODE_FOR_insn_prefetch_l2
, NULL
}, /* prefetch_l2 */
2973 { CODE_FOR_insn_prefetch_l2_fault
, NULL
}, /* prefetch_l2_fault */
2974 { CODE_FOR_insn_prefetch_l3
, NULL
}, /* prefetch_l3 */
2975 { CODE_FOR_insn_prefetch_l3_fault
, NULL
}, /* prefetch_l3_fault */
2976 { CODE_FOR_insn_revbits
, NULL
}, /* revbits */
2977 { CODE_FOR_bswapdi2
, NULL
}, /* revbytes */
2978 { CODE_FOR_rotldi3
, NULL
}, /* rotl */
2979 { CODE_FOR_ashldi3
, NULL
}, /* shl */
2980 { CODE_FOR_insn_shl16insli
, NULL
}, /* shl16insli */
2981 { CODE_FOR_insn_shl1add
, NULL
}, /* shl1add */
2982 { CODE_FOR_insn_shl1addx
, NULL
}, /* shl1addx */
2983 { CODE_FOR_insn_shl2add
, NULL
}, /* shl2add */
2984 { CODE_FOR_insn_shl2addx
, NULL
}, /* shl2addx */
2985 { CODE_FOR_insn_shl3add
, NULL
}, /* shl3add */
2986 { CODE_FOR_insn_shl3addx
, NULL
}, /* shl3addx */
2987 { CODE_FOR_ashlsi3
, NULL
}, /* shlx */
2988 { CODE_FOR_ashrdi3
, NULL
}, /* shrs */
2989 { CODE_FOR_lshrdi3
, NULL
}, /* shru */
2990 { CODE_FOR_lshrsi3
, NULL
}, /* shrux */
2991 { CODE_FOR_insn_shufflebytes
, NULL
}, /* shufflebytes */
2992 { CODE_FOR_insn_shufflebytes1
, NULL
}, /* shufflebytes1 */
2993 { CODE_FOR_insn_st
, NULL
}, /* st */
2994 { CODE_FOR_insn_st1
, NULL
}, /* st1 */
2995 { CODE_FOR_insn_st2
, NULL
}, /* st2 */
2996 { CODE_FOR_insn_st4
, NULL
}, /* st4 */
2997 { CODE_FOR_insn_stnt
, NULL
}, /* stnt */
2998 { CODE_FOR_insn_stnt1
, NULL
}, /* stnt1 */
2999 { CODE_FOR_insn_stnt2
, NULL
}, /* stnt2 */
3000 { CODE_FOR_insn_stnt4
, NULL
}, /* stnt4 */
3001 { CODE_FOR_subdi3
, NULL
}, /* sub */
3002 { CODE_FOR_subsi3
, NULL
}, /* subx */
3003 { CODE_FOR_sssubsi3
, NULL
}, /* subxsc */
3004 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
3005 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
3006 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
3007 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
3008 { CODE_FOR_insn_v1add
, NULL
}, /* v1add */
3009 { CODE_FOR_insn_v1addi
, NULL
}, /* v1addi */
3010 { CODE_FOR_insn_v1adduc
, NULL
}, /* v1adduc */
3011 { CODE_FOR_insn_v1adiffu
, NULL
}, /* v1adiffu */
3012 { CODE_FOR_insn_v1avgu
, NULL
}, /* v1avgu */
3013 { CODE_FOR_insn_v1cmpeq
, NULL
}, /* v1cmpeq */
3014 { CODE_FOR_insn_v1cmpeqi
, NULL
}, /* v1cmpeqi */
3015 { CODE_FOR_insn_v1cmples
, NULL
}, /* v1cmples */
3016 { CODE_FOR_insn_v1cmpleu
, NULL
}, /* v1cmpleu */
3017 { CODE_FOR_insn_v1cmplts
, NULL
}, /* v1cmplts */
3018 { CODE_FOR_insn_v1cmpltsi
, NULL
}, /* v1cmpltsi */
3019 { CODE_FOR_insn_v1cmpltu
, NULL
}, /* v1cmpltu */
3020 { CODE_FOR_insn_v1cmpltui
, NULL
}, /* v1cmpltui */
3021 { CODE_FOR_insn_v1cmpne
, NULL
}, /* v1cmpne */
3022 { CODE_FOR_insn_v1ddotpu
, NULL
}, /* v1ddotpu */
3023 { CODE_FOR_insn_v1ddotpua
, NULL
}, /* v1ddotpua */
3024 { CODE_FOR_insn_v1ddotpus
, NULL
}, /* v1ddotpus */
3025 { CODE_FOR_insn_v1ddotpusa
, NULL
}, /* v1ddotpusa */
3026 { CODE_FOR_insn_v1dotp
, NULL
}, /* v1dotp */
3027 { CODE_FOR_insn_v1dotpa
, NULL
}, /* v1dotpa */
3028 { CODE_FOR_insn_v1dotpu
, NULL
}, /* v1dotpu */
3029 { CODE_FOR_insn_v1dotpua
, NULL
}, /* v1dotpua */
3030 { CODE_FOR_insn_v1dotpus
, NULL
}, /* v1dotpus */
3031 { CODE_FOR_insn_v1dotpusa
, NULL
}, /* v1dotpusa */
3032 { CODE_FOR_insn_v1int_h
, NULL
}, /* v1int_h */
3033 { CODE_FOR_insn_v1int_l
, NULL
}, /* v1int_l */
3034 { CODE_FOR_insn_v1maxu
, NULL
}, /* v1maxu */
3035 { CODE_FOR_insn_v1maxui
, NULL
}, /* v1maxui */
3036 { CODE_FOR_insn_v1minu
, NULL
}, /* v1minu */
3037 { CODE_FOR_insn_v1minui
, NULL
}, /* v1minui */
3038 { CODE_FOR_insn_v1mnz
, NULL
}, /* v1mnz */
3039 { CODE_FOR_insn_v1multu
, NULL
}, /* v1multu */
3040 { CODE_FOR_insn_v1mulu
, NULL
}, /* v1mulu */
3041 { CODE_FOR_insn_v1mulus
, NULL
}, /* v1mulus */
3042 { CODE_FOR_insn_v1mz
, NULL
}, /* v1mz */
3043 { CODE_FOR_insn_v1sadau
, NULL
}, /* v1sadau */
3044 { CODE_FOR_insn_v1sadu
, NULL
}, /* v1sadu */
3045 { CODE_FOR_insn_v1shl
, NULL
}, /* v1shl */
3046 { CODE_FOR_insn_v1shl
, NULL
}, /* v1shli */
3047 { CODE_FOR_insn_v1shrs
, NULL
}, /* v1shrs */
3048 { CODE_FOR_insn_v1shrs
, NULL
}, /* v1shrsi */
3049 { CODE_FOR_insn_v1shru
, NULL
}, /* v1shru */
3050 { CODE_FOR_insn_v1shru
, NULL
}, /* v1shrui */
3051 { CODE_FOR_insn_v1sub
, NULL
}, /* v1sub */
3052 { CODE_FOR_insn_v1subuc
, NULL
}, /* v1subuc */
3053 { CODE_FOR_insn_v2add
, NULL
}, /* v2add */
3054 { CODE_FOR_insn_v2addi
, NULL
}, /* v2addi */
3055 { CODE_FOR_insn_v2addsc
, NULL
}, /* v2addsc */
3056 { CODE_FOR_insn_v2adiffs
, NULL
}, /* v2adiffs */
3057 { CODE_FOR_insn_v2avgs
, NULL
}, /* v2avgs */
3058 { CODE_FOR_insn_v2cmpeq
, NULL
}, /* v2cmpeq */
3059 { CODE_FOR_insn_v2cmpeqi
, NULL
}, /* v2cmpeqi */
3060 { CODE_FOR_insn_v2cmples
, NULL
}, /* v2cmples */
3061 { CODE_FOR_insn_v2cmpleu
, NULL
}, /* v2cmpleu */
3062 { CODE_FOR_insn_v2cmplts
, NULL
}, /* v2cmplts */
3063 { CODE_FOR_insn_v2cmpltsi
, NULL
}, /* v2cmpltsi */
3064 { CODE_FOR_insn_v2cmpltu
, NULL
}, /* v2cmpltu */
3065 { CODE_FOR_insn_v2cmpltui
, NULL
}, /* v2cmpltui */
3066 { CODE_FOR_insn_v2cmpne
, NULL
}, /* v2cmpne */
3067 { CODE_FOR_insn_v2dotp
, NULL
}, /* v2dotp */
3068 { CODE_FOR_insn_v2dotpa
, NULL
}, /* v2dotpa */
3069 { CODE_FOR_insn_v2int_h
, NULL
}, /* v2int_h */
3070 { CODE_FOR_insn_v2int_l
, NULL
}, /* v2int_l */
3071 { CODE_FOR_insn_v2maxs
, NULL
}, /* v2maxs */
3072 { CODE_FOR_insn_v2maxsi
, NULL
}, /* v2maxsi */
3073 { CODE_FOR_insn_v2mins
, NULL
}, /* v2mins */
3074 { CODE_FOR_insn_v2minsi
, NULL
}, /* v2minsi */
3075 { CODE_FOR_insn_v2mnz
, NULL
}, /* v2mnz */
3076 { CODE_FOR_insn_v2mulfsc
, NULL
}, /* v2mulfsc */
3077 { CODE_FOR_insn_v2muls
, NULL
}, /* v2muls */
3078 { CODE_FOR_insn_v2mults
, NULL
}, /* v2mults */
3079 { CODE_FOR_insn_v2mz
, NULL
}, /* v2mz */
3080 { CODE_FOR_insn_v2packh
, NULL
}, /* v2packh */
3081 { CODE_FOR_insn_v2packl
, NULL
}, /* v2packl */
3082 { CODE_FOR_insn_v2packuc
, NULL
}, /* v2packuc */
3083 { CODE_FOR_insn_v2sadas
, NULL
}, /* v2sadas */
3084 { CODE_FOR_insn_v2sadau
, NULL
}, /* v2sadau */
3085 { CODE_FOR_insn_v2sads
, NULL
}, /* v2sads */
3086 { CODE_FOR_insn_v2sadu
, NULL
}, /* v2sadu */
3087 { CODE_FOR_insn_v2shl
, NULL
}, /* v2shl */
3088 { CODE_FOR_insn_v2shl
, NULL
}, /* v2shli */
3089 { CODE_FOR_insn_v2shlsc
, NULL
}, /* v2shlsc */
3090 { CODE_FOR_insn_v2shrs
, NULL
}, /* v2shrs */
3091 { CODE_FOR_insn_v2shrs
, NULL
}, /* v2shrsi */
3092 { CODE_FOR_insn_v2shru
, NULL
}, /* v2shru */
3093 { CODE_FOR_insn_v2shru
, NULL
}, /* v2shrui */
3094 { CODE_FOR_insn_v2sub
, NULL
}, /* v2sub */
3095 { CODE_FOR_insn_v2subsc
, NULL
}, /* v2subsc */
3096 { CODE_FOR_insn_v4add
, NULL
}, /* v4add */
3097 { CODE_FOR_insn_v4addsc
, NULL
}, /* v4addsc */
3098 { CODE_FOR_insn_v4int_h
, NULL
}, /* v4int_h */
3099 { CODE_FOR_insn_v4int_l
, NULL
}, /* v4int_l */
3100 { CODE_FOR_insn_v4packsc
, NULL
}, /* v4packsc */
3101 { CODE_FOR_insn_v4shl
, NULL
}, /* v4shl */
3102 { CODE_FOR_insn_v4shlsc
, NULL
}, /* v4shlsc */
3103 { CODE_FOR_insn_v4shrs
, NULL
}, /* v4shrs */
3104 { CODE_FOR_insn_v4shru
, NULL
}, /* v4shru */
3105 { CODE_FOR_insn_v4sub
, NULL
}, /* v4sub */
3106 { CODE_FOR_insn_v4subsc
, NULL
}, /* v4subsc */
3107 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
3108 { CODE_FOR_xordi3
, NULL
}, /* xor */
3109 { CODE_FOR_tilegx_network_barrier
, NULL
}, /* network_barrier */
3110 { CODE_FOR_tilegx_idn0_receive
, NULL
}, /* idn0_receive */
3111 { CODE_FOR_tilegx_idn1_receive
, NULL
}, /* idn1_receive */
3112 { CODE_FOR_tilegx_idn_send
, NULL
}, /* idn_send */
3113 { CODE_FOR_tilegx_udn0_receive
, NULL
}, /* udn0_receive */
3114 { CODE_FOR_tilegx_udn1_receive
, NULL
}, /* udn1_receive */
3115 { CODE_FOR_tilegx_udn2_receive
, NULL
}, /* udn2_receive */
3116 { CODE_FOR_tilegx_udn3_receive
, NULL
}, /* udn3_receive */
3117 { CODE_FOR_tilegx_udn_send
, NULL
}, /* udn_send */
3121 struct tilegx_builtin_def
3124 enum tilegx_builtin code
;
3126 /* The first character is the return type. Subsequent characters
3127 are the argument types. See char_to_type. */
3132 static const struct tilegx_builtin_def tilegx_builtins
[] = {
3133 { "__insn_add", TILEGX_INSN_ADD
, true, "lll" },
3134 { "__insn_addi", TILEGX_INSN_ADD
, true, "lll" },
3135 { "__insn_addli", TILEGX_INSN_ADD
, true, "lll" },
3136 { "__insn_addx", TILEGX_INSN_ADDX
, true, "iii" },
3137 { "__insn_addxi", TILEGX_INSN_ADDX
, true, "iii" },
3138 { "__insn_addxli", TILEGX_INSN_ADDX
, true, "iii" },
3139 { "__insn_addxsc", TILEGX_INSN_ADDXSC
, true, "iii" },
3140 { "__insn_and", TILEGX_INSN_AND
, true, "lll" },
3141 { "__insn_andi", TILEGX_INSN_AND
, true, "lll" },
3142 { "__insn_bfexts", TILEGX_INSN_BFEXTS
, true, "llll" },
3143 { "__insn_bfextu", TILEGX_INSN_BFEXTU
, true, "llll" },
3144 { "__insn_bfins", TILEGX_INSN_BFINS
, true, "lllll"},
3145 { "__insn_clz", TILEGX_INSN_CLZ
, true, "ll" },
3146 { "__insn_cmoveqz", TILEGX_INSN_CMOVEQZ
, true, "llll" },
3147 { "__insn_cmovnez", TILEGX_INSN_CMOVNEZ
, true, "llll" },
3148 { "__insn_cmpeq", TILEGX_INSN_CMPEQ
, true, "lll" },
3149 { "__insn_cmpeqi", TILEGX_INSN_CMPEQ
, true, "lll" },
3150 { "__insn_cmpexch", TILEGX_INSN_CMPEXCH
, false, "lpl" },
3151 { "__insn_cmpexch4", TILEGX_INSN_CMPEXCH4
, false, "ipi" },
3152 { "__insn_cmples", TILEGX_INSN_CMPLES
, true, "lll" },
3153 { "__insn_cmpleu", TILEGX_INSN_CMPLEU
, true, "lll" },
3154 { "__insn_cmplts", TILEGX_INSN_CMPLTS
, true, "lll" },
3155 { "__insn_cmpltsi", TILEGX_INSN_CMPLTS
, true, "lll" },
3156 { "__insn_cmpltu", TILEGX_INSN_CMPLTU
, true, "lll" },
3157 { "__insn_cmpltui", TILEGX_INSN_CMPLTU
, true, "lll" },
3158 { "__insn_cmpne", TILEGX_INSN_CMPNE
, true, "lll" },
3159 { "__insn_cmul", TILEGX_INSN_CMUL
, true, "lll" },
3160 { "__insn_cmula", TILEGX_INSN_CMULA
, true, "llll" },
3161 { "__insn_cmulaf", TILEGX_INSN_CMULAF
, true, "llll" },
3162 { "__insn_cmulf", TILEGX_INSN_CMULF
, true, "lll" },
3163 { "__insn_cmulfr", TILEGX_INSN_CMULFR
, true, "lll" },
3164 { "__insn_cmulh", TILEGX_INSN_CMULH
, true, "lll" },
3165 { "__insn_cmulhr", TILEGX_INSN_CMULHR
, true, "lll" },
3166 { "__insn_crc32_32", TILEGX_INSN_CRC32_32
, true, "lll" },
3167 { "__insn_crc32_8", TILEGX_INSN_CRC32_8
, true, "lll" },
3168 { "__insn_ctz", TILEGX_INSN_CTZ
, true, "ll" },
3169 { "__insn_dblalign", TILEGX_INSN_DBLALIGN
, true, "lllk" },
3170 { "__insn_dblalign2", TILEGX_INSN_DBLALIGN2
, true, "lll" },
3171 { "__insn_dblalign4", TILEGX_INSN_DBLALIGN4
, true, "lll" },
3172 { "__insn_dblalign6", TILEGX_INSN_DBLALIGN6
, true, "lll" },
3173 { "__insn_drain", TILEGX_INSN_DRAIN
, false, "v" },
3174 { "__insn_dtlbpr", TILEGX_INSN_DTLBPR
, false, "vl" },
3175 { "__insn_exch", TILEGX_INSN_EXCH
, false, "lpl" },
3176 { "__insn_exch4", TILEGX_INSN_EXCH4
, false, "ipi" },
3177 { "__insn_fdouble_add_flags", TILEGX_INSN_FDOUBLE_ADD_FLAGS
, true, "lll" },
3178 { "__insn_fdouble_addsub", TILEGX_INSN_FDOUBLE_ADDSUB
, true, "llll" },
3179 { "__insn_fdouble_mul_flags", TILEGX_INSN_FDOUBLE_MUL_FLAGS
, true, "lll" },
3180 { "__insn_fdouble_pack1", TILEGX_INSN_FDOUBLE_PACK1
, true, "lll" },
3181 { "__insn_fdouble_pack2", TILEGX_INSN_FDOUBLE_PACK2
, true, "llll" },
3182 { "__insn_fdouble_sub_flags", TILEGX_INSN_FDOUBLE_SUB_FLAGS
, true, "lll" },
3183 { "__insn_fdouble_unpack_max", TILEGX_INSN_FDOUBLE_UNPACK_MAX
, true, "lll" },
3184 { "__insn_fdouble_unpack_min", TILEGX_INSN_FDOUBLE_UNPACK_MIN
, true, "lll" },
3185 { "__insn_fetchadd", TILEGX_INSN_FETCHADD
, false, "lpl" },
3186 { "__insn_fetchadd4", TILEGX_INSN_FETCHADD4
, false, "ipi" },
3187 { "__insn_fetchaddgez", TILEGX_INSN_FETCHADDGEZ
, false, "lpl" },
3188 { "__insn_fetchaddgez4", TILEGX_INSN_FETCHADDGEZ4
, false, "ipi" },
3189 { "__insn_fetchand", TILEGX_INSN_FETCHAND
, false, "lpl" },
3190 { "__insn_fetchand4", TILEGX_INSN_FETCHAND4
, false, "ipi" },
3191 { "__insn_fetchor", TILEGX_INSN_FETCHOR
, false, "lpl" },
3192 { "__insn_fetchor4", TILEGX_INSN_FETCHOR4
, false, "ipi" },
3193 { "__insn_finv", TILEGX_INSN_FINV
, false, "vk" },
3194 { "__insn_flush", TILEGX_INSN_FLUSH
, false, "vk" },
3195 { "__insn_flushwb", TILEGX_INSN_FLUSHWB
, false, "v" },
3196 { "__insn_fnop", TILEGX_INSN_FNOP
, false, "v" },
3197 { "__insn_fsingle_add1", TILEGX_INSN_FSINGLE_ADD1
, true, "lll" },
3198 { "__insn_fsingle_addsub2", TILEGX_INSN_FSINGLE_ADDSUB2
, true, "llll" },
3199 { "__insn_fsingle_mul1", TILEGX_INSN_FSINGLE_MUL1
, true, "lll" },
3200 { "__insn_fsingle_mul2", TILEGX_INSN_FSINGLE_MUL2
, true, "lll" },
3201 { "__insn_fsingle_pack1", TILEGX_INSN_FSINGLE_PACK1
, true, "ll" },
3202 { "__insn_fsingle_pack2", TILEGX_INSN_FSINGLE_PACK2
, true, "lll" },
3203 { "__insn_fsingle_sub1", TILEGX_INSN_FSINGLE_SUB1
, true, "lll" },
3204 { "__insn_icoh", TILEGX_INSN_ICOH
, false, "vk" },
3205 { "__insn_ill", TILEGX_INSN_ILL
, false, "v" },
3206 { "__insn_info", TILEGX_INSN_INFO
, false, "vl" },
3207 { "__insn_infol", TILEGX_INSN_INFOL
, false, "vl" },
3208 { "__insn_inv", TILEGX_INSN_INV
, false, "vp" },
3209 { "__insn_ld", TILEGX_INSN_LD
, false, "lk" },
3210 { "__insn_ld1s", TILEGX_INSN_LD1S
, false, "lk" },
3211 { "__insn_ld1u", TILEGX_INSN_LD1U
, false, "lk" },
3212 { "__insn_ld2s", TILEGX_INSN_LD2S
, false, "lk" },
3213 { "__insn_ld2u", TILEGX_INSN_LD2U
, false, "lk" },
3214 { "__insn_ld4s", TILEGX_INSN_LD4S
, false, "lk" },
3215 { "__insn_ld4u", TILEGX_INSN_LD4U
, false, "lk" },
3216 { "__insn_ldna", TILEGX_INSN_LDNA
, false, "lk" },
3217 { "__insn_ldnt", TILEGX_INSN_LDNT
, false, "lk" },
3218 { "__insn_ldnt1s", TILEGX_INSN_LDNT1S
, false, "lk" },
3219 { "__insn_ldnt1u", TILEGX_INSN_LDNT1U
, false, "lk" },
3220 { "__insn_ldnt2s", TILEGX_INSN_LDNT2S
, false, "lk" },
3221 { "__insn_ldnt2u", TILEGX_INSN_LDNT2U
, false, "lk" },
3222 { "__insn_ldnt4s", TILEGX_INSN_LDNT4S
, false, "lk" },
3223 { "__insn_ldnt4u", TILEGX_INSN_LDNT4U
, false, "lk" },
3224 { "__insn_ld_L2", TILEGX_INSN_LD_L2
, false, "lk" },
3225 { "__insn_ld1s_L2", TILEGX_INSN_LD1S_L2
, false, "lk" },
3226 { "__insn_ld1u_L2", TILEGX_INSN_LD1U_L2
, false, "lk" },
3227 { "__insn_ld2s_L2", TILEGX_INSN_LD2S_L2
, false, "lk" },
3228 { "__insn_ld2u_L2", TILEGX_INSN_LD2U_L2
, false, "lk" },
3229 { "__insn_ld4s_L2", TILEGX_INSN_LD4S_L2
, false, "lk" },
3230 { "__insn_ld4u_L2", TILEGX_INSN_LD4U_L2
, false, "lk" },
3231 { "__insn_ldna_L2", TILEGX_INSN_LDNA_L2
, false, "lk" },
3232 { "__insn_ldnt_L2", TILEGX_INSN_LDNT_L2
, false, "lk" },
3233 { "__insn_ldnt1s_L2", TILEGX_INSN_LDNT1S_L2
, false, "lk" },
3234 { "__insn_ldnt1u_L2", TILEGX_INSN_LDNT1U_L2
, false, "lk" },
3235 { "__insn_ldnt2s_L2", TILEGX_INSN_LDNT2S_L2
, false, "lk" },
3236 { "__insn_ldnt2u_L2", TILEGX_INSN_LDNT2U_L2
, false, "lk" },
3237 { "__insn_ldnt4s_L2", TILEGX_INSN_LDNT4S_L2
, false, "lk" },
3238 { "__insn_ldnt4u_L2", TILEGX_INSN_LDNT4U_L2
, false, "lk" },
3239 { "__insn_ld_miss", TILEGX_INSN_LD_MISS
, false, "lk" },
3240 { "__insn_ld1s_miss", TILEGX_INSN_LD1S_MISS
, false, "lk" },
3241 { "__insn_ld1u_miss", TILEGX_INSN_LD1U_MISS
, false, "lk" },
3242 { "__insn_ld2s_miss", TILEGX_INSN_LD2S_MISS
, false, "lk" },
3243 { "__insn_ld2u_miss", TILEGX_INSN_LD2U_MISS
, false, "lk" },
3244 { "__insn_ld4s_miss", TILEGX_INSN_LD4S_MISS
, false, "lk" },
3245 { "__insn_ld4u_miss", TILEGX_INSN_LD4U_MISS
, false, "lk" },
3246 { "__insn_ldna_miss", TILEGX_INSN_LDNA_MISS
, false, "lk" },
3247 { "__insn_ldnt_miss", TILEGX_INSN_LDNT_MISS
, false, "lk" },
3248 { "__insn_ldnt1s_miss", TILEGX_INSN_LDNT1S_MISS
, false, "lk" },
3249 { "__insn_ldnt1u_miss", TILEGX_INSN_LDNT1U_MISS
, false, "lk" },
3250 { "__insn_ldnt2s_miss", TILEGX_INSN_LDNT2S_MISS
, false, "lk" },
3251 { "__insn_ldnt2u_miss", TILEGX_INSN_LDNT2U_MISS
, false, "lk" },
3252 { "__insn_ldnt4s_miss", TILEGX_INSN_LDNT4S_MISS
, false, "lk" },
3253 { "__insn_ldnt4u_miss", TILEGX_INSN_LDNT4U_MISS
, false, "lk" },
3254 { "__insn_lnk", TILEGX_INSN_LNK
, true, "l" },
3255 { "__insn_mf", TILEGX_INSN_MF
, false, "v" },
3256 { "__insn_mfspr", TILEGX_INSN_MFSPR
, false, "ll" },
3257 { "__insn_mm", TILEGX_INSN_MM
, true, "lllll"},
3258 { "__insn_mnz", TILEGX_INSN_MNZ
, true, "lll" },
3259 { "__insn_move", TILEGX_INSN_MOVE
, true, "ll" },
3260 { "__insn_movei", TILEGX_INSN_MOVE
, true, "ll" },
3261 { "__insn_moveli", TILEGX_INSN_MOVE
, true, "ll" },
3262 { "__insn_mtspr", TILEGX_INSN_MTSPR
, false, "vll" },
3263 { "__insn_mul_hs_hs", TILEGX_INSN_MUL_HS_HS
, true, "lll" },
3264 { "__insn_mul_hs_hu", TILEGX_INSN_MUL_HS_HU
, true, "lll" },
3265 { "__insn_mul_hs_ls", TILEGX_INSN_MUL_HS_LS
, true, "lll" },
3266 { "__insn_mul_hs_lu", TILEGX_INSN_MUL_HS_LU
, true, "lll" },
3267 { "__insn_mul_hu_hu", TILEGX_INSN_MUL_HU_HU
, true, "lll" },
3268 { "__insn_mul_hu_ls", TILEGX_INSN_MUL_HU_LS
, true, "lll" },
3269 { "__insn_mul_hu_lu", TILEGX_INSN_MUL_HU_LU
, true, "lll" },
3270 { "__insn_mul_ls_ls", TILEGX_INSN_MUL_LS_LS
, true, "lll" },
3271 { "__insn_mul_ls_lu", TILEGX_INSN_MUL_LS_LU
, true, "lll" },
3272 { "__insn_mul_lu_lu", TILEGX_INSN_MUL_LU_LU
, true, "lll" },
3273 { "__insn_mula_hs_hs", TILEGX_INSN_MULA_HS_HS
, true, "llll" },
3274 { "__insn_mula_hs_hu", TILEGX_INSN_MULA_HS_HU
, true, "llll" },
3275 { "__insn_mula_hs_ls", TILEGX_INSN_MULA_HS_LS
, true, "llll" },
3276 { "__insn_mula_hs_lu", TILEGX_INSN_MULA_HS_LU
, true, "llll" },
3277 { "__insn_mula_hu_hu", TILEGX_INSN_MULA_HU_HU
, true, "llll" },
3278 { "__insn_mula_hu_ls", TILEGX_INSN_MULA_HU_LS
, true, "llll" },
3279 { "__insn_mula_hu_lu", TILEGX_INSN_MULA_HU_LU
, true, "llll" },
3280 { "__insn_mula_ls_ls", TILEGX_INSN_MULA_LS_LS
, true, "llll" },
3281 { "__insn_mula_ls_lu", TILEGX_INSN_MULA_LS_LU
, true, "llll" },
3282 { "__insn_mula_lu_lu", TILEGX_INSN_MULA_LU_LU
, true, "llll" },
3283 { "__insn_mulax", TILEGX_INSN_MULAX
, true, "iiii" },
3284 { "__insn_mulx", TILEGX_INSN_MULX
, true, "iii" },
3285 { "__insn_mz", TILEGX_INSN_MZ
, true, "lll" },
3286 { "__insn_nap", TILEGX_INSN_NAP
, false, "v" },
3287 { "__insn_nop", TILEGX_INSN_NOP
, true, "v" },
3288 { "__insn_nor", TILEGX_INSN_NOR
, true, "lll" },
3289 { "__insn_or", TILEGX_INSN_OR
, true, "lll" },
3290 { "__insn_ori", TILEGX_INSN_OR
, true, "lll" },
3291 { "__insn_pcnt", TILEGX_INSN_PCNT
, true, "ll" },
3292 { "__insn_prefetch", TILEGX_INSN_PREFETCH_L1
, false, "vk" },
3293 { "__insn_prefetch_l1", TILEGX_INSN_PREFETCH_L1
, false, "vk" },
3294 { "__insn_prefetch_l1_fault", TILEGX_INSN_PREFETCH_L1_FAULT
, false, "vk" },
3295 { "__insn_prefetch_l2", TILEGX_INSN_PREFETCH_L2
, false, "vk" },
3296 { "__insn_prefetch_l2_fault", TILEGX_INSN_PREFETCH_L2_FAULT
, false, "vk" },
3297 { "__insn_prefetch_l3", TILEGX_INSN_PREFETCH_L3
, false, "vk" },
3298 { "__insn_prefetch_l3_fault", TILEGX_INSN_PREFETCH_L3_FAULT
, false, "vk" },
3299 { "__insn_revbits", TILEGX_INSN_REVBITS
, true, "ll" },
3300 { "__insn_revbytes", TILEGX_INSN_REVBYTES
, true, "ll" },
3301 { "__insn_rotl", TILEGX_INSN_ROTL
, true, "lli" },
3302 { "__insn_rotli", TILEGX_INSN_ROTL
, true, "lli" },
3303 { "__insn_shl", TILEGX_INSN_SHL
, true, "lli" },
3304 { "__insn_shl16insli", TILEGX_INSN_SHL16INSLI
, true, "lll" },
3305 { "__insn_shl1add", TILEGX_INSN_SHL1ADD
, true, "lll" },
3306 { "__insn_shl1addx", TILEGX_INSN_SHL1ADDX
, true, "iii" },
3307 { "__insn_shl2add", TILEGX_INSN_SHL2ADD
, true, "lll" },
3308 { "__insn_shl2addx", TILEGX_INSN_SHL2ADDX
, true, "iii" },
3309 { "__insn_shl3add", TILEGX_INSN_SHL3ADD
, true, "lll" },
3310 { "__insn_shl3addx", TILEGX_INSN_SHL3ADDX
, true, "iii" },
3311 { "__insn_shli", TILEGX_INSN_SHL
, true, "lli" },
3312 { "__insn_shlx", TILEGX_INSN_SHLX
, true, "iii" },
3313 { "__insn_shlxi", TILEGX_INSN_SHLX
, true, "iii" },
3314 { "__insn_shrs", TILEGX_INSN_SHRS
, true, "lli" },
3315 { "__insn_shrsi", TILEGX_INSN_SHRS
, true, "lli" },
3316 { "__insn_shru", TILEGX_INSN_SHRU
, true, "lli" },
3317 { "__insn_shrui", TILEGX_INSN_SHRU
, true, "lli" },
3318 { "__insn_shrux", TILEGX_INSN_SHRUX
, true, "iii" },
3319 { "__insn_shruxi", TILEGX_INSN_SHRUX
, true, "iii" },
3320 { "__insn_shufflebytes", TILEGX_INSN_SHUFFLEBYTES
, true, "llll" },
3321 { "__insn_shufflebytes1", TILEGX_INSN_SHUFFLEBYTES1
, true, "lll" },
3322 { "__insn_st", TILEGX_INSN_ST
, false, "vpl" },
3323 { "__insn_st1", TILEGX_INSN_ST1
, false, "vpl" },
3324 { "__insn_st2", TILEGX_INSN_ST2
, false, "vpl" },
3325 { "__insn_st4", TILEGX_INSN_ST4
, false, "vpl" },
3326 { "__insn_stnt", TILEGX_INSN_STNT
, false, "vpl" },
3327 { "__insn_stnt1", TILEGX_INSN_STNT1
, false, "vpl" },
3328 { "__insn_stnt2", TILEGX_INSN_STNT2
, false, "vpl" },
3329 { "__insn_stnt4", TILEGX_INSN_STNT4
, false, "vpl" },
3330 { "__insn_sub", TILEGX_INSN_SUB
, true, "lll" },
3331 { "__insn_subx", TILEGX_INSN_SUBX
, true, "iii" },
3332 { "__insn_subxsc", TILEGX_INSN_SUBXSC
, true, "iii" },
3333 { "__insn_tblidxb0", TILEGX_INSN_TBLIDXB0
, true, "lll" },
3334 { "__insn_tblidxb1", TILEGX_INSN_TBLIDXB1
, true, "lll" },
3335 { "__insn_tblidxb2", TILEGX_INSN_TBLIDXB2
, true, "lll" },
3336 { "__insn_tblidxb3", TILEGX_INSN_TBLIDXB3
, true, "lll" },
3337 { "__insn_v1add", TILEGX_INSN_V1ADD
, true, "lll" },
3338 { "__insn_v1addi", TILEGX_INSN_V1ADDI
, true, "lll" },
3339 { "__insn_v1adduc", TILEGX_INSN_V1ADDUC
, true, "lll" },
3340 { "__insn_v1adiffu", TILEGX_INSN_V1ADIFFU
, true, "lll" },
3341 { "__insn_v1avgu", TILEGX_INSN_V1AVGU
, true, "lll" },
3342 { "__insn_v1cmpeq", TILEGX_INSN_V1CMPEQ
, true, "lll" },
3343 { "__insn_v1cmpeqi", TILEGX_INSN_V1CMPEQI
, true, "lll" },
3344 { "__insn_v1cmples", TILEGX_INSN_V1CMPLES
, true, "lll" },
3345 { "__insn_v1cmpleu", TILEGX_INSN_V1CMPLEU
, true, "lll" },
3346 { "__insn_v1cmplts", TILEGX_INSN_V1CMPLTS
, true, "lll" },
3347 { "__insn_v1cmpltsi", TILEGX_INSN_V1CMPLTSI
, true, "lll" },
3348 { "__insn_v1cmpltu", TILEGX_INSN_V1CMPLTU
, true, "lll" },
3349 { "__insn_v1cmpltui", TILEGX_INSN_V1CMPLTUI
, true, "lll" },
3350 { "__insn_v1cmpne", TILEGX_INSN_V1CMPNE
, true, "lll" },
3351 { "__insn_v1ddotpu", TILEGX_INSN_V1DDOTPU
, true, "lll" },
3352 { "__insn_v1ddotpua", TILEGX_INSN_V1DDOTPUA
, true, "llll" },
3353 { "__insn_v1ddotpus", TILEGX_INSN_V1DDOTPUS
, true, "lll" },
3354 { "__insn_v1ddotpusa", TILEGX_INSN_V1DDOTPUSA
, true, "llll" },
3355 { "__insn_v1dotp", TILEGX_INSN_V1DOTP
, true, "lll" },
3356 { "__insn_v1dotpa", TILEGX_INSN_V1DOTPA
, true, "llll" },
3357 { "__insn_v1dotpu", TILEGX_INSN_V1DOTPU
, true, "lll" },
3358 { "__insn_v1dotpua", TILEGX_INSN_V1DOTPUA
, true, "llll" },
3359 { "__insn_v1dotpus", TILEGX_INSN_V1DOTPUS
, true, "lll" },
3360 { "__insn_v1dotpusa", TILEGX_INSN_V1DOTPUSA
, true, "llll" },
3361 { "__insn_v1int_h", TILEGX_INSN_V1INT_H
, true, "lll" },
3362 { "__insn_v1int_l", TILEGX_INSN_V1INT_L
, true, "lll" },
3363 { "__insn_v1maxu", TILEGX_INSN_V1MAXU
, true, "lll" },
3364 { "__insn_v1maxui", TILEGX_INSN_V1MAXUI
, true, "lll" },
3365 { "__insn_v1minu", TILEGX_INSN_V1MINU
, true, "lll" },
3366 { "__insn_v1minui", TILEGX_INSN_V1MINUI
, true, "lll" },
3367 { "__insn_v1mnz", TILEGX_INSN_V1MNZ
, true, "lll" },
3368 { "__insn_v1multu", TILEGX_INSN_V1MULTU
, true, "lll" },
3369 { "__insn_v1mulu", TILEGX_INSN_V1MULU
, true, "lll" },
3370 { "__insn_v1mulus", TILEGX_INSN_V1MULUS
, true, "lll" },
3371 { "__insn_v1mz", TILEGX_INSN_V1MZ
, true, "lll" },
3372 { "__insn_v1sadau", TILEGX_INSN_V1SADAU
, true, "llll" },
3373 { "__insn_v1sadu", TILEGX_INSN_V1SADU
, true, "lll" },
3374 { "__insn_v1shl", TILEGX_INSN_V1SHL
, true, "lll" },
3375 { "__insn_v1shli", TILEGX_INSN_V1SHLI
, true, "lll" },
3376 { "__insn_v1shrs", TILEGX_INSN_V1SHRS
, true, "lll" },
3377 { "__insn_v1shrsi", TILEGX_INSN_V1SHRSI
, true, "lll" },
3378 { "__insn_v1shru", TILEGX_INSN_V1SHRU
, true, "lll" },
3379 { "__insn_v1shrui", TILEGX_INSN_V1SHRUI
, true, "lll" },
3380 { "__insn_v1sub", TILEGX_INSN_V1SUB
, true, "lll" },
3381 { "__insn_v1subuc", TILEGX_INSN_V1SUBUC
, true, "lll" },
3382 { "__insn_v2add", TILEGX_INSN_V2ADD
, true, "lll" },
3383 { "__insn_v2addi", TILEGX_INSN_V2ADDI
, true, "lll" },
3384 { "__insn_v2addsc", TILEGX_INSN_V2ADDSC
, true, "lll" },
3385 { "__insn_v2adiffs", TILEGX_INSN_V2ADIFFS
, true, "lll" },
3386 { "__insn_v2avgs", TILEGX_INSN_V2AVGS
, true, "lll" },
3387 { "__insn_v2cmpeq", TILEGX_INSN_V2CMPEQ
, true, "lll" },
3388 { "__insn_v2cmpeqi", TILEGX_INSN_V2CMPEQI
, true, "lll" },
3389 { "__insn_v2cmples", TILEGX_INSN_V2CMPLES
, true, "lll" },
3390 { "__insn_v2cmpleu", TILEGX_INSN_V2CMPLEU
, true, "lll" },
3391 { "__insn_v2cmplts", TILEGX_INSN_V2CMPLTS
, true, "lll" },
3392 { "__insn_v2cmpltsi", TILEGX_INSN_V2CMPLTSI
, true, "lll" },
3393 { "__insn_v2cmpltu", TILEGX_INSN_V2CMPLTU
, true, "lll" },
3394 { "__insn_v2cmpltui", TILEGX_INSN_V2CMPLTUI
, true, "lll" },
3395 { "__insn_v2cmpne", TILEGX_INSN_V2CMPNE
, true, "lll" },
3396 { "__insn_v2dotp", TILEGX_INSN_V2DOTP
, true, "lll" },
3397 { "__insn_v2dotpa", TILEGX_INSN_V2DOTPA
, true, "llll" },
3398 { "__insn_v2int_h", TILEGX_INSN_V2INT_H
, true, "lll" },
3399 { "__insn_v2int_l", TILEGX_INSN_V2INT_L
, true, "lll" },
3400 { "__insn_v2maxs", TILEGX_INSN_V2MAXS
, true, "lll" },
3401 { "__insn_v2maxsi", TILEGX_INSN_V2MAXSI
, true, "lll" },
3402 { "__insn_v2mins", TILEGX_INSN_V2MINS
, true, "lll" },
3403 { "__insn_v2minsi", TILEGX_INSN_V2MINSI
, true, "lll" },
3404 { "__insn_v2mnz", TILEGX_INSN_V2MNZ
, true, "lll" },
3405 { "__insn_v2mulfsc", TILEGX_INSN_V2MULFSC
, true, "lll" },
3406 { "__insn_v2muls", TILEGX_INSN_V2MULS
, true, "lll" },
3407 { "__insn_v2mults", TILEGX_INSN_V2MULTS
, true, "lll" },
3408 { "__insn_v2mz", TILEGX_INSN_V2MZ
, true, "lll" },
3409 { "__insn_v2packh", TILEGX_INSN_V2PACKH
, true, "lll" },
3410 { "__insn_v2packl", TILEGX_INSN_V2PACKL
, true, "lll" },
3411 { "__insn_v2packuc", TILEGX_INSN_V2PACKUC
, true, "lll" },
3412 { "__insn_v2sadas", TILEGX_INSN_V2SADAS
, true, "llll" },
3413 { "__insn_v2sadau", TILEGX_INSN_V2SADAU
, true, "llll" },
3414 { "__insn_v2sads", TILEGX_INSN_V2SADS
, true, "lll" },
3415 { "__insn_v2sadu", TILEGX_INSN_V2SADU
, true, "lll" },
3416 { "__insn_v2shl", TILEGX_INSN_V2SHL
, true, "lll" },
3417 { "__insn_v2shli", TILEGX_INSN_V2SHLI
, true, "lll" },
3418 { "__insn_v2shlsc", TILEGX_INSN_V2SHLSC
, true, "lll" },
3419 { "__insn_v2shrs", TILEGX_INSN_V2SHRS
, true, "lll" },
3420 { "__insn_v2shrsi", TILEGX_INSN_V2SHRSI
, true, "lll" },
3421 { "__insn_v2shru", TILEGX_INSN_V2SHRU
, true, "lll" },
3422 { "__insn_v2shrui", TILEGX_INSN_V2SHRUI
, true, "lll" },
3423 { "__insn_v2sub", TILEGX_INSN_V2SUB
, true, "lll" },
3424 { "__insn_v2subsc", TILEGX_INSN_V2SUBSC
, true, "lll" },
3425 { "__insn_v4add", TILEGX_INSN_V4ADD
, true, "lll" },
3426 { "__insn_v4addsc", TILEGX_INSN_V4ADDSC
, true, "lll" },
3427 { "__insn_v4int_h", TILEGX_INSN_V4INT_H
, true, "lll" },
3428 { "__insn_v4int_l", TILEGX_INSN_V4INT_L
, true, "lll" },
3429 { "__insn_v4packsc", TILEGX_INSN_V4PACKSC
, true, "lll" },
3430 { "__insn_v4shl", TILEGX_INSN_V4SHL
, true, "lll" },
3431 { "__insn_v4shlsc", TILEGX_INSN_V4SHLSC
, true, "lll" },
3432 { "__insn_v4shrs", TILEGX_INSN_V4SHRS
, true, "lll" },
3433 { "__insn_v4shru", TILEGX_INSN_V4SHRU
, true, "lll" },
3434 { "__insn_v4sub", TILEGX_INSN_V4SUB
, true, "lll" },
3435 { "__insn_v4subsc", TILEGX_INSN_V4SUBSC
, true, "lll" },
3436 { "__insn_wh64", TILEGX_INSN_WH64
, false, "vp" },
3437 { "__insn_xor", TILEGX_INSN_XOR
, true, "lll" },
3438 { "__insn_xori", TILEGX_INSN_XOR
, true, "lll" },
3439 { "__tile_network_barrier", TILEGX_NETWORK_BARRIER
, false, "v" },
3440 { "__tile_idn0_receive", TILEGX_IDN0_RECEIVE
, false, "l" },
3441 { "__tile_idn1_receive", TILEGX_IDN1_RECEIVE
, false, "l" },
3442 { "__tile_idn_send", TILEGX_IDN_SEND
, false, "vl" },
3443 { "__tile_udn0_receive", TILEGX_UDN0_RECEIVE
, false, "l" },
3444 { "__tile_udn1_receive", TILEGX_UDN1_RECEIVE
, false, "l" },
3445 { "__tile_udn2_receive", TILEGX_UDN2_RECEIVE
, false, "l" },
3446 { "__tile_udn3_receive", TILEGX_UDN3_RECEIVE
, false, "l" },
3447 { "__tile_udn_send", TILEGX_UDN_SEND
, false, "vl" },
3451 /* Convert a character in a builtin type string to a tree type. */
3453 char_to_type (char c
)
3455 static tree volatile_ptr_type_node
= NULL
;
3456 static tree volatile_const_ptr_type_node
= NULL
;
3458 if (volatile_ptr_type_node
== NULL
)
3460 volatile_ptr_type_node
=
3461 build_pointer_type (build_qualified_type (void_type_node
,
3462 TYPE_QUAL_VOLATILE
));
3463 volatile_const_ptr_type_node
=
3464 build_pointer_type (build_qualified_type (void_type_node
,
3466 | TYPE_QUAL_VOLATILE
));
3472 return void_type_node
;
3474 return unsigned_type_node
;
3476 return long_long_unsigned_type_node
;
3478 return volatile_ptr_type_node
;
3480 return volatile_const_ptr_type_node
;
3487 /* Implement TARGET_INIT_BUILTINS. */
3489 tilegx_init_builtins (void)
3493 for (i
= 0; i
< ARRAY_SIZE (tilegx_builtins
); i
++)
3495 const struct tilegx_builtin_def
*p
= &tilegx_builtins
[i
];
3496 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3500 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3503 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3506 ret_type
= char_to_type (p
->type
[0]);
3508 ftype
= build_function_type (ret_type
, arg_type_list
);
3510 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3514 TREE_READONLY (decl
) = 1;
3515 TREE_NOTHROW (decl
) = 1;
3517 if (tilegx_builtin_info
[p
->code
].fndecl
== NULL
)
3518 tilegx_builtin_info
[p
->code
].fndecl
= decl
;
3523 /* Implement TARGET_EXPAND_BUILTIN. */
3525 tilegx_expand_builtin (tree exp
,
3527 rtx subtarget ATTRIBUTE_UNUSED
,
3528 machine_mode mode ATTRIBUTE_UNUSED
,
3529 int ignore ATTRIBUTE_UNUSED
)
3531 #define MAX_BUILTIN_ARGS 4
3533 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3534 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3536 call_expr_arg_iterator iter
;
3537 enum insn_code icode
;
3538 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3543 if (fcode
>= TILEGX_BUILTIN_max
)
3544 internal_error ("bad builtin fcode");
3545 icode
= tilegx_builtin_info
[fcode
].icode
;
3547 internal_error ("bad builtin icode");
3549 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3552 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3554 const struct insn_operand_data
*insn_op
;
3556 if (arg
== error_mark_node
)
3558 if (opnum
> MAX_BUILTIN_ARGS
)
3561 insn_op
= &insn_data
[icode
].operand
[opnum
];
3563 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3565 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3567 machine_mode opmode
= insn_op
->mode
;
3569 /* pointer_operand and pmode_register_operand operands do
3570 not specify a mode, so use the operand's mode instead
3571 (which should always be right by the time we get here,
3572 except for constants, which are VOIDmode). */
3573 if (opmode
== VOIDmode
)
3575 machine_mode m
= GET_MODE (op
[opnum
]);
3576 gcc_assert (m
== Pmode
|| m
== VOIDmode
);
3580 op
[opnum
] = copy_to_mode_reg (opmode
, op
[opnum
]);
3583 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3585 /* We still failed to meet the predicate even after moving
3586 into a register. Assume we needed an immediate. */
3587 error_at (EXPR_LOCATION (exp
),
3588 "operand must be an immediate of the right size");
3597 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3599 || GET_MODE (target
) != tmode
3600 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3602 if (tmode
== VOIDmode
)
3604 /* get the mode from the return type. */
3605 tmode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
)));
3607 target
= gen_reg_rtx (tmode
);
3612 fn
= GEN_FCN (icode
);
3616 pat
= fn (NULL_RTX
);
3622 pat
= fn (op
[0], op
[1]);
3625 pat
= fn (op
[0], op
[1], op
[2]);
3628 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3631 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3639 /* If we are generating a prefetch, tell the scheduler not to move
3641 if (GET_CODE (pat
) == PREFETCH
)
3642 PREFETCH_SCHEDULE_BARRIER_P (pat
) = true;
3653 /* Implement TARGET_BUILTIN_DECL. */
3655 tilegx_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3657 if (code
>= TILEGX_BUILTIN_max
)
3658 return error_mark_node
;
3660 return tilegx_builtin_info
[code
].fndecl
;
3667 /* Return whether REGNO needs to be saved in the stack frame. */
3669 need_to_save_reg (unsigned int regno
)
3671 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3672 && df_regs_ever_live_p (regno
))
3676 && (regno
== PIC_OFFSET_TABLE_REGNUM
3677 || regno
== TILEGX_PIC_TEXT_LABEL_REGNUM
)
3678 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3681 if (crtl
->calls_eh_return
)
3684 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3686 if (regno
== EH_RETURN_DATA_REGNO (i
))
3695 /* Return the size of the register savev area. This function is only
3696 correct starting with local register allocation */
3698 tilegx_saved_regs_size (void)
3700 int reg_save_size
= 0;
3702 int offset_to_frame
;
3705 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3706 if (need_to_save_reg (regno
))
3707 reg_save_size
+= UNITS_PER_WORD
;
3709 /* Pad out the register save area if necessary to make
3710 frame_pointer_rtx be as aligned as the stack pointer. */
3711 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3712 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3713 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3715 return reg_save_size
;
3719 /* Round up frame size SIZE. */
3721 round_frame_size (int size
)
3723 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3724 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3728 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3729 emit the corresponding REG_CFA_OFFSET note described by CFA and
3730 CFA_OFFSET. Return the emitted insn. */
3732 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3735 rtx reg
= gen_rtx_REG (DImode
, regno
);
3736 rtx mem
= gen_frame_mem (DImode
, addr
);
3737 rtx mov
= gen_movdi (mem
, reg
);
3739 /* Describe what just happened in a way that dwarf understands. We
3740 use temporary registers to hold the address to make scheduling
3741 easier, and use the REG_CFA_OFFSET to describe the address as an
3742 offset from the CFA. */
3743 rtx reg_note
= gen_rtx_REG (DImode
, regno_note
);
3744 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, GEN_INT (cfa_offset
));
3745 rtx cfa_relative_mem
= gen_frame_mem (DImode
, cfa_relative_addr
);
3746 rtx real
= gen_rtx_SET (cfa_relative_mem
, reg_note
);
3747 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3749 return emit_insn (mov
);
3753 /* Emit a load in the stack frame to load REGNO from address ADDR.
3754 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3755 non-null. Return the emitted insn. */
3757 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3759 rtx reg
= gen_rtx_REG (DImode
, regno
);
3760 rtx mem
= gen_frame_mem (DImode
, addr
);
3762 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3763 return emit_insn (gen_movdi (reg
, mem
));
3767 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3768 including sequences. */
3770 set_frame_related_p (void)
3772 rtx_insn
*seq
= get_insns ();
3783 while (insn
!= NULL_RTX
)
3785 RTX_FRAME_RELATED_P (insn
) = 1;
3786 insn
= NEXT_INSN (insn
);
3788 seq
= emit_insn (seq
);
3792 seq
= emit_insn (seq
);
3793 RTX_FRAME_RELATED_P (seq
) = 1;
3799 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3801 /* This emits code for 'sp += offset'.
3803 The ABI only allows us to modify 'sp' in a single 'addi' or
3804 'addli', so the backtracer understands it. Larger amounts cannot
3805 use those instructions, so are added by placing the offset into a
3806 large register and using 'add'.
3808 This happens after reload, so we need to expand it ourselves. */
3810 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3814 rtx imm_rtx
= GEN_INT (offset
);
3818 if (satisfies_constraint_J (imm_rtx
))
3820 /* We can add this using a single immediate add. */
3825 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3826 tilegx_expand_set_const64 (tmp
, imm_rtx
);
3830 /* Actually adjust the stack pointer. */
3832 pat
= gen_sp_adjust_32bit (stack_pointer_rtx
, stack_pointer_rtx
, to_add
);
3834 pat
= gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
, to_add
);
3836 insn
= emit_insn (pat
);
3837 REG_NOTES (insn
) = reg_notes
;
3839 /* Describe what just happened in a way that dwarf understands. */
3842 rtx real
= gen_rtx_SET (stack_pointer_rtx
,
3843 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3845 RTX_FRAME_RELATED_P (insn
) = 1;
3846 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3853 /* Return whether the current function is leaf. This takes into
3854 account whether the function calls tls_get_addr. */
3856 tilegx_current_function_is_leaf (void)
3858 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3862 /* Return the frame size. */
3864 compute_total_frame_size (void)
3866 int total_size
= (get_frame_size () + tilegx_saved_regs_size ()
3867 + crtl
->outgoing_args_size
3868 + crtl
->args
.pretend_args_size
);
3870 if (!tilegx_current_function_is_leaf () || cfun
->calls_alloca
)
3872 /* Make room for save area in callee. */
3873 total_size
+= STACK_POINTER_OFFSET
;
3876 return round_frame_size (total_size
);
3880 /* Return nonzero if this function is known to have a null epilogue.
3881 This allows the optimizer to omit jumps to jumps if no stack was
3884 tilegx_can_use_return_insn_p (void)
3886 return (reload_completed
3887 && !cfun
->static_chain_decl
3888 && !compute_total_frame_size ()
3889 && tilegx_current_function_is_leaf ()
3890 && !crtl
->profile
&& !df_regs_ever_live_p (TILEGX_LINK_REGNUM
));
3894 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3895 is a frame pointer, it computes the value relative to
3896 that. Otherwise it uses the stack pointer. */
3898 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3900 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3901 int offset_from_base
;
3903 if (frame_pointer_needed
)
3905 base_reg_rtx
= hard_frame_pointer_rtx
;
3906 offset_from_base
= offset_from_fp
;
3910 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3911 offset_from_base
= offset_from_sp
;
3912 base_reg_rtx
= stack_pointer_rtx
;
3915 if (offset_from_base
== 0)
3916 return base_reg_rtx
;
3918 /* Compute the new value of the stack pointer. */
3919 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3920 offset_rtx
= GEN_INT (offset_from_base
);
3922 if (!add_operand (offset_rtx
, Pmode
))
3924 expand_set_cint64 (tmp_reg_rtx
, offset_rtx
);
3925 offset_rtx
= tmp_reg_rtx
;
3928 emit_insn (gen_rtx_SET (tmp_reg_rtx
,
3929 gen_rtx_PLUS (Pmode
, base_reg_rtx
, offset_rtx
)));
3935 /* The stack frame looks like this:
3940 AP -> +-------------+
3944 HFP -> +-------------+
3946 | reg save | crtl->args.pretend_args_size bytes
3949 | saved regs | tilegx_saved_regs_size() bytes
3950 FP -> +-------------+
3952 | vars | get_frame_size() bytes
3956 | stack args | crtl->outgoing_args_size bytes
3958 | HFP | ptr_size bytes (only here if nonleaf / alloca)
3960 | callee lr | ptr_size bytes (only here if nonleaf / alloca)
3962 SP -> +-------------+
3966 For functions with a frame larger than 32767 bytes, or which use
3967 alloca (), r52 is used as a frame pointer. Otherwise there is no
3970 FP is saved at SP+ptr_size before calling a subroutine so the callee
3973 tilegx_expand_prologue (void)
3975 #define ROUND_ROBIN_SIZE 4
3976 /* We round-robin through four scratch registers to hold temporary
3977 addresses for saving registers, to make instruction scheduling
3979 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3980 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3983 unsigned int which_scratch
;
3984 int offset
, start_offset
, regno
;
3986 /* A register that holds a copy of the incoming fp. */
3987 int fp_copy_regno
= -1;
3989 /* A register that holds a copy of the incoming sp. */
3990 int sp_copy_regno
= -1;
3992 /* Next scratch register number to hand out (postdecrementing). */
3993 int next_scratch_regno
= 29;
3995 int total_size
= compute_total_frame_size ();
3997 if (flag_stack_usage_info
)
3998 current_function_static_stack_size
= total_size
;
4000 /* Save lr first in its special location because code after this
4001 might use the link register as a scratch register. */
4002 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM
) || crtl
->calls_eh_return
)
4004 FRP (frame_emit_store (TILEGX_LINK_REGNUM
, TILEGX_LINK_REGNUM
,
4005 stack_pointer_rtx
, stack_pointer_rtx
, 0));
4006 emit_insn (gen_blockage ());
4009 if (total_size
== 0)
4011 /* Load the PIC register if needed. */
4012 if (flag_pic
&& crtl
->uses_pic_offset_table
)
4013 load_pic_register (false);
4018 cfa
= stack_pointer_rtx
;
4020 if (frame_pointer_needed
)
4022 fp_copy_regno
= next_scratch_regno
--;
4024 /* Copy the old frame pointer aside so we can save it later. */
4026 FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
4027 gen_lowpart (word_mode
, hard_frame_pointer_rtx
)));
4028 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
4030 /* Set up the frame pointer. */
4031 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
4032 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
4033 cfa
= hard_frame_pointer_rtx
;
4034 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
4036 /* fp holds a copy of the incoming sp, in case we need to store
4038 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
4040 else if (!tilegx_current_function_is_leaf ())
4042 /* Copy the old stack pointer aside so we can save it later. */
4043 sp_copy_regno
= next_scratch_regno
--;
4044 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
4048 if (tilegx_current_function_is_leaf ())
4050 /* No need to store chain pointer to caller's frame. */
4051 emit_sp_adjust (-total_size
, &next_scratch_regno
,
4052 !frame_pointer_needed
, NULL_RTX
);
4056 /* Save the frame pointer (incoming sp value) to support
4057 backtracing. First we need to create an rtx with the store
4059 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
4060 rtx size_rtx
= GEN_INT (-(total_size
- UNITS_PER_WORD
));
4062 if (add_operand (size_rtx
, Pmode
))
4064 /* Expose more parallelism by computing this value from the
4065 original stack pointer, not the one after we have pushed
4067 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
4068 emit_insn (gen_rtx_SET (chain_addr
, p
));
4069 emit_sp_adjust (-total_size
, &next_scratch_regno
,
4070 !frame_pointer_needed
, NULL_RTX
);
4074 /* The stack frame is large, so just store the incoming sp
4075 value at *(new_sp + UNITS_PER_WORD). */
4077 emit_sp_adjust (-total_size
, &next_scratch_regno
,
4078 !frame_pointer_needed
, NULL_RTX
);
4079 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4080 GEN_INT (UNITS_PER_WORD
));
4081 emit_insn (gen_rtx_SET (chain_addr
, p
));
4084 /* Save our frame pointer for backtrace chaining. */
4085 emit_insn (gen_movdi (gen_frame_mem (DImode
, chain_addr
),
4086 gen_rtx_REG (DImode
, sp_copy_regno
)));
4089 /* Compute where to start storing registers we need to save. */
4090 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
4091 offset
= start_offset
;
4093 /* Store all registers that need saving. */
4095 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
4096 if (need_to_save_reg (regno
))
4098 rtx r
= reg_save_addr
[which_scratch
];
4100 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
4104 int prev_scratch_regno
= next_scratch_regno
;
4105 r
= compute_frame_addr (offset
, &next_scratch_regno
);
4106 if (prev_scratch_regno
!= next_scratch_regno
)
4107 reg_save_addr
[which_scratch
] = r
;
4111 /* Advance to the next stack slot to store this
4113 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
4114 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
4115 emit_insn (gen_rtx_SET (r
, p
));
4118 /* Save this register to the stack (but use the old fp value
4119 we copied aside if appropriate). */
4121 (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
4122 ? fp_copy_regno
: regno
;
4123 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
4125 offset
-= UNITS_PER_WORD
;
4126 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
4129 /* If profiling, force that to happen after the frame is set up. */
4131 emit_insn (gen_blockage ());
4133 /* Load the PIC register if needed. */
4134 if (flag_pic
&& crtl
->uses_pic_offset_table
)
4135 load_pic_register (false);
4139 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
4140 true for a sibcall_epilogue pattern, and false for an epilogue
4143 tilegx_expand_epilogue (bool sibcall_p
)
4145 /* We round-robin through four scratch registers to hold temporary
4146 addresses for saving registers, to make instruction scheduling
4148 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
4149 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
4151 rtx_insn
*last_insn
, *insn
;
4152 unsigned int which_scratch
;
4153 int offset
, start_offset
, regno
;
4154 rtx cfa_restores
= NULL_RTX
;
4156 /* A register that holds a copy of the incoming fp. */
4157 int fp_copy_regno
= -1;
4159 /* Next scratch register number to hand out (postdecrementing). */
4160 int next_scratch_regno
= 29;
4162 int total_size
= compute_total_frame_size ();
4164 last_insn
= get_last_insn ();
4166 /* Load lr first since we are going to need it first. */
4168 if (df_regs_ever_live_p (TILEGX_LINK_REGNUM
))
4170 insn
= frame_emit_load (TILEGX_LINK_REGNUM
,
4171 compute_frame_addr (0, &next_scratch_regno
),
4175 if (total_size
== 0)
4179 RTX_FRAME_RELATED_P (insn
) = 1;
4180 REG_NOTES (insn
) = cfa_restores
;
4185 /* Compute where to start restoring registers. */
4186 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
4187 offset
= start_offset
;
4189 if (frame_pointer_needed
)
4190 fp_copy_regno
= next_scratch_regno
--;
4192 /* Restore all callee-saved registers. */
4194 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
4195 if (need_to_save_reg (regno
))
4197 rtx r
= reg_save_addr
[which_scratch
];
4200 r
= compute_frame_addr (offset
, &next_scratch_regno
);
4201 reg_save_addr
[which_scratch
] = r
;
4205 /* Advance to the next stack slot to store this register. */
4206 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
4207 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
4208 emit_insn (gen_rtx_SET (r
, p
));
4211 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
4212 frame_emit_load (fp_copy_regno
, r
, NULL
);
4214 frame_emit_load (regno
, r
, &cfa_restores
);
4216 offset
-= UNITS_PER_WORD
;
4217 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
4220 if (!tilegx_current_function_is_leaf ())
4222 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
4224 emit_insn (gen_blockage ());
4226 if (frame_pointer_needed
)
4228 /* Restore the old stack pointer by copying from the frame
4232 insn
= emit_insn (gen_sp_restore_32bit (stack_pointer_rtx
,
4233 hard_frame_pointer_rtx
));
4237 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
4238 hard_frame_pointer_rtx
));
4240 RTX_FRAME_RELATED_P (insn
) = 1;
4241 REG_NOTES (insn
) = cfa_restores
;
4242 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
4246 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
4250 if (crtl
->calls_eh_return
)
4253 emit_insn (gen_sp_adjust_32bit (stack_pointer_rtx
, stack_pointer_rtx
,
4254 EH_RETURN_STACKADJ_RTX
));
4256 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
4257 EH_RETURN_STACKADJ_RTX
));
4260 /* Restore the old frame pointer. */
4261 if (frame_pointer_needed
)
4263 insn
= emit_move_insn (gen_lowpart (DImode
, hard_frame_pointer_rtx
),
4264 gen_rtx_REG (DImode
, fp_copy_regno
));
4265 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
4268 /* Mark the pic registers as live outside of the function. */
4271 emit_use (cfun
->machine
->text_label_rtx
);
4272 emit_use (cfun
->machine
->got_rtx
);
4278 emit_jump_insn (gen__return ());
4282 emit_use (gen_rtx_REG (Pmode
, TILEGX_LINK_REGNUM
));
4285 /* Mark all insns we just emitted as frame-related. */
4286 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
4287 RTX_FRAME_RELATED_P (last_insn
) = 1;
4290 #undef ROUND_ROBIN_SIZE
4293 /* Implement INITIAL_ELIMINATION_OFFSET. */
4295 tilegx_initial_elimination_offset (int from
, int to
)
4297 int total_size
= compute_total_frame_size ();
4299 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
4301 return (total_size
- crtl
->args
.pretend_args_size
4302 - tilegx_saved_regs_size ());
4304 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
4306 return -(crtl
->args
.pretend_args_size
+ tilegx_saved_regs_size ());
4308 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
4310 return STACK_POINTER_OFFSET
+ total_size
;
4312 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
4314 return STACK_POINTER_OFFSET
;
4321 /* Return an RTX indicating where the return address to the calling
4322 function can be found. */
4324 tilegx_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
4329 return get_hard_reg_initial_val (Pmode
, TILEGX_LINK_REGNUM
);
4333 /* Implement EH_RETURN_HANDLER_RTX. The MEM needs to be volatile to
4334 prevent it from being deleted. */
4336 tilegx_eh_return_handler_rtx (void)
4338 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
4339 MEM_VOLATILE_P (tmp
) = true;
4347 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
4349 tilegx_conditional_register_usage (void)
4351 global_regs
[TILEGX_NETORDER_REGNUM
] = 1;
4352 /* TILEGX_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
4353 member of fixed_regs, and therefore must be member of
4354 call_used_regs, but it is not a member of call_really_used_regs[]
4355 because it is not clobbered by a call. */
4356 if (TILEGX_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
4358 fixed_regs
[TILEGX_PIC_TEXT_LABEL_REGNUM
] = 1;
4359 call_used_regs
[TILEGX_PIC_TEXT_LABEL_REGNUM
] = 1;
4361 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
4363 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
4364 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
4369 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
4371 tilegx_frame_pointer_required (void)
4373 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
4378 /* Scheduling and reorg */
4380 /* Return the length of INSN. LENGTH is the initial length computed
4381 by attributes in the machine-description file. This is where we
4382 account for bundles. */
4384 tilegx_adjust_insn_length (rtx_insn
*insn
, int length
)
4386 machine_mode mode
= GET_MODE (insn
);
4388 /* A non-termininating instruction in a bundle has length 0. */
4392 /* By default, there is not length adjustment. */
4397 /* Implement TARGET_SCHED_ISSUE_RATE. */
4399 tilegx_issue_rate (void)
4405 /* Return the rtx for the jump target. */
4407 get_jump_target (rtx branch
)
4409 if (CALL_P (branch
))
4412 call
= PATTERN (branch
);
4414 if (GET_CODE (call
) == PARALLEL
)
4415 call
= XVECEXP (call
, 0, 0);
4417 if (GET_CODE (call
) == SET
)
4418 call
= SET_SRC (call
);
4420 if (GET_CODE (call
) == CALL
)
4421 return XEXP (XEXP (call
, 0), 0);
4427 /* Implement TARGET_SCHED_ADJUST_COST. */
4429 tilegx_sched_adjust_cost (rtx_insn
*insn
, int dep_type
, rtx_insn
*dep_insn
,
4430 int cost
, unsigned int)
4432 /* If we have a true dependence, INSN is a call, and DEP_INSN
4433 defines a register that is needed by the call (argument or stack
4434 pointer) , set its latency to 0 so that it can be bundled with
4435 the call. Explicitly check for and exclude the case when
4436 DEP_INSN defines the target of the jump. */
4437 if (CALL_P (insn
) && dep_type
== REG_DEP_TRUE
)
4439 rtx target
= get_jump_target (insn
);
4440 if (!REG_P (target
) || !set_of (target
, dep_insn
))
4448 /* Skip over irrelevant NOTEs and such and look for the next insn we
4449 would consider bundling. */
4451 next_insn_to_bundle (rtx_insn
*r
, rtx_insn
*end
)
4453 for (; r
!= end
; r
= NEXT_INSN (r
))
4455 if (NONDEBUG_INSN_P (r
)
4456 && GET_CODE (PATTERN (r
)) != USE
4457 && GET_CODE (PATTERN (r
)) != CLOBBER
)
4465 /* Go through all insns, and use the information generated during
4466 scheduling to generate SEQUENCEs to represent bundles of
4467 instructions issued simultaneously. */
4469 tilegx_gen_bundles (void)
4472 FOR_EACH_BB_FN (bb
, cfun
)
4474 rtx_insn
*insn
, *next
, *prev
;
4475 rtx_insn
*end
= NEXT_INSN (BB_END (bb
));
4478 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
4480 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
4482 /* Never wrap {} around inline asm. */
4483 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
4485 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
4486 /* NOTE: The scheduler incorrectly believes a call
4487 insn can execute in the same cycle as the insn
4488 after the call. This is of course impossible.
4489 Really we need to fix the scheduler somehow, so
4490 the code after the call gets scheduled
4494 /* Mark current insn as the end of a bundle. */
4495 PUT_MODE (insn
, QImode
);
4499 /* Mark it as part of a bundle. */
4500 PUT_MODE (insn
, SImode
);
4504 /* Delete barrier insns, because they can mess up the
4505 emitting of bundle braces. If it is end-of-bundle, then
4506 the previous insn must be marked end-of-bundle. */
4507 if (get_attr_type (insn
) == TYPE_NOTHING
) {
4508 if (GET_MODE (insn
) == QImode
&& prev
!= NULL
4509 && GET_MODE (prev
) == SImode
)
4511 PUT_MODE (prev
, QImode
);
4515 // Note: prev remains the same for next iteration.
4524 /* Replace OLD_INSN with NEW_INSN. */
4526 replace_insns (rtx_insn
*old_insn
, rtx_insn
*new_insns
)
4529 emit_insn_before (new_insns
, old_insn
);
4531 delete_insn (old_insn
);
4535 /* Returns true if INSN is the first instruction of a pc-relative
4536 address compuatation. */
4538 match_pcrel_step1 (rtx insn
)
4540 rtx pattern
= PATTERN (insn
);
4543 if (GET_CODE (pattern
) != SET
)
4546 src
= SET_SRC (pattern
);
4548 return (GET_CODE (src
) == CONST
4549 && GET_CODE (XEXP (src
, 0)) == UNSPEC
4550 && XINT (XEXP (src
, 0), 1) == UNSPEC_HW1_LAST_PCREL
);
4554 /* Do the first replacement step in tilegx_fixup_pcrel_references. */
4556 replace_mov_pcrel_step1 (rtx_insn
*insn
)
4558 rtx pattern
= PATTERN (insn
);
4561 rtx_insn
*new_insns
;
4563 gcc_assert (GET_CODE (pattern
) == SET
);
4564 opnds
[0] = SET_DEST (pattern
);
4566 gcc_assert (GET_CODE (SET_SRC (pattern
)) == CONST
);
4568 unspec
= XEXP (SET_SRC (pattern
), 0);
4569 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4570 gcc_assert (XINT (unspec
, 1) == UNSPEC_HW1_LAST_PCREL
);
4571 opnds
[1] = XVECEXP (unspec
, 0, 0);
4573 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4574 if (GET_CODE (opnds
[1]) != SYMBOL_REF
)
4582 emit_insn (gen_mov_got32_step1_32bit (opnds
[0], opnds
[1]));
4584 emit_insn (gen_mov_got32_step1 (opnds
[0], opnds
[1]));
4587 new_insns
= get_insns ();
4590 replace_insns (insn
, new_insns
);
4594 /* Returns true if INSN is the second instruction of a pc-relative
4595 address compuatation. */
4597 match_pcrel_step2 (rtx_insn
*insn
)
4604 if (recog_memoized (insn
) != CODE_FOR_insn_addr_shl16insli_32bit
)
4609 if (recog_memoized (insn
) != CODE_FOR_insn_addr_shl16insli
)
4613 unspec
= SET_SRC (PATTERN (insn
));
4614 addr
= XVECEXP (unspec
, 0, 1);
4616 return (GET_CODE (addr
) == CONST
4617 && GET_CODE (XEXP (addr
, 0)) == UNSPEC
4618 && XINT (XEXP (addr
, 0), 1) == UNSPEC_HW0_PCREL
);
4622 /* Do the second replacement step in tilegx_fixup_pcrel_references. */
4624 replace_mov_pcrel_step2 (rtx_insn
*insn
)
4626 rtx pattern
= PATTERN (insn
);
4630 rtx_insn
*new_insns
;
4631 rtx got_rtx
= tilegx_got_rtx ();
4633 gcc_assert (GET_CODE (pattern
) == SET
);
4634 opnds
[0] = SET_DEST (pattern
);
4636 unspec
= SET_SRC (pattern
);
4637 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4638 gcc_assert (XINT (unspec
, 1) == UNSPEC_INSN_ADDR_SHL16INSLI
);
4640 opnds
[1] = XVECEXP (unspec
, 0, 0);
4642 addr
= XVECEXP (unspec
, 0, 1);
4643 gcc_assert (GET_CODE (addr
) == CONST
);
4645 unspec
= XEXP (addr
, 0);
4646 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4647 gcc_assert (XINT (unspec
, 1) == UNSPEC_HW0_PCREL
);
4648 opnds
[2] = XVECEXP (unspec
, 0, 0);
4650 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4651 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4659 emit_insn (gen_add_got16_32bit (opnds
[0], got_rtx
, opnds
[2]));
4661 emit_insn (gen_add_got16 (opnds
[0], got_rtx
, opnds
[2]));
4666 emit_insn (gen_mov_got32_step2_32bit
4667 (opnds
[0], opnds
[1], opnds
[2]));
4669 emit_insn (gen_mov_got32_step2 (opnds
[0], opnds
[1], opnds
[2]));
4672 new_insns
= get_insns ();
4675 replace_insns (insn
, new_insns
);
4679 /* Do the third replacement step in tilegx_fixup_pcrel_references. */
4681 replace_mov_pcrel_step3 (rtx_insn
*insn
)
4683 rtx pattern
= PATTERN (insn
);
4686 rtx_insn
*new_insns
;
4687 rtx got_rtx
= tilegx_got_rtx ();
4688 rtx text_label_rtx
= tilegx_text_label_rtx ();
4690 gcc_assert (GET_CODE (pattern
) == SET
);
4691 opnds
[0] = SET_DEST (pattern
);
4693 unspec
= SET_SRC (pattern
);
4694 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4695 gcc_assert (XINT (unspec
, 1) == UNSPEC_MOV_PCREL_STEP3
);
4699 if (XVECEXP (unspec
, 0, 0) == text_label_rtx
)
4700 opnds
[2] = XVECEXP (unspec
, 0, 1);
4703 gcc_assert (XVECEXP (unspec
, 0, 1) == text_label_rtx
);
4704 opnds
[2] = XVECEXP (unspec
, 0, 0);
4707 opnds
[3] = XVECEXP (unspec
, 0, 2);
4709 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4710 if (GET_CODE (opnds
[3]) != SYMBOL_REF
)
4717 emit_move_insn (opnds
[0], gen_const_mem (Pmode
, opnds
[2]));
4721 emit_move_insn (opnds
[0], gen_rtx_PLUS (Pmode
, opnds
[1], opnds
[2]));
4722 emit_move_insn (opnds
[0], gen_const_mem (Pmode
, opnds
[0]));
4725 new_insns
= get_insns ();
4728 replace_insns (insn
, new_insns
);
4732 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4733 going through the GOT when the symbol is local to the compilation
4734 unit. But such a symbol requires that the common text_label that
4735 we generate at the beginning of the function be in the same section
4736 as the reference to the SYMBOL_REF. This may not be true if we
4737 generate hot/cold sections. This function looks for such cases and
4738 replaces such references with the longer sequence going through the
4741 We expect following instruction sequence:
4742 moveli tmp1, hw1_last(x-.L_PICLNK) [1]
4743 shl16insli tmp2, tmp1, hw0(x-.L_PICLNK) [2]
4744 add<x> tmp3, txt_label_reg, tmp2 [3]
4746 If we're compiling -fpic, we replace with the following sequence
4747 (the numbers in brackets match the instructions they're replacing
4750 add<x>li tmp2, got_reg, hw0_last_got(x) [2]
4751 ld<4> tmp3, tmp2 [3]
4753 If we're compiling -fPIC, we replace the first instruction with:
4755 moveli tmp1, hw1_last_got(x) [1]
4756 shl16insli tmp2, tmp1, hw0_got(x) [2]
4757 add<x> tmp3, got_reg, tmp2 [3]
4758 ld<4> tmp3, tmp3 [3]
4760 Note that we're careful to disturb the instruction sequence as
4761 little as possible, since it's very late in the compilation
4764 tilegx_fixup_pcrel_references (void)
4766 rtx_insn
*insn
, *next_insn
;
4767 bool same_section_as_entry
= true;
4769 for (insn
= get_insns (); insn
; insn
= next_insn
)
4771 next_insn
= NEXT_INSN (insn
);
4773 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4775 same_section_as_entry
= !same_section_as_entry
;
4779 if (same_section_as_entry
)
4783 && GET_CODE (PATTERN (insn
)) != USE
4784 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4789 if (match_pcrel_step1 (insn
))
4790 replace_mov_pcrel_step1 (insn
);
4791 else if (match_pcrel_step2 (insn
))
4792 replace_mov_pcrel_step2 (insn
);
4793 else if (recog_memoized (insn
) == CODE_FOR_mov_pcrel_step3_32bit
)
4794 replace_mov_pcrel_step3 (insn
);
4798 if (match_pcrel_step1 (insn
))
4799 replace_mov_pcrel_step1 (insn
);
4800 else if (match_pcrel_step2 (insn
))
4801 replace_mov_pcrel_step2 (insn
);
4802 else if (recog_memoized (insn
) == CODE_FOR_mov_pcrel_step3
)
4803 replace_mov_pcrel_step3 (insn
);
4809 /* Ensure that no var tracking notes are emitted in the middle of a
4810 three-instruction bundle. */
4812 reorder_var_tracking_notes (void)
4815 FOR_EACH_BB_FN (bb
, cfun
)
4817 rtx_insn
*insn
, *next
;
4818 rtx_insn
*queue
= NULL
;
4819 bool in_bundle
= false;
4821 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4823 next
= NEXT_INSN (insn
);
4827 /* Emit queued up notes at the last instruction of a
4829 if (GET_MODE (insn
) == QImode
)
4833 rtx_insn
*next_queue
= PREV_INSN (queue
);
4834 SET_PREV_INSN (NEXT_INSN (insn
)) = queue
;
4835 SET_NEXT_INSN (queue
) = NEXT_INSN (insn
);
4836 SET_NEXT_INSN (insn
) = queue
;
4837 SET_PREV_INSN (queue
) = insn
;
4842 else if (GET_MODE (insn
) == SImode
)
4845 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4849 rtx_insn
*prev
= PREV_INSN (insn
);
4850 SET_PREV_INSN (next
) = prev
;
4851 SET_NEXT_INSN (prev
) = next
;
4853 SET_PREV_INSN (insn
) = queue
;
4862 /* Perform machine dependent operations on the rtl chain INSNS. */
4866 /* We are freeing block_for_insn in the toplev to keep compatibility
4867 with old MDEP_REORGS that are not CFG based. Recompute it
4869 compute_bb_for_insn ();
4871 if (flag_reorder_blocks_and_partition
)
4873 tilegx_fixup_pcrel_references ();
4876 if (flag_schedule_insns_after_reload
)
4880 timevar_push (TV_SCHED2
);
4882 timevar_pop (TV_SCHED2
);
4884 /* Examine the schedule to group into bundles. */
4885 tilegx_gen_bundles ();
4890 if (flag_var_tracking
)
4892 timevar_push (TV_VAR_TRACKING
);
4893 variable_tracking_main ();
4894 reorder_var_tracking_notes ();
4895 timevar_pop (TV_VAR_TRACKING
);
4898 df_finish_pass (false);
4905 /* Select a format to encode pointers in exception handling data.
4906 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4907 GLOBAL is true if the symbol may be affected by dynamic
4910 tilegx_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4912 int type
= TARGET_32BIT
? DW_EH_PE_sdata4
: DW_EH_PE_sdata8
;
4913 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| type
;
4917 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4919 tilegx_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4920 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4923 rtx this_rtx
, funexp
, addend
;
4926 /* Pretend to be a post-reload pass while generating rtl. */
4927 reload_completed
= 1;
4929 /* Mark the end of the (empty) prologue. */
4930 emit_note (NOTE_INSN_PROLOGUE_END
);
4932 /* Find the "this" pointer. If the function returns a structure,
4933 the structure return pointer is in $1. */
4934 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4935 this_rtx
= gen_rtx_REG (Pmode
, 1);
4937 this_rtx
= gen_rtx_REG (Pmode
, 0);
4939 /* Add DELTA to THIS_RTX. */
4940 if (!(delta
>= -32868 && delta
<= 32767))
4942 addend
= gen_rtx_REG (Pmode
, 29);
4943 emit_move_insn (addend
, GEN_INT (delta
));
4946 addend
= GEN_INT (delta
);
4949 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, addend
));
4951 emit_insn (gen_adddi3 (this_rtx
, this_rtx
, addend
));
4953 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4958 tmp
= gen_rtx_REG (Pmode
, 29);
4959 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4961 if (!(vcall_offset
>= -32868 && vcall_offset
<= 32767))
4963 addend
= gen_rtx_REG (Pmode
, 28);
4964 emit_move_insn (addend
, GEN_INT (vcall_offset
));
4967 addend
= GEN_INT (vcall_offset
);
4970 emit_insn (gen_addsi3 (tmp
, tmp
, addend
));
4972 emit_insn (gen_adddi3 (tmp
, tmp
, addend
));
4974 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4977 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4979 emit_insn (gen_adddi3 (this_rtx
, this_rtx
, tmp
));
4982 /* Generate a tail call to the target function. */
4983 if (!TREE_USED (function
))
4985 assemble_external (function
);
4986 TREE_USED (function
) = 1;
4988 funexp
= XEXP (DECL_RTL (function
), 0);
4989 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4990 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4991 SIBLING_CALL_P (insn
) = 1;
4993 /* Run just enough of rest_of_compilation to get the insns emitted.
4994 There's not really enough bulk here to make other passes such as
4995 instruction scheduling worth while. Note that use_thunk calls
4996 assemble_start_function and assemble_end_function.
4998 We don't currently bundle, but the instruciton sequence is all
4999 serial except for the tail call, so we're only wasting one cycle.
5001 insn
= get_insns ();
5002 shorten_branches (insn
);
5003 final_start_function (insn
, file
, 1);
5004 final (insn
, file
, 1);
5005 final_end_function ();
5007 /* Stop pretending to be a post-reload pass. */
5008 reload_completed
= 0;
5012 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
5014 tilegx_asm_trampoline_template (FILE *file
)
5016 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
5019 fprintf (file
, "\tlnk r10\n");
5020 fprintf (file
, "\taddxi r10, r10, 32\n");
5021 fprintf (file
, "\tld4s_add r11, r10, %d\n", ptr_mode_size
);
5022 fprintf (file
, "\tld4s r10, r10\n");
5023 fprintf (file
, "\tjr r11\n");
5024 fprintf (file
, "\t.word 0 # <function address>\n");
5025 fprintf (file
, "\t.word 0 # <static chain value>\n");
5029 fprintf (file
, "\tlnk r10\n");
5030 fprintf (file
, "\taddi r10, r10, 32\n");
5031 fprintf (file
, "\tld_add r11, r10, %d\n", ptr_mode_size
);
5032 fprintf (file
, "\tld r10, r10\n");
5033 fprintf (file
, "\tjr r11\n");
5034 fprintf (file
, "\t.quad 0 # <function address>\n");
5035 fprintf (file
, "\t.quad 0 # <static chain value>\n");
5040 /* Implement TARGET_TRAMPOLINE_INIT. */
5042 tilegx_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
5046 rtx begin_addr
, end_addr
;
5047 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
5049 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
5050 chaddr
= copy_to_reg (static_chain
);
5052 emit_block_move (m_tramp
, assemble_trampoline_template (),
5053 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5055 mem
= adjust_address (m_tramp
, ptr_mode
,
5056 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
5057 emit_move_insn (mem
, fnaddr
);
5058 mem
= adjust_address (m_tramp
, ptr_mode
,
5059 TRAMPOLINE_SIZE
- ptr_mode_size
);
5060 emit_move_insn (mem
, chaddr
);
5062 /* Get pointers to the beginning and end of the code block. */
5063 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
5064 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
5067 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
5068 LCT_NORMAL
, VOIDmode
, begin_addr
, Pmode
,
5073 /* Implement TARGET_PRINT_OPERAND. */
5075 tilegx_print_operand (FILE *file
, rtx x
, int code
)
5080 /* Print the compare operator opcode for conditional moves. */
5081 switch (GET_CODE (x
))
5090 output_operand_lossage ("invalid %%c operand");
5095 /* Print the compare operator opcode for conditional moves. */
5096 switch (GET_CODE (x
))
5105 output_operand_lossage ("invalid %%C operand");
5111 /* Print the compare operator opcode for conditional moves. */
5112 switch (GET_CODE (x
))
5121 output_operand_lossage ("invalid %%d operand");
5128 /* Print the compare operator opcode for conditional moves. */
5129 switch (GET_CODE (x
))
5138 output_operand_lossage ("invalid %%D operand");
5145 if (GET_CODE (x
) == CONST
5146 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
5148 rtx addr
= XVECEXP (XEXP (x
, 0), 0, 0);
5149 int unspec
= XINT (XEXP (x
, 0), 1);
5150 const char *opstr
= NULL
;
5154 case UNSPEC_HW0_PCREL
:
5158 case UNSPEC_HW1_PCREL
:
5167 case UNSPEC_HW0_LAST
:
5170 case UNSPEC_HW1_LAST
:
5171 case UNSPEC_HW1_LAST_PCREL
:
5174 case UNSPEC_HW2_LAST
:
5175 case UNSPEC_HW2_LAST_PCREL
:
5178 case UNSPEC_HW0_GOT
:
5181 case UNSPEC_HW0_LAST_GOT
:
5182 opstr
= "hw0_last_got";
5184 case UNSPEC_HW1_LAST_GOT
:
5185 opstr
= "hw1_last_got";
5187 case UNSPEC_HW0_TLS_GD
:
5188 opstr
= "hw0_tls_gd";
5190 case UNSPEC_HW1_LAST_TLS_GD
:
5191 opstr
= "hw1_last_tls_gd";
5193 case UNSPEC_HW0_TLS_IE
:
5194 opstr
= "hw0_tls_ie";
5196 case UNSPEC_HW1_LAST_TLS_IE
:
5197 opstr
= "hw1_last_tls_ie";
5199 case UNSPEC_HW0_TLS_LE
:
5200 opstr
= "hw0_tls_le";
5202 case UNSPEC_HW1_LAST_TLS_LE
:
5203 opstr
= "hw1_last_tls_le";
5205 case UNSPEC_HW0_PLT_PCREL
:
5208 case UNSPEC_HW1_PLT_PCREL
:
5211 case UNSPEC_HW1_LAST_PLT_PCREL
:
5212 opstr
= "hw1_last_plt";
5214 case UNSPEC_HW2_LAST_PLT_PCREL
:
5215 opstr
= "hw2_last_plt";
5218 output_operand_lossage ("invalid %%H specifier");
5221 fputs (opstr
, file
);
5223 output_addr_const (file
, addr
);
5225 if (unspec
== UNSPEC_HW0_PCREL
5226 || unspec
== UNSPEC_HW1_PCREL
5227 || unspec
== UNSPEC_HW1_LAST_PCREL
5228 || unspec
== UNSPEC_HW2_LAST_PCREL
5229 || unspec
== UNSPEC_HW0_PLT_PCREL
5230 || unspec
== UNSPEC_HW1_PLT_PCREL
5231 || unspec
== UNSPEC_HW1_LAST_PLT_PCREL
5232 || unspec
== UNSPEC_HW2_LAST_PLT_PCREL
)
5234 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
5235 fputs (" - " , file
);
5236 output_addr_const (file
, addr2
);
5242 else if (symbolic_operand (x
, VOIDmode
))
5244 output_addr_const (file
, x
);
5252 /* Print the low 16 bits of a constant. */
5254 if (CONST_INT_P (x
))
5256 else if (GET_CODE (x
) == CONST_DOUBLE
)
5257 i
= CONST_DOUBLE_LOW (x
);
5260 output_operand_lossage ("invalid %%h operand");
5263 i
= trunc_int_for_mode (i
, HImode
);
5264 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
5269 /* Print an auto-inc memory operand. */
5272 output_operand_lossage ("invalid %%I operand");
5276 output_memory_autoinc_first
= true;
5277 output_address (GET_MODE (x
), XEXP (x
, 0));
5281 /* Print an auto-inc memory operand. */
5284 output_operand_lossage ("invalid %%i operand");
5288 output_memory_autoinc_first
= false;
5289 output_address (GET_MODE (x
), XEXP (x
, 0));
5294 /* Print the low 8 bits of a constant. */
5296 if (CONST_INT_P (x
))
5298 else if (GET_CODE (x
) == CONST_DOUBLE
)
5299 i
= CONST_DOUBLE_LOW (x
);
5300 else if (GET_CODE (x
) == CONST_VECTOR
5301 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
5302 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
5305 output_operand_lossage ("invalid %%j operand");
5308 i
= trunc_int_for_mode (i
, QImode
);
5309 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
5315 /* Print a constant plus one. */
5316 if (!CONST_INT_P (x
))
5318 output_operand_lossage ("invalid %%P operand");
5321 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + 1);
5328 /* Print a bfextu-style bit range. */
5329 int first_bit
, last_bit
;
5330 HOST_WIDE_INT flip
= (code
== 'm') ? ~0 : 0;
5332 if (!CONST_INT_P (x
)
5333 || !tilegx_bitfield_operand_p (INTVAL (x
) ^ flip
,
5334 &first_bit
, &last_bit
))
5336 output_operand_lossage ("invalid %%%c operand", code
);
5340 fprintf (file
, "%d, %d", first_bit
, last_bit
);
5346 const char *reg
= NULL
;
5348 /* Print a network register. */
5349 if (!CONST_INT_P (x
))
5351 output_operand_lossage ("invalid %%N operand");
5357 case TILEGX_NETREG_IDN0
: reg
= "idn0"; break;
5358 case TILEGX_NETREG_IDN1
: reg
= "idn1"; break;
5359 case TILEGX_NETREG_UDN0
: reg
= "udn0"; break;
5360 case TILEGX_NETREG_UDN1
: reg
= "udn1"; break;
5361 case TILEGX_NETREG_UDN2
: reg
= "udn2"; break;
5362 case TILEGX_NETREG_UDN3
: reg
= "udn3"; break;
5367 fprintf (file
, reg
);
5372 if (GET_CODE (x
) == SYMBOL_REF
)
5374 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
5375 fprintf (file
, "plt(");
5376 output_addr_const (file
, x
);
5377 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
5378 fprintf (file
, ")");
5381 output_addr_const (file
, x
);
5385 /* In this case we need a register. Use 'zero' if the operand
5388 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
5390 fputs ("zero", file
);
5393 else if (!REG_P (x
))
5395 output_operand_lossage ("invalid operand for 'r' specifier");
5403 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
5408 output_address (VOIDmode
, XEXP (x
, 0));
5413 output_addr_const (file
, x
);
5419 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
5424 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
5426 tilegx_print_operand_address (FILE *file
, machine_mode mode
, rtx addr
)
5428 if (GET_CODE (addr
) == POST_DEC
5429 || GET_CODE (addr
) == POST_INC
)
5431 int offset
= GET_MODE_SIZE (mode
);
5433 gcc_assert (mode
!= VOIDmode
);
5435 if (output_memory_autoinc_first
)
5436 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
5438 fprintf (file
, "%d",
5439 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
5441 else if (GET_CODE (addr
) == POST_MODIFY
)
5443 gcc_assert (mode
!= VOIDmode
);
5445 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
5447 if (output_memory_autoinc_first
)
5448 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
5450 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
5451 INTVAL (XEXP (XEXP (addr
, 1), 1)));
5454 tilegx_print_operand (file
, addr
, 'r');
5458 /* Machine mode of current insn, for determining curly brace
5460 static machine_mode insn_mode
;
5463 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
5465 tilegx_final_prescan_insn (rtx_insn
*insn
)
5467 /* Record this for tilegx_asm_output_opcode to examine. */
5468 insn_mode
= GET_MODE (insn
);
5472 /* While emitting asm, are we currently inside '{' for a bundle? */
5473 static bool tilegx_in_bundle
= false;
5475 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
5476 appropriate given the bundling information recorded by
5477 tilegx_gen_bundles. */
5479 tilegx_asm_output_opcode (FILE *stream
, const char *code
)
5481 bool pseudo
= !strcmp (code
, "pseudo");
5483 if (!tilegx_in_bundle
&& insn_mode
== SImode
)
5485 /* Start a new bundle. */
5486 fprintf (stream
, "{\n\t");
5487 tilegx_in_bundle
= true;
5490 if (tilegx_in_bundle
&& insn_mode
== QImode
)
5492 /* Close an existing bundle. */
5493 static char buf
[100];
5495 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
5497 strcpy (buf
, pseudo
? "" : code
);
5498 strcat (buf
, "\n\t}");
5499 tilegx_in_bundle
= false;
5505 return pseudo
? "" : code
;
5510 /* Output assembler code to FILE to increment profiler label # LABELNO
5511 for profiling a function entry. */
5513 tilegx_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
5515 if (tilegx_in_bundle
)
5517 fprintf (file
, "\t}\n");
5520 if (cfun
->static_chain_decl
)
5524 "\taddi\tsp, sp, -16\n"
5535 "\t}\n", MCOUNT_NAME
);
5543 "\t}\n", MCOUNT_NAME
);
5546 if (cfun
->static_chain_decl
)
5549 "\taddi\tsp, sp, 16\n"
5553 tilegx_in_bundle
= false;
5557 /* Implement TARGET_ASM_FILE_END. */
5559 tilegx_file_end (void)
5561 if (NEED_INDICATE_EXEC_STACK
)
5562 file_end_indicate_exec_stack ();
5565 /* Implement TARGET_TRULY_NOOP_TRUNCATION. We represent all SI values
5566 as sign-extended DI values in registers. */
5569 tilegx_truly_noop_truncation (poly_uint64 outprec
, poly_uint64 inprec
)
5571 return inprec
<= 32 || outprec
> 32;
5574 #undef TARGET_HAVE_TLS
5575 #define TARGET_HAVE_TLS HAVE_AS_TLS
5577 #undef TARGET_OPTION_OVERRIDE
5578 #define TARGET_OPTION_OVERRIDE tilegx_option_override
5580 #ifdef TARGET_THREAD_SSP_OFFSET
5581 #undef TARGET_STACK_PROTECT_GUARD
5582 #define TARGET_STACK_PROTECT_GUARD hook_tree_void_null
5585 #undef TARGET_SCALAR_MODE_SUPPORTED_P
5586 #define TARGET_SCALAR_MODE_SUPPORTED_P tilegx_scalar_mode_supported_p
5588 #undef TARGET_VECTOR_MODE_SUPPORTED_P
5589 #define TARGET_VECTOR_MODE_SUPPORTED_P tilegx_vector_mode_supported_p
5591 #undef TARGET_CANNOT_FORCE_CONST_MEM
5592 #define TARGET_CANNOT_FORCE_CONST_MEM tilegx_cannot_force_const_mem
5594 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
5595 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilegx_function_ok_for_sibcall
5597 #undef TARGET_PASS_BY_REFERENCE
5598 #define TARGET_PASS_BY_REFERENCE tilegx_pass_by_reference
5600 #undef TARGET_RETURN_IN_MSB
5601 #define TARGET_RETURN_IN_MSB tilegx_return_in_msb
5603 #undef TARGET_RETURN_IN_MEMORY
5604 #define TARGET_RETURN_IN_MEMORY tilegx_return_in_memory
5606 #undef TARGET_MODE_REP_EXTENDED
5607 #define TARGET_MODE_REP_EXTENDED tilegx_mode_rep_extended
5609 #undef TARGET_FUNCTION_ARG_BOUNDARY
5610 #define TARGET_FUNCTION_ARG_BOUNDARY tilegx_function_arg_boundary
5612 #undef TARGET_FUNCTION_ARG
5613 #define TARGET_FUNCTION_ARG tilegx_function_arg
5615 #undef TARGET_FUNCTION_ARG_ADVANCE
5616 #define TARGET_FUNCTION_ARG_ADVANCE tilegx_function_arg_advance
5618 #undef TARGET_FUNCTION_VALUE
5619 #define TARGET_FUNCTION_VALUE tilegx_function_value
5621 #undef TARGET_LIBCALL_VALUE
5622 #define TARGET_LIBCALL_VALUE tilegx_libcall_value
5624 #undef TARGET_FUNCTION_VALUE_REGNO_P
5625 #define TARGET_FUNCTION_VALUE_REGNO_P tilegx_function_value_regno_p
5627 #undef TARGET_PROMOTE_FUNCTION_MODE
5628 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
5630 #undef TARGET_PROMOTE_PROTOTYPES
5631 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
5633 #undef TARGET_BUILD_BUILTIN_VA_LIST
5634 #define TARGET_BUILD_BUILTIN_VA_LIST tilegx_build_builtin_va_list
5636 #undef TARGET_EXPAND_BUILTIN_VA_START
5637 #define TARGET_EXPAND_BUILTIN_VA_START tilegx_va_start
5639 #undef TARGET_SETUP_INCOMING_VARARGS
5640 #define TARGET_SETUP_INCOMING_VARARGS tilegx_setup_incoming_varargs
5642 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
5643 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilegx_gimplify_va_arg_expr
5645 #undef TARGET_RTX_COSTS
5646 #define TARGET_RTX_COSTS tilegx_rtx_costs
5648 #undef TARGET_EXPAND_TO_RTL_HOOK
5649 #define TARGET_EXPAND_TO_RTL_HOOK tilegx_expand_to_rtl_hook
5651 #undef TARGET_SHIFT_TRUNCATION_MASK
5652 #define TARGET_SHIFT_TRUNCATION_MASK tilegx_shift_truncation_mask
5654 #undef TARGET_INIT_LIBFUNCS
5655 #define TARGET_INIT_LIBFUNCS tilegx_init_libfuncs
5657 /* Limit to what we can reach in one addli. */
5658 #undef TARGET_MIN_ANCHOR_OFFSET
5659 #define TARGET_MIN_ANCHOR_OFFSET -32768
5660 #undef TARGET_MAX_ANCHOR_OFFSET
5661 #define TARGET_MAX_ANCHOR_OFFSET 32767
5663 #undef TARGET_LEGITIMATE_CONSTANT_P
5664 #define TARGET_LEGITIMATE_CONSTANT_P tilegx_legitimate_constant_p
5667 #define TARGET_LRA_P hook_bool_void_false
5669 #undef TARGET_LEGITIMATE_ADDRESS_P
5670 #define TARGET_LEGITIMATE_ADDRESS_P tilegx_legitimate_address_p
5672 #undef TARGET_LEGITIMIZE_ADDRESS
5673 #define TARGET_LEGITIMIZE_ADDRESS tilegx_legitimize_address
5675 #undef TARGET_DELEGITIMIZE_ADDRESS
5676 #define TARGET_DELEGITIMIZE_ADDRESS tilegx_delegitimize_address
5678 #undef TARGET_INIT_BUILTINS
5679 #define TARGET_INIT_BUILTINS tilegx_init_builtins
5681 #undef TARGET_BUILTIN_DECL
5682 #define TARGET_BUILTIN_DECL tilegx_builtin_decl
5684 #undef TARGET_EXPAND_BUILTIN
5685 #define TARGET_EXPAND_BUILTIN tilegx_expand_builtin
5687 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5688 #define TARGET_CONDITIONAL_REGISTER_USAGE tilegx_conditional_register_usage
5690 #undef TARGET_FRAME_POINTER_REQUIRED
5691 #define TARGET_FRAME_POINTER_REQUIRED tilegx_frame_pointer_required
5693 #undef TARGET_DELAY_SCHED2
5694 #define TARGET_DELAY_SCHED2 true
5696 #undef TARGET_DELAY_VARTRACK
5697 #define TARGET_DELAY_VARTRACK true
5699 #undef TARGET_SCHED_ISSUE_RATE
5700 #define TARGET_SCHED_ISSUE_RATE tilegx_issue_rate
5702 #undef TARGET_SCHED_ADJUST_COST
5703 #define TARGET_SCHED_ADJUST_COST tilegx_sched_adjust_cost
5705 #undef TARGET_MACHINE_DEPENDENT_REORG
5706 #define TARGET_MACHINE_DEPENDENT_REORG tilegx_reorg
5708 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5709 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5710 hook_bool_const_tree_hwi_hwi_const_tree_true
5712 #undef TARGET_ASM_OUTPUT_MI_THUNK
5713 #define TARGET_ASM_OUTPUT_MI_THUNK tilegx_output_mi_thunk
5715 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5716 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilegx_asm_trampoline_template
5718 #undef TARGET_TRAMPOLINE_INIT
5719 #define TARGET_TRAMPOLINE_INIT tilegx_trampoline_init
5721 #undef TARGET_PRINT_OPERAND
5722 #define TARGET_PRINT_OPERAND tilegx_print_operand
5724 #undef TARGET_PRINT_OPERAND_ADDRESS
5725 #define TARGET_PRINT_OPERAND_ADDRESS tilegx_print_operand_address
5727 #undef TARGET_ASM_FILE_END
5728 #define TARGET_ASM_FILE_END tilegx_file_end
5730 #undef TARGET_ASM_ALIGNED_DI_OP
5731 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
5733 #undef TARGET_CAN_USE_DOLOOP_P
5734 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
5736 #undef TARGET_TRULY_NOOP_TRUNCATION
5737 #define TARGET_TRULY_NOOP_TRUNCATION tilegx_truly_noop_truncation
5739 #undef TARGET_CONSTANT_ALIGNMENT
5740 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
5742 struct gcc_target targetm
= TARGET_INITIALIZER
;
5744 #include "gt-tilegx.h"