1 /* Subroutines used for code generation on the Tilera TILEPro.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by Walter Lee (walt@tilera.com)
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "insn-config.h"
29 #include "insn-attr.h"
32 #include "langhooks.h"
34 #include "sched-int.h"
35 #include "sel-sched.h"
37 #include "tm-constrs.h"
39 #include "target-def.h"
45 #include "tilepro-builtins.h"
46 #include "tilepro-multiply.h"
47 #include "diagnostic.h"
49 /* SYMBOL_REF for GOT */
50 static GTY(()) rtx g_got_symbol
= NULL
;
52 /* In case of a POST_INC or POST_DEC memory reference, we must report
53 the mode of the memory reference from TARGET_PRINT_OPERAND to
54 TARGET_PRINT_OPERAND_ADDRESS. */
55 static enum machine_mode output_memory_reference_mode
;
57 /* Report whether we're printing out the first address fragment of a
58 POST_INC or POST_DEC memory reference, from TARGET_PRINT_OPERAND to
59 TARGET_PRINT_OPERAND_ADDRESS. */
60 static bool output_memory_autoinc_first
;
66 /* Implement TARGET_OPTION_OVERRIDE. */
68 tilepro_option_override (void)
70 /* When modulo scheduling is enabled, we still rely on regular
71 scheduler for bundling. */
72 if (flag_modulo_sched
)
73 flag_resched_modulo_sched
= 1;
78 /* Implement TARGET_SCALAR_MODE_SUPPORTED_P. */
80 tilepro_scalar_mode_supported_p (enum machine_mode mode
)
100 /* Implement TARGET_VECTOR_MODE_SUPPORTED_P. */
102 tile_vector_mode_supported_p (enum machine_mode mode
)
104 return mode
== V4QImode
|| mode
== V2HImode
;
108 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
110 tilepro_cannot_force_const_mem (enum machine_mode mode ATTRIBUTE_UNUSED
,
111 rtx x ATTRIBUTE_UNUSED
)
117 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */
119 tilepro_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
125 /* Implement TARGET_PASS_BY_REFERENCE. Variable sized types are
126 passed by reference. */
128 tilepro_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED
,
129 enum machine_mode mode ATTRIBUTE_UNUSED
,
130 const_tree type
, bool named ATTRIBUTE_UNUSED
)
132 return (type
&& TYPE_SIZE (type
)
133 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
);
137 /* Implement TARGET_RETURN_IN_MEMORY. */
139 tilepro_return_in_memory (const_tree type
, const_tree fndecl ATTRIBUTE_UNUSED
)
141 return !IN_RANGE (int_size_in_bytes (type
),
142 0, TILEPRO_NUM_RETURN_REGS
* UNITS_PER_WORD
);
146 /* Implement TARGET_FUNCTION_ARG_BOUNDARY. */
148 tilepro_function_arg_boundary (enum machine_mode mode
, const_tree type
)
150 unsigned int alignment
;
152 alignment
= type
? TYPE_ALIGN (type
) : GET_MODE_ALIGNMENT (mode
);
153 if (alignment
< PARM_BOUNDARY
)
154 alignment
= PARM_BOUNDARY
;
155 if (alignment
> STACK_BOUNDARY
)
156 alignment
= STACK_BOUNDARY
;
161 /* Implement TARGET_FUNCTION_ARG. */
163 tilepro_function_arg (cumulative_args_t cum_v
,
164 enum machine_mode mode
,
165 const_tree type
, bool named ATTRIBUTE_UNUSED
)
167 CUMULATIVE_ARGS cum
= *get_cumulative_args (cum_v
);
168 int byte_size
= ((mode
== BLKmode
)
169 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
170 bool doubleword_aligned_p
;
172 if (cum
>= TILEPRO_NUM_ARG_REGS
)
175 /* See whether the argument has doubleword alignment. */
176 doubleword_aligned_p
=
177 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
179 if (doubleword_aligned_p
)
182 /* The ABI does not allow parameters to be passed partially in reg
183 and partially in stack. */
184 if ((cum
+ (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
185 > TILEPRO_NUM_ARG_REGS
)
188 return gen_rtx_REG (mode
, cum
);
192 /* Implement TARGET_FUNCTION_ARG_ADVANCE. */
194 tilepro_function_arg_advance (cumulative_args_t cum_v
,
195 enum machine_mode mode
,
196 const_tree type
, bool named ATTRIBUTE_UNUSED
)
198 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
200 int byte_size
= ((mode
== BLKmode
)
201 ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
));
202 int word_size
= (byte_size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
203 bool doubleword_aligned_p
;
205 /* See whether the argument has doubleword alignment. */
206 doubleword_aligned_p
=
207 tilepro_function_arg_boundary (mode
, type
) > BITS_PER_WORD
;
209 if (doubleword_aligned_p
)
212 /* If the current argument does not fit in the pretend_args space,
214 if (*cum
< TILEPRO_NUM_ARG_REGS
215 && *cum
+ word_size
> TILEPRO_NUM_ARG_REGS
)
216 *cum
= TILEPRO_NUM_ARG_REGS
;
222 /* Implement TARGET_FUNCTION_VALUE. */
224 tilepro_function_value (const_tree valtype
, const_tree fn_decl_or_type
,
225 bool outgoing ATTRIBUTE_UNUSED
)
227 enum machine_mode mode
;
230 mode
= TYPE_MODE (valtype
);
231 unsigned_p
= TYPE_UNSIGNED (valtype
);
233 mode
= promote_function_mode (valtype
, mode
, &unsigned_p
,
236 return gen_rtx_REG (mode
, 0);
240 /* Implement TARGET_LIBCALL_VALUE. */
242 tilepro_libcall_value (enum machine_mode mode
,
243 const_rtx fun ATTRIBUTE_UNUSED
)
245 return gen_rtx_REG (mode
, 0);
249 /* Implement FUNCTION_VALUE_REGNO_P. */
251 tilepro_function_value_regno_p (const unsigned int regno
)
253 return regno
< TILEPRO_NUM_RETURN_REGS
;
257 /* Implement TARGET_BUILD_BUILTIN_VA_LIST. */
259 tilepro_build_builtin_va_list (void)
261 tree f_args
, f_skip
, record
, type_decl
;
264 record
= lang_hooks
.types
.make_type (RECORD_TYPE
);
266 type_decl
= build_decl (BUILTINS_LOCATION
, TYPE_DECL
,
267 get_identifier ("__va_list_tag"), record
);
269 f_args
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
270 get_identifier ("__args"), ptr_type_node
);
271 f_skip
= build_decl (BUILTINS_LOCATION
, FIELD_DECL
,
272 get_identifier ("__skip"), ptr_type_node
);
274 DECL_FIELD_CONTEXT (f_args
) = record
;
276 DECL_FIELD_CONTEXT (f_skip
) = record
;
278 TREE_CHAIN (record
) = type_decl
;
279 TYPE_NAME (record
) = type_decl
;
280 TYPE_FIELDS (record
) = f_args
;
281 TREE_CHAIN (f_args
) = f_skip
;
283 /* We know this is being padded and we want it too. It is an
284 internal type so hide the warnings from the user. */
288 layout_type (record
);
292 /* The correct type is an array type of one element. */
297 /* Implement TARGET_EXPAND_BUILTIN_VA_START. */
299 tilepro_va_start (tree valist
, rtx nextarg ATTRIBUTE_UNUSED
)
304 f_args
= TYPE_FIELDS (TREE_TYPE (valist
));
305 f_skip
= TREE_CHAIN (f_args
);
308 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
310 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
312 /* Find the __args area. */
313 t
= make_tree (TREE_TYPE (args
), virtual_incoming_args_rtx
);
314 t
= fold_build_pointer_plus_hwi (t
,
316 (crtl
->args
.info
- TILEPRO_NUM_ARG_REGS
));
318 if (crtl
->args
.pretend_args_size
> 0)
319 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
321 t
= build2 (MODIFY_EXPR
, TREE_TYPE (args
), args
, t
);
322 TREE_SIDE_EFFECTS (t
) = 1;
323 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
325 /* Find the __skip area. */
326 t
= make_tree (TREE_TYPE (skip
), virtual_incoming_args_rtx
);
327 t
= fold_build_pointer_plus_hwi (t
, -STACK_POINTER_OFFSET
);
328 t
= build2 (MODIFY_EXPR
, TREE_TYPE (skip
), skip
, t
);
329 TREE_SIDE_EFFECTS (t
) = 1;
330 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
334 /* Implement TARGET_SETUP_INCOMING_VARARGS. */
336 tilepro_setup_incoming_varargs (cumulative_args_t cum
,
337 enum machine_mode mode
,
338 tree type
, int *pretend_args
, int no_rtl
)
340 CUMULATIVE_ARGS local_cum
= *get_cumulative_args (cum
);
343 /* The caller has advanced CUM up to, but not beyond, the last named
344 argument. Advance a local copy of CUM past the last "real" named
345 argument, to find out how many registers are left over. */
346 targetm
.calls
.function_arg_advance (pack_cumulative_args (&local_cum
),
348 first_reg
= local_cum
;
350 if (local_cum
< TILEPRO_NUM_ARG_REGS
)
352 *pretend_args
= UNITS_PER_WORD
* (TILEPRO_NUM_ARG_REGS
- first_reg
);
356 alias_set_type set
= get_varargs_alias_set ();
358 gen_rtx_MEM (BLKmode
, plus_constant (Pmode
, \
359 virtual_incoming_args_rtx
,
360 -STACK_POINTER_OFFSET
-
362 (TILEPRO_NUM_ARG_REGS
-
364 MEM_NOTRAP_P (tmp
) = 1;
365 set_mem_alias_set (tmp
, set
);
366 move_block_from_reg (first_reg
, tmp
,
367 TILEPRO_NUM_ARG_REGS
- first_reg
);
375 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR. Gimplify va_arg by updating
376 the va_list structure VALIST as required to retrieve an argument of
377 type TYPE, and returning that argument.
379 ret = va_arg(VALIST, TYPE);
381 generates code equivalent to:
383 paddedsize = (sizeof(TYPE) + 3) & -4;
384 if ((VALIST.__args + paddedsize > VALIST.__skip)
385 & (VALIST.__args <= VALIST.__skip))
386 addr = VALIST.__skip + STACK_POINTER_OFFSET;
388 addr = VALIST.__args;
389 VALIST.__args = addr + paddedsize;
390 ret = *(TYPE *)addr; */
392 tilepro_gimplify_va_arg_expr (tree valist
, tree type
, gimple_seq
* pre_p
,
393 gimple_seq
* post_p ATTRIBUTE_UNUSED
)
397 HOST_WIDE_INT size
, rsize
;
399 bool pass_by_reference_p
;
401 f_args
= TYPE_FIELDS (va_list_type_node
);
402 f_skip
= TREE_CHAIN (f_args
);
405 build3 (COMPONENT_REF
, TREE_TYPE (f_args
), valist
, f_args
, NULL_TREE
);
407 build3 (COMPONENT_REF
, TREE_TYPE (f_skip
), valist
, f_skip
, NULL_TREE
);
409 addr
= create_tmp_var (ptr_type_node
, "va_arg");
411 /* if an object is dynamically sized, a pointer to it is passed
412 instead of the object itself. */
413 pass_by_reference_p
= pass_by_reference (NULL
, TYPE_MODE (type
), type
,
416 if (pass_by_reference_p
)
417 type
= build_pointer_type (type
);
419 size
= int_size_in_bytes (type
);
420 rsize
= ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
) * UNITS_PER_WORD
;
422 /* If the alignment of the type is greater than the default for a
423 parameter, align to STACK_BOUNDARY. */
424 if (TYPE_ALIGN (type
) > PARM_BOUNDARY
)
426 /* Assert the only case we generate code for: when
427 stack boundary = 2 * parm boundary. */
428 gcc_assert (STACK_BOUNDARY
== PARM_BOUNDARY
* 2);
430 tmp
= build2 (BIT_AND_EXPR
, sizetype
,
431 fold_convert (sizetype
, unshare_expr (args
)),
432 size_int (PARM_BOUNDARY
/ 8));
433 tmp
= build2 (POINTER_PLUS_EXPR
, ptr_type_node
,
434 unshare_expr (args
), tmp
);
436 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
439 /* Build conditional expression to calculate addr. The expression
440 will be gimplified later. */
441 tmp
= fold_build_pointer_plus_hwi (unshare_expr (args
), rsize
);
442 tmp
= build2 (TRUTH_AND_EXPR
, boolean_type_node
,
443 build2 (GT_EXPR
, boolean_type_node
, tmp
, unshare_expr (skip
)),
444 build2 (LE_EXPR
, boolean_type_node
, unshare_expr (args
),
445 unshare_expr (skip
)));
447 tmp
= build3 (COND_EXPR
, ptr_type_node
, tmp
,
448 build2 (POINTER_PLUS_EXPR
, ptr_type_node
, unshare_expr (skip
),
449 size_int (STACK_POINTER_OFFSET
)),
450 unshare_expr (args
));
452 gimplify_assign (addr
, tmp
, pre_p
);
454 /* Update VALIST.__args. */
455 tmp
= fold_build_pointer_plus_hwi (addr
, rsize
);
456 gimplify_assign (unshare_expr (args
), tmp
, pre_p
);
458 addr
= fold_convert (build_pointer_type (type
), addr
);
460 if (pass_by_reference_p
)
461 addr
= build_va_arg_indirect_ref (addr
);
463 return build_va_arg_indirect_ref (addr
);
468 /* Implement TARGET_RTX_COSTS. */
470 tilepro_rtx_costs (rtx x
, int code
, int outer_code
, int opno
, int *total
,
476 /* If this is an 8-bit constant, return zero since it can be
477 used nearly anywhere with no cost. If it is a valid operand
478 for an ADD or AND, likewise return 0 if we know it will be
479 used in that context. Otherwise, return 2 since it might be
480 used there later. All other constants take at least two
482 if (satisfies_constraint_I (x
))
487 else if (outer_code
== PLUS
&& add_operand (x
, VOIDmode
))
489 /* Slightly penalize large constants even though we can add
490 them in one instruction, because it forces the use of
491 2-wide bundling mode. */
495 else if (move_operand (x
, SImode
))
497 /* We can materialize in one move. */
498 *total
= COSTS_N_INSNS (1);
503 /* We can materialize in two moves. */
504 *total
= COSTS_N_INSNS (2);
513 *total
= COSTS_N_INSNS (2);
517 *total
= COSTS_N_INSNS (4);
525 /* If outer-code was a sign or zero extension, a cost of
526 COSTS_N_INSNS (1) was already added in, so account for
528 if (outer_code
== ZERO_EXTEND
|| outer_code
== SIGN_EXTEND
)
529 *total
= COSTS_N_INSNS (1);
531 *total
= COSTS_N_INSNS (2);
535 /* Convey that s[123]a are efficient. */
536 if (GET_CODE (XEXP (x
, 0)) == MULT
537 && cint_248_operand (XEXP (XEXP (x
, 0), 1), VOIDmode
))
539 *total
= (rtx_cost (XEXP (XEXP (x
, 0), 0),
540 (enum rtx_code
) outer_code
, opno
, speed
)
541 + rtx_cost (XEXP (x
, 1),
542 (enum rtx_code
) outer_code
, opno
, speed
)
543 + COSTS_N_INSNS (1));
549 *total
= COSTS_N_INSNS (2);
554 if (outer_code
== MULT
)
557 *total
= COSTS_N_INSNS (1);
564 /* These are handled by software and are very expensive. */
565 *total
= COSTS_N_INSNS (100);
569 case UNSPEC_VOLATILE
:
571 int num
= XINT (x
, 1);
573 if (num
<= TILEPRO_LAST_LATENCY_1_INSN
)
574 *total
= COSTS_N_INSNS (1);
575 else if (num
<= TILEPRO_LAST_LATENCY_2_INSN
)
576 *total
= COSTS_N_INSNS (2);
577 else if (num
> TILEPRO_LAST_LATENCY_INSN
)
579 if (outer_code
== PLUS
)
582 *total
= COSTS_N_INSNS (1);
588 case UNSPEC_BLOCKAGE
:
589 case UNSPEC_NETWORK_BARRIER
:
593 case UNSPEC_LNK_AND_LABEL
:
595 case UNSPEC_NETWORK_RECEIVE
:
596 case UNSPEC_NETWORK_SEND
:
597 case UNSPEC_TLS_GD_ADD
:
598 *total
= COSTS_N_INSNS (1);
601 case UNSPEC_TLS_IE_LOAD
:
602 *total
= COSTS_N_INSNS (2);
606 *total
= COSTS_N_INSNS (3);
610 *total
= COSTS_N_INSNS (4);
613 case UNSPEC_LATENCY_L2
:
614 *total
= COSTS_N_INSNS (8);
617 case UNSPEC_TLS_GD_CALL
:
618 *total
= COSTS_N_INSNS (30);
621 case UNSPEC_LATENCY_MISS
:
622 *total
= COSTS_N_INSNS (80);
626 *total
= COSTS_N_INSNS (1);
639 /* Returns an SImode integer rtx with value VAL. */
641 gen_int_si (HOST_WIDE_INT val
)
643 return gen_int_mode (val
, SImode
);
647 /* Create a temporary variable to hold a partial result, to enable
650 create_temp_reg_if_possible (enum machine_mode mode
, rtx default_reg
)
652 return can_create_pseudo_p ()? gen_reg_rtx (mode
) : default_reg
;
656 /* Functions to save and restore machine-specific function data. */
657 static struct machine_function
*
658 tilepro_init_machine_status (void)
660 return ggc_alloc_cleared_machine_function ();
664 /* Do anything needed before RTL is emitted for each function. */
666 tilepro_init_expanders (void)
668 /* Arrange to initialize and mark the machine per-function
670 init_machine_status
= tilepro_init_machine_status
;
672 if (cfun
&& cfun
->machine
&& flag_pic
)
674 static int label_num
= 0;
676 char text_label_name
[32];
678 struct machine_function
*machine
= cfun
->machine
;
680 ASM_GENERATE_INTERNAL_LABEL (text_label_name
, "L_PICLNK", label_num
++);
682 machine
->text_label_symbol
=
683 gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (text_label_name
));
685 machine
->text_label_rtx
=
686 gen_rtx_REG (Pmode
, TILEPRO_PIC_TEXT_LABEL_REGNUM
);
688 machine
->got_rtx
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
690 machine
->calls_tls_get_addr
= false;
695 /* Return true if X contains a thread-local symbol. */
697 tilepro_tls_referenced_p (rtx x
)
699 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == PLUS
)
700 x
= XEXP (XEXP (x
, 0), 0);
702 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_TLS_MODEL (x
))
705 /* That's all we handle in tilepro_legitimize_tls_address for
711 /* Return true if X requires a scratch register. It is given that
712 flag_pic is on and that X satisfies CONSTANT_P. */
714 tilepro_pic_address_needs_scratch (rtx x
)
716 if (GET_CODE (x
) == CONST
717 && GET_CODE (XEXP (x
, 0)) == PLUS
718 && (GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
719 || GET_CODE (XEXP (XEXP (x
, 0), 0)) == LABEL_REF
)
720 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
727 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for
728 which we are willing to load the value into a register via a move
729 pattern. TLS cannot be treated as a constant because it can
730 include a function call. */
732 tilepro_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
734 switch (GET_CODE (x
))
738 return !tilepro_tls_referenced_p (x
);
746 /* Return true if the constant value X is a legitimate general operand
747 when generating PIC code. It is given that flag_pic is on and that
748 X satisfies CONSTANT_P. */
750 tilepro_legitimate_pic_operand_p (rtx x
)
752 if (tilepro_pic_address_needs_scratch (x
))
755 if (tilepro_tls_referenced_p (x
))
762 /* Return true if the rtx X can be used as an address operand. */
764 tilepro_legitimate_address_p (enum machine_mode
ARG_UNUSED (mode
), rtx x
,
767 if (GET_CODE (x
) == SUBREG
)
770 switch (GET_CODE (x
))
774 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
781 if (GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
784 if (GET_CODE (XEXP (x
, 1)) != PLUS
)
787 if (!rtx_equal_p (XEXP (x
, 0), XEXP (XEXP (x
, 1), 0)))
790 if (!satisfies_constraint_I (XEXP (XEXP (x
, 1), 1)))
803 /* Check if x is a valid reg. */
808 return REGNO_OK_FOR_BASE_P (REGNO (x
));
814 /* Return the rtx containing SYMBOL_REF to the text label. */
816 tilepro_text_label_symbol (void)
818 return cfun
->machine
->text_label_symbol
;
822 /* Return the register storing the value of the text label. */
824 tilepro_text_label_rtx (void)
826 return cfun
->machine
->text_label_rtx
;
830 /* Return the register storing the value of the global offset
833 tilepro_got_rtx (void)
835 return cfun
->machine
->got_rtx
;
839 /* Return the SYMBOL_REF for _GLOBAL_OFFSET_TABLE_. */
841 tilepro_got_symbol (void)
843 if (g_got_symbol
== NULL
)
844 g_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
850 /* Return a reference to the got to be used by tls references. */
852 tilepro_tls_got (void)
857 crtl
->uses_pic_offset_table
= 1;
858 return tilepro_got_rtx ();
861 temp
= gen_reg_rtx (Pmode
);
862 emit_move_insn (temp
, tilepro_got_symbol ());
868 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
869 this (thread-local) address. */
871 tilepro_legitimize_tls_address (rtx addr
)
875 gcc_assert (can_create_pseudo_p ());
877 if (GET_CODE (addr
) == SYMBOL_REF
)
878 switch (SYMBOL_REF_TLS_MODEL (addr
))
880 case TLS_MODEL_GLOBAL_DYNAMIC
:
881 case TLS_MODEL_LOCAL_DYNAMIC
:
883 rtx r0
, temp1
, temp2
, temp3
, got
, last
;
885 ret
= gen_reg_rtx (Pmode
);
886 r0
= gen_rtx_REG (Pmode
, 0);
887 temp1
= gen_reg_rtx (Pmode
);
888 temp2
= gen_reg_rtx (Pmode
);
889 temp3
= gen_reg_rtx (Pmode
);
891 got
= tilepro_tls_got ();
892 emit_insn (gen_tls_gd_addhi (temp1
, got
, addr
));
893 emit_insn (gen_tls_gd_addlo (temp2
, temp1
, addr
));
894 emit_move_insn (r0
, temp2
);
895 emit_insn (gen_tls_gd_call (addr
));
896 emit_move_insn (temp3
, r0
);
897 last
= emit_insn (gen_tls_gd_add (ret
, temp3
, addr
));
898 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
901 case TLS_MODEL_INITIAL_EXEC
:
903 rtx temp1
, temp2
, temp3
, got
, last
;
905 ret
= gen_reg_rtx (Pmode
);
906 temp1
= gen_reg_rtx (Pmode
);
907 temp2
= gen_reg_rtx (Pmode
);
908 temp3
= gen_reg_rtx (Pmode
);
910 got
= tilepro_tls_got ();
911 emit_insn (gen_tls_ie_addhi (temp1
, got
, addr
));
912 emit_insn (gen_tls_ie_addlo (temp2
, temp1
, addr
));
913 emit_insn (gen_tls_ie_load (temp3
, temp2
, addr
));
918 THREAD_POINTER_REGNUM
),
920 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
923 case TLS_MODEL_LOCAL_EXEC
:
927 ret
= gen_reg_rtx (Pmode
);
928 temp1
= gen_reg_rtx (Pmode
);
930 emit_insn (gen_tls_le_addhi (temp1
,
932 THREAD_POINTER_REGNUM
),
934 last
= emit_insn (gen_tls_le_addlo (ret
, temp1
, addr
));
935 set_unique_reg_note (last
, REG_EQUAL
, copy_rtx (addr
));
941 else if (GET_CODE (addr
) == CONST
)
945 gcc_assert (GET_CODE (XEXP (addr
, 0)) == PLUS
);
947 base
= tilepro_legitimize_tls_address (XEXP (XEXP (addr
, 0), 0));
948 offset
= XEXP (XEXP (addr
, 0), 1);
950 base
= force_operand (base
, NULL_RTX
);
951 ret
= force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
960 /* Legitimize PIC addresses. If the address is already
961 position-independent, we return ORIG. Newly generated
962 position-independent addresses go into a reg. This is REG if
963 nonzero, otherwise we allocate register(s) as necessary. */
965 tilepro_legitimize_pic_address (rtx orig
,
966 enum machine_mode mode ATTRIBUTE_UNUSED
,
969 if (GET_CODE (orig
) == SYMBOL_REF
)
971 rtx address
, pic_ref
;
975 gcc_assert (can_create_pseudo_p ());
976 reg
= gen_reg_rtx (Pmode
);
979 if (SYMBOL_REF_LOCAL_P (orig
))
981 /* If not during reload, allocate another temp reg here for
982 loading in the address, so that these instructions can be
983 optimized properly. */
984 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
985 rtx text_label_symbol
= tilepro_text_label_symbol ();
986 rtx text_label_rtx
= tilepro_text_label_rtx ();
988 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
990 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
993 /* Note: this is conservative. We use the text_label but we
994 don't use the pic_offset_table. However, in some cases
995 we may need the pic_offset_table (see
996 tilepro_fixup_pcrel_references). */
997 crtl
->uses_pic_offset_table
= 1;
1001 emit_move_insn (reg
, address
);
1006 /* If not during reload, allocate another temp reg here for
1007 loading in the address, so that these instructions can be
1008 optimized properly. */
1009 rtx temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1011 gcc_assert (flag_pic
);
1014 emit_insn (gen_add_got16 (temp_reg
,
1015 tilepro_got_rtx (), orig
));
1019 rtx temp_reg2
= create_temp_reg_if_possible (Pmode
, reg
);
1020 emit_insn (gen_addhi_got32 (temp_reg2
,
1021 tilepro_got_rtx (), orig
));
1022 emit_insn (gen_addlo_got32 (temp_reg
, temp_reg2
, orig
));
1027 pic_ref
= gen_const_mem (Pmode
, address
);
1028 crtl
->uses_pic_offset_table
= 1;
1029 emit_move_insn (reg
, pic_ref
);
1030 /* The following put a REG_EQUAL note on this insn, so that
1031 it can be optimized by loop. But it causes the label to
1032 be optimized away. */
1033 /* set_unique_reg_note (insn, REG_EQUAL, orig); */
1037 else if (GET_CODE (orig
) == CONST
)
1041 if (GET_CODE (XEXP (orig
, 0)) == PLUS
1042 && XEXP (XEXP (orig
, 0), 0) == tilepro_got_rtx ())
1047 gcc_assert (can_create_pseudo_p ());
1048 reg
= gen_reg_rtx (Pmode
);
1051 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
1052 base
= tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
,
1055 tilepro_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
1056 base
== reg
? 0 : reg
);
1058 if (CONST_INT_P (offset
))
1060 if (can_create_pseudo_p ())
1061 offset
= force_reg (Pmode
, offset
);
1063 /* If we reach here, then something is seriously
1068 if (can_create_pseudo_p ())
1069 return force_reg (Pmode
, gen_rtx_PLUS (Pmode
, base
, offset
));
1073 else if (GET_CODE (orig
) == LABEL_REF
)
1075 rtx address
, temp_reg
;
1076 rtx text_label_symbol
;
1081 gcc_assert (can_create_pseudo_p ());
1082 reg
= gen_reg_rtx (Pmode
);
1085 /* If not during reload, allocate another temp reg here for
1086 loading in the address, so that these instructions can be
1087 optimized properly. */
1088 temp_reg
= create_temp_reg_if_possible (Pmode
, reg
);
1089 text_label_symbol
= tilepro_text_label_symbol ();
1090 text_label_rtx
= tilepro_text_label_rtx ();
1092 emit_insn (gen_addli_pcrel (temp_reg
, text_label_rtx
, orig
,
1093 text_label_symbol
));
1094 emit_insn (gen_auli_pcrel (temp_reg
, temp_reg
, orig
,
1095 text_label_symbol
));
1097 /* Note: this is conservative. We use the text_label but we
1098 don't use the pic_offset_table. */
1099 crtl
->uses_pic_offset_table
= 1;
1103 emit_move_insn (reg
, address
);
1112 /* Implement TARGET_LEGITIMIZE_ADDRESS. */
1114 tilepro_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
1115 enum machine_mode mode
)
1117 if (GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
1118 && symbolic_operand (x
, Pmode
) && tilepro_tls_referenced_p (x
))
1120 return tilepro_legitimize_tls_address (x
);
1124 return tilepro_legitimize_pic_address (x
, mode
, 0);
1131 /* Implement TARGET_DELEGITIMIZE_ADDRESS. */
1133 tilepro_delegitimize_address (rtx x
)
1135 x
= delegitimize_mem_from_attrs (x
);
1137 if (GET_CODE (x
) == CONST
&& GET_CODE (XEXP (x
, 0)) == UNSPEC
)
1139 switch (XINT (XEXP (x
, 0), 1))
1141 case UNSPEC_PCREL_SYM
:
1142 case UNSPEC_GOT16_SYM
:
1143 case UNSPEC_GOT32_SYM
:
1146 x
= XVECEXP (XEXP (x
, 0), 0, 0);
1155 /* Emit code to load the PIC register. */
1157 load_pic_register (bool delay_pic_helper ATTRIBUTE_UNUSED
)
1159 int orig_flag_pic
= flag_pic
;
1161 rtx got_symbol
= tilepro_got_symbol ();
1162 rtx text_label_symbol
= tilepro_text_label_symbol ();
1163 rtx text_label_rtx
= tilepro_text_label_rtx ();
1166 emit_insn (gen_insn_lnk_and_label (text_label_rtx
, text_label_symbol
));
1168 emit_insn (gen_addli_pcrel (tilepro_got_rtx (),
1169 text_label_rtx
, got_symbol
, text_label_symbol
));
1171 emit_insn (gen_auli_pcrel (tilepro_got_rtx (),
1173 got_symbol
, text_label_symbol
));
1175 flag_pic
= orig_flag_pic
;
1177 /* Need to emit this whether or not we obey regdecls, since
1178 setjmp/longjmp can cause life info to screw up. ??? In the case
1179 where we don't obey regdecls, this is not sufficient since we may
1180 not fall out the bottom. */
1181 emit_use (tilepro_got_rtx ());
1185 /* Return the simd variant of the constant NUM of mode MODE, by
1186 replicating it to fill an interger of mode SImode. NUM is first
1187 truncated to fit in MODE. */
1189 tilepro_simd_int (rtx num
, enum machine_mode mode
)
1191 HOST_WIDE_INT n
= 0;
1193 gcc_assert (CONST_INT_P (num
));
1200 n
= 0x01010101 * (n
& 0x000000FF);
1203 n
= 0x00010001 * (n
& 0x0000FFFF);
1213 return gen_int_si (n
);
1217 /* Split one or more DImode RTL references into pairs of SImode
1218 references. The RTL can be REG, offsettable MEM, integer constant,
1219 or CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL
1220 to split and "num" is its length. lo_half and hi_half are output
1221 arrays that parallel "operands". */
1223 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1227 rtx op
= operands
[num
];
1229 /* simplify_subreg refuse to split volatile memory addresses,
1230 but we still have to handle it. */
1233 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1234 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1238 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1239 GET_MODE (op
) == VOIDmode
1240 ? DImode
: GET_MODE (op
), 0);
1241 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1242 GET_MODE (op
) == VOIDmode
1243 ? DImode
: GET_MODE (op
), 4);
1249 /* Returns true iff val can be moved into a register in one
1250 instruction. And if it can, it emits the code to move the
1253 If three_wide_only is true, this insists on an instruction that
1254 works in a bundle containing three instructions. */
1256 expand_set_cint32_one_inst (rtx dest_reg
,
1257 HOST_WIDE_INT val
, bool three_wide_only
)
1259 val
= trunc_int_for_mode (val
, SImode
);
1261 if (val
== trunc_int_for_mode (val
, QImode
))
1264 emit_move_insn (dest_reg
, GEN_INT (val
));
1267 else if (!three_wide_only
)
1269 rtx imm_op
= GEN_INT (val
);
1271 if (satisfies_constraint_J (imm_op
)
1272 || satisfies_constraint_K (imm_op
)
1273 || satisfies_constraint_N (imm_op
)
1274 || satisfies_constraint_P (imm_op
))
1276 emit_move_insn (dest_reg
, imm_op
);
1285 /* Implement SImode rotatert. */
1286 static HOST_WIDE_INT
1287 rotate_right (HOST_WIDE_INT n
, int count
)
1289 unsigned HOST_WIDE_INT x
= n
& 0xFFFFFFFF;
1292 return ((x
>> count
) | (x
<< (32 - count
))) & 0xFFFFFFFF;
1296 /* Return true iff n contains exactly one contiguous sequence of 1
1297 bits, possibly wrapping around from high bits to low bits. */
1299 tilepro_bitfield_operand_p (HOST_WIDE_INT n
, int *first_bit
, int *last_bit
)
1306 for (i
= 0; i
< 32; i
++)
1308 unsigned HOST_WIDE_INT x
= rotate_right (n
, i
);
1312 /* See if x is a power of two minus one, i.e. only consecutive 1
1313 bits starting from bit 0. */
1314 if ((x
& (x
+ 1)) == 0)
1316 if (first_bit
!= NULL
)
1318 if (last_bit
!= NULL
)
1319 *last_bit
= (i
+ exact_log2 (x
^ (x
>> 1))) & 31;
1329 /* Create code to move the CONST_INT value in src_val to dest_reg. */
1331 expand_set_cint32 (rtx dest_reg
, rtx src_val
)
1334 int leading_zeroes
, trailing_zeroes
;
1336 int three_wide_only
;
1339 gcc_assert (CONST_INT_P (src_val
));
1340 val
= trunc_int_for_mode (INTVAL (src_val
), SImode
);
1342 /* See if we can generate the constant in one instruction. */
1343 if (expand_set_cint32_one_inst (dest_reg
, val
, false))
1346 /* Create a temporary variable to hold a partial result, to enable
1348 temp
= create_temp_reg_if_possible (SImode
, dest_reg
);
1350 leading_zeroes
= 31 - floor_log2 (val
& 0xFFFFFFFF);
1351 trailing_zeroes
= exact_log2 (val
& -val
);
1353 lower
= trunc_int_for_mode (val
, HImode
);
1354 upper
= trunc_int_for_mode ((val
- lower
) >> 16, HImode
);
1356 /* First try all three-wide instructions that generate a constant
1357 (i.e. movei) followed by various shifts and rotates. If none of
1358 those work, try various two-wide ways of generating a constant
1359 followed by various shifts and rotates. */
1360 for (three_wide_only
= 1; three_wide_only
>= 0; three_wide_only
--)
1364 if (expand_set_cint32_one_inst (temp
, val
>> trailing_zeroes
,
1367 /* 0xFFFFA500 becomes:
1368 movei temp, 0xFFFFFFA5
1369 shli dest, temp, 8 */
1370 emit_move_insn (dest_reg
,
1371 gen_rtx_ASHIFT (SImode
, temp
,
1372 GEN_INT (trailing_zeroes
)));
1376 if (expand_set_cint32_one_inst (temp
, val
<< leading_zeroes
,
1379 /* 0x7FFFFFFF becomes:
1381 shri dest, temp, 1 */
1382 emit_move_insn (dest_reg
,
1383 gen_rtx_LSHIFTRT (SImode
, temp
,
1384 GEN_INT (leading_zeroes
)));
1388 /* Try rotating a one-instruction immediate, since rotate is
1390 for (count
= 1; count
< 32; count
++)
1392 HOST_WIDE_INT r
= rotate_right (val
, count
);
1393 if (expand_set_cint32_one_inst (temp
, r
, three_wide_only
))
1395 /* 0xFFA5FFFF becomes:
1396 movei temp, 0xFFFFFFA5
1397 rli dest, temp, 16 */
1398 emit_move_insn (dest_reg
,
1399 gen_rtx_ROTATE (SImode
, temp
, GEN_INT (count
)));
1404 if (lower
== trunc_int_for_mode (lower
, QImode
))
1406 /* We failed to use two 3-wide instructions, but the low 16
1407 bits are a small number so just use a 2-wide + 3-wide
1408 auli + addi pair rather than anything more exotic.
1411 auli temp, zero, 0x1234
1412 addi dest, temp, 0x56 */
1417 /* Fallback case: use a auli + addli/addi pair. */
1418 emit_move_insn (temp
, GEN_INT (upper
<< 16));
1419 emit_move_insn (dest_reg
, (gen_rtx_PLUS (SImode
, temp
, GEN_INT (lower
))));
1423 /* Load OP1, a 32-bit constant, into OP0, a register. We know it
1424 can't be done in one insn when we get here, the move expander
1427 tilepro_expand_set_const32 (rtx op0
, rtx op1
)
1429 enum machine_mode mode
= GET_MODE (op0
);
1432 if (CONST_INT_P (op1
))
1434 /* TODO: I don't know if we want to split large constants now,
1435 or wait until later (with a define_split).
1437 Does splitting early help CSE? Does it harm other
1438 optimizations that might fold loads? */
1439 expand_set_cint32 (op0
, op1
);
1443 temp
= create_temp_reg_if_possible (mode
, op0
);
1445 /* A symbol, emit in the traditional way. */
1446 emit_move_insn (temp
, gen_rtx_HIGH (mode
, op1
));
1447 emit_move_insn (op0
, gen_rtx_LO_SUM (mode
, temp
, op1
));
1452 /* Expand a move instruction. Return true if all work is done. */
1454 tilepro_expand_mov (enum machine_mode mode
, rtx
*operands
)
1456 /* Handle sets of MEM first. */
1457 if (MEM_P (operands
[0]))
1459 if (can_create_pseudo_p ())
1460 operands
[0] = validize_mem (operands
[0]);
1462 if (reg_or_0_operand (operands
[1], mode
))
1465 if (!reload_in_progress
)
1466 operands
[1] = force_reg (mode
, operands
[1]);
1469 /* Fixup TLS cases. */
1470 if (CONSTANT_P (operands
[1]) && tilepro_tls_referenced_p (operands
[1]))
1472 operands
[1] = tilepro_legitimize_tls_address (operands
[1]);
1476 /* Fixup PIC cases. */
1477 if (flag_pic
&& CONSTANT_P (operands
[1]))
1479 if (tilepro_pic_address_needs_scratch (operands
[1]))
1480 operands
[1] = tilepro_legitimize_pic_address (operands
[1], mode
, 0);
1482 if (symbolic_operand (operands
[1], mode
))
1484 operands
[1] = tilepro_legitimize_pic_address (operands
[1],
1486 (reload_in_progress
?
1493 /* Fixup for UNSPEC addresses. */
1495 && GET_CODE (operands
[1]) == HIGH
1496 && GET_CODE (XEXP (operands
[1], 0)) == CONST
1497 && GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == UNSPEC
)
1499 rtx unspec
= XEXP (XEXP (operands
[1], 0), 0);
1500 int unspec_num
= XINT (unspec
, 1);
1501 if (unspec_num
== UNSPEC_PCREL_SYM
)
1503 emit_insn (gen_auli_pcrel (operands
[0], const0_rtx
,
1504 XVECEXP (unspec
, 0, 0),
1505 XVECEXP (unspec
, 0, 1)));
1508 else if (flag_pic
== 2 && unspec_num
== UNSPEC_GOT32_SYM
)
1510 emit_insn (gen_addhi_got32 (operands
[0], const0_rtx
,
1511 XVECEXP (unspec
, 0, 0)));
1514 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_GD
)
1516 emit_insn (gen_tls_gd_addhi (operands
[0], const0_rtx
,
1517 XVECEXP (unspec
, 0, 0)));
1520 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_IE
)
1522 emit_insn (gen_tls_ie_addhi (operands
[0], const0_rtx
,
1523 XVECEXP (unspec
, 0, 0)));
1526 else if (HAVE_AS_TLS
&& unspec_num
== UNSPEC_TLS_LE
)
1528 emit_insn (gen_tls_le_addhi (operands
[0], const0_rtx
,
1529 XVECEXP (unspec
, 0, 0)));
1534 /* Accept non-constants and valid constants unmodified. */
1535 if (!CONSTANT_P (operands
[1])
1536 || GET_CODE (operands
[1]) == HIGH
|| move_operand (operands
[1], mode
))
1539 /* Split large integers. */
1540 if (GET_MODE_SIZE (mode
) <= 4)
1542 tilepro_expand_set_const32 (operands
[0], operands
[1]);
1550 /* Expand the "insv" pattern. */
1552 tilepro_expand_insv (rtx operands
[4])
1554 rtx first_rtx
= operands
[2];
1555 HOST_WIDE_INT first
= INTVAL (first_rtx
);
1556 HOST_WIDE_INT width
= INTVAL (operands
[1]);
1557 rtx v
= operands
[3];
1559 /* Shift the inserted bits into position. */
1562 if (CONST_INT_P (v
))
1564 /* Shift the constant into mm position. */
1565 v
= gen_int_si (INTVAL (v
) << first
);
1569 /* Shift over the value to be inserted. */
1570 rtx tmp
= gen_reg_rtx (SImode
);
1571 emit_insn (gen_ashlsi3 (tmp
, v
, first_rtx
));
1576 /* Insert the shifted bits using an 'mm' insn. */
1577 emit_insn (gen_insn_mm (operands
[0], v
, operands
[0], first_rtx
,
1578 GEN_INT (first
+ width
- 1)));
1582 /* Expand unaligned loads. */
1584 tilepro_expand_unaligned_load (rtx dest_reg
, rtx mem
, HOST_WIDE_INT bitsize
,
1585 HOST_WIDE_INT bit_offset
, bool sign
)
1587 enum machine_mode mode
;
1588 rtx addr_lo
, addr_hi
;
1589 rtx mem_lo
, mem_hi
, hi
;
1590 rtx mema
, wide_result
;
1591 int last_byte_offset
;
1592 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1594 mode
= GET_MODE (dest_reg
);
1596 hi
= gen_reg_rtx (mode
);
1598 if (bitsize
== 2 * BITS_PER_UNIT
&& (bit_offset
% BITS_PER_UNIT
) == 0)
1602 /* When just loading a two byte value, we can load the two bytes
1603 individually and combine them efficiently. */
1605 mem_lo
= adjust_address (mem
, QImode
, byte_offset
);
1606 mem_hi
= adjust_address (mem
, QImode
, byte_offset
+ 1);
1608 lo
= gen_reg_rtx (mode
);
1609 emit_insn (gen_zero_extendqisi2 (lo
, mem_lo
));
1613 rtx tmp
= gen_reg_rtx (mode
);
1615 /* Do a signed load of the second byte then shift and OR it
1617 emit_insn (gen_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1618 emit_insn (gen_ashlsi3 (gen_lowpart (SImode
, tmp
),
1619 gen_lowpart (SImode
, hi
), GEN_INT (8)));
1620 emit_insn (gen_iorsi3 (gen_lowpart (SImode
, dest_reg
),
1621 gen_lowpart (SImode
, lo
),
1622 gen_lowpart (SImode
, tmp
)));
1626 /* Do two unsigned loads and use intlb to interleave
1628 emit_insn (gen_zero_extendqisi2 (gen_lowpart (SImode
, hi
), mem_hi
));
1629 emit_insn (gen_insn_intlb (gen_lowpart (SImode
, dest_reg
),
1630 gen_lowpart (SImode
, hi
),
1631 gen_lowpart (SImode
, lo
)));
1637 mema
= XEXP (mem
, 0);
1639 /* AND addresses cannot be in any alias set, since they may
1640 implicitly alias surrounding code. Ideally we'd have some alias
1641 set that covered all types except those with alignment 8 or
1643 addr_lo
= force_reg (Pmode
, plus_constant (Pmode
, mema
, byte_offset
));
1644 mem_lo
= change_address (mem
, mode
,
1645 gen_rtx_AND (Pmode
, addr_lo
, GEN_INT (-4)));
1646 set_mem_alias_set (mem_lo
, 0);
1648 /* Load the high word at an address that will not fault if the low
1649 address is aligned and at the very end of a page. */
1650 last_byte_offset
= (bit_offset
+ bitsize
- 1) / BITS_PER_UNIT
;
1651 addr_hi
= force_reg (Pmode
, plus_constant (Pmode
, mema
, last_byte_offset
));
1652 mem_hi
= change_address (mem
, mode
,
1653 gen_rtx_AND (Pmode
, addr_hi
, GEN_INT (-4)));
1654 set_mem_alias_set (mem_hi
, 0);
1658 addr_lo
= make_safe_from (addr_lo
, dest_reg
);
1659 wide_result
= dest_reg
;
1663 wide_result
= gen_reg_rtx (mode
);
1666 /* Load hi first in case dest_reg is used in mema. */
1667 emit_move_insn (hi
, mem_hi
);
1668 emit_move_insn (wide_result
, mem_lo
);
1670 emit_insn (gen_insn_dword_align (gen_lowpart (SImode
, wide_result
),
1671 gen_lowpart (SImode
, wide_result
),
1672 gen_lowpart (SImode
, hi
), addr_lo
));
1677 extract_bit_field (gen_lowpart (SImode
, wide_result
),
1678 bitsize
, bit_offset
% BITS_PER_UNIT
,
1679 !sign
, false, gen_lowpart (SImode
, dest_reg
),
1682 if (extracted
!= dest_reg
)
1683 emit_move_insn (dest_reg
, gen_lowpart (SImode
, extracted
));
1688 /* Expand unaligned stores. */
1690 tilepro_expand_unaligned_store (rtx mem
, rtx src
, HOST_WIDE_INT bitsize
,
1691 HOST_WIDE_INT bit_offset
)
1693 HOST_WIDE_INT byte_offset
= bit_offset
/ BITS_PER_UNIT
;
1694 HOST_WIDE_INT bytesize
= bitsize
/ BITS_PER_UNIT
;
1695 HOST_WIDE_INT shift_amt
;
1700 for (i
= 0, shift_amt
= 0; i
< bytesize
; i
++, shift_amt
+= BITS_PER_UNIT
)
1702 mem_addr
= adjust_address (mem
, QImode
, byte_offset
+ i
);
1706 store_val
= expand_simple_binop (SImode
, LSHIFTRT
,
1707 gen_lowpart (SImode
, src
),
1708 GEN_INT (shift_amt
), NULL
, 1,
1710 store_val
= gen_lowpart (QImode
, store_val
);
1714 store_val
= gen_lowpart (QImode
, src
);
1717 emit_move_insn (mem_addr
, store_val
);
1722 /* Implement the movmisalign patterns. One of the operands is a
1723 memory that is not naturally aligned. Emit instructions to load
1726 tilepro_expand_movmisalign (enum machine_mode mode
, rtx
*operands
)
1728 if (MEM_P (operands
[1]))
1732 if (register_operand (operands
[0], mode
))
1735 tmp
= gen_reg_rtx (mode
);
1737 tilepro_expand_unaligned_load (tmp
, operands
[1],
1738 GET_MODE_BITSIZE (mode
), 0, true);
1740 if (tmp
!= operands
[0])
1741 emit_move_insn (operands
[0], tmp
);
1743 else if (MEM_P (operands
[0]))
1745 if (!reg_or_0_operand (operands
[1], mode
))
1746 operands
[1] = force_reg (mode
, operands
[1]);
1748 tilepro_expand_unaligned_store (operands
[0], operands
[1],
1749 GET_MODE_BITSIZE (mode
), 0);
1756 /* Implement the addsi3 pattern. */
1758 tilepro_expand_addsi (rtx op0
, rtx op1
, rtx op2
)
1764 /* Skip anything that only takes one instruction. */
1765 if (add_operand (op2
, SImode
))
1768 /* We can only optimize ints here (it should be impossible to get
1769 here with any other type, but it is harmless to check. */
1770 if (!CONST_INT_P (op2
))
1773 temp
= create_temp_reg_if_possible (SImode
, op0
);
1775 high
= (n
+ (n
& 0x8000)) & ~0xffff;
1777 emit_move_insn (temp
, gen_rtx_PLUS (SImode
, op1
, gen_int_si (high
)));
1778 emit_move_insn (op0
, gen_rtx_PLUS (SImode
, temp
, gen_int_si (n
- high
)));
1784 /* Implement the allocate_stack pattern (alloca). */
1786 tilepro_allocate_stack (rtx op0
, rtx op1
)
1788 /* Technically the correct way to initialize chain_loc is with
1789 * gen_frame_mem() instead of gen_rtx_MEM(), but gen_frame_mem()
1790 * sets the alias_set to that of a frame reference. Some of our
1791 * tests rely on some unsafe assumption about when the chaining
1792 * update is done, we need to be conservative about reordering the
1793 * chaining instructions.
1795 rtx fp_addr
= gen_reg_rtx (Pmode
);
1796 rtx fp_value
= gen_reg_rtx (Pmode
);
1799 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1800 GEN_INT (UNITS_PER_WORD
)));
1802 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1804 emit_move_insn (fp_value
, fp_loc
);
1806 op1
= force_reg (Pmode
, op1
);
1808 emit_move_insn (stack_pointer_rtx
,
1809 gen_rtx_MINUS (Pmode
, stack_pointer_rtx
, op1
));
1811 emit_move_insn (fp_addr
, gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
1812 GEN_INT (UNITS_PER_WORD
)));
1814 fp_loc
= gen_frame_mem (Pmode
, fp_addr
);
1816 emit_move_insn (fp_loc
, fp_value
);
1818 emit_move_insn (op0
, virtual_stack_dynamic_rtx
);
1825 /* Returns the insn_code in ENTRY. */
1826 static enum insn_code
1827 tilepro_multiply_get_opcode (const struct tilepro_multiply_insn_seq_entry
1830 return tilepro_multiply_insn_seq_decode_opcode
[entry
->compressed_opcode
];
1834 /* Returns the length of the 'op' array. */
1836 tilepro_multiply_get_num_ops (const struct tilepro_multiply_insn_seq
*seq
)
1838 /* The array either uses all of its allocated slots or is terminated
1839 by a bogus opcode. Either way, the array size is the index of the
1840 last valid opcode plus one. */
1842 for (i
= tilepro_multiply_insn_seq_MAX_OPERATIONS
- 1; i
>= 0; i
--)
1843 if (tilepro_multiply_get_opcode (&seq
->op
[i
]) != CODE_FOR_nothing
)
1846 /* An empty array is not allowed. */
1851 /* We precompute a number of expression trees for multiplying by
1852 constants. This generates code for such an expression tree by
1853 walking through the nodes in the tree (which are conveniently
1854 pre-linearized) and emitting an instruction for each one. */
1856 tilepro_expand_constant_multiply_given_sequence (rtx result
, rtx src
,
1858 tilepro_multiply_insn_seq
1864 /* Keep track of the subexpressions computed so far, so later
1865 instructions can refer to them. We seed the array with zero and
1866 the value being multiplied. */
1867 int num_subexprs
= 2;
1868 rtx subexprs
[tilepro_multiply_insn_seq_MAX_OPERATIONS
+ 2];
1869 subexprs
[0] = const0_rtx
;
1872 /* Determine how many instructions we are going to generate. */
1873 num_ops
= tilepro_multiply_get_num_ops (seq
);
1874 gcc_assert (num_ops
> 0
1875 && num_ops
<= tilepro_multiply_insn_seq_MAX_OPERATIONS
);
1877 for (i
= 0; i
< num_ops
; i
++)
1879 const struct tilepro_multiply_insn_seq_entry
*entry
= &seq
->op
[i
];
1881 /* Figure out where to store the output of this instruction. */
1882 const bool is_last_op
= (i
+ 1 == num_ops
);
1883 rtx out
= is_last_op
? result
: gen_reg_rtx (SImode
);
1885 enum insn_code opcode
= tilepro_multiply_get_opcode (entry
);
1886 if (opcode
== CODE_FOR_ashlsi3
)
1888 /* Handle shift by immediate. This is a special case because
1889 the meaning of the second operand is a constant shift
1890 count rather than an operand index. */
1892 /* Make sure the shift count is in range. Zero should not
1894 const int shift_count
= entry
->rhs
;
1895 gcc_assert (shift_count
> 0 && shift_count
< 32);
1897 /* Emit the actual instruction. */
1898 emit_insn (GEN_FCN (opcode
)
1899 (out
, subexprs
[entry
->lhs
],
1900 gen_rtx_CONST_INT (SImode
, shift_count
)));
1904 /* Handle a normal two-operand instruction, such as add or
1907 /* Make sure we are referring to a previously computed
1909 gcc_assert (entry
->rhs
< num_subexprs
);
1911 /* Emit the actual instruction. */
1912 emit_insn (GEN_FCN (opcode
)
1913 (out
, subexprs
[entry
->lhs
], subexprs
[entry
->rhs
]));
1916 /* Record this subexpression for use by later expressions. */
1917 subexprs
[num_subexprs
++] = out
;
1922 /* bsearch helper function. */
1924 tilepro_compare_multipliers (const void *key
, const void *t
)
1926 return *(const int *) key
-
1927 ((const struct tilepro_multiply_insn_seq
*) t
)->multiplier
;
1931 /* Returns the tilepro_multiply_insn_seq for multiplier, or NULL if
1933 static const struct tilepro_multiply_insn_seq
*
1934 tilepro_find_multiply_insn_seq_for_constant (int multiplier
)
1936 return ((const struct tilepro_multiply_insn_seq
*)
1937 bsearch (&multiplier
, tilepro_multiply_insn_seq_table
,
1938 tilepro_multiply_insn_seq_table_size
,
1939 sizeof tilepro_multiply_insn_seq_table
[0],
1940 tilepro_compare_multipliers
));
1944 /* Try to a expand constant multiply in SImode by looking it up in a
1945 precompiled table. OP0 is the result operand, OP1 is the source
1946 operand, and MULTIPLIER is the value of the constant. Return true
1949 tilepro_expand_const_mulsi (rtx op0
, rtx op1
, int multiplier
)
1951 /* See if we have precomputed an efficient way to multiply by this
1953 const struct tilepro_multiply_insn_seq
*seq
=
1954 tilepro_find_multiply_insn_seq_for_constant (multiplier
);
1957 tilepro_expand_constant_multiply_given_sequence (op0
, op1
, seq
);
1965 /* Expand the mulsi pattern. */
1967 tilepro_expand_mulsi (rtx op0
, rtx op1
, rtx op2
)
1969 if (CONST_INT_P (op2
))
1971 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op2
), SImode
);
1972 return tilepro_expand_const_mulsi (op0
, op1
, n
);
1978 /* Expand a high multiply pattern in SImode. RESULT, OP1, OP2 are the
1979 operands, and SIGN is true if it's a signed multiply, and false if
1980 it's an unsigned multiply. */
1982 tilepro_expand_high_multiply (rtx result
, rtx op1
, rtx op2
, bool sign
)
1984 rtx tmp0
= gen_reg_rtx (SImode
);
1985 rtx tmp1
= gen_reg_rtx (SImode
);
1986 rtx tmp2
= gen_reg_rtx (SImode
);
1987 rtx tmp3
= gen_reg_rtx (SImode
);
1988 rtx tmp4
= gen_reg_rtx (SImode
);
1989 rtx tmp5
= gen_reg_rtx (SImode
);
1990 rtx tmp6
= gen_reg_rtx (SImode
);
1991 rtx tmp7
= gen_reg_rtx (SImode
);
1992 rtx tmp8
= gen_reg_rtx (SImode
);
1993 rtx tmp9
= gen_reg_rtx (SImode
);
1994 rtx tmp10
= gen_reg_rtx (SImode
);
1995 rtx tmp11
= gen_reg_rtx (SImode
);
1996 rtx tmp12
= gen_reg_rtx (SImode
);
1997 rtx tmp13
= gen_reg_rtx (SImode
);
1998 rtx result_lo
= gen_reg_rtx (SImode
);
2002 emit_insn (gen_insn_mulhl_su (tmp0
, op1
, op2
));
2003 emit_insn (gen_insn_mulhl_su (tmp1
, op2
, op1
));
2004 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2005 emit_insn (gen_insn_mulhh_ss (tmp3
, op1
, op2
));
2009 emit_insn (gen_insn_mulhl_uu (tmp0
, op1
, op2
));
2010 emit_insn (gen_insn_mulhl_uu (tmp1
, op2
, op1
));
2011 emit_insn (gen_insn_mulll_uu (tmp2
, op1
, op2
));
2012 emit_insn (gen_insn_mulhh_uu (tmp3
, op1
, op2
));
2015 emit_move_insn (tmp4
, (gen_rtx_ASHIFT (SImode
, tmp0
, GEN_INT (16))));
2017 emit_move_insn (tmp5
, (gen_rtx_ASHIFT (SImode
, tmp1
, GEN_INT (16))));
2019 emit_move_insn (tmp6
, (gen_rtx_PLUS (SImode
, tmp4
, tmp5
)));
2020 emit_move_insn (result_lo
, (gen_rtx_PLUS (SImode
, tmp2
, tmp6
)));
2022 emit_move_insn (tmp7
, gen_rtx_LTU (SImode
, tmp6
, tmp4
));
2023 emit_move_insn (tmp8
, gen_rtx_LTU (SImode
, result_lo
, tmp2
));
2027 emit_move_insn (tmp9
, (gen_rtx_ASHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2028 emit_move_insn (tmp10
, (gen_rtx_ASHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2032 emit_move_insn (tmp9
, (gen_rtx_LSHIFTRT (SImode
, tmp0
, GEN_INT (16))));
2033 emit_move_insn (tmp10
, (gen_rtx_LSHIFTRT (SImode
, tmp1
, GEN_INT (16))));
2036 emit_move_insn (tmp11
, (gen_rtx_PLUS (SImode
, tmp3
, tmp7
)));
2037 emit_move_insn (tmp12
, (gen_rtx_PLUS (SImode
, tmp8
, tmp9
)));
2038 emit_move_insn (tmp13
, (gen_rtx_PLUS (SImode
, tmp11
, tmp12
)));
2039 emit_move_insn (result
, (gen_rtx_PLUS (SImode
, tmp13
, tmp10
)));
2043 /* Implement smulsi3_highpart. */
2045 tilepro_expand_smulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2047 tilepro_expand_high_multiply (op0
, op1
, op2
, true);
2051 /* Implement umulsi3_highpart. */
2053 tilepro_expand_umulsi3_highpart (rtx op0
, rtx op1
, rtx op2
)
2055 tilepro_expand_high_multiply (op0
, op1
, op2
, false);
2060 /* Compare and branches */
2062 /* Helper function to handle DImode for tilepro_emit_setcc_internal. */
2064 tilepro_emit_setcc_internal_di (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
)
2066 rtx operands
[2], lo_half
[2], hi_half
[2];
2067 rtx tmp
, tmp0
, tmp1
, tmp2
;
2070 /* Reduce the number of cases we need to handle by reversing the
2080 /* We handle these compares directly. */
2087 /* Reverse the operands. */
2092 /* We should not have called this with any other code. */
2098 code
= swap_condition (code
);
2099 tmp
= op0
, op0
= op1
, op1
= tmp
;
2105 split_di (operands
, 2, lo_half
, hi_half
);
2107 if (!reg_or_0_operand (lo_half
[0], SImode
))
2108 lo_half
[0] = force_reg (SImode
, lo_half
[0]);
2110 if (!reg_or_0_operand (hi_half
[0], SImode
))
2111 hi_half
[0] = force_reg (SImode
, hi_half
[0]);
2113 if (!CONST_INT_P (lo_half
[1]) && !register_operand (lo_half
[1], SImode
))
2114 lo_half
[1] = force_reg (SImode
, lo_half
[1]);
2116 if (!CONST_INT_P (hi_half
[1]) && !register_operand (hi_half
[1], SImode
))
2117 hi_half
[1] = force_reg (SImode
, hi_half
[1]);
2119 tmp0
= gen_reg_rtx (SImode
);
2120 tmp1
= gen_reg_rtx (SImode
);
2121 tmp2
= gen_reg_rtx (SImode
);
2126 emit_insn (gen_insn_seq (tmp0
, lo_half
[0], lo_half
[1]));
2127 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2128 emit_insn (gen_andsi3 (res
, tmp0
, tmp1
));
2132 emit_insn (gen_insn_sne (tmp0
, lo_half
[0], lo_half
[1]));
2133 emit_insn (gen_insn_sne (tmp1
, hi_half
[0], hi_half
[1]));
2134 emit_insn (gen_iorsi3 (res
, tmp0
, tmp1
));
2138 emit_insn (gen_insn_slte (tmp0
, hi_half
[0], hi_half
[1]));
2139 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2140 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2141 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2144 if (operands
[1] == const0_rtx
)
2146 emit_insn (gen_lshrsi3 (res
, hi_half
[0], GEN_INT (31)));
2151 emit_insn (gen_insn_slt (tmp0
, hi_half
[0], hi_half
[1]));
2152 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2153 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2154 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2158 emit_insn (gen_insn_slte_u (tmp0
, hi_half
[0], hi_half
[1]));
2159 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2160 emit_insn (gen_insn_slte_u (tmp2
, lo_half
[0], lo_half
[1]));
2161 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2164 emit_insn (gen_insn_slt_u (tmp0
, hi_half
[0], hi_half
[1]));
2165 emit_insn (gen_insn_seq (tmp1
, hi_half
[0], hi_half
[1]));
2166 emit_insn (gen_insn_slt_u (tmp2
, lo_half
[0], lo_half
[1]));
2167 emit_insn (gen_insn_mvnz (res
, tmp0
, tmp1
, tmp2
));
2177 /* Certain simplifications can be done to make invalid setcc
2178 operations valid. Return the final comparison, or NULL if we can't
2181 tilepro_emit_setcc_internal (rtx res
, enum rtx_code code
, rtx op0
, rtx op1
,
2182 enum machine_mode cmp_mode
)
2187 if (cmp_mode
== DImode
)
2189 return tilepro_emit_setcc_internal_di (res
, code
, op0
, op1
);
2192 /* The general case: fold the comparison code to the types of
2193 compares that we have, choosing the branch as necessary. */
2203 /* We have these compares. */
2210 /* We do not have these compares, so we reverse the
2216 /* We should not have called this with any other code. */
2222 code
= swap_condition (code
);
2223 tmp
= op0
, op0
= op1
, op1
= tmp
;
2226 if (!reg_or_0_operand (op0
, SImode
))
2227 op0
= force_reg (SImode
, op0
);
2229 if (!CONST_INT_P (op1
) && !register_operand (op1
, SImode
))
2230 op1
= force_reg (SImode
, op1
);
2232 /* Return the setcc comparison. */
2233 emit_insn (gen_rtx_SET (VOIDmode
, res
,
2234 gen_rtx_fmt_ee (code
, SImode
, op0
, op1
)));
2240 /* Implement cstore patterns. */
2242 tilepro_emit_setcc (rtx operands
[], enum machine_mode cmp_mode
)
2245 tilepro_emit_setcc_internal (operands
[0], GET_CODE (operands
[1]),
2246 operands
[2], operands
[3], cmp_mode
);
2250 /* Return whether CODE is a signed comparison. */
2252 signed_compare_p (enum rtx_code code
)
2254 return (code
== EQ
|| code
== NE
|| code
== LT
|| code
== LE
2255 || code
== GT
|| code
== GE
);
2259 /* Generate the comparison for an SImode conditional branch. */
2261 tilepro_emit_cc_test (enum rtx_code code
, rtx op0
, rtx op1
,
2262 enum machine_mode cmp_mode
, bool eq_ne_only
)
2264 enum rtx_code branch_code
;
2267 /* Check for a compare against zero using a comparison we can do
2269 if (cmp_mode
!= DImode
2270 && op1
== const0_rtx
2271 && (code
== EQ
|| code
== NE
2272 || (!eq_ne_only
&& signed_compare_p (code
))))
2274 op0
= force_reg (SImode
, op0
);
2275 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, const0_rtx
);
2278 /* The general case: fold the comparison code to the types of
2279 compares that we have, choosing the branch as necessary. */
2287 /* We have these compares. */
2296 /* These must be reversed (except NE, but let's
2298 code
= reverse_condition (code
);
2306 if (cmp_mode
!= DImode
2307 && CONST_INT_P (op1
) && (!satisfies_constraint_I (op1
) || code
== LEU
))
2309 HOST_WIDE_INT n
= trunc_int_for_mode (INTVAL (op1
), SImode
);
2314 /* Subtract off the value we want to compare against and see
2315 if we get zero. This is cheaper than creating a constant
2316 in a register. Except that subtracting -128 is more
2317 expensive than seqi to -128, so we leave that alone. */
2318 /* ??? Don't do this when comparing against symbols,
2319 otherwise we'll reduce (&x == 0x1234) to (&x-0x1234 ==
2320 0), which will be declared false out of hand (at least
2322 if (!(symbolic_operand (op0
, VOIDmode
)
2323 || (REG_P (op0
) && REG_POINTER (op0
))))
2325 /* To compare against MIN_INT, we add MIN_INT and check
2328 if (n
!= -2147483647 - 1)
2333 op0
= force_reg (SImode
, op0
);
2334 temp
= gen_reg_rtx (SImode
);
2335 emit_insn (gen_addsi3 (temp
, op0
, gen_int_si (add
)));
2336 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2337 VOIDmode
, temp
, const0_rtx
);
2347 /* Change ((unsigned)x < 0x1000) into !((unsigned)x >> 12),
2350 int first
= exact_log2 (code
== LTU
? n
: n
+ 1);
2353 op0
= force_reg (SImode
, op0
);
2354 temp
= gen_reg_rtx (SImode
);
2355 emit_move_insn (temp
,
2356 gen_rtx_LSHIFTRT (SImode
, op0
,
2357 gen_int_si (first
)));
2358 return gen_rtx_fmt_ee (reverse_condition (branch_code
),
2359 VOIDmode
, temp
, const0_rtx
);
2369 /* Compute a flag saying whether we should branch. */
2370 temp
= gen_reg_rtx (SImode
);
2371 tilepro_emit_setcc_internal (temp
, code
, op0
, op1
, cmp_mode
);
2373 /* Return the branch comparison. */
2374 return gen_rtx_fmt_ee (branch_code
, VOIDmode
, temp
, const0_rtx
);
2378 /* Generate the comparison for a conditional branch. */
2380 tilepro_emit_conditional_branch (rtx operands
[], enum machine_mode cmp_mode
)
2383 tilepro_emit_cc_test (GET_CODE (operands
[0]), operands
[1], operands
[2],
2385 rtx branch_rtx
= gen_rtx_SET (VOIDmode
, pc_rtx
,
2386 gen_rtx_IF_THEN_ELSE (VOIDmode
, cmp_rtx
,
2391 emit_jump_insn (branch_rtx
);
2395 /* Implement the movsicc pattern. */
2397 tilepro_emit_conditional_move (rtx cmp
)
2400 tilepro_emit_cc_test (GET_CODE (cmp
), XEXP (cmp
, 0), XEXP (cmp
, 1),
2401 GET_MODE (XEXP (cmp
, 0)), true);
2405 /* Return true if INSN is annotated with a REG_BR_PROB note that
2406 indicates it's a branch that's predicted taken. */
2408 cbranch_predicted_p (rtx insn
)
2410 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2414 int pred_val
= INTVAL (XEXP (x
, 0));
2416 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2423 /* Output assembly code for a specific branch instruction, appending
2424 the branch prediction flag to the opcode if appropriate. */
2426 tilepro_output_simple_cbranch_with_opcode (rtx insn
, const char *opcode
,
2427 int regop
, bool netreg_p
,
2428 bool reverse_predicted
)
2430 static char buf
[64];
2431 sprintf (buf
, "%s%s\t%%%c%d, %%l0", opcode
,
2432 (cbranch_predicted_p (insn
) ^ reverse_predicted
) ? "t" : "",
2433 netreg_p
? 'N' : 'r', regop
);
2438 /* Output assembly code for a specific branch instruction, appending
2439 the branch prediction flag to the opcode if appropriate. */
2441 tilepro_output_cbranch_with_opcode (rtx insn
, rtx
*operands
,
2443 const char *rev_opcode
,
2444 int regop
, bool netreg_p
)
2446 const char *branch_if_false
;
2447 rtx taken
, not_taken
;
2448 bool is_simple_branch
;
2450 gcc_assert (LABEL_P (operands
[0]));
2452 is_simple_branch
= true;
2453 if (INSN_ADDRESSES_SET_P ())
2455 int from_addr
= INSN_ADDRESSES (INSN_UID (insn
));
2456 int to_addr
= INSN_ADDRESSES (INSN_UID (operands
[0]));
2457 int delta
= to_addr
- from_addr
;
2458 is_simple_branch
= IN_RANGE (delta
, -524288, 524280);
2461 if (is_simple_branch
)
2463 /* Just a simple conditional branch. */
2465 tilepro_output_simple_cbranch_with_opcode (insn
, opcode
, regop
,
2469 /* Generate a reversed branch around a direct jump. This fallback
2470 does not use branch-likely instructions. */
2471 not_taken
= gen_label_rtx ();
2472 taken
= operands
[0];
2474 /* Generate the reversed branch to NOT_TAKEN. */
2475 operands
[0] = not_taken
;
2477 tilepro_output_simple_cbranch_with_opcode (insn
, rev_opcode
, regop
,
2479 output_asm_insn (branch_if_false
, operands
);
2481 output_asm_insn ("j\t%l0", &taken
);
2483 /* Output NOT_TAKEN. */
2484 targetm
.asm_out
.internal_label (asm_out_file
, "L",
2485 CODE_LABEL_NUMBER (not_taken
));
2490 /* Output assembly code for a conditional branch instruction. */
2492 tilepro_output_cbranch (rtx insn
, rtx
*operands
, bool reversed
)
2494 enum rtx_code code
= GET_CODE (operands
[1]);
2496 const char *rev_opcode
;
2499 code
= reverse_condition (code
);
2517 rev_opcode
= "blez";
2525 rev_opcode
= "bgez";
2532 tilepro_output_cbranch_with_opcode (insn
, operands
, opcode
, rev_opcode
,
2537 /* Implement the tablejump pattern. */
2539 tilepro_expand_tablejump (rtx op0
, rtx op1
)
2543 rtx table
= gen_rtx_LABEL_REF (Pmode
, op1
);
2544 rtx temp
= gen_reg_rtx (Pmode
);
2545 rtx text_label_symbol
= tilepro_text_label_symbol ();
2546 rtx text_label_rtx
= tilepro_text_label_rtx ();
2548 emit_insn (gen_addli_pcrel (temp
, text_label_rtx
,
2549 table
, text_label_symbol
));
2550 emit_insn (gen_auli_pcrel (temp
, temp
, table
, text_label_symbol
));
2551 emit_move_insn (temp
,
2552 gen_rtx_PLUS (Pmode
,
2553 convert_to_mode (Pmode
, op0
, false),
2558 emit_jump_insn (gen_tablejump_aux (op0
, op1
));
2562 /* Expand a builtin vector binary op, by calling gen function GEN with
2563 operands in the proper modes. DEST is converted to DEST_MODE, and
2564 src0 and src1 (if DO_SRC1 is true) is converted to SRC_MODE. */
2566 tilepro_expand_builtin_vector_binop (rtx (*gen
) (rtx
, rtx
, rtx
),
2567 enum machine_mode dest_mode
,
2569 enum machine_mode src_mode
,
2570 rtx src0
, rtx src1
, bool do_src1
)
2572 dest
= gen_lowpart (dest_mode
, dest
);
2574 if (src0
== const0_rtx
)
2575 src0
= CONST0_RTX (src_mode
);
2577 src0
= gen_lowpart (src_mode
, src0
);
2581 if (src1
== const0_rtx
)
2582 src1
= CONST0_RTX (src_mode
);
2584 src1
= gen_lowpart (src_mode
, src1
);
2587 emit_insn ((*gen
) (dest
, src0
, src1
));
2594 struct tile_builtin_info
2596 enum insn_code icode
;
2600 static struct tile_builtin_info tilepro_builtin_info
[TILEPRO_BUILTIN_max
] = {
2601 { CODE_FOR_addsi3
, NULL
}, /* add */
2602 { CODE_FOR_insn_addb
, NULL
}, /* addb */
2603 { CODE_FOR_insn_addbs_u
, NULL
}, /* addbs_u */
2604 { CODE_FOR_insn_addh
, NULL
}, /* addh */
2605 { CODE_FOR_insn_addhs
, NULL
}, /* addhs */
2606 { CODE_FOR_insn_addib
, NULL
}, /* addib */
2607 { CODE_FOR_insn_addih
, NULL
}, /* addih */
2608 { CODE_FOR_insn_addlis
, NULL
}, /* addlis */
2609 { CODE_FOR_ssaddsi3
, NULL
}, /* adds */
2610 { CODE_FOR_insn_adiffb_u
, NULL
}, /* adiffb_u */
2611 { CODE_FOR_insn_adiffh
, NULL
}, /* adiffh */
2612 { CODE_FOR_andsi3
, NULL
}, /* and */
2613 { CODE_FOR_insn_auli
, NULL
}, /* auli */
2614 { CODE_FOR_insn_avgb_u
, NULL
}, /* avgb_u */
2615 { CODE_FOR_insn_avgh
, NULL
}, /* avgh */
2616 { CODE_FOR_insn_bitx
, NULL
}, /* bitx */
2617 { CODE_FOR_bswapsi2
, NULL
}, /* bytex */
2618 { CODE_FOR_clzsi2
, NULL
}, /* clz */
2619 { CODE_FOR_insn_crc32_32
, NULL
}, /* crc32_32 */
2620 { CODE_FOR_insn_crc32_8
, NULL
}, /* crc32_8 */
2621 { CODE_FOR_ctzsi2
, NULL
}, /* ctz */
2622 { CODE_FOR_insn_drain
, NULL
}, /* drain */
2623 { CODE_FOR_insn_dtlbpr
, NULL
}, /* dtlbpr */
2624 { CODE_FOR_insn_dword_align
, NULL
}, /* dword_align */
2625 { CODE_FOR_insn_finv
, NULL
}, /* finv */
2626 { CODE_FOR_insn_flush
, NULL
}, /* flush */
2627 { CODE_FOR_insn_fnop
, NULL
}, /* fnop */
2628 { CODE_FOR_insn_icoh
, NULL
}, /* icoh */
2629 { CODE_FOR_insn_ill
, NULL
}, /* ill */
2630 { CODE_FOR_insn_info
, NULL
}, /* info */
2631 { CODE_FOR_insn_infol
, NULL
}, /* infol */
2632 { CODE_FOR_insn_inthb
, NULL
}, /* inthb */
2633 { CODE_FOR_insn_inthh
, NULL
}, /* inthh */
2634 { CODE_FOR_insn_intlb
, NULL
}, /* intlb */
2635 { CODE_FOR_insn_intlh
, NULL
}, /* intlh */
2636 { CODE_FOR_insn_inv
, NULL
}, /* inv */
2637 { CODE_FOR_insn_lb
, NULL
}, /* lb */
2638 { CODE_FOR_insn_lb_u
, NULL
}, /* lb_u */
2639 { CODE_FOR_insn_lh
, NULL
}, /* lh */
2640 { CODE_FOR_insn_lh_u
, NULL
}, /* lh_u */
2641 { CODE_FOR_insn_lnk
, NULL
}, /* lnk */
2642 { CODE_FOR_insn_lw
, NULL
}, /* lw */
2643 { CODE_FOR_insn_lw_na
, NULL
}, /* lw_na */
2644 { CODE_FOR_insn_lb_L2
, NULL
}, /* lb_L2 */
2645 { CODE_FOR_insn_lb_u_L2
, NULL
}, /* lb_u_L2 */
2646 { CODE_FOR_insn_lh_L2
, NULL
}, /* lh_L2 */
2647 { CODE_FOR_insn_lh_u_L2
, NULL
}, /* lh_u_L2 */
2648 { CODE_FOR_insn_lw_L2
, NULL
}, /* lw_L2 */
2649 { CODE_FOR_insn_lw_na_L2
, NULL
}, /* lw_na_L2 */
2650 { CODE_FOR_insn_lb_miss
, NULL
}, /* lb_miss */
2651 { CODE_FOR_insn_lb_u_miss
, NULL
}, /* lb_u_miss */
2652 { CODE_FOR_insn_lh_miss
, NULL
}, /* lh_miss */
2653 { CODE_FOR_insn_lh_u_miss
, NULL
}, /* lh_u_miss */
2654 { CODE_FOR_insn_lw_miss
, NULL
}, /* lw_miss */
2655 { CODE_FOR_insn_lw_na_miss
, NULL
}, /* lw_na_miss */
2656 { CODE_FOR_insn_maxb_u
, NULL
}, /* maxb_u */
2657 { CODE_FOR_insn_maxh
, NULL
}, /* maxh */
2658 { CODE_FOR_insn_maxib_u
, NULL
}, /* maxib_u */
2659 { CODE_FOR_insn_maxih
, NULL
}, /* maxih */
2660 { CODE_FOR_memory_barrier
, NULL
}, /* mf */
2661 { CODE_FOR_insn_mfspr
, NULL
}, /* mfspr */
2662 { CODE_FOR_insn_minb_u
, NULL
}, /* minb_u */
2663 { CODE_FOR_insn_minh
, NULL
}, /* minh */
2664 { CODE_FOR_insn_minib_u
, NULL
}, /* minib_u */
2665 { CODE_FOR_insn_minih
, NULL
}, /* minih */
2666 { CODE_FOR_insn_mm
, NULL
}, /* mm */
2667 { CODE_FOR_insn_mnz
, NULL
}, /* mnz */
2668 { CODE_FOR_insn_mnzb
, NULL
}, /* mnzb */
2669 { CODE_FOR_insn_mnzh
, NULL
}, /* mnzh */
2670 { CODE_FOR_movsi
, NULL
}, /* move */
2671 { CODE_FOR_insn_movelis
, NULL
}, /* movelis */
2672 { CODE_FOR_insn_mtspr
, NULL
}, /* mtspr */
2673 { CODE_FOR_insn_mulhh_ss
, NULL
}, /* mulhh_ss */
2674 { CODE_FOR_insn_mulhh_su
, NULL
}, /* mulhh_su */
2675 { CODE_FOR_insn_mulhh_uu
, NULL
}, /* mulhh_uu */
2676 { CODE_FOR_insn_mulhha_ss
, NULL
}, /* mulhha_ss */
2677 { CODE_FOR_insn_mulhha_su
, NULL
}, /* mulhha_su */
2678 { CODE_FOR_insn_mulhha_uu
, NULL
}, /* mulhha_uu */
2679 { CODE_FOR_insn_mulhhsa_uu
, NULL
}, /* mulhhsa_uu */
2680 { CODE_FOR_insn_mulhl_ss
, NULL
}, /* mulhl_ss */
2681 { CODE_FOR_insn_mulhl_su
, NULL
}, /* mulhl_su */
2682 { CODE_FOR_insn_mulhl_us
, NULL
}, /* mulhl_us */
2683 { CODE_FOR_insn_mulhl_uu
, NULL
}, /* mulhl_uu */
2684 { CODE_FOR_insn_mulhla_ss
, NULL
}, /* mulhla_ss */
2685 { CODE_FOR_insn_mulhla_su
, NULL
}, /* mulhla_su */
2686 { CODE_FOR_insn_mulhla_us
, NULL
}, /* mulhla_us */
2687 { CODE_FOR_insn_mulhla_uu
, NULL
}, /* mulhla_uu */
2688 { CODE_FOR_insn_mulhlsa_uu
, NULL
}, /* mulhlsa_uu */
2689 { CODE_FOR_insn_mulll_ss
, NULL
}, /* mulll_ss */
2690 { CODE_FOR_insn_mulll_su
, NULL
}, /* mulll_su */
2691 { CODE_FOR_insn_mulll_uu
, NULL
}, /* mulll_uu */
2692 { CODE_FOR_insn_mullla_ss
, NULL
}, /* mullla_ss */
2693 { CODE_FOR_insn_mullla_su
, NULL
}, /* mullla_su */
2694 { CODE_FOR_insn_mullla_uu
, NULL
}, /* mullla_uu */
2695 { CODE_FOR_insn_mulllsa_uu
, NULL
}, /* mulllsa_uu */
2696 { CODE_FOR_insn_mvnz
, NULL
}, /* mvnz */
2697 { CODE_FOR_insn_mvz
, NULL
}, /* mvz */
2698 { CODE_FOR_insn_mz
, NULL
}, /* mz */
2699 { CODE_FOR_insn_mzb
, NULL
}, /* mzb */
2700 { CODE_FOR_insn_mzh
, NULL
}, /* mzh */
2701 { CODE_FOR_insn_nap
, NULL
}, /* nap */
2702 { CODE_FOR_nop
, NULL
}, /* nop */
2703 { CODE_FOR_insn_nor
, NULL
}, /* nor */
2704 { CODE_FOR_iorsi3
, NULL
}, /* or */
2705 { CODE_FOR_insn_packbs_u
, NULL
}, /* packbs_u */
2706 { CODE_FOR_insn_packhb
, NULL
}, /* packhb */
2707 { CODE_FOR_insn_packhs
, NULL
}, /* packhs */
2708 { CODE_FOR_insn_packlb
, NULL
}, /* packlb */
2709 { CODE_FOR_popcountsi2
, NULL
}, /* pcnt */
2710 { CODE_FOR_insn_prefetch
, NULL
}, /* prefetch */
2711 { CODE_FOR_insn_prefetch_L1
, NULL
}, /* prefetch_L1 */
2712 { CODE_FOR_rotlsi3
, NULL
}, /* rl */
2713 { CODE_FOR_insn_s1a
, NULL
}, /* s1a */
2714 { CODE_FOR_insn_s2a
, NULL
}, /* s2a */
2715 { CODE_FOR_insn_s3a
, NULL
}, /* s3a */
2716 { CODE_FOR_insn_sadab_u
, NULL
}, /* sadab_u */
2717 { CODE_FOR_insn_sadah
, NULL
}, /* sadah */
2718 { CODE_FOR_insn_sadah_u
, NULL
}, /* sadah_u */
2719 { CODE_FOR_insn_sadb_u
, NULL
}, /* sadb_u */
2720 { CODE_FOR_insn_sadh
, NULL
}, /* sadh */
2721 { CODE_FOR_insn_sadh_u
, NULL
}, /* sadh_u */
2722 { CODE_FOR_insn_sb
, NULL
}, /* sb */
2723 { CODE_FOR_insn_seq
, NULL
}, /* seq */
2724 { CODE_FOR_insn_seqb
, NULL
}, /* seqb */
2725 { CODE_FOR_insn_seqh
, NULL
}, /* seqh */
2726 { CODE_FOR_insn_seqib
, NULL
}, /* seqib */
2727 { CODE_FOR_insn_seqih
, NULL
}, /* seqih */
2728 { CODE_FOR_insn_sh
, NULL
}, /* sh */
2729 { CODE_FOR_ashlsi3
, NULL
}, /* shl */
2730 { CODE_FOR_insn_shlb
, NULL
}, /* shlb */
2731 { CODE_FOR_insn_shlh
, NULL
}, /* shlh */
2732 { CODE_FOR_insn_shlb
, NULL
}, /* shlib */
2733 { CODE_FOR_insn_shlh
, NULL
}, /* shlih */
2734 { CODE_FOR_lshrsi3
, NULL
}, /* shr */
2735 { CODE_FOR_insn_shrb
, NULL
}, /* shrb */
2736 { CODE_FOR_insn_shrh
, NULL
}, /* shrh */
2737 { CODE_FOR_insn_shrb
, NULL
}, /* shrib */
2738 { CODE_FOR_insn_shrh
, NULL
}, /* shrih */
2739 { CODE_FOR_insn_slt
, NULL
}, /* slt */
2740 { CODE_FOR_insn_slt_u
, NULL
}, /* slt_u */
2741 { CODE_FOR_insn_sltb
, NULL
}, /* sltb */
2742 { CODE_FOR_insn_sltb_u
, NULL
}, /* sltb_u */
2743 { CODE_FOR_insn_slte
, NULL
}, /* slte */
2744 { CODE_FOR_insn_slte_u
, NULL
}, /* slte_u */
2745 { CODE_FOR_insn_slteb
, NULL
}, /* slteb */
2746 { CODE_FOR_insn_slteb_u
, NULL
}, /* slteb_u */
2747 { CODE_FOR_insn_slteh
, NULL
}, /* slteh */
2748 { CODE_FOR_insn_slteh_u
, NULL
}, /* slteh_u */
2749 { CODE_FOR_insn_slth
, NULL
}, /* slth */
2750 { CODE_FOR_insn_slth_u
, NULL
}, /* slth_u */
2751 { CODE_FOR_insn_sltib
, NULL
}, /* sltib */
2752 { CODE_FOR_insn_sltib_u
, NULL
}, /* sltib_u */
2753 { CODE_FOR_insn_sltih
, NULL
}, /* sltih */
2754 { CODE_FOR_insn_sltih_u
, NULL
}, /* sltih_u */
2755 { CODE_FOR_insn_sne
, NULL
}, /* sne */
2756 { CODE_FOR_insn_sneb
, NULL
}, /* sneb */
2757 { CODE_FOR_insn_sneh
, NULL
}, /* sneh */
2758 { CODE_FOR_ashrsi3
, NULL
}, /* sra */
2759 { CODE_FOR_insn_srab
, NULL
}, /* srab */
2760 { CODE_FOR_insn_srah
, NULL
}, /* srah */
2761 { CODE_FOR_insn_srab
, NULL
}, /* sraib */
2762 { CODE_FOR_insn_srah
, NULL
}, /* sraih */
2763 { CODE_FOR_subsi3
, NULL
}, /* sub */
2764 { CODE_FOR_insn_subb
, NULL
}, /* subb */
2765 { CODE_FOR_insn_subbs_u
, NULL
}, /* subbs_u */
2766 { CODE_FOR_insn_subh
, NULL
}, /* subh */
2767 { CODE_FOR_insn_subhs
, NULL
}, /* subhs */
2768 { CODE_FOR_sssubsi3
, NULL
}, /* subs */
2769 { CODE_FOR_insn_sw
, NULL
}, /* sw */
2770 { CODE_FOR_insn_tblidxb0
, NULL
}, /* tblidxb0 */
2771 { CODE_FOR_insn_tblidxb1
, NULL
}, /* tblidxb1 */
2772 { CODE_FOR_insn_tblidxb2
, NULL
}, /* tblidxb2 */
2773 { CODE_FOR_insn_tblidxb3
, NULL
}, /* tblidxb3 */
2774 { CODE_FOR_insn_tns
, NULL
}, /* tns */
2775 { CODE_FOR_insn_wh64
, NULL
}, /* wh64 */
2776 { CODE_FOR_xorsi3
, NULL
}, /* xor */
2777 { CODE_FOR_tilepro_network_barrier
, NULL
}, /* network_barrier */
2778 { CODE_FOR_tilepro_idn0_receive
, NULL
}, /* idn0_receive */
2779 { CODE_FOR_tilepro_idn1_receive
, NULL
}, /* idn1_receive */
2780 { CODE_FOR_tilepro_idn_send
, NULL
}, /* idn_send */
2781 { CODE_FOR_tilepro_sn_receive
, NULL
}, /* sn_receive */
2782 { CODE_FOR_tilepro_sn_send
, NULL
}, /* sn_send */
2783 { CODE_FOR_tilepro_udn0_receive
, NULL
}, /* udn0_receive */
2784 { CODE_FOR_tilepro_udn1_receive
, NULL
}, /* udn1_receive */
2785 { CODE_FOR_tilepro_udn2_receive
, NULL
}, /* udn2_receive */
2786 { CODE_FOR_tilepro_udn3_receive
, NULL
}, /* udn3_receive */
2787 { CODE_FOR_tilepro_udn_send
, NULL
}, /* udn_send */
2791 struct tilepro_builtin_def
2794 enum tilepro_builtin code
;
2796 /* The first character is the return type. Subsequent characters
2797 are the argument types. See char_to_type. */
2802 static const struct tilepro_builtin_def tilepro_builtins
[] = {
2803 { "__insn_add", TILEPRO_INSN_ADD
, true, "lll" },
2804 { "__insn_addb", TILEPRO_INSN_ADDB
, true, "lll" },
2805 { "__insn_addbs_u", TILEPRO_INSN_ADDBS_U
, false, "lll" },
2806 { "__insn_addh", TILEPRO_INSN_ADDH
, true, "lll" },
2807 { "__insn_addhs", TILEPRO_INSN_ADDHS
, false, "lll" },
2808 { "__insn_addi", TILEPRO_INSN_ADD
, true, "lll" },
2809 { "__insn_addib", TILEPRO_INSN_ADDIB
, true, "lll" },
2810 { "__insn_addih", TILEPRO_INSN_ADDIH
, true, "lll" },
2811 { "__insn_addli", TILEPRO_INSN_ADD
, true, "lll" },
2812 { "__insn_addlis", TILEPRO_INSN_ADDLIS
, false, "lll" },
2813 { "__insn_adds", TILEPRO_INSN_ADDS
, false, "lll" },
2814 { "__insn_adiffb_u", TILEPRO_INSN_ADIFFB_U
, true, "lll" },
2815 { "__insn_adiffh", TILEPRO_INSN_ADIFFH
, true, "lll" },
2816 { "__insn_and", TILEPRO_INSN_AND
, true, "lll" },
2817 { "__insn_andi", TILEPRO_INSN_AND
, true, "lll" },
2818 { "__insn_auli", TILEPRO_INSN_AULI
, true, "lll" },
2819 { "__insn_avgb_u", TILEPRO_INSN_AVGB_U
, true, "lll" },
2820 { "__insn_avgh", TILEPRO_INSN_AVGH
, true, "lll" },
2821 { "__insn_bitx", TILEPRO_INSN_BITX
, true, "ll" },
2822 { "__insn_bytex", TILEPRO_INSN_BYTEX
, true, "ll" },
2823 { "__insn_clz", TILEPRO_INSN_CLZ
, true, "ll" },
2824 { "__insn_crc32_32", TILEPRO_INSN_CRC32_32
, true, "lll" },
2825 { "__insn_crc32_8", TILEPRO_INSN_CRC32_8
, true, "lll" },
2826 { "__insn_ctz", TILEPRO_INSN_CTZ
, true, "ll" },
2827 { "__insn_drain", TILEPRO_INSN_DRAIN
, false, "v" },
2828 { "__insn_dtlbpr", TILEPRO_INSN_DTLBPR
, false, "vl" },
2829 { "__insn_dword_align", TILEPRO_INSN_DWORD_ALIGN
, true, "lllk" },
2830 { "__insn_finv", TILEPRO_INSN_FINV
, false, "vk" },
2831 { "__insn_flush", TILEPRO_INSN_FLUSH
, false, "vk" },
2832 { "__insn_fnop", TILEPRO_INSN_FNOP
, false, "v" },
2833 { "__insn_icoh", TILEPRO_INSN_ICOH
, false, "vk" },
2834 { "__insn_ill", TILEPRO_INSN_ILL
, false, "v" },
2835 { "__insn_info", TILEPRO_INSN_INFO
, false, "vl" },
2836 { "__insn_infol", TILEPRO_INSN_INFOL
, false, "vl" },
2837 { "__insn_inthb", TILEPRO_INSN_INTHB
, true, "lll" },
2838 { "__insn_inthh", TILEPRO_INSN_INTHH
, true, "lll" },
2839 { "__insn_intlb", TILEPRO_INSN_INTLB
, true, "lll" },
2840 { "__insn_intlh", TILEPRO_INSN_INTLH
, true, "lll" },
2841 { "__insn_inv", TILEPRO_INSN_INV
, false, "vp" },
2842 { "__insn_lb", TILEPRO_INSN_LB
, false, "lk" },
2843 { "__insn_lb_u", TILEPRO_INSN_LB_U
, false, "lk" },
2844 { "__insn_lh", TILEPRO_INSN_LH
, false, "lk" },
2845 { "__insn_lh_u", TILEPRO_INSN_LH_U
, false, "lk" },
2846 { "__insn_lnk", TILEPRO_INSN_LNK
, true, "l" },
2847 { "__insn_lw", TILEPRO_INSN_LW
, false, "lk" },
2848 { "__insn_lw_na", TILEPRO_INSN_LW_NA
, false, "lk" },
2849 { "__insn_lb_L2", TILEPRO_INSN_LB_L2
, false, "lk" },
2850 { "__insn_lb_u_L2", TILEPRO_INSN_LB_U_L2
, false, "lk" },
2851 { "__insn_lh_L2", TILEPRO_INSN_LH_L2
, false, "lk" },
2852 { "__insn_lh_u_L2", TILEPRO_INSN_LH_U_L2
, false, "lk" },
2853 { "__insn_lw_L2", TILEPRO_INSN_LW_L2
, false, "lk" },
2854 { "__insn_lw_na_L2", TILEPRO_INSN_LW_NA_L2
, false, "lk" },
2855 { "__insn_lb_miss", TILEPRO_INSN_LB_MISS
, false, "lk" },
2856 { "__insn_lb_u_miss", TILEPRO_INSN_LB_U_MISS
, false, "lk" },
2857 { "__insn_lh_miss", TILEPRO_INSN_LH_MISS
, false, "lk" },
2858 { "__insn_lh_u_miss", TILEPRO_INSN_LH_U_MISS
, false, "lk" },
2859 { "__insn_lw_miss", TILEPRO_INSN_LW_MISS
, false, "lk" },
2860 { "__insn_lw_na_miss", TILEPRO_INSN_LW_NA_MISS
, false, "lk" },
2861 { "__insn_maxb_u", TILEPRO_INSN_MAXB_U
, true, "lll" },
2862 { "__insn_maxh", TILEPRO_INSN_MAXH
, true, "lll" },
2863 { "__insn_maxib_u", TILEPRO_INSN_MAXIB_U
, true, "lll" },
2864 { "__insn_maxih", TILEPRO_INSN_MAXIH
, true, "lll" },
2865 { "__insn_mf", TILEPRO_INSN_MF
, false, "v" },
2866 { "__insn_mfspr", TILEPRO_INSN_MFSPR
, false, "ll" },
2867 { "__insn_minb_u", TILEPRO_INSN_MINB_U
, true, "lll" },
2868 { "__insn_minh", TILEPRO_INSN_MINH
, true, "lll" },
2869 { "__insn_minib_u", TILEPRO_INSN_MINIB_U
, true, "lll" },
2870 { "__insn_minih", TILEPRO_INSN_MINIH
, true, "lll" },
2871 { "__insn_mm", TILEPRO_INSN_MM
, true, "lllll" },
2872 { "__insn_mnz", TILEPRO_INSN_MNZ
, true, "lll" },
2873 { "__insn_mnzb", TILEPRO_INSN_MNZB
, true, "lll" },
2874 { "__insn_mnzh", TILEPRO_INSN_MNZH
, true, "lll" },
2875 { "__insn_move", TILEPRO_INSN_MOVE
, true, "ll" },
2876 { "__insn_movei", TILEPRO_INSN_MOVE
, true, "ll" },
2877 { "__insn_moveli", TILEPRO_INSN_MOVE
, true, "ll" },
2878 { "__insn_movelis", TILEPRO_INSN_MOVELIS
, false, "ll" },
2879 { "__insn_mtspr", TILEPRO_INSN_MTSPR
, false, "vll" },
2880 { "__insn_mulhh_ss", TILEPRO_INSN_MULHH_SS
, true, "lll" },
2881 { "__insn_mulhh_su", TILEPRO_INSN_MULHH_SU
, true, "lll" },
2882 { "__insn_mulhh_uu", TILEPRO_INSN_MULHH_UU
, true, "lll" },
2883 { "__insn_mulhha_ss", TILEPRO_INSN_MULHHA_SS
, true, "llll" },
2884 { "__insn_mulhha_su", TILEPRO_INSN_MULHHA_SU
, true, "llll" },
2885 { "__insn_mulhha_uu", TILEPRO_INSN_MULHHA_UU
, true, "llll" },
2886 { "__insn_mulhhsa_uu", TILEPRO_INSN_MULHHSA_UU
, true, "llll" },
2887 { "__insn_mulhl_ss", TILEPRO_INSN_MULHL_SS
, true, "lll" },
2888 { "__insn_mulhl_su", TILEPRO_INSN_MULHL_SU
, true, "lll" },
2889 { "__insn_mulhl_us", TILEPRO_INSN_MULHL_US
, true, "lll" },
2890 { "__insn_mulhl_uu", TILEPRO_INSN_MULHL_UU
, true, "lll" },
2891 { "__insn_mulhla_ss", TILEPRO_INSN_MULHLA_SS
, true, "llll" },
2892 { "__insn_mulhla_su", TILEPRO_INSN_MULHLA_SU
, true, "llll" },
2893 { "__insn_mulhla_us", TILEPRO_INSN_MULHLA_US
, true, "llll" },
2894 { "__insn_mulhla_uu", TILEPRO_INSN_MULHLA_UU
, true, "llll" },
2895 { "__insn_mulhlsa_uu", TILEPRO_INSN_MULHLSA_UU
, true, "llll" },
2896 { "__insn_mulll_ss", TILEPRO_INSN_MULLL_SS
, true, "lll" },
2897 { "__insn_mulll_su", TILEPRO_INSN_MULLL_SU
, true, "lll" },
2898 { "__insn_mulll_uu", TILEPRO_INSN_MULLL_UU
, true, "lll" },
2899 { "__insn_mullla_ss", TILEPRO_INSN_MULLLA_SS
, true, "llll" },
2900 { "__insn_mullla_su", TILEPRO_INSN_MULLLA_SU
, true, "llll" },
2901 { "__insn_mullla_uu", TILEPRO_INSN_MULLLA_UU
, true, "llll" },
2902 { "__insn_mulllsa_uu", TILEPRO_INSN_MULLLSA_UU
, true, "llll" },
2903 { "__insn_mvnz", TILEPRO_INSN_MVNZ
, true, "llll" },
2904 { "__insn_mvz", TILEPRO_INSN_MVZ
, true, "llll" },
2905 { "__insn_mz", TILEPRO_INSN_MZ
, true, "lll" },
2906 { "__insn_mzb", TILEPRO_INSN_MZB
, true, "lll" },
2907 { "__insn_mzh", TILEPRO_INSN_MZH
, true, "lll" },
2908 { "__insn_nap", TILEPRO_INSN_NAP
, false, "v" },
2909 { "__insn_nop", TILEPRO_INSN_NOP
, true, "v" },
2910 { "__insn_nor", TILEPRO_INSN_NOR
, true, "lll" },
2911 { "__insn_or", TILEPRO_INSN_OR
, true, "lll" },
2912 { "__insn_ori", TILEPRO_INSN_OR
, true, "lll" },
2913 { "__insn_packbs_u", TILEPRO_INSN_PACKBS_U
, false, "lll" },
2914 { "__insn_packhb", TILEPRO_INSN_PACKHB
, true, "lll" },
2915 { "__insn_packhs", TILEPRO_INSN_PACKHS
, false, "lll" },
2916 { "__insn_packlb", TILEPRO_INSN_PACKLB
, true, "lll" },
2917 { "__insn_pcnt", TILEPRO_INSN_PCNT
, true, "ll" },
2918 { "__insn_prefetch", TILEPRO_INSN_PREFETCH
, false, "vk" },
2919 { "__insn_prefetch_L1", TILEPRO_INSN_PREFETCH_L1
, false, "vk" },
2920 { "__insn_rl", TILEPRO_INSN_RL
, true, "lll" },
2921 { "__insn_rli", TILEPRO_INSN_RL
, true, "lll" },
2922 { "__insn_s1a", TILEPRO_INSN_S1A
, true, "lll" },
2923 { "__insn_s2a", TILEPRO_INSN_S2A
, true, "lll" },
2924 { "__insn_s3a", TILEPRO_INSN_S3A
, true, "lll" },
2925 { "__insn_sadab_u", TILEPRO_INSN_SADAB_U
, true, "llll" },
2926 { "__insn_sadah", TILEPRO_INSN_SADAH
, true, "llll" },
2927 { "__insn_sadah_u", TILEPRO_INSN_SADAH_U
, true, "llll" },
2928 { "__insn_sadb_u", TILEPRO_INSN_SADB_U
, true, "lll" },
2929 { "__insn_sadh", TILEPRO_INSN_SADH
, true, "lll" },
2930 { "__insn_sadh_u", TILEPRO_INSN_SADH_U
, true, "lll" },
2931 { "__insn_sb", TILEPRO_INSN_SB
, false, "vpl" },
2932 { "__insn_seq", TILEPRO_INSN_SEQ
, true, "lll" },
2933 { "__insn_seqb", TILEPRO_INSN_SEQB
, true, "lll" },
2934 { "__insn_seqh", TILEPRO_INSN_SEQH
, true, "lll" },
2935 { "__insn_seqi", TILEPRO_INSN_SEQ
, true, "lll" },
2936 { "__insn_seqib", TILEPRO_INSN_SEQIB
, true, "lll" },
2937 { "__insn_seqih", TILEPRO_INSN_SEQIH
, true, "lll" },
2938 { "__insn_sh", TILEPRO_INSN_SH
, false, "vpl" },
2939 { "__insn_shl", TILEPRO_INSN_SHL
, true, "lll" },
2940 { "__insn_shlb", TILEPRO_INSN_SHLB
, true, "lll" },
2941 { "__insn_shlh", TILEPRO_INSN_SHLH
, true, "lll" },
2942 { "__insn_shli", TILEPRO_INSN_SHL
, true, "lll" },
2943 { "__insn_shlib", TILEPRO_INSN_SHLIB
, true, "lll" },
2944 { "__insn_shlih", TILEPRO_INSN_SHLIH
, true, "lll" },
2945 { "__insn_shr", TILEPRO_INSN_SHR
, true, "lll" },
2946 { "__insn_shrb", TILEPRO_INSN_SHRB
, true, "lll" },
2947 { "__insn_shrh", TILEPRO_INSN_SHRH
, true, "lll" },
2948 { "__insn_shri", TILEPRO_INSN_SHR
, true, "lll" },
2949 { "__insn_shrib", TILEPRO_INSN_SHRIB
, true, "lll" },
2950 { "__insn_shrih", TILEPRO_INSN_SHRIH
, true, "lll" },
2951 { "__insn_slt", TILEPRO_INSN_SLT
, true, "lll" },
2952 { "__insn_slt_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2953 { "__insn_sltb", TILEPRO_INSN_SLTB
, true, "lll" },
2954 { "__insn_sltb_u", TILEPRO_INSN_SLTB_U
, true, "lll" },
2955 { "__insn_slte", TILEPRO_INSN_SLTE
, true, "lll" },
2956 { "__insn_slte_u", TILEPRO_INSN_SLTE_U
, true, "lll" },
2957 { "__insn_slteb", TILEPRO_INSN_SLTEB
, true, "lll" },
2958 { "__insn_slteb_u", TILEPRO_INSN_SLTEB_U
, true, "lll" },
2959 { "__insn_slteh", TILEPRO_INSN_SLTEH
, true, "lll" },
2960 { "__insn_slteh_u", TILEPRO_INSN_SLTEH_U
, true, "lll" },
2961 { "__insn_slth", TILEPRO_INSN_SLTH
, true, "lll" },
2962 { "__insn_slth_u", TILEPRO_INSN_SLTH_U
, true, "lll" },
2963 { "__insn_slti", TILEPRO_INSN_SLT
, true, "lll" },
2964 { "__insn_slti_u", TILEPRO_INSN_SLT_U
, true, "lll" },
2965 { "__insn_sltib", TILEPRO_INSN_SLTIB
, true, "lll" },
2966 { "__insn_sltib_u", TILEPRO_INSN_SLTIB_U
, true, "lll" },
2967 { "__insn_sltih", TILEPRO_INSN_SLTIH
, true, "lll" },
2968 { "__insn_sltih_u", TILEPRO_INSN_SLTIH_U
, true, "lll" },
2969 { "__insn_sne", TILEPRO_INSN_SNE
, true, "lll" },
2970 { "__insn_sneb", TILEPRO_INSN_SNEB
, true, "lll" },
2971 { "__insn_sneh", TILEPRO_INSN_SNEH
, true, "lll" },
2972 { "__insn_sra", TILEPRO_INSN_SRA
, true, "lll" },
2973 { "__insn_srab", TILEPRO_INSN_SRAB
, true, "lll" },
2974 { "__insn_srah", TILEPRO_INSN_SRAH
, true, "lll" },
2975 { "__insn_srai", TILEPRO_INSN_SRA
, true, "lll" },
2976 { "__insn_sraib", TILEPRO_INSN_SRAIB
, true, "lll" },
2977 { "__insn_sraih", TILEPRO_INSN_SRAIH
, true, "lll" },
2978 { "__insn_sub", TILEPRO_INSN_SUB
, true, "lll" },
2979 { "__insn_subb", TILEPRO_INSN_SUBB
, true, "lll" },
2980 { "__insn_subbs_u", TILEPRO_INSN_SUBBS_U
, false, "lll" },
2981 { "__insn_subh", TILEPRO_INSN_SUBH
, true, "lll" },
2982 { "__insn_subhs", TILEPRO_INSN_SUBHS
, false, "lll" },
2983 { "__insn_subs", TILEPRO_INSN_SUBS
, false, "lll" },
2984 { "__insn_sw", TILEPRO_INSN_SW
, false, "vpl" },
2985 { "__insn_tblidxb0", TILEPRO_INSN_TBLIDXB0
, true, "lll" },
2986 { "__insn_tblidxb1", TILEPRO_INSN_TBLIDXB1
, true, "lll" },
2987 { "__insn_tblidxb2", TILEPRO_INSN_TBLIDXB2
, true, "lll" },
2988 { "__insn_tblidxb3", TILEPRO_INSN_TBLIDXB3
, true, "lll" },
2989 { "__insn_tns", TILEPRO_INSN_TNS
, false, "lp" },
2990 { "__insn_wh64", TILEPRO_INSN_WH64
, false, "vp" },
2991 { "__insn_xor", TILEPRO_INSN_XOR
, true, "lll" },
2992 { "__insn_xori", TILEPRO_INSN_XOR
, true, "lll" },
2993 { "__tile_network_barrier", TILEPRO_NETWORK_BARRIER
, false, "v" },
2994 { "__tile_idn0_receive", TILEPRO_IDN0_RECEIVE
, false, "l" },
2995 { "__tile_idn1_receive", TILEPRO_IDN1_RECEIVE
, false, "l" },
2996 { "__tile_idn_send", TILEPRO_IDN_SEND
, false, "vl" },
2997 { "__tile_sn_receive", TILEPRO_SN_RECEIVE
, false, "l" },
2998 { "__tile_sn_send", TILEPRO_SN_SEND
, false, "vl" },
2999 { "__tile_udn0_receive", TILEPRO_UDN0_RECEIVE
, false, "l" },
3000 { "__tile_udn1_receive", TILEPRO_UDN1_RECEIVE
, false, "l" },
3001 { "__tile_udn2_receive", TILEPRO_UDN2_RECEIVE
, false, "l" },
3002 { "__tile_udn3_receive", TILEPRO_UDN3_RECEIVE
, false, "l" },
3003 { "__tile_udn_send", TILEPRO_UDN_SEND
, false, "vl" },
3007 /* Convert a character in a builtin type string to a tree type. */
3009 char_to_type (char c
)
3011 static tree volatile_ptr_type_node
= NULL
;
3012 static tree volatile_const_ptr_type_node
= NULL
;
3014 if (volatile_ptr_type_node
== NULL
)
3016 volatile_ptr_type_node
=
3017 build_pointer_type (build_qualified_type (void_type_node
,
3018 TYPE_QUAL_VOLATILE
));
3019 volatile_const_ptr_type_node
=
3020 build_pointer_type (build_qualified_type (void_type_node
,
3022 | TYPE_QUAL_VOLATILE
));
3028 return void_type_node
;
3030 return long_unsigned_type_node
;
3032 return volatile_ptr_type_node
;
3034 return volatile_const_ptr_type_node
;
3041 /* Implement TARGET_INIT_BUILTINS. */
3043 tilepro_init_builtins (void)
3047 for (i
= 0; i
< ARRAY_SIZE (tilepro_builtins
); i
++)
3049 const struct tilepro_builtin_def
*p
= &tilepro_builtins
[i
];
3050 tree ftype
, ret_type
, arg_type_list
= void_list_node
;
3054 for (j
= strlen (p
->type
) - 1; j
> 0; j
--)
3057 tree_cons (NULL_TREE
, char_to_type (p
->type
[j
]), arg_type_list
);
3060 ret_type
= char_to_type (p
->type
[0]);
3062 ftype
= build_function_type (ret_type
, arg_type_list
);
3064 decl
= add_builtin_function (p
->name
, ftype
, p
->code
, BUILT_IN_MD
,
3068 TREE_READONLY (decl
) = 1;
3069 TREE_NOTHROW (decl
) = 1;
3071 if (tilepro_builtin_info
[p
->code
].fndecl
== NULL
)
3072 tilepro_builtin_info
[p
->code
].fndecl
= decl
;
3077 /* Implement TARGET_EXPAND_BUILTIN. */
3079 tilepro_expand_builtin (tree exp
,
3081 rtx subtarget ATTRIBUTE_UNUSED
,
3082 enum machine_mode mode ATTRIBUTE_UNUSED
,
3083 int ignore ATTRIBUTE_UNUSED
)
3085 #define MAX_BUILTIN_ARGS 4
3087 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
3088 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3090 call_expr_arg_iterator iter
;
3091 enum insn_code icode
;
3092 rtx op
[MAX_BUILTIN_ARGS
+ 1], pat
;
3097 if (fcode
>= TILEPRO_BUILTIN_max
)
3098 internal_error ("bad builtin fcode");
3099 icode
= tilepro_builtin_info
[fcode
].icode
;
3101 internal_error ("bad builtin icode");
3103 nonvoid
= TREE_TYPE (TREE_TYPE (fndecl
)) != void_type_node
;
3106 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
3108 const struct insn_operand_data
*insn_op
;
3110 if (arg
== error_mark_node
)
3112 if (opnum
> MAX_BUILTIN_ARGS
)
3115 insn_op
= &insn_data
[icode
].operand
[opnum
];
3117 op
[opnum
] = expand_expr (arg
, NULL_RTX
, insn_op
->mode
, EXPAND_NORMAL
);
3119 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3120 op
[opnum
] = copy_to_mode_reg (insn_op
->mode
, op
[opnum
]);
3122 if (!(*insn_op
->predicate
) (op
[opnum
], insn_op
->mode
))
3124 /* We still failed to meet the predicate even after moving
3125 into a register. Assume we needed an immediate. */
3126 error_at (EXPR_LOCATION (exp
),
3127 "operand must be an immediate of the right size");
3136 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3138 || GET_MODE (target
) != tmode
3139 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3140 target
= gen_reg_rtx (tmode
);
3144 fn
= GEN_FCN (icode
);
3148 pat
= fn (NULL_RTX
);
3154 pat
= fn (op
[0], op
[1]);
3157 pat
= fn (op
[0], op
[1], op
[2]);
3160 pat
= fn (op
[0], op
[1], op
[2], op
[3]);
3163 pat
= fn (op
[0], op
[1], op
[2], op
[3], op
[4]);
3179 /* Implement TARGET_BUILTIN_DECL. */
3181 tilepro_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
3183 if (code
>= TILEPRO_BUILTIN_max
)
3184 return error_mark_node
;
3186 return tilepro_builtin_info
[code
].fndecl
;
3193 /* Return whether REGNO needs to be saved in the stack frame. */
3195 need_to_save_reg (unsigned int regno
)
3197 if (!fixed_regs
[regno
] && !call_used_regs
[regno
]
3198 && df_regs_ever_live_p (regno
))
3202 && (regno
== PIC_OFFSET_TABLE_REGNUM
3203 || regno
== TILEPRO_PIC_TEXT_LABEL_REGNUM
)
3204 && (crtl
->uses_pic_offset_table
|| crtl
->saves_all_registers
))
3207 if (crtl
->calls_eh_return
)
3210 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; i
++)
3212 if (regno
== EH_RETURN_DATA_REGNO (i
))
3221 /* Return the size of the register savev area. This function is only
3222 correct starting with local register allocation */
3224 tilepro_saved_regs_size (void)
3226 int reg_save_size
= 0;
3228 int offset_to_frame
;
3231 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
3232 if (need_to_save_reg (regno
))
3233 reg_save_size
+= UNITS_PER_WORD
;
3235 /* Pad out the register save area if necessary to make
3236 frame_pointer_rtx be as aligned as the stack pointer. */
3237 offset_to_frame
= crtl
->args
.pretend_args_size
+ reg_save_size
;
3238 align_mask
= (STACK_BOUNDARY
/ BITS_PER_UNIT
) - 1;
3239 reg_save_size
+= (-offset_to_frame
) & align_mask
;
3241 return reg_save_size
;
3245 /* Round up frame size SIZE. */
3247 round_frame_size (int size
)
3249 return ((size
+ STACK_BOUNDARY
/ BITS_PER_UNIT
- 1)
3250 & -STACK_BOUNDARY
/ BITS_PER_UNIT
);
3254 /* Emit a store in the stack frame to save REGNO at address ADDR, and
3255 emit the corresponding REG_CFA_OFFSET note described by CFA and
3256 CFA_OFFSET. Return the emitted insn. */
3258 frame_emit_store (int regno
, int regno_note
, rtx addr
, rtx cfa
,
3261 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3262 rtx mem
= gen_frame_mem (Pmode
, addr
);
3263 rtx mov
= gen_movsi (mem
, reg
);
3265 /* Describe what just happened in a way that dwarf understands. We
3266 use temporary registers to hold the address to make scheduling
3267 easier, and use the REG_CFA_OFFSET to describe the address as an
3268 offset from the CFA. */
3269 rtx reg_note
= gen_rtx_REG (Pmode
, regno_note
);
3270 rtx cfa_relative_addr
= gen_rtx_PLUS (Pmode
, cfa
, gen_int_si (cfa_offset
));
3271 rtx cfa_relative_mem
= gen_frame_mem (Pmode
, cfa_relative_addr
);
3272 rtx real
= gen_rtx_SET (VOIDmode
, cfa_relative_mem
, reg_note
);
3273 add_reg_note (mov
, REG_CFA_OFFSET
, real
);
3275 return emit_insn (mov
);
3279 /* Emit a load in the stack frame to load REGNO from address ADDR.
3280 Add a REG_CFA_RESTORE note to CFA_RESTORES if CFA_RESTORES is
3281 non-null. Return the emitted insn. */
3283 frame_emit_load (int regno
, rtx addr
, rtx
*cfa_restores
)
3285 rtx reg
= gen_rtx_REG (Pmode
, regno
);
3286 rtx mem
= gen_frame_mem (Pmode
, addr
);
3288 *cfa_restores
= alloc_reg_note (REG_CFA_RESTORE
, reg
, *cfa_restores
);
3289 return emit_insn (gen_movsi (reg
, mem
));
3293 /* Helper function to set RTX_FRAME_RELATED_P on instructions,
3294 including sequences. */
3296 set_frame_related_p (void)
3298 rtx seq
= get_insns ();
3309 while (insn
!= NULL_RTX
)
3311 RTX_FRAME_RELATED_P (insn
) = 1;
3312 insn
= NEXT_INSN (insn
);
3314 seq
= emit_insn (seq
);
3318 seq
= emit_insn (seq
);
3319 RTX_FRAME_RELATED_P (seq
) = 1;
3325 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
3327 /* This emits code for 'sp += offset'.
3329 The ABI only allows us to modify 'sp' in a single 'addi' or
3330 'addli', so the backtracer understands it. Larger amounts cannot
3331 use those instructions, so are added by placing the offset into a
3332 large register and using 'add'.
3334 This happens after reload, so we need to expand it ourselves. */
3336 emit_sp_adjust (int offset
, int *next_scratch_regno
, bool frame_related
,
3340 rtx imm_rtx
= gen_int_si (offset
);
3343 if (satisfies_constraint_J (imm_rtx
))
3345 /* We can add this using a single addi or addli. */
3350 rtx tmp
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3351 tilepro_expand_set_const32 (tmp
, imm_rtx
);
3355 /* Actually adjust the stack pointer. */
3356 insn
= emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3358 REG_NOTES (insn
) = reg_notes
;
3360 /* Describe what just happened in a way that dwarf understands. */
3363 rtx real
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
3364 gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3366 RTX_FRAME_RELATED_P (insn
) = 1;
3367 add_reg_note (insn
, REG_CFA_ADJUST_CFA
, real
);
3374 /* Return whether the current function is leaf. This takes into
3375 account whether the function calls tls_get_addr. */
3377 tilepro_current_function_is_leaf (void)
3379 return crtl
->is_leaf
&& !cfun
->machine
->calls_tls_get_addr
;
3383 /* Return the frame size. */
3385 compute_total_frame_size (void)
3387 int total_size
= (get_frame_size () + tilepro_saved_regs_size ()
3388 + crtl
->outgoing_args_size
3389 + crtl
->args
.pretend_args_size
);
3391 if (!tilepro_current_function_is_leaf () || cfun
->calls_alloca
)
3393 /* Make room for save area in callee. */
3394 total_size
+= STACK_POINTER_OFFSET
;
3397 return round_frame_size (total_size
);
3401 /* Return nonzero if this function is known to have a null epilogue.
3402 This allows the optimizer to omit jumps to jumps if no stack was
3405 tilepro_can_use_return_insn_p (void)
3407 return (reload_completed
3408 && cfun
->static_chain_decl
== 0
3409 && compute_total_frame_size () == 0
3410 && tilepro_current_function_is_leaf ()
3411 && !crtl
->profile
&& !df_regs_ever_live_p (TILEPRO_LINK_REGNUM
));
3415 /* Returns an rtx for a stack slot at 'FP + offset_from_fp'. If there
3416 is a frame pointer, it computes the value relative to
3417 that. Otherwise it uses the stack pointer. */
3419 compute_frame_addr (int offset_from_fp
, int *next_scratch_regno
)
3421 rtx base_reg_rtx
, tmp_reg_rtx
, offset_rtx
;
3422 int offset_from_base
;
3424 if (frame_pointer_needed
)
3426 base_reg_rtx
= hard_frame_pointer_rtx
;
3427 offset_from_base
= offset_from_fp
;
3431 int offset_from_sp
= compute_total_frame_size () + offset_from_fp
;
3432 base_reg_rtx
= stack_pointer_rtx
;
3433 offset_from_base
= offset_from_sp
;
3436 if (offset_from_base
== 0)
3437 return base_reg_rtx
;
3439 /* Compute the new value of the stack pointer. */
3440 tmp_reg_rtx
= gen_rtx_REG (Pmode
, (*next_scratch_regno
)--);
3441 offset_rtx
= gen_int_si (offset_from_base
);
3443 if (!tilepro_expand_addsi (tmp_reg_rtx
, base_reg_rtx
, offset_rtx
))
3445 emit_insn (gen_rtx_SET (VOIDmode
, tmp_reg_rtx
,
3446 gen_rtx_PLUS (Pmode
, base_reg_rtx
,
3454 /* The stack frame looks like this:
3459 AP -> +-------------+
3463 HFP -> +-------------+
3465 | reg save | crtl->args.pretend_args_size bytes
3468 | saved regs | tilepro_saved_regs_size() bytes
3469 FP -> +-------------+
3471 | vars | get_frame_size() bytes
3475 | stack args | crtl->outgoing_args_size bytes
3477 | HFP | 4 bytes (only here if nonleaf / alloca)
3479 | callee lr | 4 bytes (only here if nonleaf / alloca)
3481 SP -> +-------------+
3485 For functions with a frame larger than 32767 bytes, or which use
3486 alloca (), r52 is used as a frame pointer. Otherwise there is no
3489 FP is saved at SP+4 before calling a subroutine so the
3490 callee can chain. */
3492 tilepro_expand_prologue (void)
3494 #define ROUND_ROBIN_SIZE 4
3495 /* We round-robin through four scratch registers to hold temporary
3496 addresses for saving registers, to make instruction scheduling
3498 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3499 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3502 unsigned int which_scratch
;
3503 int offset
, start_offset
, regno
;
3505 /* A register that holds a copy of the incoming fp. */
3506 int fp_copy_regno
= -1;
3508 /* A register that holds a copy of the incoming sp. */
3509 int sp_copy_regno
= -1;
3511 /* Next scratch register number to hand out (postdecrementing). */
3512 int next_scratch_regno
= 29;
3514 int total_size
= compute_total_frame_size ();
3516 if (flag_stack_usage_info
)
3517 current_function_static_stack_size
= total_size
;
3519 /* Save lr first in its special location because code after this
3520 might use the link register as a scratch register. */
3521 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
) || crtl
->calls_eh_return
)
3522 FRP (frame_emit_store (TILEPRO_LINK_REGNUM
, TILEPRO_LINK_REGNUM
,
3523 stack_pointer_rtx
, stack_pointer_rtx
, 0));
3525 if (total_size
== 0)
3527 /* Load the PIC register if needed. */
3528 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3529 load_pic_register (false);
3534 cfa
= stack_pointer_rtx
;
3536 if (frame_pointer_needed
)
3538 fp_copy_regno
= next_scratch_regno
--;
3540 /* Copy the old frame pointer aside so we can save it later. */
3541 insn
= FRP (emit_move_insn (gen_rtx_REG (word_mode
, fp_copy_regno
),
3542 hard_frame_pointer_rtx
));
3543 add_reg_note (insn
, REG_CFA_REGISTER
, NULL_RTX
);
3545 /* Set up the frame pointer. */
3546 insn
= FRP (emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
));
3547 add_reg_note (insn
, REG_CFA_DEF_CFA
, hard_frame_pointer_rtx
);
3548 cfa
= hard_frame_pointer_rtx
;
3549 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
3551 /* fp holds a copy of the incoming sp, in case we need to store
3553 sp_copy_regno
= HARD_FRAME_POINTER_REGNUM
;
3555 else if (!tilepro_current_function_is_leaf ())
3557 /* Copy the old stack pointer aside so we can save it later. */
3558 sp_copy_regno
= next_scratch_regno
--;
3559 emit_move_insn (gen_rtx_REG (Pmode
, sp_copy_regno
),
3563 if (tilepro_current_function_is_leaf ())
3565 /* No need to store chain pointer to caller's frame. */
3566 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3567 !frame_pointer_needed
, NULL_RTX
);
3571 /* Save the frame pointer (incoming sp value) to support
3572 backtracing. First we need to create an rtx with the store
3574 rtx chain_addr
= gen_rtx_REG (Pmode
, next_scratch_regno
--);
3575 rtx size_rtx
= gen_int_si (-(total_size
- UNITS_PER_WORD
));
3577 if (add_operand (size_rtx
, Pmode
))
3579 /* Expose more parallelism by computing this value from the
3580 original stack pointer, not the one after we have pushed
3582 rtx p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
, size_rtx
);
3583 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3584 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3585 !frame_pointer_needed
, NULL_RTX
);
3589 /* The stack frame is large, so just store the incoming sp
3590 value at *(new_sp + UNITS_PER_WORD). */
3592 emit_sp_adjust (-total_size
, &next_scratch_regno
,
3593 !frame_pointer_needed
, NULL_RTX
);
3594 p
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
3595 GEN_INT (UNITS_PER_WORD
));
3596 emit_insn (gen_rtx_SET (VOIDmode
, chain_addr
, p
));
3599 /* Save our frame pointer for backtrace chaining. */
3600 emit_insn (gen_movsi (gen_frame_mem (SImode
, chain_addr
),
3601 gen_rtx_REG (SImode
, sp_copy_regno
)));
3604 /* Compute where to start storing registers we need to save. */
3605 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3606 offset
= start_offset
;
3608 /* Store all registers that need saving. */
3610 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3611 if (need_to_save_reg (regno
))
3613 rtx r
= reg_save_addr
[which_scratch
];
3615 int cfa_offset
= frame_pointer_needed
? offset
: total_size
+ offset
;
3619 rtx p
= compute_frame_addr (offset
, &next_scratch_regno
);
3620 r
= gen_rtx_REG (word_mode
, next_scratch_regno
--);
3621 reg_save_addr
[which_scratch
] = r
;
3623 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3627 /* Advance to the next stack slot to store this register. */
3628 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3629 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3630 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3633 /* Save this register to the stack (but use the old fp value
3634 we copied aside if appropriate). */
3635 from_regno
= (fp_copy_regno
>= 0
3637 HARD_FRAME_POINTER_REGNUM
) ? fp_copy_regno
: regno
;
3638 FRP (frame_emit_store (from_regno
, regno
, r
, cfa
, cfa_offset
));
3640 offset
-= UNITS_PER_WORD
;
3641 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3644 /* If profiling, force that to happen after the frame is set up. */
3646 emit_insn (gen_blockage ());
3648 /* Load the PIC register if needed. */
3649 if (flag_pic
&& crtl
->uses_pic_offset_table
)
3650 load_pic_register (false);
3654 /* Implement the epilogue and sibcall_epilogue patterns. SIBCALL_P is
3655 true for a sibcall_epilogue pattern, and false for an epilogue
3658 tilepro_expand_epilogue (bool sibcall_p
)
3660 /* We round-robin through four scratch registers to hold temporary
3661 addresses for saving registers, to make instruction scheduling
3663 rtx reg_save_addr
[ROUND_ROBIN_SIZE
] = {
3664 NULL_RTX
, NULL_RTX
, NULL_RTX
, NULL_RTX
3666 rtx last_insn
, insn
;
3667 unsigned int which_scratch
;
3668 int offset
, start_offset
, regno
;
3669 rtx cfa_restores
= NULL_RTX
;
3671 /* A register that holds a copy of the incoming fp. */
3672 int fp_copy_regno
= -1;
3674 /* Next scratch register number to hand out (postdecrementing). */
3675 int next_scratch_regno
= 29;
3677 int total_size
= compute_total_frame_size ();
3679 last_insn
= get_last_insn ();
3681 /* Load lr first since we are going to need it first. */
3683 if (df_regs_ever_live_p (TILEPRO_LINK_REGNUM
))
3685 insn
= frame_emit_load (TILEPRO_LINK_REGNUM
,
3686 compute_frame_addr (0, &next_scratch_regno
),
3690 if (total_size
== 0)
3694 RTX_FRAME_RELATED_P (insn
) = 1;
3695 REG_NOTES (insn
) = cfa_restores
;
3700 /* Compute where to start restoring registers. */
3701 start_offset
= -crtl
->args
.pretend_args_size
- UNITS_PER_WORD
;
3702 offset
= start_offset
;
3704 if (frame_pointer_needed
)
3705 fp_copy_regno
= next_scratch_regno
--;
3707 /* Restore all callee-saved registers. */
3709 for (regno
= FIRST_PSEUDO_REGISTER
- 1; regno
>= 0; regno
--)
3710 if (need_to_save_reg (regno
))
3712 rtx r
= reg_save_addr
[which_scratch
];
3715 r
= compute_frame_addr (offset
, &next_scratch_regno
);
3716 reg_save_addr
[which_scratch
] = r
;
3720 /* Advance to the next stack slot to store this
3722 int stride
= ROUND_ROBIN_SIZE
* -UNITS_PER_WORD
;
3723 rtx p
= gen_rtx_PLUS (Pmode
, r
, GEN_INT (stride
));
3724 emit_insn (gen_rtx_SET (VOIDmode
, r
, p
));
3727 if (fp_copy_regno
>= 0 && regno
== HARD_FRAME_POINTER_REGNUM
)
3728 frame_emit_load (fp_copy_regno
, r
, NULL
);
3730 frame_emit_load (regno
, r
, &cfa_restores
);
3732 offset
-= UNITS_PER_WORD
;
3733 which_scratch
= (which_scratch
+ 1) % ROUND_ROBIN_SIZE
;
3736 if (!tilepro_current_function_is_leaf ())
3738 alloc_reg_note (REG_CFA_RESTORE
, stack_pointer_rtx
, cfa_restores
);
3740 emit_insn (gen_blockage ());
3742 if (frame_pointer_needed
)
3744 /* Restore the old stack pointer by copying from the frame
3746 insn
= emit_insn (gen_sp_restore (stack_pointer_rtx
,
3747 hard_frame_pointer_rtx
));
3748 RTX_FRAME_RELATED_P (insn
) = 1;
3749 REG_NOTES (insn
) = cfa_restores
;
3750 add_reg_note (insn
, REG_CFA_DEF_CFA
, stack_pointer_rtx
);
3754 insn
= emit_sp_adjust (total_size
, &next_scratch_regno
, true,
3758 if (crtl
->calls_eh_return
)
3759 emit_insn (gen_sp_adjust (stack_pointer_rtx
, stack_pointer_rtx
,
3760 EH_RETURN_STACKADJ_RTX
));
3762 /* Restore the old frame pointer. */
3763 if (frame_pointer_needed
)
3765 insn
= emit_move_insn (hard_frame_pointer_rtx
,
3766 gen_rtx_REG (Pmode
, fp_copy_regno
));
3767 add_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
);
3770 /* Mark the pic registers as live outside of the function. */
3773 emit_use (cfun
->machine
->text_label_rtx
);
3774 emit_use (cfun
->machine
->got_rtx
);
3780 /* Emit the actual 'return' instruction. */
3781 emit_jump_insn (gen__return ());
3785 emit_use (gen_rtx_REG (Pmode
, TILEPRO_LINK_REGNUM
));
3788 /* Mark all insns we just emitted as frame-related. */
3789 for (; last_insn
!= NULL_RTX
; last_insn
= next_insn (last_insn
))
3790 RTX_FRAME_RELATED_P (last_insn
) = 1;
3793 #undef ROUND_ROBIN_SIZE
3796 /* Implement INITIAL_ELIMINATION_OFFSET. */
3798 tilepro_initial_elimination_offset (int from
, int to
)
3800 int total_size
= compute_total_frame_size ();
3802 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3804 return (total_size
- crtl
->args
.pretend_args_size
3805 - tilepro_saved_regs_size ());
3807 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3809 return -(crtl
->args
.pretend_args_size
+ tilepro_saved_regs_size ());
3811 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
3813 return STACK_POINTER_OFFSET
+ total_size
;
3815 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
3817 return STACK_POINTER_OFFSET
;
3824 /* Return an RTX indicating where the return address to the
3825 calling function can be found. */
3827 tilepro_return_addr (int count
, rtx frame ATTRIBUTE_UNUSED
)
3832 return get_hard_reg_initial_val (Pmode
, TILEPRO_LINK_REGNUM
);
3836 /* Implement EH_RETURN_HANDLER_RTX. */
3838 tilepro_eh_return_handler_rtx (void)
3840 /* The MEM needs to be volatile to prevent it from being
3842 rtx tmp
= gen_frame_mem (Pmode
, hard_frame_pointer_rtx
);
3843 MEM_VOLATILE_P (tmp
) = true;
3851 /* Implemnet TARGET_CONDITIONAL_REGISTER_USAGE. */
3853 tilepro_conditional_register_usage (void)
3855 global_regs
[TILEPRO_NETORDER_REGNUM
] = 1;
3856 /* TILEPRO_PIC_TEXT_LABEL_REGNUM is conditionally used. It is a
3857 member of fixed_regs, and therefore must be member of
3858 call_used_regs, but it is not a member of call_really_used_regs[]
3859 because it is not clobbered by a call. */
3860 if (TILEPRO_PIC_TEXT_LABEL_REGNUM
!= INVALID_REGNUM
)
3862 fixed_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3863 call_used_regs
[TILEPRO_PIC_TEXT_LABEL_REGNUM
] = 1;
3865 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3867 fixed_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3868 call_used_regs
[PIC_OFFSET_TABLE_REGNUM
] = 1;
3873 /* Implement TARGET_FRAME_POINTER_REQUIRED. */
3875 tilepro_frame_pointer_required (void)
3877 return crtl
->calls_eh_return
|| cfun
->calls_alloca
;
3882 /* Scheduling and reorg */
3884 /* Return the length of INSN. LENGTH is the initial length computed
3885 by attributes in the machine-description file. This is where we
3886 account for bundles. */
3888 tilepro_adjust_insn_length (rtx insn
, int length
)
3890 enum machine_mode mode
= GET_MODE (insn
);
3892 /* A non-termininating instruction in a bundle has length 0. */
3896 /* By default, there is not length adjustment. */
3901 /* Implement TARGET_SCHED_ISSUE_RATE. */
3903 tilepro_issue_rate (void)
3909 /* Return the rtx for the jump target. */
3911 get_jump_target (rtx branch
)
3913 if (CALL_P (branch
))
3916 call
= PATTERN (branch
);
3918 if (GET_CODE (call
) == PARALLEL
)
3919 call
= XVECEXP (call
, 0, 0);
3921 if (GET_CODE (call
) == SET
)
3922 call
= SET_SRC (call
);
3924 if (GET_CODE (call
) == CALL
)
3925 return XEXP (XEXP (call
, 0), 0);
3930 /* Implement TARGET_SCHED_ADJUST_COST. */
3932 tilepro_sched_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
3934 /* If we have a true dependence, INSN is a call, and DEP_INSN
3935 defines a register that is needed by the call (argument or stack
3936 pointer), set its latency to 0 so that it can be bundled with
3937 the call. Explicitly check for and exclude the case when
3938 DEP_INSN defines the target of the jump. */
3939 if (CALL_P (insn
) && REG_NOTE_KIND (link
) == REG_DEP_TRUE
)
3941 rtx target
= get_jump_target (insn
);
3942 if (!REG_P (target
) || !set_of (target
, dep_insn
))
3950 /* Skip over irrelevant NOTEs and such and look for the next insn we
3951 would consider bundling. */
3953 next_insn_to_bundle (rtx r
, rtx end
)
3955 for (; r
!= end
; r
= NEXT_INSN (r
))
3957 if (NONDEBUG_INSN_P (r
)
3958 && GET_CODE (PATTERN (r
)) != USE
3959 && GET_CODE (PATTERN (r
)) != CLOBBER
)
3967 /* Go through all insns, and use the information generated during
3968 scheduling to generate SEQUENCEs to represent bundles of
3969 instructions issued simultaneously. */
3971 tilepro_gen_bundles (void)
3977 rtx end
= NEXT_INSN (BB_END (bb
));
3979 for (insn
= next_insn_to_bundle (BB_HEAD (bb
), end
); insn
; insn
= next
)
3981 next
= next_insn_to_bundle (NEXT_INSN (insn
), end
);
3983 /* Never wrap {} around inline asm. */
3984 if (GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
3986 if (next
== NULL_RTX
|| GET_MODE (next
) == TImode
3987 /* NOTE: The scheduler incorrectly believes a call
3988 insn can execute in the same cycle as the insn
3989 after the call. This is of course impossible.
3990 Really we need to fix the scheduler somehow, so
3991 the code after the call gets scheduled
3995 /* Mark current insn as the end of a bundle. */
3996 PUT_MODE (insn
, QImode
);
4000 /* Mark it as part of a bundle. */
4001 PUT_MODE (insn
, SImode
);
4009 /* Helper function for tilepro_fixup_pcrel_references. */
4011 replace_pc_relative_symbol_ref (rtx insn
, rtx opnds
[4], bool first_insn_p
)
4021 emit_insn (gen_add_got16 (opnds
[0], tilepro_got_rtx (),
4023 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4030 emit_insn (gen_addhi_got32 (opnds
[0], tilepro_got_rtx (),
4035 emit_insn (gen_addlo_got32 (opnds
[0], opnds
[1], opnds
[2]));
4036 emit_insn (gen_insn_lw (opnds
[0], opnds
[0]));
4040 new_insns
= get_insns ();
4044 emit_insn_before (new_insns
, insn
);
4050 /* Returns whether INSN is a pc-relative addli insn. */
4052 match_addli_pcrel (rtx insn
)
4054 rtx pattern
= PATTERN (insn
);
4057 if (GET_CODE (pattern
) != SET
)
4060 if (GET_CODE (SET_SRC (pattern
)) != LO_SUM
)
4063 if (GET_CODE (XEXP (SET_SRC (pattern
), 1)) != CONST
)
4066 unspec
= XEXP (XEXP (SET_SRC (pattern
), 1), 0);
4068 return (GET_CODE (unspec
) == UNSPEC
4069 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4073 /* Helper function for tilepro_fixup_pcrel_references. */
4075 replace_addli_pcrel (rtx insn
)
4077 rtx pattern
= PATTERN (insn
);
4083 gcc_assert (GET_CODE (pattern
) == SET
);
4084 opnds
[0] = SET_DEST (pattern
);
4086 set_src
= SET_SRC (pattern
);
4087 gcc_assert (GET_CODE (set_src
) == LO_SUM
);
4088 gcc_assert (GET_CODE (XEXP (set_src
, 1)) == CONST
);
4089 opnds
[1] = XEXP (set_src
, 0);
4091 unspec
= XEXP (XEXP (set_src
, 1), 0);
4092 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4093 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4094 opnds
[2] = XVECEXP (unspec
, 0, 0);
4095 opnds
[3] = XVECEXP (unspec
, 0, 1);
4097 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4098 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4101 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4103 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4107 /* Returns whether INSN is a pc-relative auli insn. */
4109 match_auli_pcrel (rtx insn
)
4111 rtx pattern
= PATTERN (insn
);
4115 if (GET_CODE (pattern
) != SET
)
4118 if (GET_CODE (SET_SRC (pattern
)) != PLUS
)
4121 high
= XEXP (SET_SRC (pattern
), 1);
4123 if (GET_CODE (high
) != HIGH
4124 || GET_CODE (XEXP (high
, 0)) != CONST
)
4127 unspec
= XEXP (XEXP (high
, 0), 0);
4129 return (GET_CODE (unspec
) == UNSPEC
4130 && XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4134 /* Helper function for tilepro_fixup_pcrel_references. */
4136 replace_auli_pcrel (rtx insn
)
4138 rtx pattern
= PATTERN (insn
);
4145 gcc_assert (GET_CODE (pattern
) == SET
);
4146 opnds
[0] = SET_DEST (pattern
);
4148 set_src
= SET_SRC (pattern
);
4149 gcc_assert (GET_CODE (set_src
) == PLUS
);
4150 opnds
[1] = XEXP (set_src
, 0);
4152 high
= XEXP (set_src
, 1);
4153 gcc_assert (GET_CODE (high
) == HIGH
);
4154 gcc_assert (GET_CODE (XEXP (high
, 0)) == CONST
);
4156 unspec
= XEXP (XEXP (high
, 0), 0);
4157 gcc_assert (GET_CODE (unspec
) == UNSPEC
);
4158 gcc_assert (XINT (unspec
, 1) == UNSPEC_PCREL_SYM
);
4159 opnds
[2] = XVECEXP (unspec
, 0, 0);
4160 opnds
[3] = XVECEXP (unspec
, 0, 1);
4162 /* We only need to replace SYMBOL_REFs, not LABEL_REFs. */
4163 if (GET_CODE (opnds
[2]) != SYMBOL_REF
)
4166 first_insn_p
= (opnds
[1] == tilepro_text_label_rtx ());
4168 replace_pc_relative_symbol_ref (insn
, opnds
, first_insn_p
);
4172 /* We generate PC relative SYMBOL_REFs as an optimization, to avoid
4173 going through the GOT when the symbol is local to the compilation
4174 unit. But such a symbol requires that the common text_label that
4175 we generate at the beginning of the function be in the same section
4176 as the reference to the SYMBOL_REF. This may not be true if we
4177 generate hot/cold sections. This function looks for such cases and
4178 replaces such references with the longer sequence going through the
4181 We expect one of the following two instruction sequences:
4182 addli tmp1, txt_label_reg, lo16(sym - txt_label)
4183 auli tmp2, tmp1, ha16(sym - txt_label)
4185 auli tmp1, txt_label_reg, ha16(sym - txt_label)
4186 addli tmp2, tmp1, lo16(sym - txt_label)
4188 If we're compiling -fpic, we replace the first instruction with
4189 nothing, and the second instruction with:
4191 addli tmp2, got_rtx, got(sym)
4194 If we're compiling -fPIC, we replace the first instruction with:
4196 auli tmp1, got_rtx, got_ha16(sym)
4198 and the second instruction with:
4200 addli tmp2, tmp1, got_lo16(sym)
4203 Note that we're careful to disturb the instruction sequence as
4204 little as possible, since it's very late in the compilation
4208 tilepro_fixup_pcrel_references (void)
4210 rtx insn
, next_insn
;
4211 bool same_section_as_entry
= true;
4213 for (insn
= get_insns (); insn
; insn
= next_insn
)
4215 next_insn
= NEXT_INSN (insn
);
4217 if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
4219 same_section_as_entry
= !same_section_as_entry
;
4223 if (same_section_as_entry
)
4227 && GET_CODE (PATTERN (insn
)) != USE
4228 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
4231 if (match_addli_pcrel (insn
))
4232 replace_addli_pcrel (insn
);
4233 else if (match_auli_pcrel (insn
))
4234 replace_auli_pcrel (insn
);
4239 /* Ensure that no var tracking notes are emitted in the middle of a
4240 three-instruction bundle. */
4242 reorder_var_tracking_notes (void)
4248 rtx queue
= NULL_RTX
;
4249 bool in_bundle
= false;
4251 for (insn
= BB_HEAD (bb
); insn
!= BB_END (bb
); insn
= next
)
4253 next
= NEXT_INSN (insn
);
4257 /* Emit queued up notes at the last instruction of a bundle. */
4258 if (GET_MODE (insn
) == QImode
)
4262 rtx next_queue
= PREV_INSN (queue
);
4263 PREV_INSN (NEXT_INSN (insn
)) = queue
;
4264 NEXT_INSN (queue
) = NEXT_INSN (insn
);
4265 NEXT_INSN (insn
) = queue
;
4266 PREV_INSN (queue
) = insn
;
4271 else if (GET_MODE (insn
) == SImode
)
4274 else if (NOTE_P (insn
) && NOTE_KIND (insn
) == NOTE_INSN_VAR_LOCATION
)
4278 rtx prev
= PREV_INSN (insn
);
4279 PREV_INSN (next
) = prev
;
4280 NEXT_INSN (prev
) = next
;
4282 PREV_INSN (insn
) = queue
;
4291 /* Perform machine dependent operations on the rtl chain INSNS. */
4293 tilepro_reorg (void)
4295 /* We are freeing block_for_insn in the toplev to keep compatibility
4296 with old MDEP_REORGS that are not CFG based. Recompute it
4298 compute_bb_for_insn ();
4300 if (flag_reorder_blocks_and_partition
)
4302 tilepro_fixup_pcrel_references ();
4305 if (flag_schedule_insns_after_reload
)
4309 timevar_push (TV_SCHED2
);
4311 timevar_pop (TV_SCHED2
);
4313 /* Examine the schedule to group into bundles. */
4314 tilepro_gen_bundles ();
4319 if (flag_var_tracking
)
4321 timevar_push (TV_VAR_TRACKING
);
4322 variable_tracking_main ();
4323 reorder_var_tracking_notes ();
4324 timevar_pop (TV_VAR_TRACKING
);
4327 df_finish_pass (false);
4334 /* Select a format to encode pointers in exception handling data.
4335 CODE is 0 for data, 1 for code labels, 2 for function pointers.
4336 GLOBAL is true if the symbol may be affected by dynamic
4339 tilepro_asm_preferred_eh_data_format (int code ATTRIBUTE_UNUSED
, int global
)
4341 return (global
? DW_EH_PE_indirect
: 0) | DW_EH_PE_pcrel
| DW_EH_PE_sdata4
;
4345 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. */
4347 tilepro_asm_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
4348 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
4351 rtx this_rtx
, insn
, funexp
;
4353 /* Pretend to be a post-reload pass while generating rtl. */
4354 reload_completed
= 1;
4356 /* Mark the end of the (empty) prologue. */
4357 emit_note (NOTE_INSN_PROLOGUE_END
);
4359 /* Find the "this" pointer. If the function returns a structure,
4360 the structure return pointer is in $1. */
4361 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
4362 this_rtx
= gen_rtx_REG (Pmode
, 1);
4364 this_rtx
= gen_rtx_REG (Pmode
, 0);
4366 /* Add DELTA to THIS_RTX. */
4367 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, GEN_INT (delta
)));
4369 /* If needed, add *(*THIS_RTX + VCALL_OFFSET) to THIS_RTX. */
4374 tmp
= gen_rtx_REG (Pmode
, 29);
4375 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this_rtx
));
4377 emit_insn (gen_addsi3 (tmp
, tmp
, GEN_INT (vcall_offset
)));
4379 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
4381 emit_insn (gen_addsi3 (this_rtx
, this_rtx
, tmp
));
4384 /* Generate a tail call to the target function. */
4385 if (!TREE_USED (function
))
4387 assemble_external (function
);
4388 TREE_USED (function
) = 1;
4390 funexp
= XEXP (DECL_RTL (function
), 0);
4391 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
4392 insn
= emit_call_insn (gen_sibcall (funexp
, const0_rtx
));
4393 SIBLING_CALL_P (insn
) = 1;
4395 /* Run just enough of rest_of_compilation to get the insns emitted.
4396 There's not really enough bulk here to make other passes such as
4397 instruction scheduling worth while. Note that use_thunk calls
4398 assemble_start_function and assemble_end_function.
4400 We don't currently bundle, but the instruciton sequence is all
4401 serial except for the tail call, so we're only wasting one cycle.
4403 insn
= get_insns ();
4404 shorten_branches (insn
);
4405 final_start_function (insn
, file
, 1);
4406 final (insn
, file
, 1);
4407 final_end_function ();
4409 /* Stop pretending to be a post-reload pass. */
4410 reload_completed
= 0;
4414 /* Implement TARGET_ASM_TRAMPOLINE_TEMPLATE. */
4416 tilepro_asm_trampoline_template (FILE *file
)
4418 fprintf (file
, "\tlnk r10\n");
4419 fprintf (file
, "\taddi r10, r10, 32\n");
4420 fprintf (file
, "\tlwadd r11, r10, %d\n", GET_MODE_SIZE (ptr_mode
));
4421 fprintf (file
, "\tlw r10, r10\n");
4422 fprintf (file
, "\tjr r11\n");
4423 fprintf (file
, "\t.word 0 # <function address>\n");
4424 fprintf (file
, "\t.word 0 # <static chain value>\n");
4428 /* Implement TARGET_TRAMPOLINE_INIT. */
4430 tilepro_trampoline_init (rtx m_tramp
, tree fndecl
, rtx static_chain
)
4434 rtx begin_addr
, end_addr
;
4435 int ptr_mode_size
= GET_MODE_SIZE (ptr_mode
);
4437 fnaddr
= copy_to_reg (XEXP (DECL_RTL (fndecl
), 0));
4438 chaddr
= copy_to_reg (static_chain
);
4440 emit_block_move (m_tramp
, assemble_trampoline_template (),
4441 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
4443 mem
= adjust_address (m_tramp
, ptr_mode
,
4444 TRAMPOLINE_SIZE
- 2 * ptr_mode_size
);
4445 emit_move_insn (mem
, fnaddr
);
4446 mem
= adjust_address (m_tramp
, ptr_mode
,
4447 TRAMPOLINE_SIZE
- ptr_mode_size
);
4448 emit_move_insn (mem
, chaddr
);
4450 /* Get pointers to the beginning and end of the code block. */
4451 begin_addr
= force_reg (Pmode
, XEXP (m_tramp
, 0));
4452 end_addr
= force_reg (Pmode
, plus_constant (Pmode
, XEXP (m_tramp
, 0),
4455 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__clear_cache"),
4456 LCT_NORMAL
, VOIDmode
, 2, begin_addr
, Pmode
,
4461 /* Implement TARGET_PRINT_OPERAND. */
4463 tilepro_print_operand (FILE *file
, rtx x
, int code
)
4468 /* Print the compare operator opcode for conditional moves. */
4469 switch (GET_CODE (x
))
4478 output_operand_lossage ("invalid %%c operand");
4483 /* Print the compare operator opcode for conditional moves. */
4484 switch (GET_CODE (x
))
4493 output_operand_lossage ("invalid %%C operand");
4499 /* Print the high 16 bits of a 32-bit constant. */
4501 if (CONST_INT_P (x
))
4503 else if (GET_CODE (x
) == CONST_DOUBLE
)
4504 i
= CONST_DOUBLE_LOW (x
);
4507 output_operand_lossage ("invalid %%h operand");
4510 i
= trunc_int_for_mode (i
>> 16, HImode
);
4511 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4518 const char *opstr
= NULL
;
4520 if (GET_CODE (x
) == CONST
4521 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4523 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4524 switch (XINT (XEXP (x
, 0), 1))
4526 case UNSPEC_GOT32_SYM
:
4529 case UNSPEC_PCREL_SYM
:
4534 opstr
= "tls_gd_ha16";
4537 opstr
= "tls_ie_ha16";
4540 opstr
= "tls_le_ha16";
4543 output_operand_lossage ("invalid %%H operand");
4552 fputs (opstr
, file
);
4554 output_addr_const (file
, addr
);
4558 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4559 fputs (" - " , file
);
4560 output_addr_const (file
, addr2
);
4568 /* Print an auto-inc memory operand. */
4571 output_operand_lossage ("invalid %%I operand");
4575 output_memory_reference_mode
= GET_MODE (x
);
4576 output_memory_autoinc_first
= true;
4577 output_address (XEXP (x
, 0));
4578 output_memory_reference_mode
= VOIDmode
;
4582 /* Print an auto-inc memory operand. */
4585 output_operand_lossage ("invalid %%i operand");
4589 output_memory_reference_mode
= GET_MODE (x
);
4590 output_memory_autoinc_first
= false;
4591 output_address (XEXP (x
, 0));
4592 output_memory_reference_mode
= VOIDmode
;
4597 /* Print the low 8 bits of a constant. */
4599 if (CONST_INT_P (x
))
4601 else if (GET_CODE (x
) == CONST_DOUBLE
)
4602 i
= CONST_DOUBLE_LOW (x
);
4603 else if (GET_CODE (x
) == CONST_VECTOR
4604 && CONST_INT_P (CONST_VECTOR_ELT (x
, 0)))
4605 i
= INTVAL (CONST_VECTOR_ELT (x
, 0));
4608 output_operand_lossage ("invalid %%j operand");
4611 i
= trunc_int_for_mode (i
, QImode
);
4612 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4619 const char *opstr
= NULL
;
4621 if (GET_CODE (x
) == CONST
4622 && GET_CODE (XEXP (x
, 0)) == UNSPEC
)
4624 addr
= XVECEXP (XEXP (x
, 0), 0, 0);
4625 switch (XINT (XEXP (x
, 0), 1))
4627 case UNSPEC_GOT16_SYM
:
4630 case UNSPEC_GOT32_SYM
:
4633 case UNSPEC_PCREL_SYM
:
4638 opstr
= "tls_gd_lo16";
4641 opstr
= "tls_ie_lo16";
4644 opstr
= "tls_le_lo16";
4647 output_operand_lossage ("invalid %%L operand");
4656 fputs (opstr
, file
);
4658 output_addr_const (file
, addr
);
4662 rtx addr2
= XVECEXP (XEXP (x
, 0), 0, 1);
4663 fputs (" - " , file
);
4664 output_addr_const (file
, addr2
);
4672 if (GET_CODE (x
) == SYMBOL_REF
)
4674 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4675 fprintf (file
, "plt(");
4676 output_addr_const (file
, x
);
4677 if (flag_pic
&& !SYMBOL_REF_LOCAL_P (x
))
4678 fprintf (file
, ")");
4681 output_addr_const (file
, x
);
4686 /* Print a 32-bit constant plus one. */
4688 if (!CONST_INT_P (x
))
4690 output_operand_lossage ("invalid %%P operand");
4693 i
= trunc_int_for_mode (INTVAL (x
) + 1, SImode
);
4694 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4700 /* Print an mm-style bit range. */
4701 int first_bit
, last_bit
;
4703 if (!CONST_INT_P (x
)
4704 || !tilepro_bitfield_operand_p (INTVAL (x
), &first_bit
,
4707 output_operand_lossage ("invalid %%M operand");
4711 fprintf (file
, "%d, %d", first_bit
, last_bit
);
4717 const char *reg
= NULL
;
4719 /* Print a network register. */
4720 if (!CONST_INT_P (x
))
4722 output_operand_lossage ("invalid %%N operand");
4728 case TILEPRO_NETREG_IDN0
: reg
= "idn0"; break;
4729 case TILEPRO_NETREG_IDN1
: reg
= "idn1"; break;
4730 case TILEPRO_NETREG_SN
: reg
= "sn"; break;
4731 case TILEPRO_NETREG_UDN0
: reg
= "udn0"; break;
4732 case TILEPRO_NETREG_UDN1
: reg
= "udn1"; break;
4733 case TILEPRO_NETREG_UDN2
: reg
= "udn2"; break;
4734 case TILEPRO_NETREG_UDN3
: reg
= "udn3"; break;
4735 default: gcc_unreachable ();
4738 fprintf (file
, reg
);
4744 /* Log base 2 of a power of two. */
4748 if (!CONST_INT_P (x
))
4750 output_operand_lossage ("invalid %%t operand");
4753 n
= trunc_int_for_mode (INTVAL (x
), SImode
);
4757 output_operand_lossage ("invalid %%t operand '"
4758 HOST_WIDE_INT_PRINT_DEC
"'", n
);
4762 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, i
);
4768 /* In this case we need a register. Use 'zero' if the
4769 operand is const0_rtx. */
4771 || (GET_MODE (x
) != VOIDmode
&& x
== CONST0_RTX (GET_MODE (x
))))
4773 fputs ("zero", file
);
4776 else if (!REG_P (x
))
4778 output_operand_lossage ("invalid %%r operand");
4786 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
4791 output_memory_reference_mode
= VOIDmode
;
4792 output_address (XEXP (x
, 0));
4797 output_addr_const (file
, x
);
4804 output_operand_lossage ("unable to print out operand yet; code == %d (%c)",
4809 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
4811 tilepro_print_operand_address (FILE *file
, rtx addr
)
4813 if (GET_CODE (addr
) == POST_DEC
4814 || GET_CODE (addr
) == POST_INC
)
4816 int offset
= GET_MODE_SIZE (output_memory_reference_mode
);
4818 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4820 if (output_memory_autoinc_first
)
4821 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4823 fprintf (file
, "%d",
4824 GET_CODE (addr
) == POST_DEC
? -offset
: offset
);
4826 else if (GET_CODE (addr
) == POST_MODIFY
)
4828 gcc_assert (output_memory_reference_mode
!= VOIDmode
);
4830 gcc_assert (GET_CODE (XEXP (addr
, 1)) == PLUS
);
4832 if (output_memory_autoinc_first
)
4833 fprintf (file
, "%s", reg_names
[REGNO (XEXP (addr
, 0))]);
4835 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
4836 INTVAL (XEXP (XEXP (addr
, 1), 1)));
4839 tilepro_print_operand (file
, addr
, 'r');
4843 /* Machine mode of current insn, for determining curly brace
4845 static enum machine_mode insn_mode
;
4848 /* Implement FINAL_PRESCAN_INSN. This is used to emit bundles. */
4850 tilepro_final_prescan_insn (rtx insn
)
4852 /* Record this for tilepro_asm_output_opcode to examine. */
4853 insn_mode
= GET_MODE (insn
);
4857 /* While emitting asm, are we currently inside '{' for a bundle? */
4858 static bool tilepro_in_bundle
= false;
4860 /* Implement ASM_OUTPUT_OPCODE. Prepend/append curly braces as
4861 appropriate given the bundling information recorded by
4862 tilepro_gen_bundles. */
4864 tilepro_asm_output_opcode (FILE *stream
, const char *code
)
4866 bool pseudo
= !strcmp (code
, "pseudo");
4868 if (!tilepro_in_bundle
&& insn_mode
== SImode
)
4870 /* Start a new bundle. */
4871 fprintf (stream
, "{\n\t");
4872 tilepro_in_bundle
= true;
4875 if (tilepro_in_bundle
&& insn_mode
== QImode
)
4877 /* Close an existing bundle. */
4878 static char buf
[100];
4880 gcc_assert (strlen (code
) + 3 + 1 < sizeof (buf
));
4882 strcpy (buf
, pseudo
? "" : code
);
4883 strcat (buf
, "\n\t}");
4884 tilepro_in_bundle
= false;
4890 return pseudo
? "" : code
;
4895 /* Output assembler code to FILE to increment profiler label # LABELNO
4896 for profiling a function entry. */
4898 tilepro_function_profiler (FILE *file
, int labelno ATTRIBUTE_UNUSED
)
4900 if (tilepro_in_bundle
)
4902 fprintf (file
, "\t}\n");
4911 "\t}\n", MCOUNT_NAME
);
4919 "\t}\n", MCOUNT_NAME
);
4922 tilepro_in_bundle
= false;
4926 /* Implement TARGET_ASM_FILE_END. */
4928 tilepro_file_end (void)
4930 if (NEED_INDICATE_EXEC_STACK
)
4931 file_end_indicate_exec_stack ();
4935 #undef TARGET_HAVE_TLS
4936 #define TARGET_HAVE_TLS HAVE_AS_TLS
4938 #undef TARGET_OPTION_OVERRIDE
4939 #define TARGET_OPTION_OVERRIDE tilepro_option_override
4941 #undef TARGET_SCALAR_MODE_SUPPORTED_P
4942 #define TARGET_SCALAR_MODE_SUPPORTED_P tilepro_scalar_mode_supported_p
4944 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4945 #define TARGET_VECTOR_MODE_SUPPORTED_P tile_vector_mode_supported_p
4947 #undef TARGET_CANNOT_FORCE_CONST_MEM
4948 #define TARGET_CANNOT_FORCE_CONST_MEM tilepro_cannot_force_const_mem
4950 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4951 #define TARGET_FUNCTION_OK_FOR_SIBCALL tilepro_function_ok_for_sibcall
4953 #undef TARGET_PASS_BY_REFERENCE
4954 #define TARGET_PASS_BY_REFERENCE tilepro_pass_by_reference
4956 #undef TARGET_RETURN_IN_MEMORY
4957 #define TARGET_RETURN_IN_MEMORY tilepro_return_in_memory
4959 #undef TARGET_FUNCTION_ARG_BOUNDARY
4960 #define TARGET_FUNCTION_ARG_BOUNDARY tilepro_function_arg_boundary
4962 #undef TARGET_FUNCTION_ARG
4963 #define TARGET_FUNCTION_ARG tilepro_function_arg
4965 #undef TARGET_FUNCTION_ARG_ADVANCE
4966 #define TARGET_FUNCTION_ARG_ADVANCE tilepro_function_arg_advance
4968 #undef TARGET_FUNCTION_VALUE
4969 #define TARGET_FUNCTION_VALUE tilepro_function_value
4971 #undef TARGET_LIBCALL_VALUE
4972 #define TARGET_LIBCALL_VALUE tilepro_libcall_value
4974 #undef TARGET_FUNCTION_VALUE_REGNO_P
4975 #define TARGET_FUNCTION_VALUE_REGNO_P tilepro_function_value_regno_p
4977 #undef TARGET_PROMOTE_FUNCTION_MODE
4978 #define TARGET_PROMOTE_FUNCTION_MODE \
4979 default_promote_function_mode_always_promote
4981 #undef TARGET_PROMOTE_PROTOTYPES
4982 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
4984 #undef TARGET_BUILD_BUILTIN_VA_LIST
4985 #define TARGET_BUILD_BUILTIN_VA_LIST tilepro_build_builtin_va_list
4987 #undef TARGET_EXPAND_BUILTIN_VA_START
4988 #define TARGET_EXPAND_BUILTIN_VA_START tilepro_va_start
4990 #undef TARGET_SETUP_INCOMING_VARARGS
4991 #define TARGET_SETUP_INCOMING_VARARGS tilepro_setup_incoming_varargs
4993 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
4994 #define TARGET_GIMPLIFY_VA_ARG_EXPR tilepro_gimplify_va_arg_expr
4996 #undef TARGET_RTX_COSTS
4997 #define TARGET_RTX_COSTS tilepro_rtx_costs
4999 /* Limit to what we can reach in one addli. */
5000 #undef TARGET_MIN_ANCHOR_OFFSET
5001 #define TARGET_MIN_ANCHOR_OFFSET -32768
5002 #undef TARGET_MAX_ANCHOR_OFFSET
5003 #define TARGET_MAX_ANCHOR_OFFSET 32767
5005 #undef TARGET_LEGITIMATE_CONSTANT_P
5006 #define TARGET_LEGITIMATE_CONSTANT_P tilepro_legitimate_constant_p
5008 #undef TARGET_LEGITIMATE_ADDRESS_P
5009 #define TARGET_LEGITIMATE_ADDRESS_P tilepro_legitimate_address_p
5011 #undef TARGET_LEGITIMIZE_ADDRESS
5012 #define TARGET_LEGITIMIZE_ADDRESS tilepro_legitimize_address
5014 #undef TARGET_DELEGITIMIZE_ADDRESS
5015 #define TARGET_DELEGITIMIZE_ADDRESS tilepro_delegitimize_address
5017 #undef TARGET_INIT_BUILTINS
5018 #define TARGET_INIT_BUILTINS tilepro_init_builtins
5020 #undef TARGET_BUILTIN_DECL
5021 #define TARGET_BUILTIN_DECL tilepro_builtin_decl
5023 #undef TARGET_EXPAND_BUILTIN
5024 #define TARGET_EXPAND_BUILTIN tilepro_expand_builtin
5026 #undef TARGET_CONDITIONAL_REGISTER_USAGE
5027 #define TARGET_CONDITIONAL_REGISTER_USAGE tilepro_conditional_register_usage
5029 #undef TARGET_FRAME_POINTER_REQUIRED
5030 #define TARGET_FRAME_POINTER_REQUIRED tilepro_frame_pointer_required
5032 #undef TARGET_DELAY_SCHED2
5033 #define TARGET_DELAY_SCHED2 true
5035 #undef TARGET_DELAY_VARTRACK
5036 #define TARGET_DELAY_VARTRACK true
5038 #undef TARGET_SCHED_ISSUE_RATE
5039 #define TARGET_SCHED_ISSUE_RATE tilepro_issue_rate
5041 #undef TARGET_SCHED_ADJUST_COST
5042 #define TARGET_SCHED_ADJUST_COST tilepro_sched_adjust_cost
5044 #undef TARGET_MACHINE_DEPENDENT_REORG
5045 #define TARGET_MACHINE_DEPENDENT_REORG tilepro_reorg
5047 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
5048 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK \
5049 hook_bool_const_tree_hwi_hwi_const_tree_true
5051 #undef TARGET_ASM_OUTPUT_MI_THUNK
5052 #define TARGET_ASM_OUTPUT_MI_THUNK tilepro_asm_output_mi_thunk
5054 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
5055 #define TARGET_ASM_TRAMPOLINE_TEMPLATE tilepro_asm_trampoline_template
5057 #undef TARGET_TRAMPOLINE_INIT
5058 #define TARGET_TRAMPOLINE_INIT tilepro_trampoline_init
5060 #undef TARGET_PRINT_OPERAND
5061 #define TARGET_PRINT_OPERAND tilepro_print_operand
5063 #undef TARGET_PRINT_OPERAND_ADDRESS
5064 #define TARGET_PRINT_OPERAND_ADDRESS tilepro_print_operand_address
5066 #undef TARGET_ASM_FILE_END
5067 #define TARGET_ASM_FILE_END tilepro_file_end
5070 struct gcc_target targetm
= TARGET_INITIALIZER
;
5072 #include "gt-tilepro.h"