c++: throw and private destructor [PR109172]
[official-gcc.git] / gcc / targhooks.cc
blob51bf3fb7a82ff83ca4c16e93f5f2472dfba71511
1 /* Default target hook functions.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* The migration of target macros to target hooks works as follows:
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
25 2. Convert all the MI files to use the hook instead of the macro.
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
30 4. Tell target maintainers to start migrating.
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
49 #include "config.h"
50 #include "system.h"
51 #include "coretypes.h"
52 #include "target.h"
53 #include "function.h"
54 #include "rtl.h"
55 #include "tree.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
58 #include "memmodel.h"
59 #include "backend.h"
60 #include "emit-rtl.h"
61 #include "df.h"
62 #include "tm_p.h"
63 #include "stringpool.h"
64 #include "tree-vrp.h"
65 #include "tree-ssanames.h"
66 #include "profile-count.h"
67 #include "optabs.h"
68 #include "regs.h"
69 #include "recog.h"
70 #include "diagnostic-core.h"
71 #include "fold-const.h"
72 #include "stor-layout.h"
73 #include "varasm.h"
74 #include "flags.h"
75 #include "explow.h"
76 #include "expmed.h"
77 #include "calls.h"
78 #include "expr.h"
79 #include "output.h"
80 #include "common/common-target.h"
81 #include "reload.h"
82 #include "intl.h"
83 #include "opts.h"
84 #include "gimplify.h"
85 #include "predict.h"
86 #include "real.h"
87 #include "langhooks.h"
88 #include "sbitmap.h"
89 #include "function-abi.h"
90 #include "attribs.h"
91 #include "asan.h"
92 #include "emit-rtl.h"
93 #include "gimple.h"
94 #include "cfgloop.h"
95 #include "tree-vectorizer.h"
96 #include "options.h"
98 bool
99 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
100 rtx addr ATTRIBUTE_UNUSED,
101 bool strict ATTRIBUTE_UNUSED)
103 #ifdef GO_IF_LEGITIMATE_ADDRESS
104 /* Defer to the old implementation using a goto. */
105 if (strict)
106 return strict_memory_address_p (mode, addr);
107 else
108 return memory_address_p (mode, addr);
109 #else
110 gcc_unreachable ();
111 #endif
114 void
115 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
117 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
118 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
119 #endif
123 default_unspec_may_trap_p (const_rtx x, unsigned flags)
125 int i;
127 /* Any floating arithmetic may trap. */
128 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
129 return 1;
131 for (i = 0; i < XVECLEN (x, 0); ++i)
133 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
134 return 1;
137 return 0;
140 machine_mode
141 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
142 machine_mode mode,
143 int *punsignedp ATTRIBUTE_UNUSED,
144 const_tree funtype ATTRIBUTE_UNUSED,
145 int for_return ATTRIBUTE_UNUSED)
147 if (type != NULL_TREE && for_return == 2)
148 return promote_mode (type, mode, punsignedp);
149 return mode;
152 machine_mode
153 default_promote_function_mode_always_promote (const_tree type,
154 machine_mode mode,
155 int *punsignedp,
156 const_tree funtype ATTRIBUTE_UNUSED,
157 int for_return ATTRIBUTE_UNUSED)
159 return promote_mode (type, mode, punsignedp);
162 machine_mode
163 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
165 if (m1 == m2)
166 return m1;
167 return VOIDmode;
170 bool
171 default_return_in_memory (const_tree type,
172 const_tree fntype ATTRIBUTE_UNUSED)
174 return (TYPE_MODE (type) == BLKmode);
178 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
179 machine_mode mode ATTRIBUTE_UNUSED)
181 return x;
184 bool
185 default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
186 machine_mode)
188 return false;
191 bool
192 default_const_not_ok_for_debug_p (rtx x)
194 if (GET_CODE (x) == UNSPEC)
195 return true;
196 return false;
200 default_expand_builtin_saveregs (void)
202 error ("%<__builtin_saveregs%> not supported by this target");
203 return const0_rtx;
206 void
207 default_setup_incoming_varargs (cumulative_args_t,
208 const function_arg_info &, int *, int)
212 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
215 default_builtin_setjmp_frame_value (void)
217 return virtual_stack_vars_rtx;
220 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
222 bool
223 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
225 return false;
228 bool
229 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
231 return (targetm.calls.setup_incoming_varargs
232 != default_setup_incoming_varargs);
235 scalar_int_mode
236 default_eh_return_filter_mode (void)
238 return targetm.unwind_word_mode ();
241 scalar_int_mode
242 default_libgcc_cmp_return_mode (void)
244 return word_mode;
247 scalar_int_mode
248 default_libgcc_shift_count_mode (void)
250 return word_mode;
253 scalar_int_mode
254 default_unwind_word_mode (void)
256 return word_mode;
259 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
261 unsigned HOST_WIDE_INT
262 default_shift_truncation_mask (machine_mode mode)
264 return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
267 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
269 unsigned int
270 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
272 return have_insn_for (DIV, mode) ? 3 : 2;
275 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
278 default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
280 return UNKNOWN;
283 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
285 bool
286 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
288 return true;
291 /* Return machine mode for non-standard suffix
292 or VOIDmode if non-standard suffixes are unsupported. */
293 machine_mode
294 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
296 return VOIDmode;
299 /* The generic C++ ABI specifies this is a 64-bit value. */
300 tree
301 default_cxx_guard_type (void)
303 return long_long_integer_type_node;
306 /* Returns the size of the cookie to use when allocating an array
307 whose elements have the indicated TYPE. Assumes that it is already
308 known that a cookie is needed. */
310 tree
311 default_cxx_get_cookie_size (tree type)
313 tree cookie_size;
315 /* We need to allocate an additional max (sizeof (size_t), alignof
316 (true_type)) bytes. */
317 tree sizetype_size;
318 tree type_align;
320 sizetype_size = size_in_bytes (sizetype);
321 type_align = size_int (TYPE_ALIGN_UNIT (type));
322 if (tree_int_cst_lt (type_align, sizetype_size))
323 cookie_size = sizetype_size;
324 else
325 cookie_size = type_align;
327 return cookie_size;
330 /* Return true if a parameter must be passed by reference. This version
331 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
333 bool
334 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
335 const function_arg_info &arg)
337 return targetm.calls.must_pass_in_stack (arg);
340 /* Return true if a parameter follows callee copies conventions. This
341 version of the hook is true for all named arguments. */
343 bool
344 hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg)
346 return arg.named;
349 /* Emit to STREAM the assembler syntax for insn operand X. */
351 void
352 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
353 int code ATTRIBUTE_UNUSED)
355 #ifdef PRINT_OPERAND
356 PRINT_OPERAND (stream, x, code);
357 #else
358 gcc_unreachable ();
359 #endif
362 /* Emit to STREAM the assembler syntax for an insn operand whose memory
363 address is X. */
365 void
366 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
367 machine_mode /*mode*/,
368 rtx x ATTRIBUTE_UNUSED)
370 #ifdef PRINT_OPERAND_ADDRESS
371 PRINT_OPERAND_ADDRESS (stream, x);
372 #else
373 gcc_unreachable ();
374 #endif
377 /* Return true if CODE is a valid punctuation character for the
378 `print_operand' hook. */
380 bool
381 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
383 #ifdef PRINT_OPERAND_PUNCT_VALID_P
384 return PRINT_OPERAND_PUNCT_VALID_P (code);
385 #else
386 return false;
387 #endif
390 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
391 tree
392 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
394 const char *skipped = name + (*name == '*' ? 1 : 0);
395 const char *stripped = targetm.strip_name_encoding (skipped);
396 if (*name != '*' && user_label_prefix[0])
397 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
398 return get_identifier (stripped);
401 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
403 machine_mode
404 default_translate_mode_attribute (machine_mode mode)
406 return mode;
409 /* True if MODE is valid for the target. By "valid", we mean able to
410 be manipulated in non-trivial ways. In particular, this means all
411 the arithmetic is supported.
413 By default we guess this means that any C type is supported. If
414 we can't map the mode back to a type that would be available in C,
415 then reject it. Special case, here, is the double-word arithmetic
416 supported by optabs.cc. */
418 bool
419 default_scalar_mode_supported_p (scalar_mode mode)
421 int precision = GET_MODE_PRECISION (mode);
423 switch (GET_MODE_CLASS (mode))
425 case MODE_PARTIAL_INT:
426 case MODE_INT:
427 if (precision == CHAR_TYPE_SIZE)
428 return true;
429 if (precision == SHORT_TYPE_SIZE)
430 return true;
431 if (precision == INT_TYPE_SIZE)
432 return true;
433 if (precision == LONG_TYPE_SIZE)
434 return true;
435 if (precision == LONG_LONG_TYPE_SIZE)
436 return true;
437 if (precision == 2 * BITS_PER_WORD)
438 return true;
439 return false;
441 case MODE_FLOAT:
442 if (precision == FLOAT_TYPE_SIZE)
443 return true;
444 if (precision == DOUBLE_TYPE_SIZE)
445 return true;
446 if (precision == LONG_DOUBLE_TYPE_SIZE)
447 return true;
448 return false;
450 case MODE_DECIMAL_FLOAT:
451 case MODE_FRACT:
452 case MODE_UFRACT:
453 case MODE_ACCUM:
454 case MODE_UACCUM:
455 return false;
457 default:
458 gcc_unreachable ();
462 /* Return true if libgcc supports floating-point mode MODE (known to
463 be supported as a scalar mode). */
465 bool
466 default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
468 switch (mode)
470 #ifdef HAVE_SFmode
471 case E_SFmode:
472 #endif
473 #ifdef HAVE_DFmode
474 case E_DFmode:
475 #endif
476 #ifdef HAVE_XFmode
477 case E_XFmode:
478 #endif
479 #ifdef HAVE_TFmode
480 case E_TFmode:
481 #endif
482 return true;
484 default:
485 return false;
489 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
490 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
491 supported. */
492 opt_scalar_float_mode
493 default_floatn_mode (int n, bool extended)
495 if (extended)
497 opt_scalar_float_mode cand1, cand2;
498 scalar_float_mode mode;
499 switch (n)
501 case 32:
502 #ifdef HAVE_DFmode
503 cand1 = DFmode;
504 #endif
505 break;
507 case 64:
508 #ifdef HAVE_XFmode
509 cand1 = XFmode;
510 #endif
511 #ifdef HAVE_TFmode
512 cand2 = TFmode;
513 #endif
514 break;
516 case 128:
517 break;
519 default:
520 /* Those are the only valid _FloatNx types. */
521 gcc_unreachable ();
523 if (cand1.exists (&mode)
524 && REAL_MODE_FORMAT (mode)->ieee_bits > n
525 && targetm.scalar_mode_supported_p (mode)
526 && targetm.libgcc_floating_mode_supported_p (mode))
527 return cand1;
528 if (cand2.exists (&mode)
529 && REAL_MODE_FORMAT (mode)->ieee_bits > n
530 && targetm.scalar_mode_supported_p (mode)
531 && targetm.libgcc_floating_mode_supported_p (mode))
532 return cand2;
534 else
536 opt_scalar_float_mode cand;
537 scalar_float_mode mode;
538 switch (n)
540 case 16:
541 /* Always enable _Float16 if we have basic support for the mode.
542 Targets can control the range and precision of operations on
543 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
544 #ifdef HAVE_HFmode
545 cand = HFmode;
546 #endif
547 break;
549 case 32:
550 #ifdef HAVE_SFmode
551 cand = SFmode;
552 #endif
553 break;
555 case 64:
556 #ifdef HAVE_DFmode
557 cand = DFmode;
558 #endif
559 break;
561 case 128:
562 #ifdef HAVE_TFmode
563 cand = TFmode;
564 #endif
565 break;
567 default:
568 break;
570 if (cand.exists (&mode)
571 && REAL_MODE_FORMAT (mode)->ieee_bits == n
572 && targetm.scalar_mode_supported_p (mode)
573 && targetm.libgcc_floating_mode_supported_p (mode))
574 return cand;
576 return opt_scalar_float_mode ();
579 /* Define this to return true if the _Floatn and _Floatnx built-in functions
580 should implicitly enable the built-in function without the __builtin_ prefix
581 in addition to the normal built-in function with the __builtin_ prefix. The
582 default is to only enable built-in functions without the __builtin_ prefix
583 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
584 id of the function to be enabled. */
586 bool
587 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
589 static bool first_time_p = true;
590 static bool c_or_objective_c;
592 if (first_time_p)
594 first_time_p = false;
595 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
598 return c_or_objective_c;
601 /* Make some target macros useable by target-independent code. */
602 bool
603 targhook_words_big_endian (void)
605 return !!WORDS_BIG_ENDIAN;
608 bool
609 targhook_float_words_big_endian (void)
611 return !!FLOAT_WORDS_BIG_ENDIAN;
614 /* True if the target supports floating-point exceptions and rounding
615 modes. */
617 bool
618 default_float_exceptions_rounding_supported_p (void)
620 #ifdef HAVE_adddf3
621 return HAVE_adddf3;
622 #else
623 return false;
624 #endif
627 /* True if the target supports decimal floating point. */
629 bool
630 default_decimal_float_supported_p (void)
632 return ENABLE_DECIMAL_FLOAT;
635 /* True if the target supports fixed-point arithmetic. */
637 bool
638 default_fixed_point_supported_p (void)
640 return ENABLE_FIXED_POINT;
643 /* True if the target supports GNU indirect functions. */
645 bool
646 default_has_ifunc_p (void)
648 return HAVE_GNU_INDIRECT_FUNCTION;
651 /* Return true if we predict the loop LOOP will be transformed to a
652 low-overhead loop, otherwise return false.
654 By default, false is returned, as this hook's applicability should be
655 verified for each target. Target maintainers should re-define the hook
656 if the target can take advantage of it. */
658 bool
659 default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
661 return false;
664 /* By default, just use the input MODE itself. */
666 machine_mode
667 default_preferred_doloop_mode (machine_mode mode)
669 return mode;
672 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
673 an error message.
675 This function checks whether a given INSN is valid within a low-overhead
676 loop. If INSN is invalid it returns the reason for that, otherwise it
677 returns NULL. A called function may clobber any special registers required
678 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
679 register for branch on table instructions. We reject the doloop pattern in
680 these cases. */
682 const char *
683 default_invalid_within_doloop (const rtx_insn *insn)
685 if (CALL_P (insn))
686 return "Function call in loop.";
688 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
689 return "Computed branch in the loop.";
691 return NULL;
694 /* Mapping of builtin functions to vectorized variants. */
696 tree
697 default_builtin_vectorized_function (unsigned int, tree, tree)
699 return NULL_TREE;
702 /* Mapping of target builtin functions to vectorized variants. */
704 tree
705 default_builtin_md_vectorized_function (tree, tree, tree)
707 return NULL_TREE;
710 /* Default vectorizer cost model values. */
713 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
714 tree vectype,
715 int misalign ATTRIBUTE_UNUSED)
717 switch (type_of_cost)
719 case scalar_stmt:
720 case scalar_load:
721 case scalar_store:
722 case vector_stmt:
723 case vector_load:
724 case vector_store:
725 case vec_to_scalar:
726 case scalar_to_vec:
727 case cond_branch_not_taken:
728 case vec_perm:
729 case vec_promote_demote:
730 return 1;
732 case unaligned_load:
733 case unaligned_store:
734 return 2;
736 case cond_branch_taken:
737 return 3;
739 case vec_construct:
740 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
742 default:
743 gcc_unreachable ();
747 /* Reciprocal. */
749 tree
750 default_builtin_reciprocal (tree)
752 return NULL_TREE;
755 void
756 default_emit_support_tinfos (emit_support_tinfos_callback)
760 bool
761 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
762 const function_arg_info &)
764 return false;
767 bool
768 hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t,
769 const function_arg_info &)
771 return true;
775 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
776 const function_arg_info &)
778 return 0;
781 void
782 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
783 tree ATTRIBUTE_UNUSED)
787 /* Default implementation of TARGET_PUSH_ARGUMENT. */
789 bool
790 default_push_argument (unsigned int)
792 #ifdef PUSH_ROUNDING
793 return !ACCUMULATE_OUTGOING_ARGS;
794 #else
795 return false;
796 #endif
799 void
800 default_function_arg_advance (cumulative_args_t, const function_arg_info &)
802 gcc_unreachable ();
805 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
807 HOST_WIDE_INT
808 default_function_arg_offset (machine_mode, const_tree)
810 return 0;
813 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
814 upward, but pad short args downward on big-endian machines. */
816 pad_direction
817 default_function_arg_padding (machine_mode mode, const_tree type)
819 if (!BYTES_BIG_ENDIAN)
820 return PAD_UPWARD;
822 unsigned HOST_WIDE_INT size;
823 if (mode == BLKmode)
825 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
826 return PAD_UPWARD;
827 size = int_size_in_bytes (type);
829 else
830 /* Targets with variable-sized modes must override this hook
831 and handle variable-sized modes explicitly. */
832 size = GET_MODE_SIZE (mode).to_constant ();
834 if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
835 return PAD_DOWNWARD;
837 return PAD_UPWARD;
841 default_function_arg (cumulative_args_t, const function_arg_info &)
843 gcc_unreachable ();
847 default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
849 gcc_unreachable ();
852 unsigned int
853 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
854 const_tree type ATTRIBUTE_UNUSED)
856 return PARM_BOUNDARY;
859 unsigned int
860 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
861 const_tree type ATTRIBUTE_UNUSED)
863 return PARM_BOUNDARY;
866 void
867 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
871 const char *
872 hook_invalid_arg_for_unprototyped_fn (
873 const_tree typelist ATTRIBUTE_UNUSED,
874 const_tree funcdecl ATTRIBUTE_UNUSED,
875 const_tree val ATTRIBUTE_UNUSED)
877 return NULL;
880 /* Initialize the stack protection decls. */
882 /* Stack protection related decls living in libgcc. */
883 static GTY(()) tree stack_chk_guard_decl;
885 tree
886 default_stack_protect_guard (void)
888 tree t = stack_chk_guard_decl;
890 if (t == NULL)
892 rtx x;
894 t = build_decl (UNKNOWN_LOCATION,
895 VAR_DECL, get_identifier ("__stack_chk_guard"),
896 ptr_type_node);
897 TREE_STATIC (t) = 1;
898 TREE_PUBLIC (t) = 1;
899 DECL_EXTERNAL (t) = 1;
900 TREE_USED (t) = 1;
901 TREE_THIS_VOLATILE (t) = 1;
902 DECL_ARTIFICIAL (t) = 1;
903 DECL_IGNORED_P (t) = 1;
905 /* Do not share RTL as the declaration is visible outside of
906 current function. */
907 x = DECL_RTL (t);
908 RTX_FLAG (x, used) = 1;
910 stack_chk_guard_decl = t;
913 return t;
916 static GTY(()) tree stack_chk_fail_decl;
918 tree
919 default_external_stack_protect_fail (void)
921 tree t = stack_chk_fail_decl;
923 if (t == NULL_TREE)
925 t = build_function_type_list (void_type_node, NULL_TREE);
926 t = build_decl (UNKNOWN_LOCATION,
927 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
928 TREE_STATIC (t) = 1;
929 TREE_PUBLIC (t) = 1;
930 DECL_EXTERNAL (t) = 1;
931 TREE_USED (t) = 1;
932 TREE_THIS_VOLATILE (t) = 1;
933 TREE_NOTHROW (t) = 1;
934 DECL_ARTIFICIAL (t) = 1;
935 DECL_IGNORED_P (t) = 1;
936 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
937 DECL_VISIBILITY_SPECIFIED (t) = 1;
939 stack_chk_fail_decl = t;
942 return build_call_expr (t, 0);
945 tree
946 default_hidden_stack_protect_fail (void)
948 #ifndef HAVE_GAS_HIDDEN
949 return default_external_stack_protect_fail ();
950 #else
951 tree t = stack_chk_fail_decl;
953 if (!flag_pic)
954 return default_external_stack_protect_fail ();
956 if (t == NULL_TREE)
958 t = build_function_type_list (void_type_node, NULL_TREE);
959 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
960 get_identifier ("__stack_chk_fail_local"), t);
961 TREE_STATIC (t) = 1;
962 TREE_PUBLIC (t) = 1;
963 DECL_EXTERNAL (t) = 1;
964 TREE_USED (t) = 1;
965 TREE_THIS_VOLATILE (t) = 1;
966 TREE_NOTHROW (t) = 1;
967 DECL_ARTIFICIAL (t) = 1;
968 DECL_IGNORED_P (t) = 1;
969 DECL_VISIBILITY_SPECIFIED (t) = 1;
970 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
972 stack_chk_fail_decl = t;
975 return build_call_expr (t, 0);
976 #endif
979 bool
980 hook_bool_const_rtx_commutative_p (const_rtx x,
981 int outer_code ATTRIBUTE_UNUSED)
983 return COMMUTATIVE_P (x);
987 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
988 const_tree fn_decl_or_type,
989 bool outgoing ATTRIBUTE_UNUSED)
991 /* The old interface doesn't handle receiving the function type. */
992 if (fn_decl_or_type
993 && !DECL_P (fn_decl_or_type))
994 fn_decl_or_type = NULL;
996 #ifdef FUNCTION_VALUE
997 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
998 #else
999 gcc_unreachable ();
1000 #endif
1004 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
1005 const_rtx fun ATTRIBUTE_UNUSED)
1007 #ifdef LIBCALL_VALUE
1008 return LIBCALL_VALUE (MACRO_MODE (mode));
1009 #else
1010 gcc_unreachable ();
1011 #endif
1014 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1016 bool
1017 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1019 #ifdef FUNCTION_VALUE_REGNO_P
1020 return FUNCTION_VALUE_REGNO_P (regno);
1021 #else
1022 gcc_unreachable ();
1023 #endif
1026 /* Choose the mode and rtx to use to zero REGNO, storing tem in PMODE and
1027 PREGNO_RTX and returning TRUE if successful, otherwise returning FALSE. If
1028 the natural mode for REGNO doesn't work, attempt to group it with subsequent
1029 adjacent registers set in TOZERO. */
1031 static inline bool
1032 zcur_select_mode_rtx (unsigned int regno, machine_mode *pmode,
1033 rtx *pregno_rtx, HARD_REG_SET tozero)
1035 rtx regno_rtx = regno_reg_rtx[regno];
1036 machine_mode mode = GET_MODE (regno_rtx);
1038 /* If the natural mode doesn't work, try some wider mode. */
1039 if (!targetm.hard_regno_mode_ok (regno, mode))
1041 bool found = false;
1042 for (int nregs = 2;
1043 !found && nregs <= hard_regno_max_nregs
1044 && regno + nregs <= FIRST_PSEUDO_REGISTER
1045 && TEST_HARD_REG_BIT (tozero,
1046 regno + nregs - 1);
1047 nregs++)
1049 mode = choose_hard_reg_mode (regno, nregs, 0);
1050 if (mode == E_VOIDmode)
1051 continue;
1052 gcc_checking_assert (targetm.hard_regno_mode_ok (regno, mode));
1053 regno_rtx = gen_rtx_REG (mode, regno);
1054 found = true;
1056 if (!found)
1057 return false;
1060 *pmode = mode;
1061 *pregno_rtx = regno_rtx;
1062 return true;
1065 /* The default hook for TARGET_ZERO_CALL_USED_REGS. */
1067 HARD_REG_SET
1068 default_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
1070 gcc_assert (!hard_reg_set_empty_p (need_zeroed_hardregs));
1072 HARD_REG_SET failed;
1073 CLEAR_HARD_REG_SET (failed);
1074 bool progress = false;
1076 /* First, try to zero each register in need_zeroed_hardregs by
1077 loading a zero into it, taking note of any failures in
1078 FAILED. */
1079 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1080 if (TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
1082 rtx_insn *last_insn = get_last_insn ();
1083 rtx regno_rtx;
1084 machine_mode mode;
1086 if (!zcur_select_mode_rtx (regno, &mode, &regno_rtx,
1087 need_zeroed_hardregs))
1089 SET_HARD_REG_BIT (failed, regno);
1090 continue;
1093 rtx zero = CONST0_RTX (mode);
1094 rtx_insn *insn = emit_move_insn (regno_rtx, zero);
1095 if (!valid_insn_p (insn))
1097 SET_HARD_REG_BIT (failed, regno);
1098 delete_insns_since (last_insn);
1100 else
1102 progress = true;
1103 regno += hard_regno_nregs (regno, mode) - 1;
1107 /* Now retry with copies from zeroed registers, as long as we've
1108 made some PROGRESS, and registers remain to be zeroed in
1109 FAILED. */
1110 while (progress && !hard_reg_set_empty_p (failed))
1112 HARD_REG_SET retrying = failed;
1114 CLEAR_HARD_REG_SET (failed);
1115 progress = false;
1117 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1118 if (TEST_HARD_REG_BIT (retrying, regno))
1120 rtx regno_rtx;
1121 machine_mode mode;
1123 /* This might select registers we've already zeroed. If grouping
1124 with them is what it takes to get regno zeroed, so be it. */
1125 if (!zcur_select_mode_rtx (regno, &mode, &regno_rtx,
1126 need_zeroed_hardregs))
1128 SET_HARD_REG_BIT (failed, regno);
1129 continue;
1132 bool success = false;
1133 /* Look for a source. */
1134 for (unsigned int src = 0; src < FIRST_PSEUDO_REGISTER; src++)
1136 /* If SRC hasn't been zeroed (yet?), skip it. */
1137 if (! TEST_HARD_REG_BIT (need_zeroed_hardregs, src))
1138 continue;
1139 if (TEST_HARD_REG_BIT (retrying, src))
1140 continue;
1142 /* Check that SRC can hold MODE, and that any other
1143 registers needed to hold MODE in SRC have also been
1144 zeroed. */
1145 if (!targetm.hard_regno_mode_ok (src, mode))
1146 continue;
1147 unsigned n = targetm.hard_regno_nregs (src, mode);
1148 bool ok = true;
1149 for (unsigned i = 1; ok && i < n; i++)
1150 ok = (TEST_HARD_REG_BIT (need_zeroed_hardregs, src + i)
1151 && !TEST_HARD_REG_BIT (retrying, src + i));
1152 if (!ok)
1153 continue;
1155 /* SRC is usable, try to copy from it. */
1156 rtx_insn *last_insn = get_last_insn ();
1157 rtx src_rtx = gen_rtx_REG (mode, src);
1158 rtx_insn *insn = emit_move_insn (regno_rtx, src_rtx);
1159 if (!valid_insn_p (insn))
1160 /* It didn't work, remove any inserts. We'll look
1161 for another SRC. */
1162 delete_insns_since (last_insn);
1163 else
1165 /* We're done for REGNO. */
1166 success = true;
1167 break;
1171 /* If nothing worked for REGNO this round, mark it to be
1172 retried if we get another round. */
1173 if (!success)
1174 SET_HARD_REG_BIT (failed, regno);
1175 else
1177 /* Take note so as to enable another round if needed. */
1178 progress = true;
1179 regno += hard_regno_nregs (regno, mode) - 1;
1184 /* If any register remained, report it. */
1185 if (!progress)
1187 static bool issued_error;
1188 if (!issued_error)
1190 const char *name = NULL;
1191 for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL;
1192 ++i)
1193 if (flag_zero_call_used_regs == zero_call_used_regs_opts[i].flag)
1195 name = zero_call_used_regs_opts[i].name;
1196 break;
1199 if (!name)
1200 name = "";
1202 issued_error = true;
1203 sorry ("argument %qs is not supported for %qs on this target",
1204 name, "-fzero-call-used-regs");
1208 return need_zeroed_hardregs;
1212 default_internal_arg_pointer (void)
1214 /* If the reg that the virtual arg pointer will be translated into is
1215 not a fixed reg or is the stack pointer, make a copy of the virtual
1216 arg pointer, and address parms via the copy. The frame pointer is
1217 considered fixed even though it is not marked as such. */
1218 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1219 || ! (fixed_regs[ARG_POINTER_REGNUM]
1220 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1221 return copy_to_reg (virtual_incoming_args_rtx);
1222 else
1223 return virtual_incoming_args_rtx;
1227 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1229 if (incoming_p)
1231 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1232 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1233 #endif
1236 #ifdef STATIC_CHAIN_REGNUM
1237 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1238 #endif
1241 static bool issued_error;
1242 if (!issued_error)
1244 issued_error = true;
1245 sorry ("nested functions not supported on this target");
1248 /* It really doesn't matter what we return here, so long at it
1249 doesn't cause the rest of the compiler to crash. */
1250 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1254 void
1255 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1256 rtx ARG_UNUSED (r_chain))
1258 sorry ("nested function trampolines not supported on this target");
1261 poly_int64
1262 default_return_pops_args (tree, tree, poly_int64)
1264 return 0;
1267 reg_class_t
1268 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1269 reg_class_t cl,
1270 reg_class_t best_cl ATTRIBUTE_UNUSED)
1272 return cl;
1275 extern bool
1276 default_lra_p (void)
1278 return true;
1282 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1284 return 0;
1287 extern bool
1288 default_register_usage_leveling_p (void)
1290 return false;
1293 extern bool
1294 default_different_addr_displacement_p (void)
1296 return false;
1299 reg_class_t
1300 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1301 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1302 machine_mode reload_mode ATTRIBUTE_UNUSED,
1303 secondary_reload_info *sri)
1305 enum reg_class rclass = NO_REGS;
1306 enum reg_class reload_class = (enum reg_class) reload_class_i;
1308 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1310 sri->icode = sri->prev_sri->t_icode;
1311 return NO_REGS;
1313 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1314 if (in_p)
1315 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1316 MACRO_MODE (reload_mode), x);
1317 #endif
1318 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1319 if (! in_p)
1320 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1321 MACRO_MODE (reload_mode), x);
1322 #endif
1323 if (rclass != NO_REGS)
1325 enum insn_code icode
1326 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1327 reload_mode);
1329 if (icode != CODE_FOR_nothing
1330 && !insn_operand_matches (icode, in_p, x))
1331 icode = CODE_FOR_nothing;
1332 else if (icode != CODE_FOR_nothing)
1334 const char *insn_constraint, *scratch_constraint;
1335 enum reg_class insn_class, scratch_class;
1337 gcc_assert (insn_data[(int) icode].n_operands == 3);
1338 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1339 if (!*insn_constraint)
1340 insn_class = ALL_REGS;
1341 else
1343 if (in_p)
1345 gcc_assert (*insn_constraint == '=');
1346 insn_constraint++;
1348 insn_class = (reg_class_for_constraint
1349 (lookup_constraint (insn_constraint)));
1350 gcc_assert (insn_class != NO_REGS);
1353 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1354 /* The scratch register's constraint must start with "=&",
1355 except for an input reload, where only "=" is necessary,
1356 and where it might be beneficial to re-use registers from
1357 the input. */
1358 gcc_assert (scratch_constraint[0] == '='
1359 && (in_p || scratch_constraint[1] == '&'));
1360 scratch_constraint++;
1361 if (*scratch_constraint == '&')
1362 scratch_constraint++;
1363 scratch_class = (reg_class_for_constraint
1364 (lookup_constraint (scratch_constraint)));
1366 if (reg_class_subset_p (reload_class, insn_class))
1368 gcc_assert (scratch_class == rclass);
1369 rclass = NO_REGS;
1371 else
1372 rclass = insn_class;
1375 if (rclass == NO_REGS)
1376 sri->icode = icode;
1377 else
1378 sri->t_icode = icode;
1380 return rclass;
1383 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1385 machine_mode
1386 default_secondary_memory_needed_mode (machine_mode mode)
1388 if (!targetm.lra_p ()
1389 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1390 && INTEGRAL_MODE_P (mode))
1391 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1392 return mode;
1395 /* By default, if flag_pic is true, then neither local nor global relocs
1396 should be placed in readonly memory. */
1399 default_reloc_rw_mask (void)
1401 return flag_pic ? 3 : 0;
1404 /* By default, address diff vectors are generated
1405 for jump tables when flag_pic is true. */
1407 bool
1408 default_generate_pic_addr_diff_vec (void)
1410 return flag_pic;
1413 /* Record an element in the table of global constructors. SYMBOL is
1414 a SYMBOL_REF of the function to be called; PRIORITY is a number
1415 between 0 and MAX_INIT_PRIORITY. */
1417 void
1418 default_asm_out_constructor (rtx symbol ATTRIBUTE_UNUSED,
1419 int priority ATTRIBUTE_UNUSED)
1421 sorry ("global constructors not supported on this target");
1424 /* Likewise for global destructors. */
1426 void
1427 default_asm_out_destructor (rtx symbol ATTRIBUTE_UNUSED,
1428 int priority ATTRIBUTE_UNUSED)
1430 sorry ("global destructors not supported on this target");
1433 /* By default, do no modification. */
1434 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1435 tree id)
1437 return id;
1440 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1442 HOST_WIDE_INT
1443 default_static_rtx_alignment (machine_mode mode)
1445 return GET_MODE_ALIGNMENT (mode);
1448 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1450 HOST_WIDE_INT
1451 default_constant_alignment (const_tree, HOST_WIDE_INT align)
1453 return align;
1456 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1457 to at least BITS_PER_WORD but otherwise makes no changes. */
1459 HOST_WIDE_INT
1460 constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1462 if (TREE_CODE (exp) == STRING_CST)
1463 return MAX (align, BITS_PER_WORD);
1464 return align;
1467 /* Default to natural alignment for vector types, bounded by
1468 MAX_OFILE_ALIGNMENT. */
1470 HOST_WIDE_INT
1471 default_vector_alignment (const_tree type)
1473 unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1474 tree size = TYPE_SIZE (type);
1475 if (tree_fits_uhwi_p (size))
1476 align = tree_to_uhwi (size);
1477 if (align >= MAX_OFILE_ALIGNMENT)
1478 return MAX_OFILE_ALIGNMENT;
1479 return MAX (align, GET_MODE_ALIGNMENT (TYPE_MODE (type)));
1482 /* The default implementation of
1483 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1485 poly_uint64
1486 default_preferred_vector_alignment (const_tree type)
1488 return TYPE_ALIGN (type);
1491 /* The default implementation of
1492 TARGET_VECTORIZE_PREFERRED_DIV_AS_SHIFTS_OVER_MULT. */
1494 bool
1495 default_preferred_div_as_shifts_over_mult (const_tree type)
1497 return !can_mult_highpart_p (TYPE_MODE (type), TYPE_UNSIGNED (type));
1500 /* By default assume vectors of element TYPE require a multiple of the natural
1501 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1502 bool
1503 default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1505 return ! is_packed;
1508 /* By default, assume that a target supports any factor of misalignment
1509 memory access if it supports movmisalign patten.
1510 is_packed is true if the memory access is defined in a packed struct. */
1511 bool
1512 default_builtin_support_vector_misalignment (machine_mode mode,
1513 const_tree type
1514 ATTRIBUTE_UNUSED,
1515 int misalignment
1516 ATTRIBUTE_UNUSED,
1517 bool is_packed
1518 ATTRIBUTE_UNUSED)
1520 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1521 return true;
1522 return false;
1525 /* By default, only attempt to parallelize bitwise operations, and
1526 possibly adds/subtracts using bit-twiddling. */
1528 machine_mode
1529 default_preferred_simd_mode (scalar_mode)
1531 return word_mode;
1534 /* By default do not split reductions further. */
1536 machine_mode
1537 default_split_reduction (machine_mode mode)
1539 return mode;
1542 /* By default only the preferred vector mode is tried. */
1544 unsigned int
1545 default_autovectorize_vector_modes (vector_modes *, bool)
1547 return 0;
1550 /* The default implementation of TARGET_VECTORIZE_RELATED_MODE. */
1552 opt_machine_mode
1553 default_vectorize_related_mode (machine_mode vector_mode,
1554 scalar_mode element_mode,
1555 poly_uint64 nunits)
1557 machine_mode result_mode;
1558 if ((maybe_ne (nunits, 0U)
1559 || multiple_p (GET_MODE_SIZE (vector_mode),
1560 GET_MODE_SIZE (element_mode), &nunits))
1561 && mode_for_vector (element_mode, nunits).exists (&result_mode)
1562 && VECTOR_MODE_P (result_mode)
1563 && targetm.vector_mode_supported_p (result_mode))
1564 return result_mode;
1566 return opt_machine_mode ();
1569 /* By default a vector of integers is used as a mask. */
1571 opt_machine_mode
1572 default_get_mask_mode (machine_mode mode)
1574 return related_int_vector_mode (mode);
1577 /* By default consider masked stores to be expensive. */
1579 bool
1580 default_empty_mask_is_expensive (unsigned ifn)
1582 return ifn == IFN_MASK_STORE;
1585 /* By default, the cost model accumulates three separate costs (prologue,
1586 loop body, and epilogue) for a vectorized loop or block. So allocate an
1587 array of three unsigned ints, set it to zero, and return its address. */
1589 vector_costs *
1590 default_vectorize_create_costs (vec_info *vinfo, bool costing_for_scalar)
1592 return new vector_costs (vinfo, costing_for_scalar);
1595 /* Determine whether or not a pointer mode is valid. Assume defaults
1596 of ptr_mode or Pmode - can be overridden. */
1597 bool
1598 default_valid_pointer_mode (scalar_int_mode mode)
1600 return (mode == ptr_mode || mode == Pmode);
1603 /* Determine whether the memory reference specified by REF may alias
1604 the C libraries errno location. */
1605 bool
1606 default_ref_may_alias_errno (ao_ref *ref)
1608 tree base = ao_ref_base (ref);
1609 /* The default implementation assumes the errno location is
1610 a declaration of type int or is always accessed via a
1611 pointer to int. We assume that accesses to errno are
1612 not deliberately obfuscated (even in conforming ways). */
1613 if (TYPE_UNSIGNED (TREE_TYPE (base))
1614 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1615 return false;
1616 /* The default implementation assumes an errno location declaration
1617 is never defined in the current compilation unit and may not be
1618 aliased by a local variable. */
1619 if (DECL_P (base)
1620 && DECL_EXTERNAL (base)
1621 && !TREE_STATIC (base))
1622 return true;
1623 else if (TREE_CODE (base) == MEM_REF
1624 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1626 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1627 return !pi || pi->pt.anything || pi->pt.nonlocal;
1629 return false;
1632 /* Return the mode for a pointer to a given ADDRSPACE,
1633 defaulting to ptr_mode for all address spaces. */
1635 scalar_int_mode
1636 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1638 return ptr_mode;
1641 /* Return the mode for an address in a given ADDRSPACE,
1642 defaulting to Pmode for all address spaces. */
1644 scalar_int_mode
1645 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1647 return Pmode;
1650 /* Named address space version of valid_pointer_mode.
1651 To match the above, the same modes apply to all address spaces. */
1653 bool
1654 default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1655 addr_space_t as ATTRIBUTE_UNUSED)
1657 return targetm.valid_pointer_mode (mode);
1660 /* Some places still assume that all pointer or address modes are the
1661 standard Pmode and ptr_mode. These optimizations become invalid if
1662 the target actually supports multiple different modes. For now,
1663 we disable such optimizations on such targets, using this function. */
1665 bool
1666 target_default_pointer_address_modes_p (void)
1668 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1669 return false;
1670 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1671 return false;
1673 return true;
1676 /* Named address space version of legitimate_address_p.
1677 By default, all address spaces have the same form. */
1679 bool
1680 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1681 bool strict,
1682 addr_space_t as ATTRIBUTE_UNUSED)
1684 return targetm.legitimate_address_p (mode, mem, strict);
1687 /* Named address space version of LEGITIMIZE_ADDRESS.
1688 By default, all address spaces have the same form. */
1691 default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1692 addr_space_t as ATTRIBUTE_UNUSED)
1694 return targetm.legitimize_address (x, oldx, mode);
1697 /* The default hook for determining if one named address space is a subset of
1698 another and to return which address space to use as the common address
1699 space. */
1701 bool
1702 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1704 return (subset == superset);
1707 /* The default hook for determining if 0 within a named address
1708 space is a valid address. */
1710 bool
1711 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1713 return false;
1716 /* The default hook for debugging the address space is to return the
1717 address space number to indicate DW_AT_address_class. */
1719 default_addr_space_debug (addr_space_t as)
1721 return as;
1724 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1725 Don't complain about any address space. */
1727 void
1728 default_addr_space_diagnose_usage (addr_space_t, location_t)
1733 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1734 called for targets with only a generic address space. */
1737 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1738 tree from_type ATTRIBUTE_UNUSED,
1739 tree to_type ATTRIBUTE_UNUSED)
1741 gcc_unreachable ();
1744 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1746 unsigned int
1747 default_hard_regno_nregs (unsigned int, machine_mode mode)
1749 /* Targets with variable-sized modes must provide their own definition
1750 of this hook. */
1751 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1754 bool
1755 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1757 return true;
1760 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1762 bool
1763 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1764 addr_space_t addrspace ATTRIBUTE_UNUSED)
1766 return false;
1769 extern bool default_new_address_profitable_p (rtx, rtx);
1772 /* The default implementation of TARGET_NEW_ADDRESS_PROFITABLE_P. */
1774 bool
1775 default_new_address_profitable_p (rtx memref ATTRIBUTE_UNUSED,
1776 rtx_insn *insn ATTRIBUTE_UNUSED,
1777 rtx new_addr ATTRIBUTE_UNUSED)
1779 return true;
1782 bool
1783 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1784 tree ARG_UNUSED (name),
1785 tree ARG_UNUSED (args),
1786 int ARG_UNUSED (flags))
1788 warning (OPT_Wattributes,
1789 "target attribute is not supported on this machine");
1791 return false;
1794 bool
1795 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1796 tree ARG_UNUSED (pop_target))
1798 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1799 emit no warning because "#pragma GCC pop_target" is valid on targets that
1800 do not have the "target" pragma. */
1801 if (args)
1802 warning (OPT_Wpragmas,
1803 "%<#pragma GCC target%> is not supported for this machine");
1805 return false;
1808 bool
1809 default_target_can_inline_p (tree caller, tree callee)
1811 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1812 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1813 if (! callee_opts)
1814 callee_opts = target_option_default_node;
1815 if (! caller_opts)
1816 caller_opts = target_option_default_node;
1818 /* If both caller and callee have attributes, assume that if the
1819 pointer is different, the two functions have different target
1820 options since build_target_option_node uses a hash table for the
1821 options. */
1822 return callee_opts == caller_opts;
1825 /* By default, return false to not need to collect any target information
1826 for inlining. Target maintainer should re-define the hook if the
1827 target want to take advantage of it. */
1829 bool
1830 default_need_ipa_fn_target_info (const_tree, unsigned int &)
1832 return false;
1835 bool
1836 default_update_ipa_fn_target_info (unsigned int &, const gimple *)
1838 return false;
1841 /* If the machine does not have a case insn that compares the bounds,
1842 this means extra overhead for dispatch tables, which raises the
1843 threshold for using them. */
1845 unsigned int
1846 default_case_values_threshold (void)
1848 return (targetm.have_casesi () ? 4 : 5);
1851 bool
1852 default_have_conditional_execution (void)
1854 return HAVE_conditional_execution;
1857 /* By default we assume that c99 functions are present at the runtime,
1858 but sincos is not. */
1859 bool
1860 default_libc_has_function (enum function_class fn_class,
1861 tree type ATTRIBUTE_UNUSED)
1863 if (fn_class == function_c94
1864 || fn_class == function_c99_misc
1865 || fn_class == function_c99_math_complex)
1866 return true;
1868 return false;
1871 /* By default assume that libc has not a fast implementation. */
1873 bool
1874 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1876 return false;
1879 bool
1880 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1881 tree type ATTRIBUTE_UNUSED)
1883 return true;
1886 bool
1887 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1888 tree type ATTRIBUTE_UNUSED)
1890 return false;
1893 /* Assume some c99 functions are present at the runtime including sincos. */
1894 bool
1895 bsd_libc_has_function (enum function_class fn_class,
1896 tree type ATTRIBUTE_UNUSED)
1898 if (fn_class == function_c94
1899 || fn_class == function_c99_misc
1900 || fn_class == function_sincos)
1901 return true;
1903 return false;
1907 tree
1908 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1910 return NULL_TREE;
1913 /* Compute cost of moving registers to/from memory. */
1916 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1917 reg_class_t rclass ATTRIBUTE_UNUSED,
1918 bool in ATTRIBUTE_UNUSED)
1920 #ifndef MEMORY_MOVE_COST
1921 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1922 #else
1923 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
1924 #endif
1927 /* Compute cost of moving data from a register of class FROM to one of
1928 TO, using MODE. */
1931 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1932 reg_class_t from ATTRIBUTE_UNUSED,
1933 reg_class_t to ATTRIBUTE_UNUSED)
1935 #ifndef REGISTER_MOVE_COST
1936 return 2;
1937 #else
1938 return REGISTER_MOVE_COST (MACRO_MODE (mode),
1939 (enum reg_class) from, (enum reg_class) to);
1940 #endif
1943 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1945 bool
1946 default_slow_unaligned_access (machine_mode, unsigned int)
1948 return STRICT_ALIGNMENT;
1951 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1953 HOST_WIDE_INT
1954 default_estimated_poly_value (poly_int64 x, poly_value_estimate_kind)
1956 return x.coeffs[0];
1959 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1960 behavior. SPEED_P is true if we are compiling for speed. */
1962 unsigned int
1963 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1965 unsigned int move_ratio;
1966 #ifdef MOVE_RATIO
1967 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1968 #else
1969 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1970 move_ratio = 2;
1971 #else /* No cpymem patterns, pick a default. */
1972 move_ratio = ((speed_p) ? 15 : 3);
1973 #endif
1974 #endif
1975 return move_ratio;
1978 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1979 used; return FALSE if the cpymem/setmem optab should be expanded, or
1980 a call to memcpy emitted. */
1982 bool
1983 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1984 unsigned int alignment,
1985 enum by_pieces_operation op,
1986 bool speed_p)
1988 unsigned int max_size = 0;
1989 unsigned int ratio = 0;
1991 switch (op)
1993 case CLEAR_BY_PIECES:
1994 max_size = STORE_MAX_PIECES;
1995 ratio = CLEAR_RATIO (speed_p);
1996 break;
1997 case MOVE_BY_PIECES:
1998 max_size = MOVE_MAX_PIECES;
1999 ratio = get_move_ratio (speed_p);
2000 break;
2001 case SET_BY_PIECES:
2002 max_size = STORE_MAX_PIECES;
2003 ratio = SET_RATIO (speed_p);
2004 break;
2005 case STORE_BY_PIECES:
2006 max_size = STORE_MAX_PIECES;
2007 ratio = get_move_ratio (speed_p);
2008 break;
2009 case COMPARE_BY_PIECES:
2010 max_size = COMPARE_MAX_PIECES;
2011 /* Pick a likely default, just as in get_move_ratio. */
2012 ratio = speed_p ? 15 : 3;
2013 break;
2016 return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
2019 /* This hook controls code generation for expanding a memcmp operation by
2020 pieces. Return 1 for the normal pattern of compare/jump after each pair
2021 of loads, or a higher number to reduce the number of branches. */
2024 default_compare_by_pieces_branch_ratio (machine_mode)
2026 return 1;
2029 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
2030 entry. If RECORD_P is true and the target supports named sections,
2031 the location of the NOPs will be recorded in a special object section
2032 called "__patchable_function_entries". This routine may be called
2033 twice per function to put NOPs before and after the function
2034 entry. */
2036 void
2037 default_print_patchable_function_entry (FILE *file,
2038 unsigned HOST_WIDE_INT patch_area_size,
2039 bool record_p)
2041 const char *nop_templ = 0;
2042 int code_num;
2043 rtx_insn *my_nop = make_insn_raw (gen_nop ());
2045 /* We use the template alone, relying on the (currently sane) assumption
2046 that the NOP template does not have variable operands. */
2047 code_num = recog_memoized (my_nop);
2048 nop_templ = get_insn_template (code_num, my_nop);
2050 if (record_p && targetm_common.have_named_sections)
2052 char buf[256];
2053 section *previous_section = in_section;
2054 const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
2056 gcc_assert (asm_op != NULL);
2057 /* If SECTION_LINK_ORDER is supported, this internal label will
2058 be filled as the symbol for linked_to section. */
2059 ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", current_function_funcdef_no);
2061 unsigned int flags = SECTION_WRITE | SECTION_RELRO;
2062 if (HAVE_GAS_SECTION_LINK_ORDER)
2063 flags |= SECTION_LINK_ORDER;
2065 section *sect = get_section ("__patchable_function_entries",
2066 flags, current_function_decl);
2067 if (HAVE_COMDAT_GROUP && DECL_COMDAT_GROUP (current_function_decl))
2068 switch_to_comdat_section (sect, current_function_decl);
2069 else
2070 switch_to_section (sect);
2071 assemble_align (POINTER_SIZE);
2072 fputs (asm_op, file);
2073 assemble_name_raw (file, buf);
2074 fputc ('\n', file);
2076 switch_to_section (previous_section);
2077 ASM_OUTPUT_LABEL (file, buf);
2080 unsigned i;
2081 for (i = 0; i < patch_area_size; ++i)
2082 output_asm_insn (nop_templ, NULL);
2085 bool
2086 default_profile_before_prologue (void)
2088 #ifdef PROFILE_BEFORE_PROLOGUE
2089 return true;
2090 #else
2091 return false;
2092 #endif
2095 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
2097 reg_class_t
2098 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
2099 reg_class_t rclass)
2101 #ifdef PREFERRED_RELOAD_CLASS
2102 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
2103 #else
2104 return rclass;
2105 #endif
2108 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
2110 reg_class_t
2111 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
2112 reg_class_t rclass)
2114 return rclass;
2117 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
2118 reg_class_t
2119 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
2121 return NO_REGS;
2124 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
2126 bool
2127 default_class_likely_spilled_p (reg_class_t rclass)
2129 return (reg_class_size[(int) rclass] == 1);
2132 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
2134 unsigned char
2135 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
2136 machine_mode mode ATTRIBUTE_UNUSED)
2138 #ifdef CLASS_MAX_NREGS
2139 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
2140 MACRO_MODE (mode));
2141 #else
2142 /* Targets with variable-sized modes must provide their own definition
2143 of this hook. */
2144 unsigned int size = GET_MODE_SIZE (mode).to_constant ();
2145 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2146 #endif
2149 /* Determine the debugging unwind mechanism for the target. */
2151 enum unwind_info_type
2152 default_debug_unwind_info (void)
2154 /* If the target wants to force the use of dwarf2 unwind info, let it. */
2155 /* ??? Change all users to the hook, then poison this. */
2156 #ifdef DWARF2_FRAME_INFO
2157 if (DWARF2_FRAME_INFO)
2158 return UI_DWARF2;
2159 #endif
2161 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
2162 #ifdef DWARF2_DEBUGGING_INFO
2163 if (dwarf_debuginfo_p ())
2164 return UI_DWARF2;
2165 #endif
2167 return UI_NONE;
2170 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
2171 must define this hook. */
2173 unsigned int
2174 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
2176 gcc_unreachable ();
2179 /* Determine the correct mode for a Dwarf frame register that represents
2180 register REGNO. */
2182 machine_mode
2183 default_dwarf_frame_reg_mode (int regno)
2185 machine_mode save_mode = reg_raw_mode[regno];
2187 if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (),
2188 regno, save_mode))
2189 save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi);
2190 return save_mode;
2193 /* To be used by targets where reg_raw_mode doesn't return the right
2194 mode for registers used in apply_builtin_return and apply_builtin_arg. */
2196 fixed_size_mode
2197 default_get_reg_raw_mode (int regno)
2199 /* Targets must override this hook if the underlying register is
2200 variable-sized. */
2201 return as_a <fixed_size_mode> (reg_raw_mode[regno]);
2204 /* Return true if a leaf function should stay leaf even with profiling
2205 enabled. */
2207 bool
2208 default_keep_leaf_when_profiled ()
2210 return false;
2213 /* Return true if the state of option OPTION should be stored in PCH files
2214 and checked by default_pch_valid_p. Store the option's current state
2215 in STATE if so. */
2217 static inline bool
2218 option_affects_pch_p (int option, struct cl_option_state *state)
2220 if ((cl_options[option].flags & CL_TARGET) == 0)
2221 return false;
2222 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
2223 return false;
2224 if (option_flag_var (option, &global_options) == &target_flags)
2225 if (targetm.check_pch_target_flags)
2226 return false;
2227 return get_option_state (&global_options, option, state);
2230 /* Default version of get_pch_validity.
2231 By default, every flag difference is fatal; that will be mostly right for
2232 most targets, but completely right for very few. */
2234 void *
2235 default_get_pch_validity (size_t *sz)
2237 struct cl_option_state state;
2238 size_t i;
2239 char *result, *r;
2241 *sz = 2;
2242 if (targetm.check_pch_target_flags)
2243 *sz += sizeof (target_flags);
2244 for (i = 0; i < cl_options_count; i++)
2245 if (option_affects_pch_p (i, &state))
2246 *sz += state.size;
2248 result = r = XNEWVEC (char, *sz);
2249 r[0] = flag_pic;
2250 r[1] = flag_pie;
2251 r += 2;
2252 if (targetm.check_pch_target_flags)
2254 memcpy (r, &target_flags, sizeof (target_flags));
2255 r += sizeof (target_flags);
2258 for (i = 0; i < cl_options_count; i++)
2259 if (option_affects_pch_p (i, &state))
2261 memcpy (r, state.data, state.size);
2262 r += state.size;
2265 return result;
2268 /* Return a message which says that a PCH file was created with a different
2269 setting of OPTION. */
2271 static const char *
2272 pch_option_mismatch (const char *option)
2274 return xasprintf (_("created and used with differing settings of '%s'"),
2275 option);
2278 /* Default version of pch_valid_p. */
2280 const char *
2281 default_pch_valid_p (const void *data_p, size_t len ATTRIBUTE_UNUSED)
2283 struct cl_option_state state;
2284 const char *data = (const char *)data_p;
2285 size_t i;
2287 /* -fpic and -fpie also usually make a PCH invalid. */
2288 if (data[0] != flag_pic)
2289 return _("created and used with different settings of %<-fpic%>");
2290 if (data[1] != flag_pie)
2291 return _("created and used with different settings of %<-fpie%>");
2292 data += 2;
2294 /* Check target_flags. */
2295 if (targetm.check_pch_target_flags)
2297 int tf;
2298 const char *r;
2300 memcpy (&tf, data, sizeof (target_flags));
2301 data += sizeof (target_flags);
2302 r = targetm.check_pch_target_flags (tf);
2303 if (r != NULL)
2304 return r;
2307 for (i = 0; i < cl_options_count; i++)
2308 if (option_affects_pch_p (i, &state))
2310 if (memcmp (data, state.data, state.size) != 0)
2311 return pch_option_mismatch (cl_options[i].opt_text);
2312 data += state.size;
2315 return NULL;
2318 /* Default version of cstore_mode. */
2320 scalar_int_mode
2321 default_cstore_mode (enum insn_code icode)
2323 return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2326 /* Default version of member_type_forces_blk. */
2328 bool
2329 default_member_type_forces_blk (const_tree, machine_mode)
2331 return false;
2334 /* Default version of canonicalize_comparison. */
2336 void
2337 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2341 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2343 void
2344 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2348 #ifndef PAD_VARARGS_DOWN
2349 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2350 #endif
2352 /* Build an indirect-ref expression over the given TREE, which represents a
2353 piece of a va_arg() expansion. */
2354 tree
2355 build_va_arg_indirect_ref (tree addr)
2357 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2358 return addr;
2361 /* The "standard" implementation of va_arg: read the value from the
2362 current (padded) address and increment by the (padded) size. */
2364 tree
2365 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2366 gimple_seq *post_p)
2368 tree addr, t, type_size, rounded_size, valist_tmp;
2369 unsigned HOST_WIDE_INT align, boundary;
2370 bool indirect;
2372 /* All of the alignment and movement below is for args-grow-up machines.
2373 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2374 implement their own specialized gimplify_va_arg_expr routines. */
2375 if (ARGS_GROW_DOWNWARD)
2376 gcc_unreachable ();
2378 indirect = pass_va_arg_by_reference (type);
2379 if (indirect)
2380 type = build_pointer_type (type);
2382 if (targetm.calls.split_complex_arg
2383 && TREE_CODE (type) == COMPLEX_TYPE
2384 && targetm.calls.split_complex_arg (type))
2386 tree real_part, imag_part;
2388 real_part = std_gimplify_va_arg_expr (valist,
2389 TREE_TYPE (type), pre_p, NULL);
2390 real_part = get_initialized_tmp_var (real_part, pre_p);
2392 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2393 TREE_TYPE (type), pre_p, NULL);
2394 imag_part = get_initialized_tmp_var (imag_part, pre_p);
2396 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2399 align = PARM_BOUNDARY / BITS_PER_UNIT;
2400 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2402 /* When we align parameter on stack for caller, if the parameter
2403 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2404 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2405 here with caller. */
2406 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2407 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2409 boundary /= BITS_PER_UNIT;
2411 /* Hoist the valist value into a temporary for the moment. */
2412 valist_tmp = get_initialized_tmp_var (valist, pre_p);
2414 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2415 requires greater alignment, we must perform dynamic alignment. */
2416 if (boundary > align
2417 && !TYPE_EMPTY_P (type)
2418 && !integer_zerop (TYPE_SIZE (type)))
2420 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2421 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2422 gimplify_and_add (t, pre_p);
2424 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2425 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2426 valist_tmp,
2427 build_int_cst (TREE_TYPE (valist), -boundary)));
2428 gimplify_and_add (t, pre_p);
2430 else
2431 boundary = align;
2433 /* If the actual alignment is less than the alignment of the type,
2434 adjust the type accordingly so that we don't assume strict alignment
2435 when dereferencing the pointer. */
2436 boundary *= BITS_PER_UNIT;
2437 if (boundary < TYPE_ALIGN (type))
2439 type = build_variant_type_copy (type);
2440 SET_TYPE_ALIGN (type, boundary);
2443 /* Compute the rounded size of the type. */
2444 type_size = arg_size_in_bytes (type);
2445 rounded_size = round_up (type_size, align);
2447 /* Reduce rounded_size so it's sharable with the postqueue. */
2448 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2450 /* Get AP. */
2451 addr = valist_tmp;
2452 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2454 /* Small args are padded downward. */
2455 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2456 rounded_size, size_int (align));
2457 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2458 size_binop (MINUS_EXPR, rounded_size, type_size));
2459 addr = fold_build_pointer_plus (addr, t);
2462 /* Compute new value for AP. */
2463 t = fold_build_pointer_plus (valist_tmp, rounded_size);
2464 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2465 gimplify_and_add (t, pre_p);
2467 addr = fold_convert (build_pointer_type (type), addr);
2469 if (indirect)
2470 addr = build_va_arg_indirect_ref (addr);
2472 return build_va_arg_indirect_ref (addr);
2475 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2476 not support nested low-overhead loops. */
2478 bool
2479 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2480 unsigned int loop_depth, bool)
2482 return loop_depth == 1;
2485 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2487 bool
2488 default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2490 return true;
2493 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2495 unsigned int
2496 default_max_noce_ifcvt_seq_cost (edge e)
2498 bool predictable_p = predictable_edge_p (e);
2500 if (predictable_p)
2502 if (OPTION_SET_P (param_max_rtl_if_conversion_predictable_cost))
2503 return param_max_rtl_if_conversion_predictable_cost;
2505 else
2507 if (OPTION_SET_P (param_max_rtl_if_conversion_unpredictable_cost))
2508 return param_max_rtl_if_conversion_unpredictable_cost;
2511 return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2514 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2516 unsigned int
2517 default_min_arithmetic_precision (void)
2519 return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2522 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2524 enum flt_eval_method
2525 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2527 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2530 /* Default implementation for
2531 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2532 HOST_WIDE_INT
2533 default_stack_clash_protection_alloca_probe_range (void)
2535 return 0;
2538 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2540 void
2541 default_select_early_remat_modes (sbitmap)
2545 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2547 tree
2548 default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2550 return build_zero_cst (type);
2553 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2554 bool
2555 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2557 #ifdef HAVE_speculation_barrier
2558 return active ? HAVE_speculation_barrier : true;
2559 #else
2560 return false;
2561 #endif
2563 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2564 that can be used on targets that never have speculative execution. */
2565 bool
2566 speculation_safe_value_not_needed (bool active)
2568 return !active;
2571 /* Default implementation of the speculation-safe-load builtin. This
2572 implementation simply copies val to result and generates a
2573 speculation_barrier insn, if such a pattern is defined. */
2575 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2576 rtx result, rtx val,
2577 rtx failval ATTRIBUTE_UNUSED)
2579 emit_move_insn (result, val);
2581 #ifdef HAVE_speculation_barrier
2582 /* Assume the target knows what it is doing: if it defines a
2583 speculation barrier, but it is not enabled, then assume that one
2584 isn't needed. */
2585 if (HAVE_speculation_barrier)
2586 emit_insn (gen_speculation_barrier ());
2587 #endif
2589 return result;
2592 /* How many bits to shift in order to access the tag bits.
2593 The default is to store the tag in the top 8 bits of a 64 bit pointer, hence
2594 shifting 56 bits will leave just the tag. */
2595 #define HWASAN_SHIFT (GET_MODE_PRECISION (Pmode) - 8)
2596 #define HWASAN_SHIFT_RTX GEN_INT (HWASAN_SHIFT)
2598 bool
2599 default_memtag_can_tag_addresses ()
2601 return false;
2604 uint8_t
2605 default_memtag_tag_size ()
2607 return 8;
2610 uint8_t
2611 default_memtag_granule_size ()
2613 return 16;
2616 /* The default implementation of TARGET_MEMTAG_INSERT_RANDOM_TAG. */
2618 default_memtag_insert_random_tag (rtx untagged, rtx target)
2620 gcc_assert (param_hwasan_instrument_stack);
2621 if (param_hwasan_random_frame_tag)
2623 rtx fn = init_one_libfunc ("__hwasan_generate_tag");
2624 rtx new_tag = emit_library_call_value (fn, NULL_RTX, LCT_NORMAL, QImode);
2625 return targetm.memtag.set_tag (untagged, new_tag, target);
2627 else
2629 /* NOTE: The kernel API does not have __hwasan_generate_tag exposed.
2630 In the future we may add the option emit random tags with inline
2631 instrumentation instead of function calls. This would be the same
2632 between the kernel and userland. */
2633 return untagged;
2637 /* The default implementation of TARGET_MEMTAG_ADD_TAG. */
2639 default_memtag_add_tag (rtx base, poly_int64 offset, uint8_t tag_offset)
2641 /* Need to look into what the most efficient code sequence is.
2642 This is a code sequence that would be emitted *many* times, so we
2643 want it as small as possible.
2645 There are two places where tag overflow is a question:
2646 - Tagging the shadow stack.
2647 (both tagging and untagging).
2648 - Tagging addressable pointers.
2650 We need to ensure both behaviors are the same (i.e. that the tag that
2651 ends up in a pointer after "overflowing" the tag bits with a tag addition
2652 is the same that ends up in the shadow space).
2654 The aim is that the behavior of tag addition should follow modulo
2655 wrapping in both instances.
2657 The libhwasan code doesn't have any path that increments a pointer's tag,
2658 which means it has no opinion on what happens when a tag increment
2659 overflows (and hence we can choose our own behavior). */
2661 offset += ((uint64_t)tag_offset << HWASAN_SHIFT);
2662 return plus_constant (Pmode, base, offset);
2665 /* The default implementation of TARGET_MEMTAG_SET_TAG. */
2667 default_memtag_set_tag (rtx untagged, rtx tag, rtx target)
2669 gcc_assert (GET_MODE (untagged) == Pmode && GET_MODE (tag) == QImode);
2670 tag = expand_simple_binop (Pmode, ASHIFT, tag, HWASAN_SHIFT_RTX, NULL_RTX,
2671 /* unsignedp = */1, OPTAB_WIDEN);
2672 rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
2673 /* unsignedp = */1, OPTAB_DIRECT);
2674 gcc_assert (ret);
2675 return ret;
2678 /* The default implementation of TARGET_MEMTAG_EXTRACT_TAG. */
2680 default_memtag_extract_tag (rtx tagged_pointer, rtx target)
2682 rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
2683 HWASAN_SHIFT_RTX, target,
2684 /* unsignedp = */0,
2685 OPTAB_DIRECT);
2686 rtx ret = gen_lowpart (QImode, tag);
2687 gcc_assert (ret);
2688 return ret;
2691 /* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER. */
2693 default_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
2695 rtx tag_mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_SHIFT) - 1, Pmode);
2696 rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
2697 tag_mask, target, true,
2698 OPTAB_DIRECT);
2699 gcc_assert (untagged_base);
2700 return untagged_base;
2703 /* The default implementation of TARGET_GCOV_TYPE_SIZE. */
2704 HOST_WIDE_INT
2705 default_gcov_type_size (void)
2707 return TYPE_PRECISION (long_long_integer_type_node) > 32 ? 64 : 32;
2710 #include "gt-targhooks.h"