1 /* Default target hook functions.
2 Copyright (C) 2003-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* The migration of target macros to target hooks works as follows:
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
25 2. Convert all the MI files to use the hook instead of the macro.
27 3. Repeat for a majority of the remaining target macros. This will
30 4. Tell target maintainers to start migrating.
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
35 6. TBD when, poison the macros. Unmigrated targets will break at
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
51 #include "coretypes.h"
56 #include "tree-ssa-alias.h"
57 #include "gimple-expr.h"
63 #include "stringpool.h"
65 #include "tree-ssanames.h"
66 #include "profile-count.h"
70 #include "diagnostic-core.h"
71 #include "fold-const.h"
72 #include "stor-layout.h"
80 #include "common/common-target.h"
87 #include "langhooks.h"
89 #include "function-abi.h"
95 #include "tree-vectorizer.h"
99 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
100 rtx addr ATTRIBUTE_UNUSED,
101 bool strict ATTRIBUTE_UNUSED)
103 #ifdef GO_IF_LEGITIMATE_ADDRESS
104 /* Defer to the old implementation using a goto. */
106 return strict_memory_address_p (mode, addr);
108 return memory_address_p (mode, addr);
115 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
117 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
118 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
123 default_unspec_may_trap_p (const_rtx x, unsigned flags)
127 /* Any floating arithmetic may trap. */
128 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
131 for (i = 0; i < XVECLEN (x, 0); ++i)
133 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
141 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
143 int *punsignedp ATTRIBUTE_UNUSED,
144 const_tree funtype ATTRIBUTE_UNUSED,
145 int for_return ATTRIBUTE_UNUSED)
147 if (type != NULL_TREE && for_return == 2)
148 return promote_mode (type, mode, punsignedp);
153 default_promote_function_mode_always_promote (const_tree type,
156 const_tree funtype ATTRIBUTE_UNUSED,
157 int for_return ATTRIBUTE_UNUSED)
159 return promote_mode (type, mode, punsignedp);
163 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
171 default_return_in_memory (const_tree type,
172 const_tree fntype ATTRIBUTE_UNUSED)
174 return (TYPE_MODE (type) == BLKmode);
178 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
179 machine_mode mode ATTRIBUTE_UNUSED)
185 default_legitimize_address_displacement (rtx *, rtx *, poly_int64,
192 default_const_not_ok_for_debug_p (rtx x)
194 if (GET_CODE (x) == UNSPEC)
200 default_expand_builtin_saveregs (void)
202 error ("%<__builtin_saveregs%> not supported by this target");
207 default_setup_incoming_varargs (cumulative_args_t,
208 const function_arg_info &, int *, int)
212 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
215 default_builtin_setjmp_frame_value (void)
217 return virtual_stack_vars_rtx;
220 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
223 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
229 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
231 return (targetm.calls.setup_incoming_varargs
232 != default_setup_incoming_varargs);
236 default_eh_return_filter_mode (void)
238 return targetm.unwind_word_mode ();
242 default_libgcc_cmp_return_mode (void)
248 default_libgcc_shift_count_mode (void)
254 default_unwind_word_mode (void)
259 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
261 unsigned HOST_WIDE_INT
262 default_shift_truncation_mask (machine_mode mode)
264 return SHIFT_COUNT_TRUNCATED ? GET_MODE_UNIT_BITSIZE (mode) - 1 : 0;
267 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
270 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
272 return have_insn_for (DIV, mode) ? 3 : 2;
275 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
278 default_mode_rep_extended (scalar_int_mode, scalar_int_mode)
283 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
286 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
291 /* Return machine mode for non-standard suffix
292 or VOIDmode if non-standard suffixes are unsupported. */
294 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
299 /* The generic C++ ABI specifies this is a 64-bit value. */
301 default_cxx_guard_type (void)
303 return long_long_integer_type_node;
306 /* Returns the size of the cookie to use when allocating an array
307 whose elements have the indicated TYPE. Assumes that it is already
308 known that a cookie is needed. */
311 default_cxx_get_cookie_size (tree type)
315 /* We need to allocate an additional max (sizeof (size_t), alignof
316 (true_type)) bytes. */
320 sizetype_size = size_in_bytes (sizetype);
321 type_align = size_int (TYPE_ALIGN_UNIT (type));
322 if (tree_int_cst_lt (type_align, sizetype_size))
323 cookie_size = sizetype_size;
325 cookie_size = type_align;
330 /* Return true if a parameter must be passed by reference. This version
331 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
334 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t,
335 const function_arg_info &arg)
337 return targetm.calls.must_pass_in_stack (arg);
340 /* Return true if a parameter follows callee copies conventions. This
341 version of the hook is true for all named arguments. */
344 hook_callee_copies_named (cumulative_args_t, const function_arg_info &arg)
349 /* Emit to STREAM the assembler syntax for insn operand X. */
352 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
353 int code ATTRIBUTE_UNUSED)
356 PRINT_OPERAND (stream, x, code);
362 /* Emit to STREAM the assembler syntax for an insn operand whose memory
366 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
367 machine_mode /*mode*/,
368 rtx x ATTRIBUTE_UNUSED)
370 #ifdef PRINT_OPERAND_ADDRESS
371 PRINT_OPERAND_ADDRESS (stream, x);
377 /* Return true if CODE is a valid punctuation character for the
378 `print_operand' hook. */
381 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
383 #ifdef PRINT_OPERAND_PUNCT_VALID_P
384 return PRINT_OPERAND_PUNCT_VALID_P (code);
390 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
392 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
394 const char *skipped = name + (*name == '*' ? 1 : 0);
395 const char *stripped = targetm.strip_name_encoding (skipped);
396 if (*name != '*' && user_label_prefix[0])
397 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
398 return get_identifier (stripped);
401 /* The default implementation of TARGET_TRANSLATE_MODE_ATTRIBUTE. */
404 default_translate_mode_attribute (machine_mode mode)
409 /* True if MODE is valid for the target. By "valid", we mean able to
410 be manipulated in non-trivial ways. In particular, this means all
411 the arithmetic is supported.
413 By default we guess this means that any C type is supported. If
414 we can't map the mode back to a type that would be available in C,
415 then reject it. Special case, here, is the double-word arithmetic
416 supported by optabs.cc. */
419 default_scalar_mode_supported_p (scalar_mode mode)
421 int precision = GET_MODE_PRECISION (mode);
423 switch (GET_MODE_CLASS (mode))
425 case MODE_PARTIAL_INT:
427 if (precision == CHAR_TYPE_SIZE)
429 if (precision == SHORT_TYPE_SIZE)
431 if (precision == INT_TYPE_SIZE)
433 if (precision == LONG_TYPE_SIZE)
435 if (precision == LONG_LONG_TYPE_SIZE)
437 if (precision == 2 * BITS_PER_WORD)
442 if (precision == FLOAT_TYPE_SIZE)
444 if (precision == DOUBLE_TYPE_SIZE)
446 if (precision == LONG_DOUBLE_TYPE_SIZE)
450 case MODE_DECIMAL_FLOAT:
462 /* Return true if libgcc supports floating-point mode MODE (known to
463 be supported as a scalar mode). */
466 default_libgcc_floating_mode_supported_p (scalar_float_mode mode)
489 /* Return the machine mode to use for the type _FloatN, if EXTENDED is
490 false, or _FloatNx, if EXTENDED is true, or VOIDmode if not
492 opt_scalar_float_mode
493 default_floatn_mode (int n, bool extended)
497 opt_scalar_float_mode cand1, cand2;
498 scalar_float_mode mode;
520 /* Those are the only valid _FloatNx types. */
523 if (cand1.exists (&mode)
524 && REAL_MODE_FORMAT (mode)->ieee_bits > n
525 && targetm.scalar_mode_supported_p (mode)
526 && targetm.libgcc_floating_mode_supported_p (mode))
528 if (cand2.exists (&mode)
529 && REAL_MODE_FORMAT (mode)->ieee_bits > n
530 && targetm.scalar_mode_supported_p (mode)
531 && targetm.libgcc_floating_mode_supported_p (mode))
536 opt_scalar_float_mode cand;
537 scalar_float_mode mode;
541 /* Always enable _Float16 if we have basic support for the mode.
542 Targets can control the range and precision of operations on
543 the _Float16 type using TARGET_C_EXCESS_PRECISION. */
570 if (cand.exists (&mode)
571 && REAL_MODE_FORMAT (mode)->ieee_bits == n
572 && targetm.scalar_mode_supported_p (mode)
573 && targetm.libgcc_floating_mode_supported_p (mode))
576 return opt_scalar_float_mode ();
579 /* Define this to return true if the _Floatn and _Floatnx built-in functions
580 should implicitly enable the built-in function without the __builtin_ prefix
581 in addition to the normal built-in function with the __builtin_ prefix. The
582 default is to only enable built-in functions without the __builtin_ prefix
583 for the GNU C langauge. The argument FUNC is the enum builtin_in_function
584 id of the function to be enabled. */
587 default_floatn_builtin_p (int func ATTRIBUTE_UNUSED)
589 static bool first_time_p = true;
590 static bool c_or_objective_c;
594 first_time_p = false;
595 c_or_objective_c = lang_GNU_C () || lang_GNU_OBJC ();
598 return c_or_objective_c;
601 /* Make some target macros useable by target-independent code. */
603 targhook_words_big_endian (void)
605 return !!WORDS_BIG_ENDIAN;
609 targhook_float_words_big_endian (void)
611 return !!FLOAT_WORDS_BIG_ENDIAN;
614 /* True if the target supports floating-point exceptions and rounding
618 default_float_exceptions_rounding_supported_p (void)
627 /* True if the target supports decimal floating point. */
630 default_decimal_float_supported_p (void)
632 return ENABLE_DECIMAL_FLOAT;
635 /* True if the target supports fixed-point arithmetic. */
638 default_fixed_point_supported_p (void)
640 return ENABLE_FIXED_POINT;
643 /* True if the target supports GNU indirect functions. */
646 default_has_ifunc_p (void)
648 return HAVE_GNU_INDIRECT_FUNCTION;
651 /* Return true if we predict the loop LOOP will be transformed to a
652 low-overhead loop, otherwise return false.
654 By default, false is returned, as this hook's applicability should be
655 verified for each target. Target maintainers should re-define the hook
656 if the target can take advantage of it. */
659 default_predict_doloop_p (class loop *loop ATTRIBUTE_UNUSED)
664 /* By default, just use the input MODE itself. */
667 default_preferred_doloop_mode (machine_mode mode)
672 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
675 This function checks whether a given INSN is valid within a low-overhead
676 loop. If INSN is invalid it returns the reason for that, otherwise it
677 returns NULL. A called function may clobber any special registers required
678 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
679 register for branch on table instructions. We reject the doloop pattern in
683 default_invalid_within_doloop (const rtx_insn *insn)
686 return "Function call in loop.";
688 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
689 return "Computed branch in the loop.";
694 /* Mapping of builtin functions to vectorized variants. */
697 default_builtin_vectorized_function (unsigned int, tree, tree)
702 /* Mapping of target builtin functions to vectorized variants. */
705 default_builtin_md_vectorized_function (tree, tree, tree)
710 /* Default vectorizer cost model values. */
713 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
715 int misalign ATTRIBUTE_UNUSED)
717 switch (type_of_cost)
727 case cond_branch_not_taken:
729 case vec_promote_demote:
733 case unaligned_store:
736 case cond_branch_taken:
740 return estimated_poly_value (TYPE_VECTOR_SUBPARTS (vectype)) - 1;
750 default_builtin_reciprocal (tree)
756 hook_bool_CUMULATIVE_ARGS_arg_info_false (cumulative_args_t,
757 const function_arg_info &)
763 hook_bool_CUMULATIVE_ARGS_arg_info_true (cumulative_args_t,
764 const function_arg_info &)
770 hook_int_CUMULATIVE_ARGS_arg_info_0 (cumulative_args_t,
771 const function_arg_info &)
777 hook_void_CUMULATIVE_ARGS_tree (cumulative_args_t ca ATTRIBUTE_UNUSED,
778 tree ATTRIBUTE_UNUSED)
782 /* Default implementation of TARGET_PUSH_ARGUMENT. */
785 default_push_argument (unsigned int)
788 return !ACCUMULATE_OUTGOING_ARGS;
795 default_function_arg_advance (cumulative_args_t, const function_arg_info &)
800 /* Default implementation of TARGET_FUNCTION_ARG_OFFSET. */
803 default_function_arg_offset (machine_mode, const_tree)
808 /* Default implementation of TARGET_FUNCTION_ARG_PADDING: usually pad
809 upward, but pad short args downward on big-endian machines. */
812 default_function_arg_padding (machine_mode mode, const_tree type)
814 if (!BYTES_BIG_ENDIAN)
817 unsigned HOST_WIDE_INT size;
820 if (!type || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
822 size = int_size_in_bytes (type);
825 /* Targets with variable-sized modes must override this hook
826 and handle variable-sized modes explicitly. */
827 size = GET_MODE_SIZE (mode).to_constant ();
829 if (size < (PARM_BOUNDARY / BITS_PER_UNIT))
836 default_function_arg (cumulative_args_t, const function_arg_info &)
842 default_function_incoming_arg (cumulative_args_t, const function_arg_info &)
848 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
849 const_tree type ATTRIBUTE_UNUSED)
851 return PARM_BOUNDARY;
855 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
856 const_tree type ATTRIBUTE_UNUSED)
858 return PARM_BOUNDARY;
862 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
867 hook_invalid_arg_for_unprototyped_fn (
868 const_tree typelist ATTRIBUTE_UNUSED,
869 const_tree funcdecl ATTRIBUTE_UNUSED,
870 const_tree val ATTRIBUTE_UNUSED)
875 /* Initialize the stack protection decls. */
877 /* Stack protection related decls living in libgcc. */
878 static GTY(()) tree stack_chk_guard_decl;
881 default_stack_protect_guard (void)
883 tree t = stack_chk_guard_decl;
889 t = build_decl (UNKNOWN_LOCATION,
890 VAR_DECL, get_identifier ("__stack_chk_guard"),
894 DECL_EXTERNAL (t) = 1;
896 TREE_THIS_VOLATILE (t) = 1;
897 DECL_ARTIFICIAL (t) = 1;
898 DECL_IGNORED_P (t) = 1;
900 /* Do not share RTL as the declaration is visible outside of
903 RTX_FLAG (x, used) = 1;
905 stack_chk_guard_decl = t;
911 static GTY(()) tree stack_chk_fail_decl;
914 default_external_stack_protect_fail (void)
916 tree t = stack_chk_fail_decl;
920 t = build_function_type_list (void_type_node, NULL_TREE);
921 t = build_decl (UNKNOWN_LOCATION,
922 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
925 DECL_EXTERNAL (t) = 1;
927 TREE_THIS_VOLATILE (t) = 1;
928 TREE_NOTHROW (t) = 1;
929 DECL_ARTIFICIAL (t) = 1;
930 DECL_IGNORED_P (t) = 1;
931 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
932 DECL_VISIBILITY_SPECIFIED (t) = 1;
934 stack_chk_fail_decl = t;
937 return build_call_expr (t, 0);
941 default_hidden_stack_protect_fail (void)
943 #ifndef HAVE_GAS_HIDDEN
944 return default_external_stack_protect_fail ();
946 tree t = stack_chk_fail_decl;
949 return default_external_stack_protect_fail ();
953 t = build_function_type_list (void_type_node, NULL_TREE);
954 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
955 get_identifier ("__stack_chk_fail_local"), t);
958 DECL_EXTERNAL (t) = 1;
960 TREE_THIS_VOLATILE (t) = 1;
961 TREE_NOTHROW (t) = 1;
962 DECL_ARTIFICIAL (t) = 1;
963 DECL_IGNORED_P (t) = 1;
964 DECL_VISIBILITY_SPECIFIED (t) = 1;
965 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
967 stack_chk_fail_decl = t;
970 return build_call_expr (t, 0);
975 hook_bool_const_rtx_commutative_p (const_rtx x,
976 int outer_code ATTRIBUTE_UNUSED)
978 return COMMUTATIVE_P (x);
982 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
983 const_tree fn_decl_or_type,
984 bool outgoing ATTRIBUTE_UNUSED)
986 /* The old interface doesn't handle receiving the function type. */
988 && !DECL_P (fn_decl_or_type))
989 fn_decl_or_type = NULL;
991 #ifdef FUNCTION_VALUE
992 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
999 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
1000 const_rtx fun ATTRIBUTE_UNUSED)
1002 #ifdef LIBCALL_VALUE
1003 return LIBCALL_VALUE (MACRO_MODE (mode));
1009 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
1012 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
1014 #ifdef FUNCTION_VALUE_REGNO_P
1015 return FUNCTION_VALUE_REGNO_P (regno);
1021 /* Choose the mode and rtx to use to zero REGNO, storing tem in PMODE and
1022 PREGNO_RTX and returning TRUE if successful, otherwise returning FALSE. If
1023 the natural mode for REGNO doesn't work, attempt to group it with subsequent
1024 adjacent registers set in TOZERO. */
1027 zcur_select_mode_rtx (unsigned int regno, machine_mode *pmode,
1028 rtx *pregno_rtx, HARD_REG_SET tozero)
1030 rtx regno_rtx = regno_reg_rtx[regno];
1031 machine_mode mode = GET_MODE (regno_rtx);
1033 /* If the natural mode doesn't work, try some wider mode. */
1034 if (!targetm.hard_regno_mode_ok (regno, mode))
1038 !found && nregs <= hard_regno_max_nregs
1039 && regno + nregs <= FIRST_PSEUDO_REGISTER
1040 && TEST_HARD_REG_BIT (tozero,
1044 mode = choose_hard_reg_mode (regno, nregs, 0);
1045 if (mode == E_VOIDmode)
1047 gcc_checking_assert (targetm.hard_regno_mode_ok (regno, mode));
1048 regno_rtx = gen_rtx_REG (mode, regno);
1056 *pregno_rtx = regno_rtx;
1060 /* The default hook for TARGET_ZERO_CALL_USED_REGS. */
1063 default_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
1065 gcc_assert (!hard_reg_set_empty_p (need_zeroed_hardregs));
1067 HARD_REG_SET failed;
1068 CLEAR_HARD_REG_SET (failed);
1069 bool progress = false;
1071 /* First, try to zero each register in need_zeroed_hardregs by
1072 loading a zero into it, taking note of any failures in
1074 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1075 if (TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
1077 rtx_insn *last_insn = get_last_insn ();
1081 if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1082 need_zeroed_hardregs))
1084 SET_HARD_REG_BIT (failed, regno);
1088 rtx zero = CONST0_RTX (mode);
1089 rtx_insn *insn = emit_move_insn (regno_rtx, zero);
1090 if (!valid_insn_p (insn))
1092 SET_HARD_REG_BIT (failed, regno);
1093 delete_insns_since (last_insn);
1098 regno += hard_regno_nregs (regno, mode) - 1;
1102 /* Now retry with copies from zeroed registers, as long as we've
1103 made some PROGRESS, and registers remain to be zeroed in
1105 while (progress && !hard_reg_set_empty_p (failed))
1107 HARD_REG_SET retrying = failed;
1109 CLEAR_HARD_REG_SET (failed);
1112 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1113 if (TEST_HARD_REG_BIT (retrying, regno))
1118 /* This might select registers we've already zeroed. If grouping
1119 with them is what it takes to get regno zeroed, so be it. */
1120 if (!zcur_select_mode_rtx (regno, &mode, ®no_rtx,
1121 need_zeroed_hardregs))
1123 SET_HARD_REG_BIT (failed, regno);
1127 bool success = false;
1128 /* Look for a source. */
1129 for (unsigned int src = 0; src < FIRST_PSEUDO_REGISTER; src++)
1131 /* If SRC hasn't been zeroed (yet?), skip it. */
1132 if (! TEST_HARD_REG_BIT (need_zeroed_hardregs, src))
1134 if (TEST_HARD_REG_BIT (retrying, src))
1137 /* Check that SRC can hold MODE, and that any other
1138 registers needed to hold MODE in SRC have also been
1140 if (!targetm.hard_regno_mode_ok (src, mode))
1142 unsigned n = targetm.hard_regno_nregs (src, mode);
1144 for (unsigned i = 1; ok && i < n; i++)
1145 ok = (TEST_HARD_REG_BIT (need_zeroed_hardregs, src + i)
1146 && !TEST_HARD_REG_BIT (retrying, src + i));
1150 /* SRC is usable, try to copy from it. */
1151 rtx_insn *last_insn = get_last_insn ();
1152 rtx src_rtx = gen_rtx_REG (mode, src);
1153 rtx_insn *insn = emit_move_insn (regno_rtx, src_rtx);
1154 if (!valid_insn_p (insn))
1155 /* It didn't work, remove any inserts. We'll look
1157 delete_insns_since (last_insn);
1160 /* We're done for REGNO. */
1166 /* If nothing worked for REGNO this round, mark it to be
1167 retried if we get another round. */
1169 SET_HARD_REG_BIT (failed, regno);
1172 /* Take note so as to enable another round if needed. */
1174 regno += hard_regno_nregs (regno, mode) - 1;
1179 /* If any register remained, report it. */
1182 static bool issued_error;
1185 const char *name = NULL;
1186 for (unsigned int i = 0; zero_call_used_regs_opts[i].name != NULL;
1188 if (flag_zero_call_used_regs == zero_call_used_regs_opts[i].flag)
1190 name = zero_call_used_regs_opts[i].name;
1197 issued_error = true;
1198 sorry ("argument %qs is not supported for %qs on this target",
1199 name, "-fzero-call-used-regs");
1203 return need_zeroed_hardregs;
1207 default_internal_arg_pointer (void)
1209 /* If the reg that the virtual arg pointer will be translated into is
1210 not a fixed reg or is the stack pointer, make a copy of the virtual
1211 arg pointer, and address parms via the copy. The frame pointer is
1212 considered fixed even though it is not marked as such. */
1213 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
1214 || ! (fixed_regs[ARG_POINTER_REGNUM]
1215 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
1216 return copy_to_reg (virtual_incoming_args_rtx);
1218 return virtual_incoming_args_rtx;
1222 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
1226 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1227 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
1231 #ifdef STATIC_CHAIN_REGNUM
1232 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
1236 static bool issued_error;
1239 issued_error = true;
1240 sorry ("nested functions not supported on this target");
1243 /* It really doesn't matter what we return here, so long at it
1244 doesn't cause the rest of the compiler to crash. */
1245 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
1250 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
1251 rtx ARG_UNUSED (r_chain))
1253 sorry ("nested function trampolines not supported on this target");
1257 default_return_pops_args (tree, tree, poly_int64)
1263 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
1265 reg_class_t best_cl ATTRIBUTE_UNUSED)
1271 default_lra_p (void)
1277 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
1283 default_register_usage_leveling_p (void)
1289 default_different_addr_displacement_p (void)
1295 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
1296 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
1297 machine_mode reload_mode ATTRIBUTE_UNUSED,
1298 secondary_reload_info *sri)
1300 enum reg_class rclass = NO_REGS;
1301 enum reg_class reload_class = (enum reg_class) reload_class_i;
1303 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
1305 sri->icode = sri->prev_sri->t_icode;
1308 #ifdef SECONDARY_INPUT_RELOAD_CLASS
1310 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class,
1311 MACRO_MODE (reload_mode), x);
1313 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
1315 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class,
1316 MACRO_MODE (reload_mode), x);
1318 if (rclass != NO_REGS)
1320 enum insn_code icode
1321 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
1324 if (icode != CODE_FOR_nothing
1325 && !insn_operand_matches (icode, in_p, x))
1326 icode = CODE_FOR_nothing;
1327 else if (icode != CODE_FOR_nothing)
1329 const char *insn_constraint, *scratch_constraint;
1330 enum reg_class insn_class, scratch_class;
1332 gcc_assert (insn_data[(int) icode].n_operands == 3);
1333 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
1334 if (!*insn_constraint)
1335 insn_class = ALL_REGS;
1340 gcc_assert (*insn_constraint == '=');
1343 insn_class = (reg_class_for_constraint
1344 (lookup_constraint (insn_constraint)));
1345 gcc_assert (insn_class != NO_REGS);
1348 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
1349 /* The scratch register's constraint must start with "=&",
1350 except for an input reload, where only "=" is necessary,
1351 and where it might be beneficial to re-use registers from
1353 gcc_assert (scratch_constraint[0] == '='
1354 && (in_p || scratch_constraint[1] == '&'));
1355 scratch_constraint++;
1356 if (*scratch_constraint == '&')
1357 scratch_constraint++;
1358 scratch_class = (reg_class_for_constraint
1359 (lookup_constraint (scratch_constraint)));
1361 if (reg_class_subset_p (reload_class, insn_class))
1363 gcc_assert (scratch_class == rclass);
1367 rclass = insn_class;
1370 if (rclass == NO_REGS)
1373 sri->t_icode = icode;
1378 /* The default implementation of TARGET_SECONDARY_MEMORY_NEEDED_MODE. */
1381 default_secondary_memory_needed_mode (machine_mode mode)
1383 if (!targetm.lra_p ()
1384 && known_lt (GET_MODE_BITSIZE (mode), BITS_PER_WORD)
1385 && INTEGRAL_MODE_P (mode))
1386 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1390 /* By default, if flag_pic is true, then neither local nor global relocs
1391 should be placed in readonly memory. */
1394 default_reloc_rw_mask (void)
1396 return flag_pic ? 3 : 0;
1399 /* By default, address diff vectors are generated
1400 for jump tables when flag_pic is true. */
1403 default_generate_pic_addr_diff_vec (void)
1408 /* Record an element in the table of global constructors. SYMBOL is
1409 a SYMBOL_REF of the function to be called; PRIORITY is a number
1410 between 0 and MAX_INIT_PRIORITY. */
1413 default_asm_out_constructor (rtx symbol ATTRIBUTE_UNUSED,
1414 int priority ATTRIBUTE_UNUSED)
1416 sorry ("global constructors not supported on this target");
1419 /* Likewise for global destructors. */
1422 default_asm_out_destructor (rtx symbol ATTRIBUTE_UNUSED,
1423 int priority ATTRIBUTE_UNUSED)
1425 sorry ("global destructors not supported on this target");
1428 /* By default, do no modification. */
1429 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1435 /* The default implementation of TARGET_STATIC_RTX_ALIGNMENT. */
1438 default_static_rtx_alignment (machine_mode mode)
1440 return GET_MODE_ALIGNMENT (mode);
1443 /* The default implementation of TARGET_CONSTANT_ALIGNMENT. */
1446 default_constant_alignment (const_tree, HOST_WIDE_INT align)
1451 /* An implementation of TARGET_CONSTANT_ALIGNMENT that aligns strings
1452 to at least BITS_PER_WORD but otherwise makes no changes. */
1455 constant_alignment_word_strings (const_tree exp, HOST_WIDE_INT align)
1457 if (TREE_CODE (exp) == STRING_CST)
1458 return MAX (align, BITS_PER_WORD);
1462 /* Default to natural alignment for vector types, bounded by
1463 MAX_OFILE_ALIGNMENT. */
1466 default_vector_alignment (const_tree type)
1468 unsigned HOST_WIDE_INT align = MAX_OFILE_ALIGNMENT;
1469 tree size = TYPE_SIZE (type);
1470 if (tree_fits_uhwi_p (size))
1471 align = tree_to_uhwi (size);
1472 if (align >= MAX_OFILE_ALIGNMENT)
1473 return MAX_OFILE_ALIGNMENT;
1474 return MAX (align, GET_MODE_ALIGNMENT (TYPE_MODE (type)));
1477 /* The default implementation of
1478 TARGET_VECTORIZE_PREFERRED_VECTOR_ALIGNMENT. */
1481 default_preferred_vector_alignment (const_tree type)
1483 return TYPE_ALIGN (type);
1486 /* By default assume vectors of element TYPE require a multiple of the natural
1487 alignment of TYPE. TYPE is naturally aligned if IS_PACKED is false. */
1489 default_builtin_vector_alignment_reachable (const_tree /*type*/, bool is_packed)
1494 /* By default, assume that a target supports any factor of misalignment
1495 memory access if it supports movmisalign patten.
1496 is_packed is true if the memory access is defined in a packed struct. */
1498 default_builtin_support_vector_misalignment (machine_mode mode,
1506 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1511 /* By default, only attempt to parallelize bitwise operations, and
1512 possibly adds/subtracts using bit-twiddling. */
1515 default_preferred_simd_mode (scalar_mode)
1520 /* By default do not split reductions further. */
1523 default_split_reduction (machine_mode mode)
1528 /* By default only the preferred vector mode is tried. */
1531 default_autovectorize_vector_modes (vector_modes *, bool)
1536 /* The default implementation of TARGET_VECTORIZE_RELATED_MODE. */
1539 default_vectorize_related_mode (machine_mode vector_mode,
1540 scalar_mode element_mode,
1543 machine_mode result_mode;
1544 if ((maybe_ne (nunits, 0U)
1545 || multiple_p (GET_MODE_SIZE (vector_mode),
1546 GET_MODE_SIZE (element_mode), &nunits))
1547 && mode_for_vector (element_mode, nunits).exists (&result_mode)
1548 && VECTOR_MODE_P (result_mode)
1549 && targetm.vector_mode_supported_p (result_mode))
1552 return opt_machine_mode ();
1555 /* By default a vector of integers is used as a mask. */
1558 default_get_mask_mode (machine_mode mode)
1560 return related_int_vector_mode (mode);
1563 /* By default consider masked stores to be expensive. */
1566 default_empty_mask_is_expensive (unsigned ifn)
1568 return ifn == IFN_MASK_STORE;
1571 /* By default, the cost model accumulates three separate costs (prologue,
1572 loop body, and epilogue) for a vectorized loop or block. So allocate an
1573 array of three unsigned ints, set it to zero, and return its address. */
1576 default_vectorize_create_costs (vec_info *vinfo, bool costing_for_scalar)
1578 return new vector_costs (vinfo, costing_for_scalar);
1581 /* Determine whether or not a pointer mode is valid. Assume defaults
1582 of ptr_mode or Pmode - can be overridden. */
1584 default_valid_pointer_mode (scalar_int_mode mode)
1586 return (mode == ptr_mode || mode == Pmode);
1589 /* Determine whether the memory reference specified by REF may alias
1590 the C libraries errno location. */
1592 default_ref_may_alias_errno (ao_ref *ref)
1594 tree base = ao_ref_base (ref);
1595 /* The default implementation assumes the errno location is
1596 a declaration of type int or is always accessed via a
1597 pointer to int. We assume that accesses to errno are
1598 not deliberately obfuscated (even in conforming ways). */
1599 if (TYPE_UNSIGNED (TREE_TYPE (base))
1600 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1602 /* The default implementation assumes an errno location declaration
1603 is never defined in the current compilation unit and may not be
1604 aliased by a local variable. */
1606 && DECL_EXTERNAL (base)
1607 && !TREE_STATIC (base))
1609 else if (TREE_CODE (base) == MEM_REF
1610 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1612 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1613 return !pi || pi->pt.anything || pi->pt.nonlocal;
1618 /* Return the mode for a pointer to a given ADDRSPACE,
1619 defaulting to ptr_mode for all address spaces. */
1622 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1627 /* Return the mode for an address in a given ADDRSPACE,
1628 defaulting to Pmode for all address spaces. */
1631 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1636 /* Named address space version of valid_pointer_mode.
1637 To match the above, the same modes apply to all address spaces. */
1640 default_addr_space_valid_pointer_mode (scalar_int_mode mode,
1641 addr_space_t as ATTRIBUTE_UNUSED)
1643 return targetm.valid_pointer_mode (mode);
1646 /* Some places still assume that all pointer or address modes are the
1647 standard Pmode and ptr_mode. These optimizations become invalid if
1648 the target actually supports multiple different modes. For now,
1649 we disable such optimizations on such targets, using this function. */
1652 target_default_pointer_address_modes_p (void)
1654 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1656 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1662 /* Named address space version of legitimate_address_p.
1663 By default, all address spaces have the same form. */
1666 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1668 addr_space_t as ATTRIBUTE_UNUSED)
1670 return targetm.legitimate_address_p (mode, mem, strict);
1673 /* Named address space version of LEGITIMIZE_ADDRESS.
1674 By default, all address spaces have the same form. */
1677 default_addr_space_legitimize_address (rtx x, rtx oldx, machine_mode mode,
1678 addr_space_t as ATTRIBUTE_UNUSED)
1680 return targetm.legitimize_address (x, oldx, mode);
1683 /* The default hook for determining if one named address space is a subset of
1684 another and to return which address space to use as the common address
1688 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1690 return (subset == superset);
1693 /* The default hook for determining if 0 within a named address
1694 space is a valid address. */
1697 default_addr_space_zero_address_valid (addr_space_t as ATTRIBUTE_UNUSED)
1702 /* The default hook for debugging the address space is to return the
1703 address space number to indicate DW_AT_address_class. */
1705 default_addr_space_debug (addr_space_t as)
1710 /* The default hook implementation for TARGET_ADDR_SPACE_DIAGNOSE_USAGE.
1711 Don't complain about any address space. */
1714 default_addr_space_diagnose_usage (addr_space_t, location_t)
1719 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1720 called for targets with only a generic address space. */
1723 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1724 tree from_type ATTRIBUTE_UNUSED,
1725 tree to_type ATTRIBUTE_UNUSED)
1730 /* The defualt implementation of TARGET_HARD_REGNO_NREGS. */
1733 default_hard_regno_nregs (unsigned int, machine_mode mode)
1735 /* Targets with variable-sized modes must provide their own definition
1737 return CEIL (GET_MODE_SIZE (mode).to_constant (), UNITS_PER_WORD);
1741 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1746 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1749 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1750 addr_space_t addrspace ATTRIBUTE_UNUSED)
1755 extern bool default_new_address_profitable_p (rtx, rtx);
1758 /* The default implementation of TARGET_NEW_ADDRESS_PROFITABLE_P. */
1761 default_new_address_profitable_p (rtx memref ATTRIBUTE_UNUSED,
1762 rtx_insn *insn ATTRIBUTE_UNUSED,
1763 rtx new_addr ATTRIBUTE_UNUSED)
1769 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1770 tree ARG_UNUSED (name),
1771 tree ARG_UNUSED (args),
1772 int ARG_UNUSED (flags))
1774 warning (OPT_Wattributes,
1775 "target attribute is not supported on this machine");
1781 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1782 tree ARG_UNUSED (pop_target))
1784 /* If args is NULL the caller is handle_pragma_pop_options (). In that case,
1785 emit no warning because "#pragma GCC pop_target" is valid on targets that
1786 do not have the "target" pragma. */
1788 warning (OPT_Wpragmas,
1789 "%<#pragma GCC target%> is not supported for this machine");
1795 default_target_can_inline_p (tree caller, tree callee)
1797 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1798 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1800 callee_opts = target_option_default_node;
1802 caller_opts = target_option_default_node;
1804 /* If both caller and callee have attributes, assume that if the
1805 pointer is different, the two functions have different target
1806 options since build_target_option_node uses a hash table for the
1808 return callee_opts == caller_opts;
1811 /* By default, return false to not need to collect any target information
1812 for inlining. Target maintainer should re-define the hook if the
1813 target want to take advantage of it. */
1816 default_need_ipa_fn_target_info (const_tree, unsigned int &)
1822 default_update_ipa_fn_target_info (unsigned int &, const gimple *)
1827 /* If the machine does not have a case insn that compares the bounds,
1828 this means extra overhead for dispatch tables, which raises the
1829 threshold for using them. */
1832 default_case_values_threshold (void)
1834 return (targetm.have_casesi () ? 4 : 5);
1838 default_have_conditional_execution (void)
1840 return HAVE_conditional_execution;
1843 /* Default that no division by constant operations are special. */
1845 default_can_special_div_by_const (enum tree_code, tree, wide_int, rtx *, rtx,
1851 /* By default we assume that c99 functions are present at the runtime,
1852 but sincos is not. */
1854 default_libc_has_function (enum function_class fn_class,
1855 tree type ATTRIBUTE_UNUSED)
1857 if (fn_class == function_c94
1858 || fn_class == function_c99_misc
1859 || fn_class == function_c99_math_complex)
1865 /* By default assume that libc has not a fast implementation. */
1868 default_libc_has_fast_function (int fcode ATTRIBUTE_UNUSED)
1874 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1875 tree type ATTRIBUTE_UNUSED)
1881 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED,
1882 tree type ATTRIBUTE_UNUSED)
1887 /* Assume some c99 functions are present at the runtime including sincos. */
1889 bsd_libc_has_function (enum function_class fn_class,
1890 tree type ATTRIBUTE_UNUSED)
1892 if (fn_class == function_c94
1893 || fn_class == function_c99_misc
1894 || fn_class == function_sincos)
1902 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1907 /* Compute cost of moving registers to/from memory. */
1910 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1911 reg_class_t rclass ATTRIBUTE_UNUSED,
1912 bool in ATTRIBUTE_UNUSED)
1914 #ifndef MEMORY_MOVE_COST
1915 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1917 return MEMORY_MOVE_COST (MACRO_MODE (mode), (enum reg_class) rclass, in);
1921 /* Compute cost of moving data from a register of class FROM to one of
1925 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1926 reg_class_t from ATTRIBUTE_UNUSED,
1927 reg_class_t to ATTRIBUTE_UNUSED)
1929 #ifndef REGISTER_MOVE_COST
1932 return REGISTER_MOVE_COST (MACRO_MODE (mode),
1933 (enum reg_class) from, (enum reg_class) to);
1937 /* The default implementation of TARGET_SLOW_UNALIGNED_ACCESS. */
1940 default_slow_unaligned_access (machine_mode, unsigned int)
1942 return STRICT_ALIGNMENT;
1945 /* The default implementation of TARGET_ESTIMATED_POLY_VALUE. */
1948 default_estimated_poly_value (poly_int64 x, poly_value_estimate_kind)
1953 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1954 behavior. SPEED_P is true if we are compiling for speed. */
1957 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1959 unsigned int move_ratio;
1961 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1963 #if defined (HAVE_cpymemqi) || defined (HAVE_cpymemhi) || defined (HAVE_cpymemsi) || defined (HAVE_cpymemdi) || defined (HAVE_cpymemti)
1965 #else /* No cpymem patterns, pick a default. */
1966 move_ratio = ((speed_p) ? 15 : 3);
1972 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1973 used; return FALSE if the cpymem/setmem optab should be expanded, or
1974 a call to memcpy emitted. */
1977 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1978 unsigned int alignment,
1979 enum by_pieces_operation op,
1982 unsigned int max_size = 0;
1983 unsigned int ratio = 0;
1987 case CLEAR_BY_PIECES:
1988 max_size = STORE_MAX_PIECES;
1989 ratio = CLEAR_RATIO (speed_p);
1991 case MOVE_BY_PIECES:
1992 max_size = MOVE_MAX_PIECES;
1993 ratio = get_move_ratio (speed_p);
1996 max_size = STORE_MAX_PIECES;
1997 ratio = SET_RATIO (speed_p);
1999 case STORE_BY_PIECES:
2000 max_size = STORE_MAX_PIECES;
2001 ratio = get_move_ratio (speed_p);
2003 case COMPARE_BY_PIECES:
2004 max_size = COMPARE_MAX_PIECES;
2005 /* Pick a likely default, just as in get_move_ratio. */
2006 ratio = speed_p ? 15 : 3;
2010 return by_pieces_ninsns (size, alignment, max_size + 1, op) < ratio;
2013 /* This hook controls code generation for expanding a memcmp operation by
2014 pieces. Return 1 for the normal pattern of compare/jump after each pair
2015 of loads, or a higher number to reduce the number of branches. */
2018 default_compare_by_pieces_branch_ratio (machine_mode)
2023 /* Helper for default_print_patchable_function_entry and other
2024 print_patchable_function_entry hook implementations. */
2027 default_print_patchable_function_entry_1 (FILE *file,
2028 unsigned HOST_WIDE_INT
2033 const char *nop_templ = 0;
2035 rtx_insn *my_nop = make_insn_raw (gen_nop ());
2037 /* We use the template alone, relying on the (currently sane) assumption
2038 that the NOP template does not have variable operands. */
2039 code_num = recog_memoized (my_nop);
2040 nop_templ = get_insn_template (code_num, my_nop);
2042 if (record_p && targetm_common.have_named_sections)
2045 static int patch_area_number;
2046 section *previous_section = in_section;
2047 const char *asm_op = integer_asm_op (POINTER_SIZE_UNITS, false);
2049 gcc_assert (asm_op != NULL);
2050 patch_area_number++;
2051 ASM_GENERATE_INTERNAL_LABEL (buf, "LPFE", patch_area_number);
2053 section *sect = get_section ("__patchable_function_entries",
2054 flags, current_function_decl);
2055 if (HAVE_COMDAT_GROUP && DECL_COMDAT_GROUP (current_function_decl))
2056 switch_to_comdat_section (sect, current_function_decl);
2058 switch_to_section (sect);
2059 assemble_align (POINTER_SIZE);
2060 fputs (asm_op, file);
2061 assemble_name_raw (file, buf);
2064 switch_to_section (previous_section);
2065 ASM_OUTPUT_LABEL (file, buf);
2069 for (i = 0; i < patch_area_size; ++i)
2070 output_asm_insn (nop_templ, NULL);
2073 /* Write PATCH_AREA_SIZE NOPs into the asm outfile FILE around a function
2074 entry. If RECORD_P is true and the target supports named sections,
2075 the location of the NOPs will be recorded in a special object section
2076 called "__patchable_function_entries". This routine may be called
2077 twice per function to put NOPs before and after the function
2081 default_print_patchable_function_entry (FILE *file,
2082 unsigned HOST_WIDE_INT patch_area_size,
2085 unsigned int flags = SECTION_WRITE | SECTION_RELRO;
2086 if (HAVE_GAS_SECTION_LINK_ORDER)
2087 flags |= SECTION_LINK_ORDER;
2088 default_print_patchable_function_entry_1 (file, patch_area_size, record_p,
2093 default_profile_before_prologue (void)
2095 #ifdef PROFILE_BEFORE_PROLOGUE
2102 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
2105 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
2108 #ifdef PREFERRED_RELOAD_CLASS
2109 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
2115 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
2118 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
2124 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
2126 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
2131 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
2134 default_class_likely_spilled_p (reg_class_t rclass)
2136 return (reg_class_size[(int) rclass] == 1);
2139 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
2142 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
2143 machine_mode mode ATTRIBUTE_UNUSED)
2145 #ifdef CLASS_MAX_NREGS
2146 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass,
2149 /* Targets with variable-sized modes must provide their own definition
2151 unsigned int size = GET_MODE_SIZE (mode).to_constant ();
2152 return (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2156 /* Determine the debugging unwind mechanism for the target. */
2158 enum unwind_info_type
2159 default_debug_unwind_info (void)
2161 /* If the target wants to force the use of dwarf2 unwind info, let it. */
2162 /* ??? Change all users to the hook, then poison this. */
2163 #ifdef DWARF2_FRAME_INFO
2164 if (DWARF2_FRAME_INFO)
2168 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
2169 #ifdef DWARF2_DEBUGGING_INFO
2170 if (dwarf_debuginfo_p ())
2177 /* Targets that set NUM_POLY_INT_COEFFS to something greater than 1
2178 must define this hook. */
2181 default_dwarf_poly_indeterminate_value (unsigned int, unsigned int *, int *)
2186 /* Determine the correct mode for a Dwarf frame register that represents
2190 default_dwarf_frame_reg_mode (int regno)
2192 machine_mode save_mode = reg_raw_mode[regno];
2194 if (targetm.hard_regno_call_part_clobbered (eh_edge_abi.id (),
2196 save_mode = choose_hard_reg_mode (regno, 1, &eh_edge_abi);
2200 /* To be used by targets where reg_raw_mode doesn't return the right
2201 mode for registers used in apply_builtin_return and apply_builtin_arg. */
2204 default_get_reg_raw_mode (int regno)
2206 /* Targets must override this hook if the underlying register is
2208 return as_a <fixed_size_mode> (reg_raw_mode[regno]);
2211 /* Return true if a leaf function should stay leaf even with profiling
2215 default_keep_leaf_when_profiled ()
2220 /* Return true if the state of option OPTION should be stored in PCH files
2221 and checked by default_pch_valid_p. Store the option's current state
2225 option_affects_pch_p (int option, struct cl_option_state *state)
2227 if ((cl_options[option].flags & CL_TARGET) == 0)
2229 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
2231 if (option_flag_var (option, &global_options) == &target_flags)
2232 if (targetm.check_pch_target_flags)
2234 return get_option_state (&global_options, option, state);
2237 /* Default version of get_pch_validity.
2238 By default, every flag difference is fatal; that will be mostly right for
2239 most targets, but completely right for very few. */
2242 default_get_pch_validity (size_t *sz)
2244 struct cl_option_state state;
2249 if (targetm.check_pch_target_flags)
2250 *sz += sizeof (target_flags);
2251 for (i = 0; i < cl_options_count; i++)
2252 if (option_affects_pch_p (i, &state))
2255 result = r = XNEWVEC (char, *sz);
2259 if (targetm.check_pch_target_flags)
2261 memcpy (r, &target_flags, sizeof (target_flags));
2262 r += sizeof (target_flags);
2265 for (i = 0; i < cl_options_count; i++)
2266 if (option_affects_pch_p (i, &state))
2268 memcpy (r, state.data, state.size);
2275 /* Return a message which says that a PCH file was created with a different
2276 setting of OPTION. */
2279 pch_option_mismatch (const char *option)
2281 return xasprintf (_("created and used with differing settings of '%s'"),
2285 /* Default version of pch_valid_p. */
2288 default_pch_valid_p (const void *data_p, size_t len ATTRIBUTE_UNUSED)
2290 struct cl_option_state state;
2291 const char *data = (const char *)data_p;
2294 /* -fpic and -fpie also usually make a PCH invalid. */
2295 if (data[0] != flag_pic)
2296 return _("created and used with different settings of %<-fpic%>");
2297 if (data[1] != flag_pie)
2298 return _("created and used with different settings of %<-fpie%>");
2301 /* Check target_flags. */
2302 if (targetm.check_pch_target_flags)
2307 memcpy (&tf, data, sizeof (target_flags));
2308 data += sizeof (target_flags);
2309 r = targetm.check_pch_target_flags (tf);
2314 for (i = 0; i < cl_options_count; i++)
2315 if (option_affects_pch_p (i, &state))
2317 if (memcmp (data, state.data, state.size) != 0)
2318 return pch_option_mismatch (cl_options[i].opt_text);
2325 /* Default version of cstore_mode. */
2328 default_cstore_mode (enum insn_code icode)
2330 return as_a <scalar_int_mode> (insn_data[(int) icode].operand[0].mode);
2333 /* Default version of member_type_forces_blk. */
2336 default_member_type_forces_blk (const_tree, machine_mode)
2341 /* Default version of canonicalize_comparison. */
2344 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
2348 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
2351 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
2355 #ifndef PAD_VARARGS_DOWN
2356 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
2359 /* Build an indirect-ref expression over the given TREE, which represents a
2360 piece of a va_arg() expansion. */
2362 build_va_arg_indirect_ref (tree addr)
2364 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
2368 /* The "standard" implementation of va_arg: read the value from the
2369 current (padded) address and increment by the (padded) size. */
2372 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
2375 tree addr, t, type_size, rounded_size, valist_tmp;
2376 unsigned HOST_WIDE_INT align, boundary;
2379 /* All of the alignment and movement below is for args-grow-up machines.
2380 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
2381 implement their own specialized gimplify_va_arg_expr routines. */
2382 if (ARGS_GROW_DOWNWARD)
2385 indirect = pass_va_arg_by_reference (type);
2387 type = build_pointer_type (type);
2389 if (targetm.calls.split_complex_arg
2390 && TREE_CODE (type) == COMPLEX_TYPE
2391 && targetm.calls.split_complex_arg (type))
2393 tree real_part, imag_part;
2395 real_part = std_gimplify_va_arg_expr (valist,
2396 TREE_TYPE (type), pre_p, NULL);
2397 real_part = get_initialized_tmp_var (real_part, pre_p);
2399 imag_part = std_gimplify_va_arg_expr (unshare_expr (valist),
2400 TREE_TYPE (type), pre_p, NULL);
2401 imag_part = get_initialized_tmp_var (imag_part, pre_p);
2403 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
2406 align = PARM_BOUNDARY / BITS_PER_UNIT;
2407 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
2409 /* When we align parameter on stack for caller, if the parameter
2410 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
2411 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
2412 here with caller. */
2413 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
2414 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
2416 boundary /= BITS_PER_UNIT;
2418 /* Hoist the valist value into a temporary for the moment. */
2419 valist_tmp = get_initialized_tmp_var (valist, pre_p);
2421 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
2422 requires greater alignment, we must perform dynamic alignment. */
2423 if (boundary > align
2424 && !TYPE_EMPTY_P (type)
2425 && !integer_zerop (TYPE_SIZE (type)))
2427 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2428 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
2429 gimplify_and_add (t, pre_p);
2431 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
2432 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
2434 build_int_cst (TREE_TYPE (valist), -boundary)));
2435 gimplify_and_add (t, pre_p);
2440 /* If the actual alignment is less than the alignment of the type,
2441 adjust the type accordingly so that we don't assume strict alignment
2442 when dereferencing the pointer. */
2443 boundary *= BITS_PER_UNIT;
2444 if (boundary < TYPE_ALIGN (type))
2446 type = build_variant_type_copy (type);
2447 SET_TYPE_ALIGN (type, boundary);
2450 /* Compute the rounded size of the type. */
2451 type_size = arg_size_in_bytes (type);
2452 rounded_size = round_up (type_size, align);
2454 /* Reduce rounded_size so it's sharable with the postqueue. */
2455 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
2459 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
2461 /* Small args are padded downward. */
2462 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
2463 rounded_size, size_int (align));
2464 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
2465 size_binop (MINUS_EXPR, rounded_size, type_size));
2466 addr = fold_build_pointer_plus (addr, t);
2469 /* Compute new value for AP. */
2470 t = fold_build_pointer_plus (valist_tmp, rounded_size);
2471 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2472 gimplify_and_add (t, pre_p);
2474 addr = fold_convert (build_pointer_type (type), addr);
2477 addr = build_va_arg_indirect_ref (addr);
2479 return build_va_arg_indirect_ref (addr);
2482 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
2483 not support nested low-overhead loops. */
2486 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
2487 unsigned int loop_depth, bool)
2489 return loop_depth == 1;
2492 /* Default implementation of TARGET_OPTAB_SUPPORTED_P. */
2495 default_optab_supported_p (int, machine_mode, machine_mode, optimization_type)
2500 /* Default implementation of TARGET_MAX_NOCE_IFCVT_SEQ_COST. */
2503 default_max_noce_ifcvt_seq_cost (edge e)
2505 bool predictable_p = predictable_edge_p (e);
2509 if (OPTION_SET_P (param_max_rtl_if_conversion_predictable_cost))
2510 return param_max_rtl_if_conversion_predictable_cost;
2514 if (OPTION_SET_P (param_max_rtl_if_conversion_unpredictable_cost))
2515 return param_max_rtl_if_conversion_unpredictable_cost;
2518 return BRANCH_COST (true, predictable_p) * COSTS_N_INSNS (3);
2521 /* Default implementation of TARGET_MIN_ARITHMETIC_PRECISION. */
2524 default_min_arithmetic_precision (void)
2526 return WORD_REGISTER_OPERATIONS ? BITS_PER_WORD : BITS_PER_UNIT;
2529 /* Default implementation of TARGET_C_EXCESS_PRECISION. */
2531 enum flt_eval_method
2532 default_excess_precision (enum excess_precision_type ATTRIBUTE_UNUSED)
2534 return FLT_EVAL_METHOD_PROMOTE_TO_FLOAT;
2537 /* Default implementation for
2538 TARGET_STACK_CLASH_PROTECTION_ALLOCA_PROBE_RANGE. */
2540 default_stack_clash_protection_alloca_probe_range (void)
2545 /* The default implementation of TARGET_EARLY_REMAT_MODES. */
2548 default_select_early_remat_modes (sbitmap)
2552 /* The default implementation of TARGET_PREFERRED_ELSE_VALUE. */
2555 default_preferred_else_value (unsigned, tree type, unsigned, tree *)
2557 return build_zero_cst (type);
2560 /* Default implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE. */
2562 default_have_speculation_safe_value (bool active ATTRIBUTE_UNUSED)
2564 #ifdef HAVE_speculation_barrier
2565 return active ? HAVE_speculation_barrier : true;
2570 /* Alternative implementation of TARGET_HAVE_SPECULATION_SAFE_VALUE
2571 that can be used on targets that never have speculative execution. */
2573 speculation_safe_value_not_needed (bool active)
2578 /* Default implementation of the speculation-safe-load builtin. This
2579 implementation simply copies val to result and generates a
2580 speculation_barrier insn, if such a pattern is defined. */
2582 default_speculation_safe_value (machine_mode mode ATTRIBUTE_UNUSED,
2583 rtx result, rtx val,
2584 rtx failval ATTRIBUTE_UNUSED)
2586 emit_move_insn (result, val);
2588 #ifdef HAVE_speculation_barrier
2589 /* Assume the target knows what it is doing: if it defines a
2590 speculation barrier, but it is not enabled, then assume that one
2592 if (HAVE_speculation_barrier)
2593 emit_insn (gen_speculation_barrier ());
2599 /* How many bits to shift in order to access the tag bits.
2600 The default is to store the tag in the top 8 bits of a 64 bit pointer, hence
2601 shifting 56 bits will leave just the tag. */
2602 #define HWASAN_SHIFT (GET_MODE_PRECISION (Pmode) - 8)
2603 #define HWASAN_SHIFT_RTX GEN_INT (HWASAN_SHIFT)
2606 default_memtag_can_tag_addresses ()
2612 default_memtag_tag_size ()
2618 default_memtag_granule_size ()
2623 /* The default implementation of TARGET_MEMTAG_INSERT_RANDOM_TAG. */
2625 default_memtag_insert_random_tag (rtx untagged, rtx target)
2627 gcc_assert (param_hwasan_instrument_stack);
2628 if (param_hwasan_random_frame_tag)
2630 rtx fn = init_one_libfunc ("__hwasan_generate_tag");
2631 rtx new_tag = emit_library_call_value (fn, NULL_RTX, LCT_NORMAL, QImode);
2632 return targetm.memtag.set_tag (untagged, new_tag, target);
2636 /* NOTE: The kernel API does not have __hwasan_generate_tag exposed.
2637 In the future we may add the option emit random tags with inline
2638 instrumentation instead of function calls. This would be the same
2639 between the kernel and userland. */
2644 /* The default implementation of TARGET_MEMTAG_ADD_TAG. */
2646 default_memtag_add_tag (rtx base, poly_int64 offset, uint8_t tag_offset)
2648 /* Need to look into what the most efficient code sequence is.
2649 This is a code sequence that would be emitted *many* times, so we
2650 want it as small as possible.
2652 There are two places where tag overflow is a question:
2653 - Tagging the shadow stack.
2654 (both tagging and untagging).
2655 - Tagging addressable pointers.
2657 We need to ensure both behaviors are the same (i.e. that the tag that
2658 ends up in a pointer after "overflowing" the tag bits with a tag addition
2659 is the same that ends up in the shadow space).
2661 The aim is that the behavior of tag addition should follow modulo
2662 wrapping in both instances.
2664 The libhwasan code doesn't have any path that increments a pointer's tag,
2665 which means it has no opinion on what happens when a tag increment
2666 overflows (and hence we can choose our own behavior). */
2668 offset += ((uint64_t)tag_offset << HWASAN_SHIFT);
2669 return plus_constant (Pmode, base, offset);
2672 /* The default implementation of TARGET_MEMTAG_SET_TAG. */
2674 default_memtag_set_tag (rtx untagged, rtx tag, rtx target)
2676 gcc_assert (GET_MODE (untagged) == Pmode && GET_MODE (tag) == QImode);
2677 tag = expand_simple_binop (Pmode, ASHIFT, tag, HWASAN_SHIFT_RTX, NULL_RTX,
2678 /* unsignedp = */1, OPTAB_WIDEN);
2679 rtx ret = expand_simple_binop (Pmode, IOR, untagged, tag, target,
2680 /* unsignedp = */1, OPTAB_DIRECT);
2685 /* The default implementation of TARGET_MEMTAG_EXTRACT_TAG. */
2687 default_memtag_extract_tag (rtx tagged_pointer, rtx target)
2689 rtx tag = expand_simple_binop (Pmode, LSHIFTRT, tagged_pointer,
2690 HWASAN_SHIFT_RTX, target,
2693 rtx ret = gen_lowpart (QImode, tag);
2698 /* The default implementation of TARGET_MEMTAG_UNTAGGED_POINTER. */
2700 default_memtag_untagged_pointer (rtx tagged_pointer, rtx target)
2702 rtx tag_mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_SHIFT) - 1, Pmode);
2703 rtx untagged_base = expand_simple_binop (Pmode, AND, tagged_pointer,
2704 tag_mask, target, true,
2706 gcc_assert (untagged_base);
2707 return untagged_base;
2710 /* The default implementation of TARGET_GCOV_TYPE_SIZE. */
2712 default_gcov_type_size (void)
2714 return TYPE_PRECISION (long_long_integer_type_node) > 32 ? 64 : 32;
2717 #include "gt-targhooks.h"