/* Default target hook functions.
- Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 2003-2015 Free Software Foundation, Inc.
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "machmode.h"
#include "rtl.h"
+#include "alias.h"
#include "tree.h"
+#include "fold-const.h"
+#include "stor-layout.h"
+#include "varasm.h"
+#include "function.h"
+#include "flags.h"
+#include "insn-config.h"
+#include "expmed.h"
+#include "dojump.h"
+#include "explow.h"
+#include "calls.h"
+#include "emit-rtl.h"
+#include "stmt.h"
#include "expr.h"
#include "output.h"
#include "diagnostic-core.h"
-#include "function.h"
#include "target.h"
#include "tm_p.h"
-#include "target-def.h"
-#include "ggc.h"
-#include "hard-reg-set.h"
#include "regs.h"
#include "reload.h"
+#include "insn-codes.h"
#include "optabs.h"
#include "recog.h"
#include "intl.h"
#include "opts.h"
+#include "tree-ssa-alias.h"
+#include "gimple-expr.h"
+#include "gimplify.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
bool
-default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
rtx addr ATTRIBUTE_UNUSED,
bool strict ATTRIBUTE_UNUSED)
{
default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
{
#ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
- ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
+ ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
#endif
}
{
int i;
- if (GET_CODE (x) == UNSPEC_VOLATILE
- /* Any floating arithmetic may trap. */
- || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
- && flag_trapping_math))
+ /* Any floating arithmetic may trap. */
+ if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
return 1;
for (i = 0; i < XVECLEN (x, 0); ++i)
return 0;
}
-enum machine_mode
+machine_mode
default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp ATTRIBUTE_UNUSED,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
{
- if (for_return == 2)
+ if (type != NULL_TREE && for_return == 2)
return promote_mode (type, mode, punsignedp);
return mode;
}
-enum machine_mode
+machine_mode
default_promote_function_mode_always_promote (const_tree type,
- enum machine_mode mode,
+ machine_mode mode,
int *punsignedp,
const_tree funtype ATTRIBUTE_UNUSED,
int for_return ATTRIBUTE_UNUSED)
return promote_mode (type, mode, punsignedp);
}
-
-enum machine_mode
-default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
+machine_mode
+default_cc_modes_compatible (machine_mode m1, machine_mode m2)
{
if (m1 == m2)
return m1;
rtx
default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED)
+ machine_mode mode ATTRIBUTE_UNUSED)
{
return x;
}
+bool
+default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED,
+ rtx *offset ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED)
+{
+ return false;
+}
+
rtx
default_expand_builtin_saveregs (void)
{
}
void
-default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
bool
-hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return false;
}
bool
-default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return (targetm.calls.setup_incoming_varargs
!= default_setup_incoming_varargs);
}
-enum machine_mode
+machine_mode
default_eh_return_filter_mode (void)
{
return targetm.unwind_word_mode ();
}
-enum machine_mode
+machine_mode
default_libgcc_cmp_return_mode (void)
{
return word_mode;
}
-enum machine_mode
+machine_mode
default_libgcc_shift_count_mode (void)
{
return word_mode;
}
-enum machine_mode
+machine_mode
default_unwind_word_mode (void)
{
return word_mode;
/* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
unsigned HOST_WIDE_INT
-default_shift_truncation_mask (enum machine_mode mode)
+default_shift_truncation_mask (machine_mode mode)
{
return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
}
/* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
unsigned int
-default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
+default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
{
return have_insn_for (DIV, mode) ? 3 : 2;
}
/* The default implementation of TARGET_MODE_REP_EXTENDED. */
int
-default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
- enum machine_mode mode_rep ATTRIBUTE_UNUSED)
+default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
+ machine_mode mode_rep ATTRIBUTE_UNUSED)
{
return UNKNOWN;
}
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
bool
-hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
+hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
{
return true;
}
/* Return machine mode for non-standard suffix
or VOIDmode if non-standard suffixes are unsupported. */
-enum machine_mode
+machine_mode
default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
{
return VOIDmode;
return long_long_integer_type_node;
}
-
/* Returns the size of the cookie to use when allocating an array
whose elements have the indicated TYPE. Assumes that it is already
known that a cookie is needed. */
sizetype_size = size_in_bytes (sizetype);
type_align = size_int (TYPE_ALIGN_UNIT (type));
- if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
+ if (tree_int_cst_lt (type_align, sizetype_size))
cookie_size = sizetype_size;
else
cookie_size = type_align;
of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
bool
-hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
+hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
bool named_arg ATTRIBUTE_UNUSED)
{
return targetm.calls.must_pass_in_stack (mode, type);
version of the hook is true for all named arguments. */
bool
-hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named)
{
return named;
return get_identifier (stripped);
}
-/* The default implementation of TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
-
-bool
-default_asm_output_addr_const_extra (FILE *file ATTRIBUTE_UNUSED,
- rtx x ATTRIBUTE_UNUSED)
-{
-#ifdef OUTPUT_ADDR_CONST_EXTRA
- OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
- return true;
-
-fail:
-#endif
- return false;
-}
-
/* True if MODE is valid for the target. By "valid", we mean able to
be manipulated in non-trivial ways. In particular, this means all
the arithmetic is supported.
supported by optabs.c. */
bool
-default_scalar_mode_supported_p (enum machine_mode mode)
+default_scalar_mode_supported_p (machine_mode mode)
{
int precision = GET_MODE_PRECISION (mode);
}
}
+/* Return true if libgcc supports floating-point mode MODE (known to
+ be supported as a scalar mode). */
+
+bool
+default_libgcc_floating_mode_supported_p (machine_mode mode)
+{
+ switch (mode)
+ {
+#ifdef HAVE_SFmode
+ case SFmode:
+#endif
+#ifdef HAVE_DFmode
+ case DFmode:
+#endif
+#ifdef HAVE_XFmode
+ case XFmode:
+#endif
+#ifdef HAVE_TFmode
+ case TFmode:
+#endif
+ return true;
+
+ default:
+ return false;
+ }
+}
+
/* Make some target macros useable by target-independent code. */
bool
targhook_words_big_endian (void)
return !!FLOAT_WORDS_BIG_ENDIAN;
}
+/* True if the target supports floating-point exceptions and rounding
+ modes. */
+
+bool
+default_float_exceptions_rounding_supported_p (void)
+{
+#ifdef HAVE_adddf3
+ return HAVE_adddf3;
+#else
+ return false;
+#endif
+}
+
/* True if the target supports decimal floating point. */
bool
return ENABLE_FIXED_POINT;
}
+/* True if the target supports GNU indirect functions. */
+
+bool
+default_has_ifunc_p (void)
+{
+ return HAVE_GNU_INDIRECT_FUNCTION;
+}
+
/* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
an error message.
these cases. */
const char *
-default_invalid_within_doloop (const_rtx insn)
+default_invalid_within_doloop (const rtx_insn *insn)
{
if (CALL_P (insn))
return "Function call in loop.";
- if (JUMP_TABLE_DATA_P (insn))
+ if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
return "Computed branch in the loop.";
return NULL;
int
default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
- tree vectype ATTRIBUTE_UNUSED,
+ tree vectype,
int misalign ATTRIBUTE_UNUSED)
{
+ unsigned elements;
+
switch (type_of_cost)
{
case scalar_stmt:
case scalar_to_vec:
case cond_branch_not_taken:
case vec_perm:
+ case vec_promote_demote:
return 1;
case unaligned_load:
case cond_branch_taken:
return 3;
+ case vec_construct:
+ elements = TYPE_VECTOR_SUBPARTS (vectype);
+ return elements / 2 + 1;
+
default:
gcc_unreachable ();
}
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return false;
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return true;
int
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
return 0;
}
void
-default_function_arg_advance (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
-#ifdef FUNCTION_ARG_ADVANCE
- CUMULATIVE_ARGS args = *ca;
- FUNCTION_ARG_ADVANCE (args, mode, CONST_CAST_TREE (type), named);
- *ca = args;
-#else
gcc_unreachable ();
-#endif
}
rtx
-default_function_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
-#ifdef FUNCTION_ARG
- return FUNCTION_ARG (*ca, mode, CONST_CAST_TREE (type), named);
-#else
gcc_unreachable ();
-#endif
}
rtx
-default_function_incoming_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
- enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
-#ifdef FUNCTION_INCOMING_ARG
- return FUNCTION_INCOMING_ARG (*ca, mode, CONST_CAST_TREE (type), named);
-#else
gcc_unreachable ();
-#endif
}
unsigned int
-default_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED)
{
return PARM_BOUNDARY;
}
+unsigned int
+default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
+ const_tree type ATTRIBUTE_UNUSED)
+{
+ return PARM_BOUNDARY;
+}
+
void
hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
{
}
rtx
-default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
const_rtx fun ATTRIBUTE_UNUSED)
{
#ifdef LIBCALL_VALUE
}
rtx
-default_static_chain (const_tree fndecl, bool incoming_p)
+default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
{
- if (!DECL_STATIC_CHAIN (fndecl))
- return NULL;
-
if (incoming_p)
{
#ifdef STATIC_CHAIN_INCOMING_REGNUM
return NO_REGS;
}
-#ifdef IRA_COVER_CLASSES
-const reg_class_t *
-default_ira_cover_classes (void)
+reg_class_t
+default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
+ reg_class_t cl)
{
- static reg_class_t classes[] = IRA_COVER_CLASSES;
- return classes;
+ return cl;
+}
+
+extern bool
+default_lra_p (void)
+{
+ return false;
+}
+
+int
+default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
+{
+ return 0;
+}
+
+extern bool
+default_register_usage_leveling_p (void)
+{
+ return false;
+}
+
+extern bool
+default_different_addr_displacement_p (void)
+{
+ return false;
}
-#endif
reg_class_t
default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
reg_class_t reload_class_i ATTRIBUTE_UNUSED,
- enum machine_mode reload_mode ATTRIBUTE_UNUSED,
+ machine_mode reload_mode ATTRIBUTE_UNUSED,
secondary_reload_info *sri)
{
enum reg_class rclass = NO_REGS;
reload_mode);
if (icode != CODE_FOR_nothing
- && insn_data[(int) icode].operand[in_p].predicate
- && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
+ && !insn_operand_matches (icode, in_p, x))
icode = CODE_FOR_nothing;
else if (icode != CODE_FOR_nothing)
{
const char *insn_constraint, *scratch_constraint;
- char insn_letter, scratch_letter;
enum reg_class insn_class, scratch_class;
gcc_assert (insn_data[(int) icode].n_operands == 3);
gcc_assert (*insn_constraint == '=');
insn_constraint++;
}
- insn_letter = *insn_constraint;
- insn_class
- = (insn_letter == 'r' ? GENERAL_REGS
- : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
- insn_constraint));
+ insn_class = (reg_class_for_constraint
+ (lookup_constraint (insn_constraint)));
gcc_assert (insn_class != NO_REGS);
}
scratch_constraint++;
if (*scratch_constraint == '&')
scratch_constraint++;
- scratch_letter = *scratch_constraint;
- scratch_class
- = (scratch_letter == 'r' ? GENERAL_REGS
- : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
- scratch_constraint));
+ scratch_class = (reg_class_for_constraint
+ (lookup_constraint (scratch_constraint)));
if (reg_class_subset_p (reload_class, insn_class))
{
return rclass;
}
-bool
-default_handle_c_option (size_t code ATTRIBUTE_UNUSED,
- const char *arg ATTRIBUTE_UNUSED,
- int value ATTRIBUTE_UNUSED)
-{
- return false;
-}
-
/* By default, if flag_pic is true, then neither local nor global relocs
should be placed in readonly memory. */
return id;
}
+/* Default to natural alignment for vector types. */
+HOST_WIDE_INT
+default_vector_alignment (const_tree type)
+{
+ return tree_to_shwi (TYPE_SIZE (type));
+}
+
bool
default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
{
memory access if it supports movmisalign patten.
is_packed is true if the memory access is defined in a packed struct. */
bool
-default_builtin_support_vector_misalignment (enum machine_mode mode,
+default_builtin_support_vector_misalignment (machine_mode mode,
const_tree type
ATTRIBUTE_UNUSED,
int misalignment
/* By default, only attempt to parallelize bitwise operations, and
possibly adds/subtracts using bit-twiddling. */
-enum machine_mode
-default_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
+machine_mode
+default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
{
return word_mode;
}
return 0;
}
+/* By default, the cost model accumulates three separate costs (prologue,
+ loop body, and epilogue) for a vectorized loop or block. So allocate an
+ array of three unsigned ints, set it to zero, and return its address. */
+
+void *
+default_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
+{
+ unsigned *cost = XNEWVEC (unsigned, 3);
+ cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
+ return cost;
+}
+
+/* By default, the cost model looks up the cost of the given statement
+ kind and mode, multiplies it by the occurrence count, accumulates
+ it into the cost specified by WHERE, and returns the cost added. */
+
+unsigned
+default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
+ struct _stmt_vec_info *stmt_info, int misalign,
+ enum vect_cost_model_location where)
+{
+ unsigned *cost = (unsigned *) data;
+ unsigned retval = 0;
+
+ tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
+ int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
+ misalign);
+ /* Statements in an inner loop relative to the loop being
+ vectorized are weighted more heavily. The value here is
+ arbitrary and could potentially be improved with analysis. */
+ if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
+ count *= 50; /* FIXME. */
+
+ retval = (unsigned) (count * stmt_cost);
+ cost[where] += retval;
+
+ return retval;
+}
+
+/* By default, the cost model just returns the accumulated costs. */
+
+void
+default_finish_cost (void *data, unsigned *prologue_cost,
+ unsigned *body_cost, unsigned *epilogue_cost)
+{
+ unsigned *cost = (unsigned *) data;
+ *prologue_cost = cost[vect_prologue];
+ *body_cost = cost[vect_body];
+ *epilogue_cost = cost[vect_epilogue];
+}
+
+/* Free the cost data. */
+
+void
+default_destroy_cost_data (void *data)
+{
+ free (data);
+}
+
/* Determine whether or not a pointer mode is valid. Assume defaults
of ptr_mode or Pmode - can be overridden. */
bool
-default_valid_pointer_mode (enum machine_mode mode)
+default_valid_pointer_mode (machine_mode mode)
{
return (mode == ptr_mode || mode == Pmode);
}
+/* Determine whether the memory reference specified by REF may alias
+ the C libraries errno location. */
+bool
+default_ref_may_alias_errno (ao_ref *ref)
+{
+ tree base = ao_ref_base (ref);
+ /* The default implementation assumes the errno location is
+ a declaration of type int or is always accessed via a
+ pointer to int. We assume that accesses to errno are
+ not deliberately obfuscated (even in conforming ways). */
+ if (TYPE_UNSIGNED (TREE_TYPE (base))
+ || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
+ return false;
+ /* The default implementation assumes an errno location
+ declaration is never defined in the current compilation unit. */
+ if (DECL_P (base)
+ && !TREE_STATIC (base))
+ return true;
+ else if (TREE_CODE (base) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ {
+ struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
+ return !pi || pi->pt.anything || pi->pt.nonlocal;
+ }
+ return false;
+}
+
/* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
for the generic address space only. */
-enum machine_mode
+machine_mode
default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
/* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
for the generic address space only. */
-enum machine_mode
+machine_mode
default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
{
gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
/* Named address space version of valid_pointer_mode. */
bool
-default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
+default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
return (mode == targetm.addr_space.pointer_mode (as)
/* Named address space version of legitimate_address_p. */
bool
-default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
+default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
bool strict, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
rtx
default_addr_space_legitimize_address (rtx x, rtx oldx,
- enum machine_mode mode, addr_space_t as)
+ machine_mode mode, addr_space_t as)
{
if (!ADDR_SPACE_GENERIC_P (as))
return x;
/* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
bool
-default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED)
+default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
+ addr_space_t addrspace ATTRIBUTE_UNUSED)
{
-#ifdef GO_IF_MODE_DEPENDENT_ADDRESS
-
- GO_IF_MODE_DEPENDENT_ADDRESS (CONST_CAST_RTX (addr), win);
- return false;
- /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
- win: ATTRIBUTE_UNUSED_LABEL
- return true;
-
-#else
-
return false;
-
-#endif
}
bool
else if (!caller_opts)
ret = false;
- /* If both caller and callee have attributes, assume that if the pointer is
- different, the the two functions have different target options since
- build_target_option_node uses a hash table for the options. */
+ /* If both caller and callee have attributes, assume that if the
+ pointer is different, the two functions have different target
+ options since build_target_option_node uses a hash table for the
+ options. */
else
ret = (callee_opts == caller_opts);
return ret;
}
-#ifndef HAVE_casesi
-# define HAVE_casesi 0
-#endif
-
/* If the machine does not have a case insn that compares the bounds,
this means extra overhead for dispatch tables, which raises the
threshold for using them. */
-unsigned int default_case_values_threshold (void)
+unsigned int
+default_case_values_threshold (void)
{
- return (HAVE_casesi ? 4 : 5);
+ return (targetm.have_casesi () ? 4 : 5);
}
bool
#endif
}
+/* By default we assume that c99 functions are present at the runtime,
+ but sincos is not. */
+bool
+default_libc_has_function (enum function_class fn_class)
+{
+ if (fn_class == function_c94
+ || fn_class == function_c99_misc
+ || fn_class == function_c99_math_complex)
+ return true;
+
+ return false;
+}
+
+bool
+gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
+{
+ return true;
+}
+
+bool
+no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
+{
+ return false;
+}
+
+tree
+default_builtin_tm_load_store (tree ARG_UNUSED (type))
+{
+ return NULL_TREE;
+}
+
/* Compute cost of moving registers to/from memory. */
int
-default_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t rclass ATTRIBUTE_UNUSED,
bool in ATTRIBUTE_UNUSED)
{
TO, using MODE. */
int
-default_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
+default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
reg_class_t from ATTRIBUTE_UNUSED,
reg_class_t to ATTRIBUTE_UNUSED)
{
#endif
}
+/* For hooks which use the MOVE_RATIO macro, this gives the legacy default
+ behaviour. SPEED_P is true if we are compiling for speed. */
+
+unsigned int
+get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
+{
+ unsigned int move_ratio;
+#ifdef MOVE_RATIO
+ move_ratio = (unsigned int) MOVE_RATIO (speed_p);
+#else
+#if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
+ move_ratio = 2;
+#else /* No movmem patterns, pick a default. */
+ move_ratio = ((speed_p) ? 15 : 3);
+#endif
+#endif
+ return move_ratio;
+}
+
+/* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
+ used; return FALSE if the movmem/setmem optab should be expanded, or
+ a call to memcpy emitted. */
+
+bool
+default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
+ unsigned int alignment,
+ enum by_pieces_operation op,
+ bool speed_p)
+{
+ unsigned int max_size = 0;
+ unsigned int ratio = 0;
+
+ switch (op)
+ {
+ case CLEAR_BY_PIECES:
+ max_size = STORE_MAX_PIECES;
+ ratio = CLEAR_RATIO (speed_p);
+ break;
+ case MOVE_BY_PIECES:
+ max_size = MOVE_MAX_PIECES;
+ ratio = get_move_ratio (speed_p);
+ break;
+ case SET_BY_PIECES:
+ max_size = STORE_MAX_PIECES;
+ ratio = SET_RATIO (speed_p);
+ break;
+ case STORE_BY_PIECES:
+ max_size = STORE_MAX_PIECES;
+ ratio = get_move_ratio (speed_p);
+ break;
+ }
+
+ return move_by_pieces_ninsns (size, alignment, max_size + 1) < ratio;
+}
+
bool
default_profile_before_prologue (void)
{
default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
reg_class_t rclass)
{
-#ifdef PREFERRED_OUTPUT_RELOAD_CLASS
- return PREFERRED_OUTPUT_RELOAD_CLASS (x, (enum reg_class) rclass);
-#else
return rclass;
-#endif
}
/* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
return (reg_class_size[(int) rclass] == 1);
}
+/* The default implementation of TARGET_CLASS_MAX_NREGS. */
+
+unsigned char
+default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
+ machine_mode mode ATTRIBUTE_UNUSED)
+{
+#ifdef CLASS_MAX_NREGS
+ return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
+#else
+ return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
+#endif
+}
+
/* Determine the debugging unwind mechanism for the target. */
enum unwind_info_type
return UI_NONE;
}
-/* Determine the exception handling mechanism for the target. */
+/* Determine the correct mode for a Dwarf frame register that represents
+ register REGNO. */
-enum unwind_info_type
-default_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
+machine_mode
+default_dwarf_frame_reg_mode (int regno)
{
- /* Obey the configure switch to turn on sjlj exceptions. */
-#ifdef CONFIG_SJLJ_EXCEPTIONS
- if (CONFIG_SJLJ_EXCEPTIONS)
- return UI_SJLJ;
-#endif
-
- /* ??? Change all users to the hook, then poison this. */
-#ifdef DWARF2_UNWIND_INFO
- if (DWARF2_UNWIND_INFO)
- return UI_DWARF2;
-#endif
+ machine_mode save_mode = reg_raw_mode[regno];
- return UI_SJLJ;
+ if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
+ save_mode = choose_hard_reg_mode (regno, 1, true);
+ return save_mode;
}
-/* To be used by targets that force dwarf2 unwind enabled. */
+/* To be used by targets where reg_raw_mode doesn't return the right
+ mode for registers used in apply_builtin_return and apply_builtin_arg. */
-enum unwind_info_type
-dwarf2_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
+machine_mode
+default_get_reg_raw_mode (int regno)
{
- /* Obey the configure switch to turn on sjlj exceptions. */
-#ifdef CONFIG_SJLJ_EXCEPTIONS
- if (CONFIG_SJLJ_EXCEPTIONS)
- return UI_SJLJ;
-#endif
-
- return UI_DWARF2;
+ return reg_raw_mode[regno];
}
-/* To be used by targets that force sjlj unwind enabled. */
-
-enum unwind_info_type
-sjlj_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
-{
- return UI_SJLJ;
-}
+/* Return true if a leaf function should stay leaf even with profiling
+ enabled. */
-/* To be used by targets where reg_raw_mode doesn't return the right
- mode for registers used in apply_builtin_return and apply_builtin_arg. */
-
-enum machine_mode
-default_get_reg_raw_mode(int regno)
+bool
+default_keep_leaf_when_profiled ()
{
- return reg_raw_mode[regno];
+ return false;
}
/* Return true if the state of option OPTION should be stored in PCH files
{
if ((cl_options[option].flags & CL_TARGET) == 0)
return false;
+ if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
+ return false;
if (option_flag_var (option, &global_options) == &target_flags)
if (targetm.check_pch_target_flags)
return false;
static const char *
pch_option_mismatch (const char *option)
{
- char *r;
-
- asprintf (&r, _("created and used with differing settings of '%s'"), option);
- if (r == NULL)
- return _("out of memory");
- return r;
+ return xasprintf (_("created and used with differing settings of '%s'"),
+ option);
}
/* Default version of pch_valid_p. */
return NULL;
}
-const struct default_options empty_optimization_table[] =
- {
- { OPT_LEVELS_NONE, 0, NULL, 0 }
- };
+/* Default version of cstore_mode. */
+
+machine_mode
+default_cstore_mode (enum insn_code icode)
+{
+ return insn_data[(int) icode].operand[0].mode;
+}
+
+/* Default version of member_type_forces_blk. */
+
+bool
+default_member_type_forces_blk (const_tree, machine_mode)
+{
+ return false;
+}
+
+rtx
+default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
+ rtx ptr ATTRIBUTE_UNUSED,
+ rtx bnd ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+void
+default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
+ rtx addr ATTRIBUTE_UNUSED,
+ rtx bounds ATTRIBUTE_UNUSED,
+ rtx to ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+rtx
+default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+void
+default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
+ rtx bounds ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+/* Default version of canonicalize_comparison. */
+
+void
+default_canonicalize_comparison (int *, rtx *, rtx *, bool)
+{
+}
+
+/* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
+
+void
+default_atomic_assign_expand_fenv (tree *, tree *, tree *)
+{
+}
+
+#ifndef PAD_VARARGS_DOWN
+#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
+#endif
+
+/* Build an indirect-ref expression over the given TREE, which represents a
+ piece of a va_arg() expansion. */
+tree
+build_va_arg_indirect_ref (tree addr)
+{
+ addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
+ return addr;
+}
+
+/* The "standard" implementation of va_arg: read the value from the
+ current (padded) address and increment by the (padded) size. */
+
+tree
+std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+ gimple_seq *post_p)
+{
+ tree addr, t, type_size, rounded_size, valist_tmp;
+ unsigned HOST_WIDE_INT align, boundary;
+ bool indirect;
+
+ /* All of the alignment and movement below is for args-grow-up machines.
+ As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
+ implement their own specialized gimplify_va_arg_expr routines. */
+ if (ARGS_GROW_DOWNWARD)
+ gcc_unreachable ();
+
+ indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
+ if (indirect)
+ type = build_pointer_type (type);
+
+ align = PARM_BOUNDARY / BITS_PER_UNIT;
+ boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
+
+ /* When we align parameter on stack for caller, if the parameter
+ alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
+ aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
+ here with caller. */
+ if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
+ boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
+
+ boundary /= BITS_PER_UNIT;
+
+ /* Hoist the valist value into a temporary for the moment. */
+ valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
+
+ /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
+ requires greater alignment, we must perform dynamic alignment. */
+ if (boundary > align
+ && !integer_zerop (TYPE_SIZE (type)))
+ {
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
+ fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
+ gimplify_and_add (t, pre_p);
+
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
+ fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
+ valist_tmp,
+ build_int_cst (TREE_TYPE (valist), -boundary)));
+ gimplify_and_add (t, pre_p);
+ }
+ else
+ boundary = align;
+
+ /* If the actual alignment is less than the alignment of the type,
+ adjust the type accordingly so that we don't assume strict alignment
+ when dereferencing the pointer. */
+ boundary *= BITS_PER_UNIT;
+ if (boundary < TYPE_ALIGN (type))
+ {
+ type = build_variant_type_copy (type);
+ TYPE_ALIGN (type) = boundary;
+ }
+
+ /* Compute the rounded size of the type. */
+ type_size = size_in_bytes (type);
+ rounded_size = round_up (type_size, align);
+
+ /* Reduce rounded_size so it's sharable with the postqueue. */
+ gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
+
+ /* Get AP. */
+ addr = valist_tmp;
+ if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
+ {
+ /* Small args are padded downward. */
+ t = fold_build2_loc (input_location, GT_EXPR, sizetype,
+ rounded_size, size_int (align));
+ t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
+ size_binop (MINUS_EXPR, rounded_size, type_size));
+ addr = fold_build_pointer_plus (addr, t);
+ }
+
+ /* Compute new value for AP. */
+ t = fold_build_pointer_plus (valist_tmp, rounded_size);
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
+ gimplify_and_add (t, pre_p);
+
+ addr = fold_convert (build_pointer_type (type), addr);
+
+ if (indirect)
+ addr = build_va_arg_indirect_ref (addr);
+
+ return build_va_arg_indirect_ref (addr);
+}
+
+tree
+default_chkp_bound_type (void)
+{
+ tree res = make_node (POINTER_BOUNDS_TYPE);
+ TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
+ TYPE_NAME (res) = get_identifier ("__bounds_type");
+ SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
+ layout_type (res);
+ return res;
+}
+
+enum machine_mode
+default_chkp_bound_mode (void)
+{
+ return VOIDmode;
+}
+
+tree
+default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
+{
+ return NULL_TREE;
+}
+
+rtx
+default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
+ const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
+ bool outgoing ATTRIBUTE_UNUSED)
+{
+ gcc_unreachable ();
+}
+
+tree
+default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
+ HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
+{
+ return NULL_TREE;
+}
+
+int
+default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
+ tree lb ATTRIBUTE_UNUSED,
+ tree ub ATTRIBUTE_UNUSED,
+ tree *stmts ATTRIBUTE_UNUSED)
+{
+ return 0;
+}
+
+void
+default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
+ enum machine_mode mode ATTRIBUTE_UNUSED,
+ tree type ATTRIBUTE_UNUSED,
+ int *pretend_arg_size ATTRIBUTE_UNUSED,
+ int second_time ATTRIBUTE_UNUSED)
+{
+}
+
+/* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
+ not support nested low-overhead loops. */
+
+bool
+can_use_doloop_if_innermost (const widest_int &, const widest_int &,
+ unsigned int loop_depth, bool)
+{
+ return loop_depth == 1;
+}
#include "gt-targhooks.h"