This means that bit-packed arrays are given "ceil" alignment for
their size by default, which may seem counter-intuitive but makes
it possible to overlay them on modular types easily. */
- TYPE_ALIGN (gnu_type)
- = align > 0 ? align : TYPE_ALIGN (gnu_field_type);
+ SET_TYPE_ALIGN (gnu_type,
+ align > 0 ? align : TYPE_ALIGN (gnu_field_type));
/* Propagate the reverse storage order flag to the record type so
that the required byte swapping is performed when retrieving the
TYPE_SIZE (gnu_type) = TYPE_SIZE (gnu_field_type);
TYPE_SIZE_UNIT (gnu_type) = TYPE_SIZE_UNIT (gnu_field_type);
SET_TYPE_ADA_SIZE (gnu_type, TYPE_RM_SIZE (gnu_field_type));
- TYPE_ALIGN (gnu_type) = align;
+ SET_TYPE_ALIGN (gnu_type, align);
relate_alias_sets (gnu_type, gnu_field_type, ALIAS_SET_COPY);
/* Don't declare the field as addressable since we won't be taking
if (No (Packed_Array_Impl_Type (gnat_entity))
&& Known_Alignment (gnat_entity))
{
- TYPE_ALIGN (tem)
- = validate_alignment (Alignment (gnat_entity), gnat_entity,
- TYPE_ALIGN (tem));
+ SET_TYPE_ALIGN (tem,
+ validate_alignment (Alignment (gnat_entity),
+ gnat_entity,
+ TYPE_ALIGN (tem)));
if (Present (Alignment_Clause (gnat_entity)))
TYPE_USER_ALIGN (tem) = 1;
}
TYPE_POINTER_TO (gnu_type) = gnu_fat_type;
TYPE_REFERENCE_TO (gnu_type) = gnu_fat_type;
SET_TYPE_MODE (gnu_type, BLKmode);
- TYPE_ALIGN (gnu_type) = TYPE_ALIGN (tem);
+ SET_TYPE_ALIGN (gnu_type, TYPE_ALIGN (tem));
/* If the maximum size doesn't overflow, use it. */
if (gnu_max_size
/* Always set the alignment on the record type here so that it can
get the proper layout. */
if (has_align)
- TYPE_ALIGN (gnu_type)
- = validate_alignment (Alignment (gnat_entity), gnat_entity, 0);
+ SET_TYPE_ALIGN (gnu_type,
+ validate_alignment (Alignment (gnat_entity),
+ gnat_entity, 0));
else
{
- TYPE_ALIGN (gnu_type) = 0;
+ SET_TYPE_ALIGN (gnu_type, 0);
/* If a type needs strict alignment, the minimum size will be the
type size instead of the RM size (see validate_size). Cap the
be created with a component clause below, then we need
to apply the same adjustment as in gnat_to_gnu_field. */
if (has_rep && TYPE_ALIGN (gnu_type) < TYPE_ALIGN (gnu_parent))
- TYPE_ALIGN (gnu_type) = TYPE_ALIGN (gnu_parent);
+ SET_TYPE_ALIGN (gnu_type, TYPE_ALIGN (gnu_parent));
/* Finally we fix up both kinds of twisted COMPONENT_REF we have
initially built. The discriminants must reference the fields
/* Set a default alignment to speed up accesses. But we
shouldn't increase the size of the structure too much,
lest it doesn't fit in return registers anymore. */
- TYPE_ALIGN (gnu_return_type)
- = get_mode_alignment (ptr_mode);
+ SET_TYPE_ALIGN (gnu_return_type,
+ get_mode_alignment (ptr_mode));
}
gnu_field
if (mode != BLKmode)
{
SET_TYPE_MODE (gnu_return_type, mode);
- TYPE_ALIGN (gnu_return_type)
- = GET_MODE_ALIGNMENT (mode);
+ SET_TYPE_ALIGN (gnu_return_type,
+ GET_MODE_ALIGNMENT (mode));
TYPE_SIZE (gnu_return_type)
= bitsize_int (GET_MODE_BITSIZE (mode));
TYPE_SIZE_UNIT (gnu_return_type)
const unsigned int type_align = TYPE_ALIGN (gnu_field_type);
if (TYPE_ALIGN (gnu_record_type) < type_align)
- TYPE_ALIGN (gnu_record_type) = type_align;
+ SET_TYPE_ALIGN (gnu_record_type, type_align);
/* If the position is not a multiple of the alignment of the type,
then error out and reset the position. */
= make_node (unchecked_union ? UNION_TYPE : QUAL_UNION_TYPE);
TYPE_NAME (gnu_union_type) = gnu_union_name;
- TYPE_ALIGN (gnu_union_type) = 0;
+ SET_TYPE_ALIGN (gnu_union_type, 0);
TYPE_PACKED (gnu_union_type) = TYPE_PACKED (gnu_record_type);
TYPE_REVERSE_STORAGE_ORDER (gnu_union_type)
= TYPE_REVERSE_STORAGE_ORDER (gnu_record_type);
/* Set the alignment of the inner type in case we need to make
inner objects into bitfields, but then clear it out so the
record actually gets only the alignment required. */
- TYPE_ALIGN (gnu_variant_type) = TYPE_ALIGN (gnu_record_type);
+ SET_TYPE_ALIGN (gnu_variant_type, TYPE_ALIGN (gnu_record_type));
TYPE_PACKED (gnu_variant_type) = TYPE_PACKED (gnu_record_type);
TYPE_REVERSE_STORAGE_ORDER (gnu_variant_type)
= TYPE_REVERSE_STORAGE_ORDER (gnu_record_type);
SET_DECL_OFFSET_ALIGN (gnu_field, BIGGEST_ALIGNMENT);
DECL_FIELD_BIT_OFFSET (gnu_field) = bitsize_zero_node;
if (field_is_aliased (gnu_field))
- TYPE_ALIGN (gnu_record_type)
- = MAX (TYPE_ALIGN (gnu_record_type),
- TYPE_ALIGN (TREE_TYPE (gnu_field)));
+ SET_TYPE_ALIGN (gnu_record_type,
+ MAX (TYPE_ALIGN (gnu_record_type),
+ TYPE_ALIGN (TREE_TYPE (gnu_field))));
MOVE_FROM_FIELD_LIST_TO (gnu_zero_list);
continue;
}
gnu_field_list = chainon (gnu_field_list, gnu_variant_part);
if (cancel_alignment)
- TYPE_ALIGN (gnu_record_type) = 0;
+ SET_TYPE_ALIGN (gnu_record_type, 0);
TYPE_ARTIFICIAL (gnu_record_type) = artificial;
SET_TYPE_ADA_SIZE (new_union_type,
size_binop (MINUS_EXPR, TYPE_ADA_SIZE (record_type),
first_bit));
- TYPE_ALIGN (new_union_type) = TYPE_ALIGN (old_union_type);
+ SET_TYPE_ALIGN (new_union_type, TYPE_ALIGN (old_union_type));
relate_alias_sets (new_union_type, old_union_type, ALIAS_SET_COPY);
}
else
TYPE_SIZE (new_type) = TYPE_SIZE (old_type);
TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (old_type);
SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (old_type));
- TYPE_ALIGN (new_type) = TYPE_ALIGN (old_type);
+ SET_TYPE_ALIGN (new_type, TYPE_ALIGN (old_type));
relate_alias_sets (new_type, old_type, ALIAS_SET_COPY);
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (new_type)))
return t;
nt = build_nonshared_array_type (component, domain);
- TYPE_ALIGN (nt) = TYPE_ALIGN (t);
+ SET_TYPE_ALIGN (nt, TYPE_ALIGN (t));
TYPE_USER_ALIGN (nt) = TYPE_USER_ALIGN (t);
SET_TYPE_MODE (nt, TYPE_MODE (t));
TYPE_SIZE (nt) = SUBSTITUTE_IN_EXPR (TYPE_SIZE (t), f, r);
pos, 1, -1);
TYPE_FIELDS (record_type) = field;
- TYPE_ALIGN (record_type) = base_align;
+ SET_TYPE_ALIGN (record_type, base_align);
TYPE_USER_ALIGN (record_type) = 1;
TYPE_SIZE (record_type)
if (in_record && size <= MAX_FIXED_MODE_SIZE)
{
align = ceil_pow2 (size);
- TYPE_ALIGN (new_type) = align;
+ SET_TYPE_ALIGN (new_type, align);
new_size = (size + align - 1) & -align;
}
else
return type;
align = new_size & -new_size;
- TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
+ SET_TYPE_ALIGN (new_type, MIN (TYPE_ALIGN (type), align));
}
TYPE_USER_ALIGN (new_type) = 1;
else if (Present (gnat_entity))
TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
- TYPE_ALIGN (record) = align ? align : orig_align;
+ SET_TYPE_ALIGN (record, align ? align : orig_align);
TYPE_SIZE (record) = size ? size : orig_size;
TYPE_SIZE_UNIT (record)
= convert (sizetype,
{
/* Make sure we can put it into a register. */
if (STRICT_ALIGNMENT)
- TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
+ SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
/* Show what it really is. */
TYPE_FAT_POINTER_P (record_type) = 1;
that just means some initializations; otherwise, layout the record. */
if (rep_level > 0)
{
- TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
+ SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
+ TYPE_ALIGN (record_type)));
if (!had_size_unit)
TYPE_SIZE_UNIT (record_type) = size_zero_node;
maximum alignment, if any. */
if (TYPE_ALIGN (record_type) >= align)
{
- DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
+ SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
DECL_BIT_FIELD (field) = 0;
}
else if (!had_align
&& (!TYPE_MAX_ALIGN (record_type)
|| TYPE_MAX_ALIGN (record_type) >= align))
{
- TYPE_ALIGN (record_type) = align;
- DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
+ SET_TYPE_ALIGN (record_type, align);
+ SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
DECL_BIT_FIELD (field) = 0;
}
}
/* A type must be as aligned as its most aligned field that is not
a bit-field. But this is already enforced by layout_type. */
if (rep_level > 0 && !DECL_BIT_FIELD (field))
- TYPE_ALIGN (record_type)
- = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
+ SET_TYPE_ALIGN (record_type,
+ MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
switch (code)
{
= concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
? "XVU" : "XVE");
TYPE_NAME (new_record_type) = new_name;
- TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
+ SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
TYPE_STUB_DECL (new_record_type)
= create_type_stub_decl (new_name, new_record_type);
DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
if (align != 0 && TYPE_ALIGN (field_type) > align)
{
field_type = copy_node (field_type);
- TYPE_ALIGN (field_type) = align;
+ SET_TYPE_ALIGN (field_type, align);
}
var = true;
}
|| (!pos
&& AGGREGATE_TYPE_P (type)
&& aggregate_type_contains_array_p (type))))
- DECL_ALIGN (field_decl) = BITS_PER_UNIT;
+ SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
/* If a size is specified, use it. Otherwise, if the record type is packed
compute a size to use, which may differ from the object's natural size.
{
if (TYPE_ALIGN (record_type) != 0
&& TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
- DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
+ SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
else
- DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
}
}
: packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
if (bit_align > DECL_ALIGN (field_decl))
- DECL_ALIGN (field_decl) = bit_align;
+ SET_DECL_ALIGN (field_decl, bit_align);
else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
{
- DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
}
}
else
*type = build_variant_type_copy (*type);
- TYPE_ALIGN (*type) = (1U << i) * BITS_PER_UNIT;
+ SET_TYPE_ALIGN (*type, (1U << i) * BITS_PER_UNIT);
TYPE_USER_ALIGN (*type) = 1;
}
else if (! VAR_OR_FUNCTION_DECL_P (decl)
}
else
{
- DECL_ALIGN (decl) = (1U << i) * BITS_PER_UNIT;
+ SET_DECL_ALIGN (decl, (1U << i) * BITS_PER_UNIT);
DECL_USER_ALIGN (decl) = 1;
}
if (TYPE_USER_ALIGN (tem))
{
if (TYPE_ALIGN (tem) > TYPE_ALIGN (newtype))
- TYPE_ALIGN (newtype) = TYPE_ALIGN (tem);
+ SET_TYPE_ALIGN (newtype, TYPE_ALIGN (tem));
TYPE_USER_ALIGN (newtype) = true;
}
DECL_MODE (newdecl) = DECL_MODE (olddecl);
if (DECL_ALIGN (olddecl) > DECL_ALIGN (newdecl))
{
- DECL_ALIGN (newdecl) = DECL_ALIGN (olddecl);
+ SET_DECL_ALIGN (newdecl, DECL_ALIGN (olddecl));
DECL_USER_ALIGN (newdecl) |= DECL_USER_ALIGN (olddecl);
}
}
/* Apply _Alignas specifiers. */
if (alignas_align)
{
- DECL_ALIGN (decl) = alignas_align * BITS_PER_UNIT;
+ SET_DECL_ALIGN (decl, alignas_align * BITS_PER_UNIT);
DECL_USER_ALIGN (decl) = 1;
}
/* Give the type a default layout like unsigned int
to avoid crashing if it does not get defined. */
SET_TYPE_MODE (ref, TYPE_MODE (unsigned_type_node));
- TYPE_ALIGN (ref) = TYPE_ALIGN (unsigned_type_node);
+ SET_TYPE_ALIGN (ref, TYPE_ALIGN (unsigned_type_node));
TYPE_USER_ALIGN (ref) = 0;
TYPE_UNSIGNED (ref) = 1;
TYPE_PRECISION (ref) = TYPE_PRECISION (unsigned_type_node);
TYPE_MIN_VALUE (enumtype) = TYPE_MIN_VALUE (tem);
TYPE_MAX_VALUE (enumtype) = TYPE_MAX_VALUE (tem);
TYPE_UNSIGNED (enumtype) = TYPE_UNSIGNED (tem);
- TYPE_ALIGN (enumtype) = TYPE_ALIGN (tem);
+ SET_TYPE_ALIGN (enumtype, TYPE_ALIGN (tem));
TYPE_SIZE (enumtype) = 0;
TYPE_PRECISION (enumtype) = TYPE_PRECISION (tem);
TYPE_SIZE_UNIT (tem) = TYPE_SIZE_UNIT (enumtype);
SET_TYPE_MODE (tem, TYPE_MODE (enumtype));
TYPE_PRECISION (tem) = TYPE_PRECISION (enumtype);
- TYPE_ALIGN (tem) = TYPE_ALIGN (enumtype);
+ SET_TYPE_ALIGN (tem, TYPE_ALIGN (enumtype));
TYPE_USER_ALIGN (tem) = TYPE_USER_ALIGN (enumtype);
TYPE_UNSIGNED (tem) = TYPE_UNSIGNED (enumtype);
TYPE_LANG_SPECIFIC (tem) = TYPE_LANG_SPECIFIC (enumtype);
else
{
align = LOCAL_DECL_ALIGNMENT (decl);
- DECL_ALIGN (decl) = align;
+ SET_DECL_ALIGN (decl, align);
}
return align / BITS_PER_UNIT;
}
alignment here, but (at least) the i386 port does exactly this
via the MINIMUM_ALIGNMENT hook. */
- DECL_ALIGN (decl) = align;
+ SET_DECL_ALIGN (decl, align);
DECL_USER_ALIGN (decl) = 0;
}
if (!callee_tree)
callee_tree = target_option_default_node;
- DECL_ALIGN (fndecl) =
- FUNCTION_BOUNDARY_P (TREE_TARGET_OPTION (callee_tree)->x_target_flags);
+ struct cl_target_option *opts = TREE_TARGET_OPTION (callee_tree);
+ SET_DECL_ALIGN (fndecl, FUNCTION_BOUNDARY_P (opts->x_target_flags));
}
/* Inner function to process the attribute((target(...))), take an argument and
TYPE_IBIT (*node) = GET_MODE_IBIT (mode);
TYPE_FBIT (*node) = GET_MODE_FBIT (mode);
TYPE_PRECISION (*node) = GET_MODE_BITSIZE (mode);
- TYPE_ALIGN (*node) = 8;
+ SET_TYPE_ALIGN (*node, 8);
SET_TYPE_MODE (*node, mode);
layout_type (*node);
if (boundary < TYPE_ALIGN (type))
{
type = build_variant_type_copy (type);
- TYPE_ALIGN (type) = boundary;
+ SET_TYPE_ALIGN (type, boundary);
}
/* Compute the rounded size of the type. */
if (boundary < TYPE_ALIGN (type))
{
type = build_variant_type_copy (type);
- TYPE_ALIGN (type) = boundary;
+ SET_TYPE_ALIGN (type, boundary);
}
/* Compute the rounded size of the type. */
FIELD_DECL, get_identifier ("__skip"), ptr_type_node);
DECL_FIELD_CONTEXT (f_args) = record;
- DECL_ALIGN (f_args) = 128;
+ SET_DECL_ALIGN (f_args, 128);
DECL_USER_ALIGN (f_args) = 1;
DECL_FIELD_CONTEXT (f_skip) = record;
- DECL_ALIGN (f_skip) = 128;
+ SET_DECL_ALIGN (f_skip, 128);
DECL_USER_ALIGN (f_skip) = 1;
TYPE_STUB_DECL (record) = type_decl;
TREE_STATIC (var) = 1;
TREE_ADDRESSABLE (var) = 1;
DECL_NONALIASED (var) = 1;
- DECL_ALIGN (var) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (var, TYPE_ALIGN (type));
return var;
}
TREE_STATIC (decl) = 1;
TREE_READONLY (decl) = 1;
DECL_VIRTUAL_P (decl) = 1;
- DECL_ALIGN (decl) = TARGET_VTABLE_ENTRY_ALIGN;
+ SET_DECL_ALIGN (decl, TARGET_VTABLE_ENTRY_ALIGN);
DECL_USER_ALIGN (decl) = true;
DECL_VTABLE_OR_VTT_P (decl) = 1;
set_linkage_according_to_type (class_type, decl);
valign = MAX (valign, TYPE_ALIGN (variants));
else
TYPE_USER_ALIGN (variants) = user_align;
- TYPE_ALIGN (variants) = valign;
+ SET_TYPE_ALIGN (variants, valign);
if (may_alias)
fixup_may_alias (variants);
}
{
DECL_SIZE (decl) = CLASSTYPE_SIZE (basetype);
DECL_SIZE_UNIT (decl) = CLASSTYPE_SIZE_UNIT (basetype);
- DECL_ALIGN (decl) = CLASSTYPE_ALIGN (basetype);
+ SET_DECL_ALIGN (decl, CLASSTYPE_ALIGN (basetype));
DECL_USER_ALIGN (decl) = CLASSTYPE_USER_ALIGN (basetype);
DECL_MODE (decl) = TYPE_MODE (basetype);
DECL_FIELD_IS_BASE (decl) = 1;
}
DECL_SIZE (field) = TYPE_SIZE (integer_type);
- DECL_ALIGN (field) = TYPE_ALIGN (integer_type);
+ SET_DECL_ALIGN (field, TYPE_ALIGN (integer_type));
DECL_USER_ALIGN (field) = TYPE_USER_ALIGN (integer_type);
layout_nonempty_base_or_field (rli, field, NULL_TREE,
empty_base_offsets);
size_binop (MULT_EXPR,
fold_convert (bitsizetype, eoc),
bitsize_int (BITS_PER_UNIT)));
- TYPE_ALIGN (base_t) = rli->record_align;
+ SET_TYPE_ALIGN (base_t, rli->record_align);
TYPE_USER_ALIGN (base_t) = TYPE_USER_ALIGN (t);
/* Copy the fields from T. */
if (TYPE_USER_ALIGN (tem))
{
if (TYPE_ALIGN (tem) > TYPE_ALIGN (newtype))
- TYPE_ALIGN (newtype) = TYPE_ALIGN (tem);
+ SET_TYPE_ALIGN (newtype, TYPE_ALIGN (tem));
TYPE_USER_ALIGN (newtype) = true;
}
/* Likewise for DECL_ALIGN, DECL_USER_ALIGN and DECL_PACKED. */
if (DECL_ALIGN (olddecl) > DECL_ALIGN (newdecl))
{
- DECL_ALIGN (newdecl) = DECL_ALIGN (olddecl);
+ SET_DECL_ALIGN (newdecl, DECL_ALIGN (olddecl));
DECL_USER_ALIGN (newdecl) |= DECL_USER_ALIGN (olddecl);
}
DECL_USER_ALIGN (olddecl) = DECL_USER_ALIGN (newdecl);
DECL_IGNORED_P (decl) = 1;
TYPE_DECL_SUPPRESS_DEBUG (decl) = 1;
TYPE_SIZE (type) = TYPE_SIZE (void_type_node);
- TYPE_ALIGN (type) = 1;
+ SET_TYPE_ALIGN (type, 1);
TYPE_USER_ALIGN (type) = 0;
SET_TYPE_MODE (type, TYPE_MODE (void_type_node));
}
TYPE_UNSIGNED (nullptr_type_node) = 1;
TYPE_PRECISION (nullptr_type_node) = GET_MODE_BITSIZE (ptr_mode);
if (abi_version_at_least (9))
- TYPE_ALIGN (nullptr_type_node) = GET_MODE_ALIGNMENT (ptr_mode);
+ SET_TYPE_ALIGN (nullptr_type_node, GET_MODE_ALIGNMENT (ptr_mode));
SET_TYPE_MODE (nullptr_type_node, ptr_mode);
record_builtin_type (RID_MAX, "decltype(nullptr)", nullptr_type_node);
nullptr_node = build_int_cst (nullptr_type_node, 0);
parms = parm;
/* Allocate space to hold the vptr bit if needed. */
- DECL_ALIGN (decl) = MINIMUM_METHOD_BOUNDARY;
+ SET_DECL_ALIGN (decl, MINIMUM_METHOD_BOUNDARY);
}
DECL_ARGUMENTS (decl) = parms;
for (t = parms; t; t = DECL_CHAIN (t))
valign = MAX (valign, TYPE_ALIGN (t));
else
TYPE_USER_ALIGN (t) = TYPE_USER_ALIGN (src);
- TYPE_ALIGN (t) = valign;
+ SET_TYPE_ALIGN (t, valign);
TYPE_UNSIGNED (t) = TYPE_UNSIGNED (src);
}
}
tree convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
tree fn = convfn;
DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
- DECL_ALIGN (fn) = MINIMUM_METHOD_BOUNDARY;
+ SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
grokclassfn (type, fn, NO_SPECIAL);
set_linkage_according_to_type (type, fn);
SET_OVERLOADED_OPERATOR_CODE (fn, NOP_EXPR);
}
- DECL_ALIGN (fn) = MINIMUM_METHOD_BOUNDARY;
+ SET_DECL_ALIGN (fn, MINIMUM_METHOD_BOUNDARY);
/* Create the explicit arguments. */
if (rhs_parm_type)
DECL_SOURCE_LOCATION (typedecl);
TYPE_PACKED (type) = TYPE_PACKED (pattern);
- TYPE_ALIGN (type) = TYPE_ALIGN (pattern);
+ SET_TYPE_ALIGN (type, TYPE_ALIGN (pattern));
TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (pattern);
TYPE_FOR_JAVA (type) = TYPE_FOR_JAVA (pattern); /* For libjava's JArray<T> */
if (ANON_AGGR_TYPE_P (pattern))
if (TYPE_USER_ALIGN (t))
{
- TYPE_ALIGN (r) = TYPE_ALIGN (t);
+ SET_TYPE_ALIGN (r, TYPE_ALIGN (t));
TYPE_USER_ALIGN (r) = 1;
}
/* Avoid targets optionally bumping up the alignment to improve
vector instruction accesses, tinfo are never accessed this way. */
#ifdef DATA_ABI_ALIGNMENT
- DECL_ALIGN (decl) = DATA_ABI_ALIGNMENT (decl, TYPE_ALIGN (TREE_TYPE (decl)));
+ SET_DECL_ALIGN (decl, DATA_ABI_ALIGNMENT (decl, TYPE_ALIGN (TREE_TYPE (decl))));
DECL_USER_ALIGN (decl) = true;
#endif
return true;
{
t = build_variant_type_copy (t);
TYPE_NAME (t) = TYPE_NAME (type);
- TYPE_ALIGN (t) = TYPE_ALIGN (type);
+ SET_TYPE_ALIGN (t, TYPE_ALIGN (type));
TYPE_USER_ALIGN (t) = TYPE_USER_ALIGN (type);
}
}
#ifdef DONT_USE_BUILTIN_SETJMP
/* We don't know what the alignment requirements of the
runtime's jmp_buf has. Overestimate. */
- DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
+ SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT);
DECL_USER_ALIGN (f_jbuf) = 1;
#endif
DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
{
inner = copy_node (inner);
TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
- TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
+ SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
}
result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
TREE_STATIC (decl) = 1;
DECL_IGNORED_P (decl) = 1;
if (!com->is_bind_c)
- DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
+ SET_DECL_ALIGN (decl, BIGGEST_ALIGNMENT);
else
{
/* Do not set the alignment for bind(c) common blocks to
tree field = NULL_TREE;
field = TYPE_FIELDS (TREE_TYPE (decl));
if (DECL_CHAIN (field) == NULL_TREE)
- DECL_ALIGN (decl) = TYPE_ALIGN (TREE_TYPE (field));
+ SET_DECL_ALIGN (decl, TYPE_ALIGN (TREE_TYPE (field)));
}
DECL_USER_ALIGN (decl) = 0;
GFC_DECL_COMMON_OR_EQUIV (decl) = 1;
alignment that is at least as large as the needed alignment for those
types. See the st_parameter_dt structure in libgfortran/io/io.h for
what really goes into this space. */
- TYPE_ALIGN (types[IOPARM_type_pad]) = MAX (TYPE_ALIGN (pchar_type_node),
- TYPE_ALIGN (gfc_get_int_type (gfc_intio_kind)));
+ SET_TYPE_ALIGN (types[IOPARM_type_pad], MAX (TYPE_ALIGN (pchar_type_node),
+ TYPE_ALIGN (gfc_get_int_type (gfc_intio_kind))));
for (ptype = IOPARM_ptype_common; ptype < IOPARM_ptype_num; ptype++)
gfc_build_st_parameter ((enum ioparam_type) ptype, types);
tree decl = gfc_add_field_to_struct_1 (context, name, type, chain);
DECL_INITIAL (decl) = 0;
- DECL_ALIGN (decl) = 0;
+ SET_DECL_ALIGN (decl, 0);
DECL_USER_ALIGN (decl) = 0;
return decl;
size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
if (stack_parm == 0)
{
- DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
+ SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
stack_parm = assign_stack_local (BLKmode, size_stored,
DECL_ALIGN (parm));
if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
if (DECL_USER_ALIGN (var))
{
- DECL_ALIGN (copy) = DECL_ALIGN (var);
+ SET_DECL_ALIGN (copy, DECL_ALIGN (var));
DECL_USER_ALIGN (copy) = 1;
}
}
if (alignment != 0)
{
- DECL_ALIGN(decl) = alignment * BITS_PER_UNIT;
+ SET_DECL_ALIGN(decl, alignment * BITS_PER_UNIT);
DECL_USER_ALIGN(decl) = 1;
}
TYPE_METHODS (this_class) = fndecl;
if (!(access_flags & ACC_STATIC))
- DECL_ALIGN (fndecl) = MINIMUM_METHOD_BOUNDARY;
+ SET_DECL_ALIGN (fndecl, MINIMUM_METHOD_BOUNDARY);
/* Notice that this is a finalizer and update the class type
accordingly. This is used to optimize instance allocation. */
FINISH_RECORD_CONSTRUCTOR (cons, v2, class_type_node);
DECL_INITIAL (decl) = cons;
-
+
/* Hash synchronization requires at least 64-bit alignment. */
if (flag_hash_synchronization && POINTER_SIZE < 64)
- DECL_ALIGN (decl) = 64;
-
+ SET_DECL_ALIGN (decl, 64);
+
if (flag_indirect_classes)
{
TREE_READONLY (decl) = 1;
cdecl = build_decl (UNKNOWN_LOCATION,
VAR_DECL, get_identifier ("_Jv_JCR_SECTION_data"),
class_array_type);
- DECL_ALIGN (cdecl) = POINTER_SIZE;
+ SET_DECL_ALIGN (cdecl, POINTER_SIZE);
DECL_USER_ALIGN (cdecl) = 1;
DECL_INITIAL (cdecl) = build_constructor (class_array_type, init);
TREE_CONSTANT (DECL_INITIAL (cdecl)) = 1;
FIELD_DECL, get_identifier ("data"), atype);
DECL_CONTEXT (arfld) = t;
DECL_CHAIN (fld) = arfld;
- DECL_ALIGN (arfld) = TYPE_ALIGN (element_type);
+ SET_DECL_ALIGN (arfld, TYPE_ALIGN (element_type));
/* We could layout_class, but that loads java.lang.Object prematurely.
* This is called by the parser, and it is a bad idea to do load_class
= size_binop (FLOOR_DIV_EXPR, convert (sizetype, DECL_SIZE (base)),
size_int (BITS_PER_UNIT));
DECL_ARTIFICIAL (base) = 1;
- DECL_ALIGN (base) = 1;
+ SET_DECL_ALIGN (base, 1);
DECL_FIELD_CONTEXT (base) = s;
#ifdef OBJCPLUS
DECL_FIELD_IS_BASE (base) = 1;
DECL_ABSTRACT_ORIGIN (field) = var;
if (type == TREE_TYPE (var))
{
- DECL_ALIGN (field) = DECL_ALIGN (var);
+ SET_DECL_ALIGN (field, DECL_ALIGN (var));
DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
}
else
- DECL_ALIGN (field) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (field, TYPE_ALIGN (type));
if ((mask & 3) == 3)
{
sfield = build_decl (DECL_SOURCE_LOCATION (var),
FIELD_DECL, DECL_NAME (var), type);
DECL_ABSTRACT_ORIGIN (sfield) = var;
- DECL_ALIGN (sfield) = DECL_ALIGN (field);
+ SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
insert_field_into_struct (ctx->srecord_type, sfield);
tree field
= build_decl (OMP_CLAUSE_LOCATION (c),
FIELD_DECL, NULL_TREE, ptr_type_node);
- DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
+ SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
insert_field_into_struct (ctx->record_type, field);
splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
(splay_tree_value) field);
TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
TREE_THIS_VOLATILE (field) = 0;
DECL_USER_ALIGN (field) = 0;
- DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
+ SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
- TYPE_ALIGN (ctx->record_type) = DECL_ALIGN (field);
+ SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
if (ctx->srecord_type)
{
tree sfield = lookup_sfield (decl, ctx);
TREE_TYPE (sfield) = TREE_TYPE (field);
TREE_THIS_VOLATILE (sfield) = 0;
DECL_USER_ALIGN (sfield) = 0;
- DECL_ALIGN (sfield) = DECL_ALIGN (field);
+ SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
- TYPE_ALIGN (ctx->srecord_type) = DECL_ALIGN (sfield);
+ SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
}
}
}
num_vars * 2);
tree funcs_decl_type = build_array_type_nelts (pointer_sized_int_node,
num_funcs);
- TYPE_ALIGN (vars_decl_type) = TYPE_ALIGN (pointer_sized_int_node);
- TYPE_ALIGN (funcs_decl_type) = TYPE_ALIGN (pointer_sized_int_node);
+ SET_TYPE_ALIGN (vars_decl_type, TYPE_ALIGN (pointer_sized_int_node));
+ SET_TYPE_ALIGN (funcs_decl_type, TYPE_ALIGN (pointer_sized_int_node));
tree ctor_v = build_constructor (vars_decl_type, v_v);
tree ctor_f = build_constructor (funcs_decl_type, v_f);
TREE_CONSTANT (ctor_v) = TREE_CONSTANT (ctor_f) = 1;
otherwise a joint table in a binary will contain padding between
tables from multiple object files. */
DECL_USER_ALIGN (funcs_decl) = DECL_USER_ALIGN (vars_decl) = 1;
- DECL_ALIGN (funcs_decl) = TYPE_ALIGN (funcs_decl_type);
- DECL_ALIGN (vars_decl) = TYPE_ALIGN (vars_decl_type);
+ SET_DECL_ALIGN (funcs_decl, TYPE_ALIGN (funcs_decl_type));
+ SET_DECL_ALIGN (vars_decl, TYPE_ALIGN (vars_decl_type));
DECL_INITIAL (funcs_decl) = ctor_f;
DECL_INITIAL (vars_decl) = ctor_v;
set_decl_section_name (funcs_decl, OFFLOAD_FUNC_TABLE_SECTION_NAME);
{
if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
{
- DECL_ALIGN (decl) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (decl, TYPE_ALIGN (type));
if (TREE_CODE (decl) == FIELD_DECL)
DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
}
#ifdef EMPTY_FIELD_BOUNDARY
if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
{
- DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
+ SET_DECL_ALIGN (decl, EMPTY_FIELD_BOUNDARY);
DECL_USER_ALIGN (decl) = 0;
}
#endif
&& !(xalign > BITS_PER_UNIT && DECL_PACKED (decl))
&& (known_align == 0 || known_align >= xalign))
{
- DECL_ALIGN (decl) = MAX (xalign, DECL_ALIGN (decl));
+ SET_DECL_ALIGN (decl, MAX (xalign, DECL_ALIGN (decl)));
DECL_MODE (decl) = xmode;
DECL_BIT_FIELD (decl) = 0;
}
DECL_USER_ALIGN, so we need to check old_user_align instead. */
if (packed_p
&& !old_user_align)
- DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
+ SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), BITS_PER_UNIT));
if (! packed_p && ! DECL_USER_ALIGN (decl))
{
to a lower boundary than alignment of variables unless
it was overridden by attribute aligned. */
#ifdef BIGGEST_FIELD_ALIGNMENT
- DECL_ALIGN (decl)
- = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
+ SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl),
+ (unsigned) BIGGEST_FIELD_ALIGNMENT));
#endif
#ifdef ADJUST_FIELD_ALIGN
- DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
+ SET_DECL_ALIGN (decl, ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl)));
#endif
}
mfa = maximum_field_alignment;
/* Should this be controlled by DECL_USER_ALIGN, too? */
if (mfa != 0)
- DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
+ SET_DECL_ALIGN (decl, MIN (DECL_ALIGN (decl), mfa));
}
/* Evaluate nonconstant size only once, either now or as soon as safe. */
DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
DECL_MODE (decl) = VOIDmode;
if (!DECL_USER_ALIGN (decl))
- DECL_ALIGN (decl) = 0;
+ SET_DECL_ALIGN (decl, 0);
SET_DECL_RTL (decl, 0);
layout_decl (decl, 0);
/* Determine the desired alignment. */
#ifdef ROUND_TYPE_ALIGN
- TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
- rli->record_align);
+ SET_TYPE_ALIGN (rli->t, ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
+ rli->record_align));
#else
- TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
+ SET_TYPE_ALIGN (rli->t, MAX (TYPE_ALIGN (rli->t), rli->record_align));
#endif
/* Compute the size so far. Be sure to allow for extra bits in the
alignment of one of the fields. */
if (mode_align >= TYPE_ALIGN (type))
{
- TYPE_ALIGN (type) = mode_align;
+ SET_TYPE_ALIGN (type, mode_align);
TYPE_USER_ALIGN (type) = 0;
}
}
/* Do machine-dependent extra alignment. */
#ifdef ROUND_TYPE_ALIGN
- TYPE_ALIGN (type)
- = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
+ SET_TYPE_ALIGN (type,
+ ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT));
#endif
/* If we failed to find a simple way to calculate the unit size
valign = MAX (valign, TYPE_ALIGN (variant));
else
TYPE_USER_ALIGN (variant) = user_align;
- TYPE_ALIGN (variant) = valign;
+ SET_TYPE_ALIGN (variant, valign);
TYPE_PRECISION (variant) = precision;
SET_TYPE_MODE (variant, mode);
}
if (align_type)
{
- TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
+ SET_TYPE_ALIGN (type, TYPE_ALIGN (align_type));
TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
}
Instead, query a target hook, defaulting to natural alignment.
This prevents ABI changes depending on whether or not native
vector modes are supported. */
- TYPE_ALIGN (type) = targetm.vector_alignment (type);
+ SET_TYPE_ALIGN (type, targetm.vector_alignment (type));
/* However, if the underlying mode requires a bigger alignment than
what the target hook provides, we cannot use the mode. For now,
case VOID_TYPE:
/* This is an incomplete type and so doesn't have a size. */
- TYPE_ALIGN (type) = 1;
+ SET_TYPE_ALIGN (type, 1);
TYPE_USER_ALIGN (type) = 0;
SET_TYPE_MODE (type, VOIDmode);
break;
#else
align = MAX (align, BITS_PER_UNIT);
#endif
- TYPE_ALIGN (type) = align;
+ SET_TYPE_ALIGN (type, align);
SET_TYPE_MODE (type, BLKmode);
if (TYPE_SIZE (type) != 0
&& ! targetm.member_type_forces_blk (type, VOIDmode)
/* Now layout both types manually. */
SET_TYPE_MODE (sizetype, smallest_mode_for_size (precision, MODE_INT));
- TYPE_ALIGN (sizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (sizetype));
+ SET_TYPE_ALIGN (sizetype, GET_MODE_ALIGNMENT (TYPE_MODE (sizetype)));
TYPE_SIZE (sizetype) = bitsize_int (precision);
TYPE_SIZE_UNIT (sizetype) = size_int (GET_MODE_SIZE (TYPE_MODE (sizetype)));
set_min_and_max_values_for_integral_type (sizetype, precision, UNSIGNED);
SET_TYPE_MODE (bitsizetype, smallest_mode_for_size (bprecision, MODE_INT));
- TYPE_ALIGN (bitsizetype) = GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype));
+ SET_TYPE_ALIGN (bitsizetype, GET_MODE_ALIGNMENT (TYPE_MODE (bitsizetype)));
TYPE_SIZE (bitsizetype) = bitsize_int (bprecision);
TYPE_SIZE_UNIT (bitsizetype)
= size_int (GET_MODE_SIZE (TYPE_MODE (bitsizetype)));
if (DECL_ALIGN (n->decl) < align
&& n->can_increase_alignment_p ())
{
- DECL_ALIGN (n->decl) = align;
+ SET_DECL_ALIGN (n->decl, align);
DECL_USER_ALIGN (n->decl) = 1;
}
return false;
if (boundary < TYPE_ALIGN (type))
{
type = build_variant_type_copy (type);
- TYPE_ALIGN (type) = boundary;
+ SET_TYPE_ALIGN (type, boundary);
}
/* Compute the rounded size of the type. */
unsigned lang_flag_5 : 1;
unsigned lang_flag_6 : 1;
- unsigned int align;
+ /* TYPE_ALIGN in log2; this has to be large enough to hold values
+ of the maximum of BIGGEST_ALIGNMENT and MAX_OFILE_ALIGNMENT,
+ the latter being usually the larger. For ELF it is 8<<28,
+ so we need to store the value 32 (not 31, as we need the zero
+ as well), hence six bits. */
+ unsigned align : 6;
+ unsigned spare : 26;
alias_set_type alias_set;
tree pointer_to;
tree reference_to;
unsigned decl_nonshareable_flag : 1;
/* DECL_OFFSET_ALIGN, used only for FIELD_DECLs. */
- unsigned int off_align : 8;
-
- /* 24 bits unused. */
+ unsigned int off_align : 6;
/* DECL_ALIGN. It should have the same size as TYPE_ALIGN. */
- unsigned int align;
+ unsigned int align : 6;
+
+ /* 20 bits unused. */
/* UID for points-to sets, stable over copying from inlining. */
unsigned int pt_uid;
/* Set correct alignment for frame struct type. */
if (TYPE_ALIGN (type) < DECL_ALIGN (field))
- TYPE_ALIGN (type) = DECL_ALIGN (field);
+ SET_TYPE_ALIGN (type, DECL_ALIGN (field));
}
/* Build or return the RECORD_TYPE that describes the frame state that is
if (use_pointer_in_frame (decl))
{
TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
- DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
+ SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
DECL_NONADDRESSABLE_P (field) = 1;
}
else
{
TREE_TYPE (field) = TREE_TYPE (decl);
DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
- DECL_ALIGN (field) = DECL_ALIGN (decl);
+ SET_DECL_ALIGN (field, DECL_ALIGN (decl));
DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
field = make_node (FIELD_DECL);
DECL_NAME (field) = get_identifier ("__chain");
TREE_TYPE (field) = type;
- DECL_ALIGN (field) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (field, TYPE_ALIGN (type));
DECL_NONADDRESSABLE_P (field) = 1;
insert_field_into_struct (get_frame_type (info), field);
t = build_array_type (char_type_node, t);
t = build_decl (DECL_SOURCE_LOCATION (info->context),
FIELD_DECL, get_identifier ("__data"), t);
- DECL_ALIGN (t) = align;
+ SET_DECL_ALIGN (t, align);
DECL_USER_ALIGN (t) = 1;
trampoline_type = make_node (RECORD_TYPE);
field = make_node (FIELD_DECL);
DECL_NAME (field) = get_identifier ("__nl_goto_buf");
TREE_TYPE (field) = type;
- DECL_ALIGN (field) = TYPE_ALIGN (type);
+ SET_DECL_ALIGN (field, TYPE_ALIGN (type));
TREE_ADDRESSABLE (field) = 1;
insert_field_into_struct (get_frame_type (info), field);
n_elem = size * 8 / BITS_PER_UNIT;
array_type = build_array_type_nelts (elem_type, n_elem);
var = create_tmp_var (array_type);
- DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
+ SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)));
{
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
if (pi != NULL && !pi->pt.anything)
DECL_PRESERVE_P (expr) = (unsigned) bp_unpack_value (bp, 1);
DECL_EXTERNAL (expr) = (unsigned) bp_unpack_value (bp, 1);
DECL_GIMPLE_REG_P (expr) = (unsigned) bp_unpack_value (bp, 1);
- DECL_ALIGN (expr) = (unsigned) bp_unpack_var_len_unsigned (bp);
+ SET_DECL_ALIGN (expr, (unsigned) bp_unpack_var_len_unsigned (bp));
#ifdef ACCEL_COMPILER
if (DECL_ALIGN (expr) > targetm.absolute_biggest_alignment)
- DECL_ALIGN (expr) = targetm.absolute_biggest_alignment;
+ SET_DECL_ALIGN (expr, targetm.absolute_biggest_alignment);
#endif
if (TREE_CODE (expr) == LABEL_DECL)
{
else if (TREE_CODE (expr) == ARRAY_TYPE)
TYPE_NONALIASED_COMPONENT (expr) = (unsigned) bp_unpack_value (bp, 1);
TYPE_PRECISION (expr) = bp_unpack_var_len_unsigned (bp);
- TYPE_ALIGN (expr) = bp_unpack_var_len_unsigned (bp);
+ SET_TYPE_ALIGN (expr, bp_unpack_var_len_unsigned (bp));
#ifdef ACCEL_COMPILER
if (TYPE_ALIGN (expr) > targetm.absolute_biggest_alignment)
- TYPE_ALIGN (expr) = targetm.absolute_biggest_alignment;
+ SET_TYPE_ALIGN (expr, targetm.absolute_biggest_alignment);
#endif
}
symtab_node::get (base_decl)->increase_alignment (TYPE_ALIGN (vectype));
else
{
- DECL_ALIGN (base_decl) = TYPE_ALIGN (vectype);
+ SET_DECL_ALIGN (base_decl, TYPE_ALIGN (vectype));
DECL_USER_ALIGN (base_decl) = 1;
}
DR_VECT_AUX (dr)->base_misaligned = false;
{
if (code == FUNCTION_DECL)
{
- DECL_ALIGN (t) = FUNCTION_BOUNDARY;
+ SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
DECL_MODE (t) = FUNCTION_MODE;
}
else
- DECL_ALIGN (t) = 1;
+ SET_DECL_ALIGN (t, 1);
}
DECL_SOURCE_LOCATION (t) = input_location;
if (TREE_CODE (t) == DEBUG_EXPR_DECL)
case tcc_type:
TYPE_UID (t) = next_type_uid++;
- TYPE_ALIGN (t) = BITS_PER_UNIT;
+ SET_TYPE_ALIGN (t, BITS_PER_UNIT);
TYPE_USER_ALIGN (t) = 0;
TYPE_MAIN_VARIANT (t) = t;
TYPE_CANONICAL (t) = t;
/* Ensure the alignment of this type is compatible with
the required alignment of the atomic type. */
if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
- TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
+ SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
}
}
return t;
t = build_variant_type_copy (type);
- TYPE_ALIGN (t) = align;
+ SET_TYPE_ALIGN (t, align);
return t;
}
SET_TYPE_MODE (itype, TYPE_MODE (type));
TYPE_SIZE (itype) = TYPE_SIZE (type);
TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
- TYPE_ALIGN (itype) = TYPE_ALIGN (type);
+ SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
if (!shared)
set_type_quals (t, TYPE_QUAL_ATOMIC);
if (align)
- TYPE_ALIGN (t) = align;
+ SET_TYPE_ALIGN (t, align);
return t;
}
/* We are not going to have real types in C with less than byte alignment,
so we might as well not have any types that claim to have it. */
- TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
+ SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
TYPE_USER_ALIGN (void_type_node) = 0;
void_node = make_node (VOID_CST);
of this type is aligned at least to the alignment of the type, even if it
doesn't appear that it is. We see this, for example, in object-oriented
languages where a tag field may show this is an object of a more-aligned
- variant of the more generic type.
-
- In an SSA_NAME node, nonzero if the SSA_NAME node is on the SSA_NAME
- freelist. */
+ variant of the more generic type. */
#define TYPE_ALIGN_OK(NODE) (TYPE_CHECK (NODE)->base.nothrow_flag)
/* Used in classes in C++. */
#define TYPE_ATTRIBUTES(NODE) (TYPE_CHECK (NODE)->type_common.attributes)
/* The alignment necessary for objects of this type.
- The value is an int, measured in bits. */
-#define TYPE_ALIGN(NODE) (TYPE_CHECK (NODE)->type_common.align)
+ The value is an int, measured in bits and must be a power of two.
+ We support also an "alignement" of zero. */
+#define TYPE_ALIGN(NODE) \
+ (TYPE_CHECK (NODE)->type_common.align \
+ ? ((unsigned)1) << ((NODE)->type_common.align - 1) : 0)
+
+/* Specify that TYPE_ALIGN(NODE) is X. */
+#define SET_TYPE_ALIGN(NODE, X) \
+ (TYPE_CHECK (NODE)->type_common.align = ffs_hwi (X))
/* 1 if the alignment for this type was requested by "aligned" attribute,
0 if it is the default for this type. */
#define DECL_SIZE(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.size)
/* Likewise for the size in bytes. */
#define DECL_SIZE_UNIT(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.size_unit)
-/* Holds the alignment required for the datum, in bits. */
-#define DECL_ALIGN(NODE) (DECL_COMMON_CHECK (NODE)->decl_common.align)
+/* Returns the alignment required for the datum, in bits. It must
+ be a power of two, but an "alignment" of zero is supported
+ (e.g. as "uninitialized" sentinel). */
+#define DECL_ALIGN(NODE) \
+ (DECL_COMMON_CHECK (NODE)->decl_common.align \
+ ? ((unsigned)1) << ((NODE)->decl_common.align - 1) : 0)
+/* Specify that DECL_ALIGN(NODE) is X. */
+#define SET_DECL_ALIGN(NODE, X) \
+ (DECL_COMMON_CHECK (NODE)->decl_common.align = ffs_hwi (X))
+
/* The alignment of NODE, in bytes. */
#define DECL_ALIGN_UNIT(NODE) (DECL_ALIGN (NODE) / BITS_PER_UNIT)
/* Set if the alignment of this DECL has been set by the user, for
#define DECL_OFFSET_ALIGN(NODE) \
(((unsigned HOST_WIDE_INT)1) << FIELD_DECL_CHECK (NODE)->decl_common.off_align)
-/* Specify that DECL_ALIGN(NODE) is a multiple of X. */
+/* Specify that DECL_OFFSET_ALIGN(NODE) is X. */
#define SET_DECL_OFFSET_ALIGN(NODE, X) \
(FIELD_DECL_CHECK (NODE)->decl_common.off_align = ffs_hwi (X) - 1)
/* Reset the alignment in case we have made it tighter, so we can benefit
from it in get_pointer_alignment. */
- DECL_ALIGN (decl) = align;
+ SET_DECL_ALIGN (decl, align);
}
/* Return DECL_ALIGN (decl), possibly increased for optimization purposes
&& asan_protect_global (decl))
{
asan_protected = true;
- DECL_ALIGN (decl) = MAX (DECL_ALIGN (decl),
- ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
+ SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
+ ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
}
set_mem_align (decl_rtl, DECL_ALIGN (decl));
architectures so use DATA_ALIGNMENT as well, except for strings. */
if (TREE_CODE (exp) == STRING_CST)
{
- DECL_ALIGN (decl) = CONSTANT_ALIGNMENT (exp, DECL_ALIGN (decl));
+ SET_DECL_ALIGN (decl, CONSTANT_ALIGNMENT (exp, DECL_ALIGN (decl)));
}
else
align_variable (decl, 0);
&& asan_protect_global (exp))
{
asan_protected = true;
- DECL_ALIGN (decl) = MAX (DECL_ALIGN (decl),
- ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
+ SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
+ ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
}
/* If the constant is part of an object block, make sure that the
{
// FIXME there's no way to get this from DWARF,
// or even, it seems, a particularly good way to deduce it.
- TYPE_ALIGN (record_or_union_type)
- = TYPE_PRECISION (pointer_sized_int_node);
+ SET_TYPE_ALIGN (record_or_union_type,
+ TYPE_PRECISION (pointer_sized_int_node));
TYPE_SIZE (record_or_union_type) = bitsize_int (size_in_bytes
* BITS_PER_UNIT);