static tree float_type_for_precision (int, machine_mode);
static tree convert_to_fat_pointer (tree, tree);
static unsigned int scale_by_factor_of (tree, unsigned int);
-static bool potential_alignment_gap (tree, tree, tree);
/* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
finish_record_type (tree record_type, tree field_list, int rep_level,
bool debug_info_p)
{
- enum tree_code code = TREE_CODE (record_type);
+ const enum tree_code orig_code = TREE_CODE (record_type);
+ const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
+ const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE;
+ const bool had_align = TYPE_ALIGN (record_type) > 0;
+ /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
+ out just like a UNION_TYPE, since the size will be fixed. */
+ const enum tree_code code
+ = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
+ ? UNION_TYPE : orig_code);
tree name = TYPE_IDENTIFIER (record_type);
tree ada_size = bitsize_zero_node;
tree size = bitsize_zero_node;
- bool had_size = TYPE_SIZE (record_type) != 0;
- bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
- bool had_align = TYPE_ALIGN (record_type) != 0;
tree field;
TYPE_FIELDS (record_type) = field_list;
that just means some initializations; otherwise, layout the record. */
if (rep_level > 0)
{
- SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
- TYPE_ALIGN (record_type)));
-
- if (!had_size_unit)
- TYPE_SIZE_UNIT (record_type) = size_zero_node;
+ if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
+ SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
if (!had_size)
TYPE_SIZE (record_type) = bitsize_zero_node;
- /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
- out just like a UNION_TYPE, since the size will be fixed. */
- else if (code == QUAL_UNION_TYPE)
- code = UNION_TYPE;
+ if (!had_size_unit)
+ TYPE_SIZE_UNIT (record_type) = size_zero_node;
}
else
{
/* Ensure there isn't a size already set. There can be in an error
case where there is a rep clause but all fields have errors and
no longer have a position. */
- TYPE_SIZE (record_type) = 0;
+ TYPE_SIZE (record_type) = NULL_TREE;
/* Ensure we use the traditional GCC layout for bitfields when we need
to pack the record type or have a representation clause. The other
if (DECL_BIT_FIELD (field)
&& operand_equal_p (this_size, TYPE_SIZE (type), 0))
{
- unsigned int align = TYPE_ALIGN (type);
+ const unsigned int align = TYPE_ALIGN (type);
/* In the general case, type alignment is required. */
if (value_factor_p (pos, align))
? UNION_TYPE : TREE_CODE (record_type));
tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
tree last_pos = bitsize_zero_node;
- tree old_field, prev_old_field = NULL_TREE;
new_name
= concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
/* Now scan all the fields, replacing each field with a new field
corresponding to the new encoding. */
- for (old_field = TYPE_FIELDS (record_type); old_field;
+ for (tree old_field = TYPE_FIELDS (record_type);
+ old_field;
old_field = DECL_CHAIN (old_field))
{
tree field_type = TREE_TYPE (old_field);
else
pos = compute_related_constant (curpos, last_pos);
- if (!pos
- && TREE_CODE (curpos) == MULT_EXPR
- && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
+ if (pos)
+ ;
+ else if (TREE_CODE (curpos) == MULT_EXPR
+ && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
{
tree offset = TREE_OPERAND (curpos, 0);
align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
}
- else if (!pos
- && TREE_CODE (curpos) == PLUS_EXPR
+ else if (TREE_CODE (curpos) == PLUS_EXPR
&& tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
&& TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
&& tree_fits_uhwi_p
last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
}
- else if (potential_alignment_gap (prev_old_field, old_field, pos))
+ else
{
- align = TYPE_ALIGN (field_type);
+ align = DECL_ALIGN (old_field);
last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
}
in this case, if we don't preventively counter that. */
if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
{
- field_type = build_pointer_type (field_type);
- if (align != 0 && TYPE_ALIGN (field_type) > align)
+ field_type = copy_type (build_pointer_type (field_type));
+ SET_TYPE_ALIGN (field_type, BITS_PER_UNIT);
+ var = true;
+
+ /* ??? Kludge to work around a bug in Workbench's debugger. */
+ if (align == 0)
{
- field_type = copy_type (field_type);
- SET_TYPE_ALIGN (field_type, align);
+ align = DECL_ALIGN (old_field);
+ last_pos = round_up (last_pos, align);
+ pos = compute_related_constant (curpos, last_pos);
}
- var = true;
}
/* Make a new field name, if necessary. */
new_field
= create_field_decl (field_name, field_type, new_record_type,
DECL_SIZE (old_field), pos, 0, 0);
+ /* The specified position is not the actual position of the field
+ but the gap with the previous field, so the computation of the
+ bit-field status may be incorrect. We adjust it manually to
+ avoid generating useless attributes for the field in DWARF. */
+ if (DECL_SIZE (old_field) == TYPE_SIZE (field_type)
+ && value_factor_p (pos, BITS_PER_UNIT))
+ {
+ DECL_BIT_FIELD (new_field) = 0;
+ DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE;
+ }
DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
TYPE_FIELDS (new_record_type) = new_field;
== QUAL_UNION_TYPE)
? bitsize_zero_node
: DECL_SIZE (old_field));
- prev_old_field = old_field;
}
TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
return new_size;
}
+/* Convert the size expression EXPR to TYPE and fold the result. */
+
+static tree
+fold_convert_size (tree type, tree expr)
+{
+ /* We assume that size expressions do not wrap around. */
+ if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR)
+ return size_binop (TREE_CODE (expr),
+ fold_convert_size (type, TREE_OPERAND (expr, 0)),
+ fold_convert_size (type, TREE_OPERAND (expr, 1)));
+
+ return fold_convert (type, expr);
+}
+
/* Return the bit position of FIELD, in bits from the start of the record,
and fold it as much as possible. This is a tree of type bitsizetype. */
static tree
fold_bit_position (const_tree field)
{
- tree offset = DECL_FIELD_OFFSET (field);
- if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
- offset = size_binop (TREE_CODE (offset),
- fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
- fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
- else
- offset = fold_convert (bitsizetype, offset);
+ tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field));
return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
size_binop (MULT_EXPR, offset, bitsize_unit_node));
}
&& !have_global_bss_p ())
DECL_COMMON (var_decl) = 1;
- /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
- since we will create an associated variable. Likewise for an external
- constant whose initializer is not absolute, because this would mean a
- global relocation in a read-only section which runs afoul of the PE-COFF
- run-time relocation mechanism. */
+ /* Do not emit debug info if not requested, or for an external constant whose
+ initializer is not absolute because this would require a global relocation
+ in a read-only section which runs afoul of the PE-COFF run-time relocation
+ mechanism. */
if (!debug_info_p
- || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
|| (extern_flag
&& constant_p
&& init
return var_decl;
}
\f
-/* Return true if TYPE, an aggregate type, contains (or is) an array. */
+/* Return true if TYPE, an aggregate type, contains (or is) an array.
+ If SELF_REFERENTIAL is true, then an additional requirement on the
+ array is that it be self-referential. */
-static bool
-aggregate_type_contains_array_p (tree type)
+bool
+aggregate_type_contains_array_p (tree type, bool self_referential)
{
switch (TREE_CODE (type))
{
tree field;
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
if (AGGREGATE_TYPE_P (TREE_TYPE (field))
- && aggregate_type_contains_array_p (TREE_TYPE (field)))
+ && aggregate_type_contains_array_p (TREE_TYPE (field),
+ self_referential))
return true;
return false;
}
case ARRAY_TYPE:
- return true;
+ return self_referential ? type_contains_placeholder_p (type) : true;
default:
gcc_unreachable ();
DECL_CONTEXT (field_decl) = record_type;
TREE_READONLY (field_decl) = TYPE_READONLY (type);
- /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
- byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
- Likewise for an aggregate without specified position that contains an
- array, because in this case slices of variable length of this array
- must be handled by GCC and variable-sized objects need to be aligned
- to at least a byte boundary. */
- if (packed && (TYPE_MODE (type) == BLKmode
- || (!pos
- && AGGREGATE_TYPE_P (type)
- && aggregate_type_contains_array_p (type))))
- SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
-
/* If a size is specified, use it. Otherwise, if the record type is packed
compute a size to use, which may differ from the object's natural size.
We always set a size in this case to trigger the checks for bitfield
DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
+ /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
+ byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
+ Likewise for an aggregate without specified position that contains an
+ array, because in this case slices of variable length of this array
+ must be handled by GCC and variable-sized objects need to be aligned
+ to at least a byte boundary. */
+ if (packed && (TYPE_MODE (type) == BLKmode
+ || (!pos
+ && AGGREGATE_TYPE_P (type)
+ && aggregate_type_contains_array_p (type, false))))
+ SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
+
/* Bump the alignment if need be, either for bitfield/packing purposes or
- to satisfy the type requirements if no such consideration applies. When
+ to satisfy the type requirements if no such considerations apply. When
we get the alignment from the type, indicate if this is from an explicit
user request, which prevents stor-layout from lowering it later on. */
- {
- unsigned int bit_align
- = (DECL_BIT_FIELD (field_decl) ? 1
- : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
+ else
+ {
+ const unsigned int field_align
+ = DECL_BIT_FIELD (field_decl)
+ ? 1
+ : packed
+ ? BITS_PER_UNIT
+ : 0;
- if (bit_align > DECL_ALIGN (field_decl))
- SET_DECL_ALIGN (field_decl, bit_align);
- else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
- {
- SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
- DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
- }
- }
+ if (field_align > DECL_ALIGN (field_decl))
+ SET_DECL_ALIGN (field_decl, field_align);
+ else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
+ {
+ SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
+ DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
+ }
+ }
if (pos)
{
return factor * value;
}
-/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
- unless we can prove these 2 fields are laid out in such a way that no gap
- exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
- is the distance in bits between the end of PREV_FIELD and the starting
- position of CURR_FIELD. It is ignored if null. */
-
-static bool
-potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
-{
- /* If this is the first field of the record, there cannot be any gap */
- if (!prev_field)
- return false;
-
- /* If the previous field is a union type, then return false: The only
- time when such a field is not the last field of the record is when
- there are other components at fixed positions after it (meaning there
- was a rep clause for every field), in which case we don't want the
- alignment constraint to override them. */
- if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
- return false;
-
- /* If the distance between the end of prev_field and the beginning of
- curr_field is constant, then there is a gap if the value of this
- constant is not null. */
- if (offset && tree_fits_uhwi_p (offset))
- return !integer_zerop (offset);
-
- /* If the size and position of the previous field are constant,
- then check the sum of this size and position. There will be a gap
- iff it is not multiple of the current field alignment. */
- if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
- && tree_fits_uhwi_p (bit_position (prev_field)))
- return ((tree_to_uhwi (bit_position (prev_field))
- + tree_to_uhwi (DECL_SIZE (prev_field)))
- % DECL_ALIGN (curr_field) != 0);
-
- /* If both the position and size of the previous field are multiples
- of the current field alignment, there cannot be any gap. */
- if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
- && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
- return false;
-
- /* Fallback, return that there may be a potential gap */
- return true;
-}
-
/* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
the decl. */
&& TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
bound_list = TYPE_ACTUAL_BOUNDS (array_type);
- /* First make the list for a CONSTRUCTOR for the template. Go down the
- field list of the template instead of the type chain because this
- array might be an Ada array of arrays and we can't tell where the
- nested arrays stop being the underlying object. */
-
- for (field = TYPE_FIELDS (template_type); field;
- (bound_list
- ? (bound_list = TREE_CHAIN (bound_list))
- : (array_type = TREE_TYPE (array_type))),
+ /* First make the list for a CONSTRUCTOR for the template. Go down
+ the field list of the template instead of the type chain because
+ this array might be an Ada array of array and we can't tell where
+ the nested array stop being the underlying object. */
+ for (field = TYPE_FIELDS (template_type);
+ field;
field = DECL_CHAIN (DECL_CHAIN (field)))
{
tree bounds, min, max;
/* If we have a bound list, get the bounds from there. Likewise
for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
- DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
- This will give us a maximum range. */
+ DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
+ template, but this will only give us a maximum range. */
if (bound_list)
- bounds = TREE_VALUE (bound_list);
+ {
+ bounds = TREE_VALUE (bound_list);
+ bound_list = TREE_CHAIN (bound_list);
+ }
else if (TREE_CODE (array_type) == ARRAY_TYPE)
- bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
+ {
+ bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
+ array_type = TREE_TYPE (array_type);
+ }
else if (expr && TREE_CODE (expr) == PARM_DECL
&& DECL_BY_COMPONENT_PTR_P (expr))
bounds = TREE_TYPE (field);
/* If the type is unsigned, overflow is allowed so we cannot be sure that
EXPR doesn't overflow. Keep it simple if optimization is disabled. */
- if (TYPE_UNSIGNED (type) || !optimize)
+ if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
return convert (sizetype, expr);
switch (code)
if (etype == type)
return expr;
- /* If both types are integral just do a normal conversion.
- Likewise for a conversion to an unconstrained array. */
+ /* If both types are integral or regular pointer, then just do a normal
+ conversion. Likewise for a conversion to an unconstrained array. */
if (((INTEGRAL_TYPE_P (type)
|| (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
|| (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
we need to pad to have the same size on both sides.
??? We cannot do it unconditionally because unchecked conversions are
- used liberally by the front-end to implement polymorphism, e.g. in:
+ used liberally by the front-end to implement interface thunks:
+ type ada__tags__addr_ptr is access system.address;
S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
return p___size__4 (p__object!(S191s.all));
- so we skip all expressions that are references. */
- else if (!REFERENCE_CLASS_P (expr)
+ so we need to skip dereferences. */
+ else if (!INDIRECT_REF_P (expr)
&& !AGGREGATE_TYPE_P (etype)
+ && ecode != UNCONSTRAINED_ARRAY_TYPE
&& TREE_CONSTANT (TYPE_SIZE (type))
&& (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
{
}
}
+ /* Likewise if we are converting from a scalar type to a type with self-
+ referential size. We use the max size to do the padding in this case. */
+ else if (!INDIRECT_REF_P (expr)
+ && !AGGREGATE_TYPE_P (etype)
+ && ecode != UNCONSTRAINED_ARRAY_TYPE
+ && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
+ {
+ tree new_size = max_size (TYPE_SIZE (type), true);
+ c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
+ if (c < 0)
+ {
+ expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
+ false, false, false, true),
+ expr);
+ expr = unchecked_convert (type, expr, notrunc_p);
+ }
+ else
+ {
+ tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
+ false, false, false, true);
+ expr = unchecked_convert (rec_type, expr, notrunc_p);
+ expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
+ }
+ }
+
/* We have a special case when we are converting between two unconstrained
array types. In that case, take the address, convert the fat pointer
types, and dereference. */
&& DECL_FUNCTION_IS_DEF (iter))
debug_hooks->early_global_decl (iter);
+ /* Output global constants. */
+ FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
+ if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
+ debug_hooks->early_global_decl (iter);
+
/* Then output the global variables. We need to do that after the debug
information for global types is emitted so that they are finalized. Skip
external global variables, unless we need to emit debug info for them:
static int flag_isoc94 = 0;
static int flag_isoc99 = 0;
static int flag_isoc11 = 0;
+static int flag_isoc2x = 0;
/* Install what the common builtins.def offers plus our local additions.