return NULL_TREE;
}
-/* Do a 1-level search for NAME as a member of TYPE. The caller must
- figure out whether it can access this field. (Since it is only one
- level, this is reasonable.) */
+/* Linear search of (partially ordered) fields of KLASS for NAME. */
static tree
-lookup_field_1 (tree type, tree name, bool want_type)
+fields_linear_search (tree klass, tree name, bool want_type)
{
- tree field;
-
- gcc_assert (identifier_p (name) && RECORD_OR_UNION_TYPE_P (type));
-
- if (CLASSTYPE_SORTED_FIELDS (type))
+ for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields))
{
- tree *fields = &CLASSTYPE_SORTED_FIELDS (type)->elts[0];
- int lo = 0, hi = CLASSTYPE_SORTED_FIELDS (type)->len;
- int i;
+ tree decl = fields;
- while (lo < hi)
+ if (!want_type
+ && TREE_CODE (decl) == FIELD_DECL
+ && ANON_AGGR_TYPE_P (TREE_TYPE (decl)))
{
- i = (lo + hi) / 2;
-
- if (DECL_NAME (fields[i]) > name)
- hi = i;
- else if (DECL_NAME (fields[i]) < name)
- lo = i + 1;
+ tree anon = TREE_TYPE (decl);
+ gcc_assert (COMPLETE_TYPE_P (anon));
+ tree temp;
+
+ if (vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (anon))
+ temp = method_vec_linear_search (method_vec, name);
else
- {
- field = NULL_TREE;
-
- /* We might have a nested class and a field with the
- same name; we sorted them appropriately via
- field_decl_cmp, so just look for the first or last
- field with this name. */
- if (want_type)
- {
- do
- field = fields[i--];
- while (i >= lo && DECL_NAME (fields[i]) == name);
- if (!DECL_DECLARES_TYPE_P (field))
- field = NULL_TREE;
- }
- else
- {
- do
- field = fields[i++];
- while (i < hi && DECL_NAME (fields[i]) == name);
- }
+ temp = fields_linear_search (anon, name, want_type);
- if (field)
- {
- field = strip_using_decl (field);
- if (is_overloaded_fn (field))
- field = NULL_TREE;
- }
-
- return field;
+ if (temp)
+ {
+ /* Anon members can only contain fields. */
+ gcc_assert (!STAT_HACK_P (temp) && !DECL_DECLARES_TYPE_P (temp));
+ return temp;
}
}
- return NULL_TREE;
- }
-
- field = TYPE_FIELDS (type);
-
- for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
- {
- tree decl = field;
- if (DECL_DECLARES_FUNCTION_P (decl))
- /* Functions are kep separately, at the moment. */
+ if (DECL_NAME (decl) != name)
continue;
-
- gcc_assert (DECL_P (field));
- if (DECL_NAME (field) == NULL_TREE
- && ANON_AGGR_TYPE_P (TREE_TYPE (field)))
- {
- tree temp = lookup_field_1 (TREE_TYPE (field), name, want_type);
- if (temp)
- return temp;
- }
-
- if (TREE_CODE (decl) == USING_DECL
- && DECL_NAME (decl) == name)
+
+ if (TREE_CODE (decl) == USING_DECL)
{
decl = strip_using_decl (decl);
if (is_overloaded_fn (decl))
continue;
}
- if (DECL_NAME (decl) == name
- && (!want_type || DECL_DECLARES_TYPE_P (decl)))
+ if (DECL_DECLARES_FUNCTION_P (decl))
+ /* Functions are found separately. */
+ continue;
+
+ if (!want_type || DECL_DECLARES_TYPE_P (decl))
return decl;
}
- /* We used to special-case vptr_identifier. Make sure it's not
- special any more. */
- gcc_assert (name != vptr_identifier || !TYPE_VFIELD (type));
-
return NULL_TREE;
}
bool conv_op = IDENTIFIER_CONV_OP_P (name);
tree lookup = conv_op ? conv_op_identifier : name;
tree val = NULL_TREE;
+ vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (klass);
- if (type_or_fns > 0)
- /* User wants type. Don't look in method_vec. */;
- else if (vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (klass))
+ if (COMPLETE_TYPE_P (klass) && method_vec)
{
- if (COMPLETE_TYPE_P (klass))
- val = method_vec_binary_search (method_vec, lookup);
- else
- val = method_vec_linear_search (method_vec, lookup);
+ val = method_vec_binary_search (method_vec, lookup);
+ if (!val)
+ ;
+ else if (type_or_fns > 0)
+ {
+ if (STAT_HACK_P (val))
+ val = STAT_TYPE (val);
+ else if (!DECL_DECLARES_TYPE_P (val))
+ val = NULL_TREE;
+ }
+ else if (STAT_HACK_P (val))
+ val = STAT_DECL (val);
+
+ if (val && TREE_CODE (val) == OVERLOAD
+ && TREE_CODE (OVL_FUNCTION (val)) == USING_DECL)
+ {
+ /* An overload with a dependent USING_DECL. Does the caller
+ want the USING_DECL or the functions? */
+ if (type_or_fns < 0)
+ val = OVL_CHAIN (val);
+ else
+ val = OVL_FUNCTION (val);
+ }
}
+ else
+ {
+ if (method_vec && type_or_fns <= 0)
+ val = method_vec_linear_search (method_vec, lookup);
- if (type_or_fns < 0)
- /* User wants functions. Don't look for a field. */;
- else if (!val || (TREE_CODE (val) == OVERLOAD && OVL_USING_P (val)))
- /* Dependent using declarations are a 'field', make sure we
- return that even if we saw an overload already. */
- if (tree field_val = lookup_field_1 (klass, lookup, type_or_fns > 0))
- if (!val || TREE_CODE (field_val) == USING_DECL)
- val = field_val;
+ if (type_or_fns < 0)
+ /* Don't bother looking for field. We don't want it. */;
+ else if (!val || (TREE_CODE (val) == OVERLOAD && OVL_USING_P (val)))
+ /* Dependent using declarations are a 'field', make sure we
+ return that even if we saw an overload already. */
+ if (tree field_val = fields_linear_search (klass, lookup,
+ type_or_fns > 0))
+ if (!val || TREE_CODE (field_val) == USING_DECL)
+ val = field_val;
+ }
/* Extract the conversion operators asked for, unless the general
conversion operator was requested. */
{
vec_alloc (method_vec, 8);
CLASSTYPE_METHOD_VEC (klass) = method_vec;
+ if (complete_p)
+ {
+ /* If the class is complete but had no method_vec, we need
+ to add the TYPE_FIELDS into it. We're also most likely
+ to be adding ctors & dtors, so ask for 6 spare slots (the
+ abstract cdtors and their clones). */
+ set_class_bindings (klass, 6);
+ method_vec = CLASSTYPE_METHOD_VEC (klass);
+ }
}
if (IDENTIFIER_CONV_OP_P (name))
if (fn_name == name)
{
+ /* If we found an existing slot, it must be a function set.
+ Even with insertion after completion, because those only
+ happen with artificial fns that have unspellable names.
+ This means we do not have to deal with the stat hack
+ either. */
+ gcc_checking_assert (OVL_P (*slot));
if (name == conv_op_identifier)
{
gcc_checking_assert (OVL_FUNCTION (*slot) == conv_op_marker);
}
/* Comparison function to compare two TYPE_METHOD_VEC entries by
- name. */
+ name. Because we can have duplicates during insertion of
+ TYPE_FIELDS, we do extra checking so deduping doesn't have to deal
+ with so many cases. */
static int
method_name_cmp (const void *a_p, const void *b_p)
tree name_a = DECL_NAME (TREE_CODE (a) == OVERLOAD ? OVL_FUNCTION (a) : a);
tree name_b = DECL_NAME (TREE_CODE (b) == OVERLOAD ? OVL_FUNCTION (b) : b);
- gcc_checking_assert (name_a && name_b && name_a != name_b);
- return name_a < name_b ? -1 : +1;
+ gcc_checking_assert (name_a && name_b);
+ if (name_a != name_b)
+ return name_a < name_b ? -1 : +1;
+
+ if (name_a == conv_op_identifier)
+ {
+ /* Strip the conv-op markers. */
+ gcc_checking_assert (OVL_FUNCTION (a) == conv_op_marker
+ && OVL_FUNCTION (b) == conv_op_marker);
+ a = OVL_CHAIN (a);
+ b = OVL_CHAIN (b);
+ }
+
+ if (TREE_CODE (a) == OVERLOAD)
+ a = OVL_FUNCTION (a);
+ if (TREE_CODE (b) == OVERLOAD)
+ b = OVL_FUNCTION (b);
+
+ /* We're in STAT_HACK or USING_DECL territory (or possibly error-land). */
+ if (TREE_CODE (a) == TREE_CODE (b))
+ /* We can get two TYPE_DECLs or two USING_DECLs. Place in source
+ order. */
+ return DECL_SOURCE_LOCATION (a) < DECL_SOURCE_LOCATION (b) ? -1 : +1;
+
+ /* If one of them is a TYPE_DECL, it loses. */
+ if (TREE_CODE (a) == TYPE_DECL)
+ return +1;
+ else if (TREE_CODE (b) == TYPE_DECL)
+ return -1;
+
+ /* If one of them is a USING_DECL, it loses. */
+ if (TREE_CODE (a) == USING_DECL)
+ return +1;
+ else if (TREE_CODE (b) == USING_DECL)
+ return -1;
+
+ /* There are no other cases, as duplicate detection should have
+ kicked in earlier. However, some erroneous cases get though.
+ Order by source location. We should really prevent this
+ happening. */
+ gcc_assert (errorcount);
+ return DECL_SOURCE_LOCATION (a) < DECL_SOURCE_LOCATION (b) ? -1 : +1;
}
static struct {
}
}
-/* Allocate and return an instance of struct sorted_fields_type with
- N fields. */
-
-static struct sorted_fields_type *
-sorted_fields_type_new (int n)
-{
- struct sorted_fields_type *sft;
- sft = (sorted_fields_type *) ggc_internal_alloc (sizeof (sorted_fields_type)
- + n * sizeof (tree));
- sft->len = n;
-
- return sft;
-}
-
/* Recursively count the number of fields in KLASS, including anonymous
union members. */
return n_fields;
}
-/* Append all the nonfunction members fields of KLASS to FIELD_VEC
- starting at IDX. Recurse for anonymous members. The array must
- have space. Returns the next available index. */
+/* Append all the nonfunction members fields of KLASS to METHOD_VEC.
+ Recurse for anonymous members. METHOD_VEC must have space. */
-static unsigned
-field_vec_append_class_fields (struct sorted_fields_type *field_vec,
- tree klass, unsigned idx)
+static void
+method_vec_append_class_fields (vec<tree, va_gc> *method_vec, tree klass)
{
for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields))
if (DECL_DECLARES_FUNCTION_P (fields))
/* Functions are handled separately. */;
else if (TREE_CODE (fields) == FIELD_DECL
&& ANON_AGGR_TYPE_P (TREE_TYPE (fields)))
- idx = field_vec_append_class_fields (field_vec, TREE_TYPE (fields), idx);
+ method_vec_append_class_fields (method_vec, TREE_TYPE (fields));
else if (DECL_NAME (fields))
- field_vec->elts[idx++] = fields;
-
- return idx;
+ {
+ tree field = fields;
+ /* Mark a conv-op USING_DECL with the conv-op-marker. */
+ if (TREE_CODE (field) == USING_DECL
+ && IDENTIFIER_CONV_OP_P (DECL_NAME (field)))
+ field = ovl_make (conv_op_marker, field);
+ method_vec->quick_push (field);
+ }
}
-/* Append all of the enum values of ENUMTYPE to FIELD_VEC starting at IDX.
- FIELD_VEC must have space. */
+/* Append all of the enum values of ENUMTYPE to METHOD_VEC.
+ METHOD_VEC must have space. */
-static unsigned
-field_vec_append_enum_values (struct sorted_fields_type *field_vec,
- tree enumtype, unsigned idx)
+static void
+method_vec_append_enum_values (vec<tree, va_gc> *method_vec, tree enumtype)
{
for (tree values = TYPE_VALUES (enumtype);
values; values = TREE_CHAIN (values))
- field_vec->elts[idx++] = TREE_VALUE (values);
+ method_vec->quick_push (TREE_VALUE (values));
+}
+
+/* METHOD_VEC has just had new DECLs added to it, but is sorted.
+ DeDup adjacent DECLS of the same name. We already dealt with
+ conflict resolution when adding the fields or methods themselves.
+ There are three cases (which could all be combined):
+ 1) a TYPE_DECL and non TYPE_DECL. Deploy STAT_HACK as appropriate.
+ 2) a USING_DECL and an overload. If the USING_DECL is dependent,
+ it wins. Otherwise the OVERLOAD does.
+ 3) two USING_DECLS. ...
+
+ method_name_cmp will have ordered duplicates as
+ <fns><using><type> */
+
+static void
+method_vec_dedup (vec<tree, va_gc> *method_vec)
+{
+ unsigned len = method_vec->length ();
+ unsigned store = 0;
+
+ tree current = (*method_vec)[0], name = OVL_NAME (current);
+ tree next = NULL_TREE, next_name = NULL_TREE;
+ for (unsigned jx, ix = 0; ix < len;
+ ix = jx, current = next, name = next_name)
+ {
+ tree to_type = NULL_TREE;
+ tree to_using = NULL_TREE;
+ tree marker = NULL_TREE;
+ if (IDENTIFIER_CONV_OP_P (name))
+ {
+ marker = current;
+ current = OVL_CHAIN (current);
+ name = DECL_NAME (OVL_FUNCTION (marker));
+ gcc_checking_assert (name == conv_op_identifier);
+ }
+
+ if (TREE_CODE (current) == USING_DECL)
+ {
+ current = strip_using_decl (current);
+ if (is_overloaded_fn (current))
+ current = NULL_TREE;
+ else if (TREE_CODE (current) == USING_DECL)
+ {
+ to_using = current;
+ current = NULL_TREE;
+ }
+ }
+
+ if (current && DECL_DECLARES_TYPE_P (current))
+ {
+ to_type = current;
+ current = NULL_TREE;
+ }
+
+ for (jx = ix + 1; jx < len; jx++)
+ {
+ next = (*method_vec)[jx];
+ next_name = OVL_NAME (next);
+ if (next_name != name)
+ break;
+
+ if (marker)
+ {
+ gcc_checking_assert (OVL_FUNCTION (marker)
+ == OVL_FUNCTION (next));
+ next = OVL_CHAIN (next);
+ }
- return idx;
+ if (TREE_CODE (next) == USING_DECL)
+ {
+ next = strip_using_decl (next);
+ if (is_overloaded_fn (next))
+ next = NULL_TREE;
+ else if (TREE_CODE (next) == USING_DECL)
+ {
+ to_using = next;
+ next = NULL_TREE;
+ }
+ }
+
+ if (next && DECL_DECLARES_TYPE_P (next))
+ to_type = next;
+ }
+
+ if (to_using)
+ {
+ if (!current)
+ current = to_using;
+ else
+ current = ovl_make (to_using, current);
+ }
+
+ if (to_type)
+ {
+ if (!current)
+ current = to_type;
+ else
+ current = stat_hack (current, to_type);
+ }
+
+ gcc_assert (current);
+ if (marker)
+ {
+ OVL_CHAIN (marker) = current;
+ current = marker;
+ }
+ (*method_vec)[store++] = current;
+ }
+
+ while (store++ < len)
+ method_vec->pop ();
}
-/* Insert FIELDS into KLASS for the sorted case if the FIELDS count is
- big enough. Sort the METHOD_VEC too. */
+/* Add the non-function members to CLASSTYPE_METHOD_VEC. If there is
+ no existing METHOD_VEC and fewer than 8 fields, do nothing. We
+ know there must be at least 1 field -- the self-reference
+ TYPE_DECL, except for anon aggregates, which will have at least
+ one field. */
void
-set_class_bindings (tree klass)
+set_class_bindings (tree klass, unsigned extra)
{
- if (vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (klass))
- qsort (method_vec->address (), method_vec->length (),
- sizeof (tree), method_name_cmp);
+ unsigned n_fields = count_class_fields (klass);
+ vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (klass);
- int n_fields = count_class_fields (klass);
- if (n_fields >= 8)
+ if (method_vec || n_fields >= 8)
{
- struct sorted_fields_type *field_vec = sorted_fields_type_new (n_fields);
- unsigned idx = field_vec_append_class_fields (field_vec, klass, 0);
- gcc_assert (idx == unsigned (field_vec->len));
- qsort (field_vec->elts, n_fields, sizeof (tree), field_decl_cmp);
- CLASSTYPE_SORTED_FIELDS (klass) = field_vec;
+ /* Append the new fields. */
+ vec_safe_reserve_exact (method_vec, extra + n_fields);
+ method_vec_append_class_fields (method_vec, klass);
+ }
+
+ if (method_vec)
+ {
+ CLASSTYPE_METHOD_VEC (klass) = method_vec;
+ qsort (method_vec->address (), method_vec->length (),
+ sizeof (tree), method_name_cmp);
+ method_vec_dedup (method_vec);
}
}
void
insert_late_enum_def_bindings (tree klass, tree enumtype)
{
- unsigned n_fields;
- struct sorted_fields_type *sorted_fields = CLASSTYPE_SORTED_FIELDS (klass);
+ int n_fields;
+ vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (klass);
- /* The enum values will already be on the TYPE_FIELDS, so don't
+ /* The enum bindings will already be on the TYPE_FIELDS, so don't
count them twice. */
- if (sorted_fields)
- n_fields = list_length (TYPE_VALUES (enumtype)) + sorted_fields->len;
- else
+ if (!method_vec)
n_fields = count_class_fields (klass);
+ else
+ n_fields = list_length (TYPE_VALUES (enumtype));
- if (n_fields >= 8)
+ if (method_vec || n_fields >= 8)
{
- struct sorted_fields_type *field_vec = sorted_fields_type_new (n_fields);
- unsigned idx;
-
- if (sorted_fields)
- {
- for (idx = 0; idx < unsigned (sorted_fields->len); ++idx)
- field_vec->elts[idx] = sorted_fields->elts[idx];
-
- idx = field_vec_append_enum_values (field_vec, enumtype, idx);
- }
+ vec_safe_reserve_exact (method_vec, n_fields);
+ if (CLASSTYPE_METHOD_VEC (klass))
+ method_vec_append_enum_values (method_vec, enumtype);
else
- idx = field_vec_append_class_fields (field_vec, klass, 0);
- gcc_assert (idx == unsigned (field_vec->len));
- qsort (field_vec->elts, n_fields, sizeof (tree), field_decl_cmp);
- CLASSTYPE_SORTED_FIELDS (klass) = field_vec;
+ method_vec_append_class_fields (method_vec, klass);
+ CLASSTYPE_METHOD_VEC (klass) = method_vec;
+ qsort (method_vec->address (), method_vec->length (),
+ sizeof (tree), method_name_cmp);
+ method_vec_dedup (method_vec);
}
}