class cast_region;
class field_region;
class string_region;
+ class bit_range_region;
class region_model_manager;
struct model_merger;
class store_manager;
return reg;
}
+/* Return the region that describes accessing BITS within PARENT as TYPE,
+ creating it if necessary. */
+
+const region *
+region_model_manager::get_bit_range (const region *parent, tree type,
+ const bit_range &bits)
+{
+ gcc_assert (parent);
+
+ bit_range_region::key_t key (parent, type, bits);
+ if (bit_range_region *reg = m_bit_range_regions.get (key))
+ return reg;
+
+ bit_range_region *bit_range_reg
+ = new bit_range_region (alloc_region_id (), parent, type, bits);
+ m_bit_range_regions.put (key, bit_range_reg);
+ return bit_range_reg;
+}
+
/* If we see a tree code we don't know how to handle, rather than
ICE or generate bogus results, create a dummy region, and notify
CTXT so that it can mark the new state as being not properly
log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
log_uniq_map (logger, show_objs, "string_region", m_string_map);
+ log_uniq_map (logger, show_objs, "bit_range_region", m_bit_range_regions);
logger->log (" # managed dynamic regions: %i",
m_managed_dynamic_regions.length ());
m_store_mgr.log_stats (logger, show_objs);
}
break;
+ case BIT_FIELD_REF:
+ {
+ tree inner_expr = TREE_OPERAND (expr, 0);
+ const region *inner_reg = get_lvalue (inner_expr, ctxt);
+ tree num_bits = TREE_OPERAND (expr, 1);
+ tree first_bit_offset = TREE_OPERAND (expr, 2);
+ gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
+ gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
+ bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
+ TREE_INT_CST_LOW (num_bits));
+ return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
+ }
+ break;
+
case MEM_REF:
{
tree ptr = TREE_OPERAND (expr, 0);
function *fun);
const region *get_symbolic_region (const svalue *sval);
const string_region *get_region_for_string (tree string_cst);
+ const region *get_bit_range (const region *parent, tree type,
+ const bit_range &bits);
const region *
get_region_for_unexpected_tree_code (region_model_context *ctxt,
typedef hash_map<tree, string_region *> string_map_t;
string_map_t m_string_map;
+ consolidation_map<bit_range_region> m_bit_range_regions;
+
store_manager m_store_mgr;
bounded_ranges_manager *m_range_mgr;
case RK_ELEMENT:
case RK_OFFSET:
case RK_SIZED:
+ case RK_BIT_RANGE:
iter = iter->get_parent_region ();
continue;
case RK_CAST:
case RK_OFFSET:
case RK_SIZED:
case RK_CAST:
+ case RK_BIT_RANGE:
return false;
default:
}
continue;
+ case RK_BIT_RANGE:
+ {
+ const bit_range_region *bit_range_reg
+ = (const bit_range_region *)iter_region;
+ iter_region = iter_region->get_parent_region ();
+
+ bit_offset_t rel_bit_offset;
+ if (!bit_range_reg->get_relative_concrete_offset (&rel_bit_offset))
+ return region_offset::make_symbolic (iter_region);
+ accum_bit_offset += rel_bit_offset;
+ }
+ continue;
+
default:
return region_offset::make_concrete (iter_region, accum_bit_offset);
}
}
}
+/* class bit_range_region : public region. */
+
+/* Implementation of region::dump_to_pp vfunc for bit_range_region. */
+
+void
+bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
+{
+ if (simple)
+ {
+ pp_string (pp, "BIT_RANGE_REG(");
+ get_parent_region ()->dump_to_pp (pp, simple);
+ pp_string (pp, ", ");
+ m_bits.dump_to_pp (pp);
+ pp_string (pp, ")");
+ }
+ else
+ {
+ pp_string (pp, "bit_range_region(");
+ get_parent_region ()->dump_to_pp (pp, simple);
+ pp_string (pp, ", ");
+ m_bits.dump_to_pp (pp);
+ pp_printf (pp, ")");
+ }
+}
+
+/* Implementation of region::get_byte_size vfunc for bit_range_region. */
+
+bool
+bit_range_region::get_byte_size (byte_size_t *out) const
+{
+ if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
+ {
+ *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
+ return true;
+ }
+ return false;
+}
+
+/* Implementation of region::get_bit_size vfunc for bit_range_region. */
+
+bool
+bit_range_region::get_bit_size (bit_size_t *out) const
+{
+ *out = m_bits.m_size_in_bits;
+ return true;
+}
+
+/* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
+
+const svalue *
+bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
+{
+ if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
+ return mgr->get_or_create_unknown_svalue (size_type_node);
+
+ HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
+ return mgr->get_or_create_int_cst (size_type_node, num_bytes);
+}
+
+/* Implementation of region::get_relative_concrete_offset vfunc for
+ bit_range_region. */
+
+bool
+bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
+{
+ *out = m_bits.get_start_bit_offset ();
+ return true;
+}
+
/* class unknown_region : public region. */
/* Implementation of region::dump_to_pp vfunc for unknown_region. */
RK_HEAP_ALLOCATED,
RK_ALLOCA,
RK_STRING,
+ RK_BIT_RANGE,
RK_UNKNOWN
};
heap_allocated_region (RK_HEAP_ALLOCATED)
alloca_region (RK_ALLOCA)
string_region (RK_STRING)
+ bit_range_region (RK_BIT_RANGE)
unknown_region (RK_UNKNOWN). */
/* Abstract base class for representing ways of accessing chunks of memory.
dyn_cast_cast_region () const { return NULL; }
virtual const string_region *
dyn_cast_string_region () const { return NULL; }
+ virtual const bit_range_region *
+ dyn_cast_bit_range_region () const { return NULL; }
virtual void accept (visitor *v) const;
namespace ana {
+/* A region for a specific range of bits within another region. */
+
+class bit_range_region : public region
+{
+public:
+ /* A support class for uniquifying instances of bit_range_region. */
+ struct key_t
+ {
+ key_t (const region *parent, tree type, const bit_range &bits)
+ : m_parent (parent), m_type (type), m_bits (bits)
+ {
+ gcc_assert (parent);
+ }
+
+ hashval_t hash () const
+ {
+ inchash::hash hstate;
+ hstate.add_ptr (m_parent);
+ hstate.add_ptr (m_type);
+ hstate.add (&m_bits, sizeof (m_bits));
+ return hstate.end ();
+ }
+
+ bool operator== (const key_t &other) const
+ {
+ return (m_parent == other.m_parent
+ && m_type == other.m_type
+ && m_bits == other.m_bits);
+ }
+
+ void mark_deleted () { m_parent = reinterpret_cast<const region *> (1); }
+ void mark_empty () { m_parent = NULL; }
+ bool is_deleted () const
+ {
+ return m_parent == reinterpret_cast<const region *> (1);
+ }
+ bool is_empty () const { return m_parent == NULL; }
+
+ const region *m_parent;
+ tree m_type;
+ bit_range m_bits;
+ };
+
+ bit_range_region (unsigned id, const region *parent, tree type,
+ const bit_range &bits)
+ : region (complexity (parent), id, parent, type),
+ m_bits (bits)
+ {}
+
+ const bit_range_region *
+ dyn_cast_bit_range_region () const FINAL OVERRIDE { return this; }
+
+ enum region_kind get_kind () const FINAL OVERRIDE { return RK_BIT_RANGE; }
+
+ void dump_to_pp (pretty_printer *pp, bool simple) const FINAL OVERRIDE;
+
+ const bit_range &get_bits () const { return m_bits; }
+
+ bool get_byte_size (byte_size_t *out) const FINAL OVERRIDE;
+ bool get_bit_size (bit_size_t *out) const FINAL OVERRIDE;
+ const svalue *get_byte_size_sval (region_model_manager *mgr) const FINAL OVERRIDE;
+ bool get_relative_concrete_offset (bit_offset_t *out) const FINAL OVERRIDE;
+
+private:
+ bit_range m_bits;
+};
+
+} // namespace ana
+
+template <>
+template <>
+inline bool
+is_a_helper <const bit_range_region *>::test (const region *reg)
+{
+ return reg->get_kind () == RK_BIT_RANGE;
+}
+
+template <> struct default_hash_traits<bit_range_region::key_t>
+: public member_function_hash_traits<bit_range_region::key_t>
+{
+ static const bool empty_zero_p = true;
+};
+
+namespace ana {
+
/* An unknown region, for handling unimplemented tree codes. */
class unknown_region : public region
--- /dev/null
+/* { dg-require-effective-target vect_int } */
+/* { dg-additional-options "-Wno-psabi" } */
+
+typedef __INT32_TYPE__ int32_t;
+typedef int32_t vnx4si __attribute__((vector_size (32)));
+
+extern void check_for_uninit (vnx4si v);
+
+void test_1a (vnx4si *out, int a, int b)
+{
+ vnx4si v = (vnx4si) { 1, 2, 3, 4, 5, 6, a, b };
+ check_for_uninit (v);
+}
+
+void test_1b (vnx4si *out, int a, int b)
+{
+ check_for_uninit ((vnx4si) { 1, 2, 3, 4, 5, 6, a, b });
+}
+
+static __attribute__((noipa)) void
+called_by_test_2 (vnx4si *out, int a, int b)
+{
+ *out = (vnx4si) { 1, 2, 3, 4, 5, 6, a, b };
+}
+
+void test_2 (vnx4si *out, int a, int b)
+{
+ vnx4si v;
+ called_by_test_2 (&v, a, b);
+ check_for_uninit (v);
+}