uptr size;
uptr stack[1]; // [size]
- static const u32 kUseCountBits = 20;
+ static const u32 kTabSizeLog = 20;
+ // Lower kTabSizeLog bits are equal for all items in one bucket.
+ // We use these bits to store the per-stack use counter.
+ static const u32 kUseCountBits = kTabSizeLog;
static const u32 kMaxUseCount = 1 << kUseCountBits;
static const u32 kUseCountMask = (1 << kUseCountBits) - 1;
static const u32 kHashMask = ~kUseCountMask;
uptr *StackDepotHandle::stack() { return &node_->stack[0]; }
// FIXME(dvyukov): this single reserved bit is used in TSan.
-typedef StackDepotBase<StackDepotNode, 1> StackDepot;
+typedef StackDepotBase<StackDepotNode, 1, StackDepotNode::kTabSizeLog>
+ StackDepot;
static StackDepot theDepot;
StackDepotStats *StackDepotGetStats() {
namespace __sanitizer {
-template <class Node, int kReservedBits>
+template <class Node, int kReservedBits, int kTabSizeLog>
class StackDepotBase {
public:
typedef typename Node::args_type args_type;
static Node *lock(atomic_uintptr_t *p);
static void unlock(atomic_uintptr_t *p, Node *s);
- static const int kTabSize = 1024 * 1024; // Hash table size.
+ static const int kTabSize = 1 << kTabSizeLog; // Hash table size.
static const int kPartBits = 8;
static const int kPartShift = sizeof(u32) * 8 - kPartBits - kReservedBits;
static const int kPartCount =
friend class StackDepotReverseMap;
};
-template <class Node, int kReservedBits>
-Node *StackDepotBase<Node, kReservedBits>::find(Node *s, args_type args,
- u32 hash) {
+template <class Node, int kReservedBits, int kTabSizeLog>
+Node *StackDepotBase<Node, kReservedBits, kTabSizeLog>::find(Node *s,
+ args_type args,
+ u32 hash) {
// Searches linked list s for the stack, returns its id.
for (; s; s = s->link) {
if (s->eq(hash, args)) {
return 0;
}
-template <class Node, int kReservedBits>
-Node *StackDepotBase<Node, kReservedBits>::lock(atomic_uintptr_t *p) {
+template <class Node, int kReservedBits, int kTabSizeLog>
+Node *StackDepotBase<Node, kReservedBits, kTabSizeLog>::lock(
+ atomic_uintptr_t *p) {
// Uses the pointer lsb as mutex.
for (int i = 0;; i++) {
uptr cmp = atomic_load(p, memory_order_relaxed);
}
}
-template <class Node, int kReservedBits>
-void StackDepotBase<Node, kReservedBits>::unlock(atomic_uintptr_t *p, Node *s) {
+template <class Node, int kReservedBits, int kTabSizeLog>
+void StackDepotBase<Node, kReservedBits, kTabSizeLog>::unlock(
+ atomic_uintptr_t *p, Node *s) {
DCHECK_EQ((uptr)s & 1, 0);
atomic_store(p, (uptr)s, memory_order_release);
}
-template <class Node, int kReservedBits>
-typename StackDepotBase<Node, kReservedBits>::handle_type
-StackDepotBase<Node, kReservedBits>::Put(args_type args, bool *inserted) {
+template <class Node, int kReservedBits, int kTabSizeLog>
+typename StackDepotBase<Node, kReservedBits, kTabSizeLog>::handle_type
+StackDepotBase<Node, kReservedBits, kTabSizeLog>::Put(args_type args,
+ bool *inserted) {
if (inserted) *inserted = false;
if (!args.is_valid()) return handle_type();
uptr h = args.hash();
return s->get_handle();
}
-template <class Node, int kReservedBits>
-typename StackDepotBase<Node, kReservedBits>::args_type
-StackDepotBase<Node, kReservedBits>::Get(u32 id) {
+template <class Node, int kReservedBits, int kTabSizeLog>
+typename StackDepotBase<Node, kReservedBits, kTabSizeLog>::args_type
+StackDepotBase<Node, kReservedBits, kTabSizeLog>::Get(u32 id) {
if (id == 0) {
return args_type();
}
return args_type();
}
-} // namespace __sanitizer
+} // namespace __sanitizer
#endif // SANITIZER_STACKDEPOTBASE_H