From 2547ac65eb3447ffce4560af4eb8d2b5ec13d052 Mon Sep 17 00:00:00 2001 From: Dmitry Vyukov Date: Thu, 20 Dec 2012 17:29:34 +0000 Subject: [PATCH] tsan: java interface implementation skeleton llvm-svn: 170707 --- compiler-rt/lib/tsan/rtl/tsan_interface_java.cc | 231 +++++++++++++++++++++++- compiler-rt/lib/tsan/rtl/tsan_mutex.h | 1 + compiler-rt/lib/tsan/rtl/tsan_rtl.h | 4 + compiler-rt/lib/tsan/rtl/tsan_stat.h | 1 + compiler-rt/lib/tsan/rtl/tsan_sync.cc | 36 ++-- compiler-rt/lib/tsan/rtl/tsan_sync.h | 2 + 6 files changed, 261 insertions(+), 14 deletions(-) diff --git a/compiler-rt/lib/tsan/rtl/tsan_interface_java.cc b/compiler-rt/lib/tsan/rtl/tsan_interface_java.cc index 72b31b7..3ac6bbf 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_interface_java.cc +++ b/compiler-rt/lib/tsan/rtl/tsan_interface_java.cc @@ -13,34 +13,261 @@ #include "tsan_interface_java.h" #include "tsan_rtl.h" +#include "tsan_mutex.h" +#include "sanitizer_common/sanitizer_internal_defs.h" +#include "sanitizer_common/sanitizer_common.h" +#include "sanitizer_common/sanitizer_placement_new.h" using namespace __tsan; // NOLINT +namespace __tsan { + +const uptr kHeapShadow = 0x300000000000ull; +const uptr kHeapAlignment = 8; + +struct BlockDesc { + bool begin; + Mutex mtx; + SyncVar *head; + + BlockDesc() + : mtx(MutexTypeJava, StatMtxJava) + , head() { + CHECK_EQ(begin, false); + begin = true; + } + + explicit BlockDesc(BlockDesc *b) + : mtx(MutexTypeJava, StatMtxJava) + , head(b->head) { + CHECK_EQ(begin, false); + begin = true; + b->head = 0; + } + + ~BlockDesc() { + CHECK_EQ(begin, true); + begin = false; + ThreadState *thr = cur_thread(); + SyncVar *s = head; + while (s) { + SyncVar *s1 = s->next; + StatInc(thr, StatSyncDestroyed); + s->mtx.Lock(); + s->mtx.Unlock(); + thr->mset.Remove(s->GetId()); + DestroyAndFree(s); + s = s1; + } + } +}; + +struct JavaContext { + Mutex mtx; + const uptr heap_begin; + const uptr heap_size; + BlockDesc *heap_shadow; + + JavaContext(jptr heap_begin, jptr heap_size) + : mtx(MutexTypeJava, StatMtxJava) + , heap_begin(heap_begin) + , heap_size(heap_size) { + uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc); + heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size); + if ((uptr)heap_shadow != kHeapShadow) { + Printf("ThreadSanitizer: failed to mmap Java heap shadow\n"); + Die(); + } + } +}; + +class ScopedJavaFunc { + public: + ScopedJavaFunc(ThreadState *thr, uptr pc) + : thr_(thr) { + Initialize(thr_); + FuncEntry(thr, pc); + CHECK_EQ(thr_->in_rtl, 0); + thr_->in_rtl++; + } + + ~ScopedJavaFunc() { + thr_->in_rtl--; + CHECK_EQ(thr_->in_rtl, 0); + FuncExit(thr_); + // FIXME(dvyukov): process pending signals. + } + + private: + ThreadState *thr_; +}; + +static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1]; +static JavaContext *jctx; + +static BlockDesc *getblock(uptr addr) { + uptr i = (addr - jctx->heap_begin) / kHeapAlignment; + return &jctx->heap_shadow[i]; +} + +static BlockDesc *getblockbegin(uptr addr) { + for (BlockDesc *b = getblock(addr);; b--) { + CHECK_GE(b, jctx->heap_shadow); + if (b->begin) + return b; + } + return 0; +} + +SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr, + bool write_lock, bool create) { + if (jctx == 0 || addr < jctx->heap_begin + || addr >= jctx->heap_begin + jctx->heap_size) + return 0; + BlockDesc *b = getblockbegin(addr); + Lock l(&b->mtx); + SyncVar *s = b->head; + for (; s; s = s->next) { + if (s->addr == addr) + break; + } + if (s == 0 && create) { + s = CTX()->synctab.Create(thr, pc, addr); + s->next = b->head; + b->head = s; + } + if (s) { + if (write_lock) + s->mtx.Lock(); + else + s->mtx.ReadLock(); + } + return s; +} + +SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) { + // We do not destroy Java mutexes other than in __tsan_java_free(). + return 0; +} + +} // namespace __tsan { + +#define SCOPED_JAVA_FUNC(func) \ + ThreadState *thr = cur_thread(); \ + const uptr caller_pc = GET_CALLER_PC(); \ + const uptr pc = (uptr)&func; \ + (void)pc; \ + ScopedJavaFunc scoped(thr, caller_pc); \ +/**/ + void __tsan_java_init(jptr heap_begin, jptr heap_size) { + SCOPED_JAVA_FUNC(__tsan_java_init); + DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size); + CHECK_EQ(jctx, 0); + CHECK_GT(heap_begin, 0); + CHECK_GT(heap_size, 0); + CHECK_LT(heap_begin, heap_begin + heap_size); + jctx = new(jctx_buf) JavaContext(heap_begin, heap_size); } int __tsan_java_fini() { - return 0; + SCOPED_JAVA_FUNC(__tsan_java_fini); + DPrintf("#%d: java_fini()\n", thr->tid); + CHECK_NE(jctx, 0); + // FIXME(dvyukov): this does not call atexit() callbacks. + int status = Finalize(thr); + DPrintf("#%d: java_fini() = %d\n", thr->tid, status); + return status; } void __tsan_java_alloc(jptr ptr, jptr size) { + SCOPED_JAVA_FUNC(__tsan_java_alloc); + DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size); + CHECK_NE(jctx, 0); + CHECK_NE(size, 0); + CHECK_GE(ptr, jctx->heap_begin); + CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size); + + BlockDesc *b = getblock(ptr); + new(b) BlockDesc(); } void __tsan_java_free(jptr ptr, jptr size) { + SCOPED_JAVA_FUNC(__tsan_java_free); + DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size); + CHECK_NE(jctx, 0); + CHECK_NE(size, 0); + CHECK_GE(ptr, jctx->heap_begin); + CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size); + + BlockDesc *beg = getblock(ptr); + BlockDesc *end = getblock(ptr + size); + for (BlockDesc *b = beg; b != end; b++) { + if (b->begin) + b->~BlockDesc(); + } } void __tsan_java_move(jptr src, jptr dst, jptr size) { + SCOPED_JAVA_FUNC(__tsan_java_move); + DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size); + CHECK_NE(jctx, 0); + CHECK_NE(size, 0); + CHECK_GE(src, jctx->heap_begin); + CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size); + CHECK_GE(dst, jctx->heap_begin); + CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size); + CHECK(dst >= src + size || src >= dst + size); + + // Assuming it's not running concurrently with threads that do + // memory accesses and mutex operations (stop-the-world phase). + BlockDesc *srcbeg = getblock(src); + BlockDesc *dstbeg = getblock(dst); + BlockDesc *srcend = getblock(src + size); + for (BlockDesc *s = srcbeg, *d = dstbeg; s != srcend; s++, d++) { + if (s->begin) { + new(d) BlockDesc(s); + s->~BlockDesc(); + } + } } void __tsan_java_mutex_lock(jptr addr) { + SCOPED_JAVA_FUNC(__tsan_java_mutex_lock); + DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr); + CHECK_NE(jctx, 0); + CHECK_GE(addr, jctx->heap_begin); + CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); + + MutexLock(thr, pc, addr); } void __tsan_java_mutex_unlock(jptr addr) { + SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock); + DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr); + CHECK_NE(jctx, 0); + CHECK_GE(addr, jctx->heap_begin); + CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); + + MutexUnlock(thr, pc, addr); } void __tsan_java_mutex_read_lock(jptr addr) { + SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock); + DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr); + CHECK_NE(jctx, 0); + CHECK_GE(addr, jctx->heap_begin); + CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); + + MutexReadLock(thr, pc, addr); } void __tsan_java_mutex_read_unlock(jptr addr) { -} + SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock); + DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr); + CHECK_NE(jctx, 0); + CHECK_GE(addr, jctx->heap_begin); + CHECK_LT(addr, jctx->heap_begin + jctx->heap_size); + MutexReadUnlock(thr, pc, addr); +} diff --git a/compiler-rt/lib/tsan/rtl/tsan_mutex.h b/compiler-rt/lib/tsan/rtl/tsan_mutex.h index 68b33a7..3a03cfb 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_mutex.h +++ b/compiler-rt/lib/tsan/rtl/tsan_mutex.h @@ -30,6 +30,7 @@ enum MutexType { MutexTypeAnnotations, MutexTypeAtExit, MutexTypeMBlock, + MutexTypeJava, // This must be the last. MutexTypeCount diff --git a/compiler-rt/lib/tsan/rtl/tsan_rtl.h b/compiler-rt/lib/tsan/rtl/tsan_rtl.h index 0713a3f..2a720cb 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_rtl.h +++ b/compiler-rt/lib/tsan/rtl/tsan_rtl.h @@ -515,6 +515,10 @@ void PrintCurrentStack(ThreadState *thr, uptr pc); void Initialize(ThreadState *thr); int Finalize(ThreadState *thr); +SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr, + bool write_lock, bool create); +SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr); + void MemoryAccess(ThreadState *thr, uptr pc, uptr addr, int kAccessSizeLog, bool kAccessIsWrite); void MemoryAccessImpl(ThreadState *thr, uptr addr, diff --git a/compiler-rt/lib/tsan/rtl/tsan_stat.h b/compiler-rt/lib/tsan/rtl/tsan_stat.h index 9c3c95a..e6bb628 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_stat.h +++ b/compiler-rt/lib/tsan/rtl/tsan_stat.h @@ -281,6 +281,7 @@ enum StatType { StatMtxAnnotations, StatMtxAtExit, StatMtxMBlock, + StatMtxJava, // This must be the last. StatCnt diff --git a/compiler-rt/lib/tsan/rtl/tsan_sync.cc b/compiler-rt/lib/tsan/rtl/tsan_sync.cc index 8951325..b25346e 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_sync.cc +++ b/compiler-rt/lib/tsan/rtl/tsan_sync.cc @@ -57,9 +57,26 @@ SyncVar* SyncTab::GetIfExistsAndLock(uptr addr, bool write_lock) { return GetAndLock(0, 0, addr, write_lock, false); } +SyncVar* SyncTab::Create(ThreadState *thr, uptr pc, uptr addr) { + StatInc(thr, StatSyncCreated); + void *mem = internal_alloc(MBlockSync, sizeof(SyncVar)); + const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed); + SyncVar *res = new(mem) SyncVar(addr, uid); +#ifndef TSAN_GO + res->creation_stack.ObtainCurrent(thr, pc); +#endif + return res; +} + SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc, uptr addr, bool write_lock, bool create) { #ifndef TSAN_GO + { // NOLINT + SyncVar *res = GetJavaSync(thr, pc, addr, write_lock, create); + if (res) + return res; + } + // Here we ask only PrimaryAllocator, because // SecondaryAllocator::PointerIsMine() is slow and we have fallback on // the hashmap anyway. @@ -74,11 +91,7 @@ SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc, if (res == 0) { if (!create) return 0; - StatInc(thr, StatSyncCreated); - void *mem = internal_alloc(MBlockSync, sizeof(SyncVar)); - const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed); - res = new(mem) SyncVar(addr, uid); - res->creation_stack.ObtainCurrent(thr, pc); + res = Create(thr, pc, addr); res->next = b->head; b->head = res; } @@ -113,13 +126,7 @@ SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc, break; } if (res == 0) { - StatInc(thr, StatSyncCreated); - void *mem = internal_alloc(MBlockSync, sizeof(SyncVar)); - const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed); - res = new(mem) SyncVar(addr, uid); -#ifndef TSAN_GO - res->creation_stack.ObtainCurrent(thr, pc); -#endif + res = Create(thr, pc, addr); res->next = p->val; p->val = res; } @@ -133,6 +140,11 @@ SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc, SyncVar* SyncTab::GetAndRemove(ThreadState *thr, uptr pc, uptr addr) { #ifndef TSAN_GO + { // NOLINT + SyncVar *res = GetAndRemoveJavaSync(thr, pc, addr); + if (res) + return res; + } if (PrimaryAllocator::PointerIsMine((void*)addr)) { MBlock *b = user_mblock(thr, (void*)addr); SyncVar *res = 0; diff --git a/compiler-rt/lib/tsan/rtl/tsan_sync.h b/compiler-rt/lib/tsan/rtl/tsan_sync.h index 34ea55b..f00038d 100644 --- a/compiler-rt/lib/tsan/rtl/tsan_sync.h +++ b/compiler-rt/lib/tsan/rtl/tsan_sync.h @@ -96,6 +96,8 @@ class SyncTab { // If the SyncVar does not exist, returns 0. SyncVar* GetAndRemove(ThreadState *thr, uptr pc, uptr addr); + SyncVar* Create(ThreadState *thr, uptr pc, uptr addr); + uptr GetMemoryConsumption(uptr *nsync); private: -- 2.7.4