./tools/gyp/gyp ./lwnode/code/escargotshim/test/cctest.gyp --depth=. -f ninja \
--generator-output=$ROOT_PATH -Dbuild_asan=1 -Dbuild_mode=debug \
- -Descargot_lib_type=static_lib -Denable_experimental=true
+ -Descargot_lib_type=static_lib -Denable_experimental=true -Dtarget_arch=x64
ninja -v -C $ROOT_PATH/out/Debug cctest
ENDIF()
IF (ESCARGOT_THREADING)
- SET (ESCARGOT_DEFINITIONS ${ESCARGOT_DEFINITIONS} -DENABLE_THREADING)
+ SET (ESCARGOT_DEFINITIONS ${ESCARGOT_DEFINITIONS} -DENABLE_THREADING -DGC_THREAD_ISOLATE)
ENDIF()
#######################################################
SET (GCUTIL_CFLAGS ${GCUTIL_CFLAGS} -DSMALL_CONFIG -DMAX_HEAP_SECTS=512)
ENDIF()
IF (ESCARGOT_THREADING)
- SET (GCUTIL_ENABLE_THREADING 1)
- SET (GCUTIL_ENABLE_THREAD_LOCAL_ALLOC 1)
+ SET (GCUTIL_ENABLE_THREADING ON)
ENDIF()
SET (GCUTIL_MODE ${ESCARGOT_MODE})
#error "I don't know what architecture this is!"
#endif
+#if defined(ENABLE_THREADING)
+#if defined(COMPILER_MSVC)
+#define MAY_THREAD_LOCAL __declspec(thread)
+#else
+#define MAY_THREAD_LOCAL __thread
+#endif
+#else
+#define MAY_THREAD_LOCAL
+#endif
+
+
#define MAKE_STACK_ALLOCATED() \
static void* operator new(size_t) = delete; \
static void* operator new[](size_t) = delete; \
return AtomicString::fromPayload(reinterpret_cast<void*>(v));
}
-bool Globals::g_globalsInited = false;
+bool thread_local Globals::g_globalsInited = false;
void Globals::initialize()
{
// initialize global value or context
m_platform->onFreeArrayBufferObjectDataBuffer(buffer, sizeInByte);
}
+ virtual void* onReallocArrayBufferObjectDataBuffer(void* oldBuffer, size_t oldSizeInByte, size_t newSizeInByte) override
+ {
+ return m_platform->onReallocArrayBufferObjectDataBuffer(oldBuffer, oldSizeInByte, newSizeInByte);
+ }
+
virtual void markJSJobEnqueued(Context* relatedContext) override
{
m_platform->markJSJobEnqueued(toRef(relatedContext));
(void*)cb);
}
+void VMInstanceRef::registerPromiseHook(PromiseHook promiseHook)
+{
+ toImpl(this)->registerPromiseHook([](ExecutionState& state, VMInstance::PromiseHookType type, PromiseObject* promise, const Value& parent, void* hook) -> void {
+ ASSERT(!!hook);
+ ((PromiseHook)hook)(toRef(&state), (PromiseHookType)type, toRef(promise), toRef(parent));
+ },
+ (void*)promiseHook);
+}
+
+void VMInstanceRef::unregisterPromiseHook()
+{
+ toImpl(this)->unregisterPromiseHook();
+}
+
void VMInstanceRef::enterIdleMode()
{
toImpl(this)->enterIdleMode();
void ContextRef::setVirtualIdentifierCallback(VirtualIdentifierCallback cb)
{
- Context* ctx = toImpl(this);
- ctx->m_virtualIdentifierCallbackPublic = (void*)cb;
- ctx->setVirtualIdentifierCallback([](ExecutionState& state, Value name) -> Value {
+ toImpl(this)->setVirtualIdentifierCallback([](ExecutionState& state, Value name) -> Value {
if (state.context()->m_virtualIdentifierCallbackPublic && !name.isSymbol()) {
return toImpl(((VirtualIdentifierCallback)state.context()->m_virtualIdentifierCallbackPublic)(toRef(&state), toRef(name)));
}
return Value(Value::EmptyValue);
- });
+ },
+ (void*)cb);
}
ContextRef::VirtualIdentifierCallback ContextRef::virtualIdentifierCallback()
void ContextRef::setSecurityPolicyCheckCallback(SecurityPolicyCheckCallback cb)
{
- Context* ctx = toImpl(this);
- ctx->m_securityPolicyCheckCallbackPublic = (void*)cb;
- ctx->setSecurityPolicyCheckCallback([](ExecutionState& state, bool isEval) -> Value {
+ toImpl(this)->setSecurityPolicyCheckCallback([](ExecutionState& state, bool isEval) -> Value {
if (state.context()->m_securityPolicyCheckCallbackPublic) {
return toImpl(((SecurityPolicyCheckCallback)state.context()->m_securityPolicyCheckCallbackPublic)(toRef(&state), isEval));
}
return Value(Value::EmptyValue);
- });
+ },
+ (void*)cb);
}
OptionalRef<FunctionObjectRef> ExecutionStateRef::resolveCallee()
return (RegExpObjectRef::RegExpObjectOption)toImpl(this)->option();
}
-BackingStoreRef* BackingStoreRef::create(size_t byteLength)
+BackingStoreRef* BackingStoreRef::create(VMInstanceRef* instance, size_t byteLength)
{
- return toRef(new BackingStore(byteLength));
+ return toRef(new BackingStore(toImpl(instance), byteLength));
}
BackingStoreRef* BackingStoreRef::create(void* data, size_t byteLength, BackingStoreRef::BackingStoreRefDeleterCallback callback, void* callbackData)
return toImpl(this)->isShared();
}
+void BackingStoreRef::reallocate(VMInstanceRef* instance, size_t newByteLength)
+{
+ toImpl(this)->reallocate(toImpl(instance), newByteLength);
+}
+
ArrayBufferObjectRef* ArrayBufferObjectRef::create(ExecutionStateRef* state)
{
return toRef(new ArrayBufferObject(*toImpl(state)));
#include <cstdlib>
#include <cstddef>
+#include <cstring>
#include <string>
#include <vector>
#include <functional>
#undef DECLARE_REF_CLASS
class ESCARGOT_EXPORT Globals {
- static bool g_globalsInited;
+ static thread_local bool g_globalsInited;
public:
+ // Escargot has thread-isoloate Globals.
+ // Users need to call initialize, finalize function for each thread
static void initialize();
static void finalize();
};
typedef void (*OnVMInstanceDelete)(VMInstanceRef* instance);
void setOnVMInstanceDelete(OnVMInstanceDelete cb);
+ enum PromiseHookType {
+ Init,
+ Resolve,
+ Before,
+ After
+ };
+
+ typedef void (*PromiseHook)(ExecutionStateRef* state, PromiseHookType type, PromiseObjectRef* promise, ValueRef* parent);
+
+ // Register PromiseHook (PromiseHook is used by third party app)
+ void registerPromiseHook(PromiseHook promiseHook);
+ void unregisterPromiseHook();
+
// this function enforce do gc,
// remove every compiled bytecodes,
// remove regexp cache,
friend class ArrayBufferObject;
public:
- static BackingStoreRef* create(size_t byteLength);
+ static BackingStoreRef* create(VMInstanceRef* instance, size_t byteLength);
typedef void (*BackingStoreRefDeleterCallback)(void* data, size_t length,
void* deleterData);
static BackingStoreRef* create(void* data, size_t byteLength, BackingStoreRefDeleterCallback callback, void* callbackData);
size_t byteLength();
// Indicates whether the backing store is Shared Data Block (for SharedArrayBuffer)
bool isShared();
+ void reallocate(VMInstanceRef* instance, size_t newByteLength);
};
class ESCARGOT_EXPORT ArrayBufferObjectRef : public ObjectRef {
return free(buffer);
}
+ virtual void* onReallocArrayBufferObjectDataBuffer(void* oldBuffer, size_t oldSizeInByte, size_t newSizeInByte)
+ {
+ void* ptr = realloc(oldBuffer, newSizeInByte);
+ if (oldSizeInByte < newSizeInByte) {
+ uint8_t* s = static_cast<uint8_t*>(ptr);
+ memset(s + oldSizeInByte, 0, newSizeInByte - oldSizeInByte);
+ }
+ return ptr;
+ }
// If you want to add a Job event, you should call VMInstanceRef::executePendingJob after event. see Shell.cpp
virtual void markJSJobEnqueued(ContextRef* relatedContext) = 0;
namespace Escargot {
-static int s_gcKinds[HeapObjectKind::NumberOfKind];
+static MAY_THREAD_LOCAL int s_gcKinds[HeapObjectKind::NumberOfKind];
template <GC_get_next_pointer_proc proc>
GC_ms_entry* markAndPushCustomIterable(GC_word* addr,
void Heap::initialize()
{
+ GC_init();
RELEASE_ASSERT(GC_get_all_interior_pointers() == 0);
GC_set_force_unmap_on_gcollect(1);
void* ByteCodeBlock::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ByteCodeBlock)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ByteCodeBlock, m_stringLiteralData));
void* GetObjectInlineCache::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(GetObjectInlineCache)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(GetObjectInlineCache, m_cache));
void* SetObjectInlineCache::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(SetObjectInlineCache)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(SetObjectInlineCache, m_cachedHiddenClassChainData));
void* NativeCodeBlock::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(NativeCodeBlock)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(NativeCodeBlock, m_context));
void* InterpretedCodeBlock::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(InterpretedCodeBlock)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(InterpretedCodeBlock, m_context));
void* InterpretedCodeBlockWithRareData::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(InterpretedCodeBlockWithRareData)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(InterpretedCodeBlockWithRareData, m_context));
void* InterpretedCodeBlock::BlockInfo::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(InterpretedCodeBlock::BlockInfo)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(InterpretedCodeBlock::BlockInfo, m_identifiers));
void* Script::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(Script)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(Script, m_srcName));
void* Script::ModuleData::ModulePromiseObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(ModulePromiseObject)] = { 0 };
PromiseObject::fillGCDescriptor(desc);
void* ArgumentsObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ArgumentsObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
ArrayBufferObject::ArrayBufferObject(ExecutionState& state, Object* proto)
: Object(state, proto, ESCARGOT_OBJECT_BUILTIN_PROPERTY_NUMBER)
- , m_data(nullptr)
- , m_byteLength(0)
, m_mayPointsSharedBackingStore(false)
{
}
GC_invoke_finalizers();
}
- auto platform = state.context()->vmInstance()->platform();
- void* buffer = platform->onMallocArrayBufferObjectDataBuffer(byteLength);
- m_backingStore = new BackingStore(buffer, byteLength, [](void* data, size_t length, void* deleterData) {
- Platform* platform = (Platform*)deleterData;
- platform->onFreeArrayBufferObjectDataBuffer(data, length);
- },
- platform);
- m_data = (uint8_t*)m_backingStore->data();
- m_byteLength = byteLength;
+ m_backingStore = new BackingStore(state.context()->vmInstance(), byteLength);
}
void ArrayBufferObject::attachBuffer(BackingStore* backingStore)
m_mayPointsSharedBackingStore = true;
m_backingStore = backingStore;
- m_data = (uint8_t*)backingStore->data();
- m_byteLength = backingStore->byteLength();
}
void ArrayBufferObject::detachArrayBuffer()
ASSERT(!isSharedArrayBufferObject());
#endif
- if (m_data && !m_mayPointsSharedBackingStore) {
+ if (m_backingStore && !m_mayPointsSharedBackingStore) {
// if backingstore is definitely not shared, we deallocate the backingstore immediately.
m_backingStore.value()->deallocate();
}
- m_data = nullptr;
- m_byteLength = 0;
m_backingStore.reset();
m_mayPointsSharedBackingStore = false;
}
void* ArrayBufferObject::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ArrayBufferObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
Value ArrayBufferObject::getValueFromBuffer(ExecutionState& state, size_t byteindex, TypedArrayType type, bool isLittleEndian)
{
// If isLittleEndian is not present, set isLittleEndian to either true or false.
- ASSERT(m_byteLength);
+ ASSERT(byteLength());
size_t elemSize = TypedArrayHelper::elementSize(type);
- ASSERT(byteindex + elemSize <= m_byteLength);
- uint8_t* rawStart = m_data + byteindex;
+ ASSERT(byteindex + elemSize <= byteLength());
+ uint8_t* rawStart = data() + byteindex;
if (LIKELY(isLittleEndian)) {
return TypedArrayHelper::rawBytesToNumber(state, type, rawStart);
} else {
void ArrayBufferObject::setValueInBuffer(ExecutionState& state, size_t byteindex, TypedArrayType type, const Value& val, bool isLittleEndian)
{
// If isLittleEndian is not present, set isLittleEndian to either true or false.
- ASSERT(m_byteLength);
+ ASSERT(byteLength());
size_t elemSize = TypedArrayHelper::elementSize(type);
- ASSERT(byteindex + elemSize <= m_byteLength);
- uint8_t* rawStart = m_data + byteindex;
+ ASSERT(byteindex + elemSize <= byteLength());
+ uint8_t* rawStart = data() + byteindex;
uint8_t* rawBytes = ALLOCA(8, uint8_t, state);
TypedArrayHelper::numberToRawBytes(state, type, val, rawBytes);
if (LIKELY(isLittleEndian)) {
#define __EscargotArrayBufferObject__
#include "runtime/Context.h"
+#include "runtime/BackingStore.h"
namespace Escargot {
-class BackingStore;
-
enum class TypedArrayType : unsigned {
Int8 = 0,
Int16,
return true;
}
- ALWAYS_INLINE const uint8_t* data() { return m_data; }
- ALWAYS_INLINE size_t byteLength() { return m_byteLength; }
+ ALWAYS_INLINE uint8_t* data()
+ {
+ if (LIKELY(m_backingStore)) {
+ return reinterpret_cast<uint8_t*>(m_backingStore->data());
+ }
+ return nullptr;
+ }
+ ALWAYS_INLINE size_t byteLength()
+ {
+ if (LIKELY(m_backingStore)) {
+ return m_backingStore->byteLength();
+ }
+ return 0;
+ }
// $24.1.1.6
Value getValueFromBuffer(ExecutionState& state, size_t byteindex, TypedArrayType type, bool isLittleEndian = true);
// $24.1.1.8
}
}
- void fillData(const uint8_t* data, size_t length)
+ void fillData(const uint8_t* newData, size_t length)
{
ASSERT(!isDetachedBuffer());
- memcpy(m_data, data, length);
+ memcpy(data(), newData, length);
}
void* operator new(size_t size);
void* operator new[](size_t size) = delete;
protected:
- uint8_t* m_data; // Points backing stores data address
- size_t m_byteLength; // Indicates backing stores byte length
bool m_mayPointsSharedBackingStore;
Optional<BackingStore*> m_backingStore;
};
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ArrayBufferView)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* ArrayIteratorObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ArrayIteratorObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
{
ASSERT(size == sizeof(AsyncGeneratorObject));
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(AsyncGeneratorObject)] = { 0 };
fillGCDescriptor(obj_bitmap);
#include "Escargot.h"
#include "BackingStore.h"
+#include "runtime/VMInstance.h"
+#include "runtime/Platform.h"
namespace Escargot {
void* BackingStore::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
// only mark m_deleterData
GC_word obj_bitmap[GC_BITMAP_SIZE(BackingStore)] = { 0 };
return GC_MALLOC_EXPLICITLY_TYPED(size, descr);
}
-BackingStore::BackingStore(void* data, size_t byteLength, BackingStoreDeleterCallback callback, void* callbackData, bool isShared)
+static void defaultPlatformBackingStoreDeleter(void* data, size_t length, void* deleterData)
+{
+ if (deleterData) {
+ VMInstance* instance = (VMInstance*)deleterData;
+ instance->platform()->onFreeArrayBufferObjectDataBuffer(data, length);
+ } else {
+ ASSERT(data == nullptr);
+ }
+}
+
+BackingStore::BackingStore(void* data, size_t byteLength, BackingStoreDeleterCallback callback, void* callbackData,
+ bool isShared, bool isAllocatedByPlatformAllocator)
: m_isShared(isShared)
+ , m_isAllocatedByPlatformAllocator(isAllocatedByPlatformAllocator)
, m_data(data)
, m_byteLength(byteLength)
, m_deleter(callback)
nullptr, nullptr, nullptr);
}
-BackingStore::BackingStore(size_t byteLength)
- : BackingStore(calloc(byteLength, 1), byteLength,
- [](void* data, size_t length, void* deleterData) {
- free(data);
- },
- nullptr)
+BackingStore::BackingStore(VMInstance* instance, size_t byteLength)
+ : BackingStore(instance->platform()->onMallocArrayBufferObjectDataBuffer(byteLength), byteLength,
+ defaultPlatformBackingStoreDeleter, instance, false, true)
+{
+}
+
+void BackingStore::reallocate(VMInstance* instance, size_t newByteLength)
{
+ // Shared Data Block should not be reallocated
+ ASSERT(!m_isShared);
+
+ if (m_byteLength == newByteLength) {
+ return;
+ }
+
+ if (m_isAllocatedByPlatformAllocator) {
+ m_data = instance->platform()->onReallocArrayBufferObjectDataBuffer(m_data, m_byteLength, newByteLength);
+ m_byteLength = newByteLength;
+ } else {
+ m_deleter(m_data, m_byteLength, m_deleterData);
+ m_data = instance->platform()->onMallocArrayBufferObjectDataBuffer(newByteLength);
+ m_deleter = defaultPlatformBackingStoreDeleter;
+ m_deleterData = instance;
+ m_byteLength = newByteLength;
+ m_isAllocatedByPlatformAllocator = true;
+ }
}
void BackingStore::deallocate()
ASSERT(!m_isShared);
m_deleter(m_data, m_byteLength, m_deleterData);
GC_REGISTER_FINALIZER_NO_ORDER(this, nullptr, nullptr, nullptr, nullptr);
+ m_data = nullptr;
+ m_byteLength = 0;
+ m_deleter = defaultPlatformBackingStoreDeleter;
+ m_deleterData = nullptr;
+ m_isAllocatedByPlatformAllocator = true;
}
} // namespace Escargot
namespace Escargot {
+class VMInstance;
+
using BackingStoreDeleterCallback = void (*)(void* data, size_t length,
void* deleterData);
friend class ArrayBufferObject;
public:
- BackingStore(size_t byteLength);
- BackingStore(void* data, size_t byteLength, BackingStoreDeleterCallback callback, void* callbackData, bool isShared = false);
+ BackingStore(VMInstance* instance, size_t byteLength);
+ BackingStore(void* data, size_t byteLength, BackingStoreDeleterCallback callback,
+ void* callbackData, bool isShared = false, bool isAllocatedByPlatformAllocator = false);
void* data() const
{
return m_isShared;
}
+ void reallocate(VMInstance* instance, size_t newByteLength);
void deallocate();
void* operator new(size_t size);
private:
// Indicates whether the backing store was created as Shared Data Block
bool m_isShared;
+ bool m_isAllocatedByPlatformAllocator;
void* m_data;
size_t m_byteLength;
BackingStoreDeleterCallback m_deleter;
void* BigIntObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(BigIntObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* BooleanObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(BooleanObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* CompressibleString::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(CompressibleString)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(CompressibleString, m_vmInstance));
void* GlobalVariableAccessCacheItem::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(GlobalVariableAccessCacheItem)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(GlobalVariableAccessCacheItem, m_cachedStructure));
friend class ByteCodeInterpreter;
friend struct OpcodeTable;
friend class ContextRef;
+ friend class VirtualIdDisabler;
#if defined(ENABLE_CODE_CACHE)
friend class CodeCacheWriter;
friend class CodeCacheReader;
// this is not compatible with ECMAScript
// but this callback is needed for browser-implementation
// if there is a Identifier with that value, callback should return non-empty value
- void setVirtualIdentifierCallback(VirtualIdentifierCallback cb)
+ void setVirtualIdentifierCallback(VirtualIdentifierCallback cb, void* callbackPublic)
{
m_virtualIdentifierCallback = cb;
+ m_virtualIdentifierCallbackPublic = callbackPublic;
}
VirtualIdentifierCallback virtualIdentifierCallback()
return m_virtualIdentifierCallback;
}
- void setSecurityPolicyCheckCallback(SecurityPolicyCheckCallback cb)
+ void setSecurityPolicyCheckCallback(SecurityPolicyCheckCallback cb, void* callbackPublic)
{
m_securityPolicyCheckCallback = cb;
+ m_securityPolicyCheckCallbackPublic = callbackPublic;
}
SecurityPolicyCheckCallback securityPolicyCheckCallback()
public:
explicit VirtualIdDisabler(Context* c)
: m_fn(c->virtualIdentifierCallback())
+ , m_fnPublic(c->m_virtualIdentifierCallbackPublic)
, m_context(c)
{
- c->setVirtualIdentifierCallback(nullptr);
+ c->setVirtualIdentifierCallback(nullptr, nullptr);
}
+
~VirtualIdDisabler()
{
- m_context->setVirtualIdentifierCallback(m_fn);
+ m_context->setVirtualIdentifierCallback(m_fn, m_fnPublic);
}
VirtualIdentifierCallback m_fn;
+ void* m_fnPublic;
Context* m_context;
};
} // namespace Escargot
void* DateObject::operator new(size_t size)
{
ASSERT(size == sizeof(DateObject));
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(DateObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* EnumerateObjectWithDestruction::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(EnumerateObjectWithDestruction)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(EnumerateObjectWithDestruction, m_keys));
void* EnumerateObjectWithIteration::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(EnumerateObjectWithIteration)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(EnumerateObjectWithIteration, m_keys));
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word objBitmap[GC_BITMAP_SIZE(DeclarativeEnvironmentRecordIndexed)] = { 0 };
GC_set_bit(objBitmap, GC_WORD_OFFSET(DeclarativeEnvironmentRecordIndexed, m_blockInfo));
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word objBitmap[GC_BITMAP_SIZE(DeclarativeEnvironmentRecordNotIndexed)] = { 0 };
GC_set_bit(objBitmap, GC_WORD_OFFSET(DeclarativeEnvironmentRecordNotIndexed, m_heapStorage));
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word objBitmap[GC_BITMAP_SIZE(ModuleEnvironmentRecord)] = { 0 };
GC_set_bit(objBitmap, GC_WORD_OFFSET(ModuleEnvironmentRecord, m_moduleBindings));
{
ASSERT(size == sizeof(ExecutionPauser));
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(ExecutionPauser)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(ExecutionPauser, m_executionState));
void* FinalizationRegistryObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(FinalizationRegistryObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* FinalizationRegistryObject::FinalizationRegistryObjectItem::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(FinalizationRegistryObject::FinalizationRegistryObjectItem)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(FinalizationRegistryObject::FinalizationRegistryObjectItem, source));
void* FunctionTemplate::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word objBitmap[GC_BITMAP_SIZE(FunctionTemplate)] = { 0 };
Template::fillGCDescriptor(objBitmap);
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(CallTemplateFunctionData)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(CallTemplateFunctionData, m_functionTemplate));
{
ASSERT(size == sizeof(GeneratorObject));
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(GeneratorObject)] = { 0 };
fillGCDescriptor(obj_bitmap);
PromiseReaction::Capability capability = promise->createResolvingFunctions(state);
+ // PromiseHook for Promise initialization case
+ if (UNLIKELY(state.context()->vmInstance()->isPromiseHookRegistered())) {
+ // Note) To pass parent promise, we allocate the second argument (argv[1]) for the parent promise
+ // otherwise, the second argument would be ignored
+ state.context()->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::Init, promise, (argc > 1) ? argv[1] : Value());
+ }
+
SandBox sb(state.context());
auto res = sb.run([&]() -> Value {
Value arguments[] = { capability.m_resolveFunction, capability.m_rejectFunction };
Value arguments[] = { res.error };
Object::call(state, capability.m_rejectFunction, Value(), 1, arguments);
}
+
return promise;
}
if (!thisValue.isObject() || !thisValue.asObject()->isPromiseObject())
ErrorObject::throwBuiltinError(state, ErrorObject::TypeError, strings->Promise.string(), false, strings->then.string(), "%s: not a Promise object");
Value C = thisValue.asObject()->speciesConstructor(state, state.context()->globalObject()->promise());
- PromiseReaction::Capability promiseCapability = PromiseObject::newPromiseCapability(state, C.asObject());
+ PromiseReaction::Capability promiseCapability = PromiseObject::newPromiseCapability(state, C.asObject(), thisValue.asObject()->asPromiseObject());
return thisValue.asObject()->asPromiseObject()->then(state, argv[0], argv[1], promiseCapability).value();
}
}
}
-std::vector<std::string> Intl::numberingSystemsForLocale(String* locale)
+static std::vector<std::string> initAvailableNumberingSystems()
{
- static std::vector<std::string> availableNumberingSystems;
-
+ std::vector<std::string> availableNumberingSystems;
UErrorCode status = U_ZERO_ERROR;
- if (availableNumberingSystems.size() == 0) {
- UEnumeration* numberingSystemNames = unumsys_openAvailableNames(&status);
- ASSERT(U_SUCCESS(status));
+ UEnumeration* numberingSystemNames = unumsys_openAvailableNames(&status);
+ ASSERT(U_SUCCESS(status));
- int32_t resultLength;
- // Numbering system names are always ASCII, so use char[].
- while (const char* result = uenum_next(numberingSystemNames, &resultLength, &status)) {
- ASSERT(U_SUCCESS(status));
- auto numsys = unumsys_openByName(result, &status);
- ASSERT(U_SUCCESS(status));
- if (!unumsys_isAlgorithmic(numsys)) {
- availableNumberingSystems.push_back(std::string(result, resultLength));
- }
- unumsys_close(numsys);
+ int32_t resultLength;
+ // Numbering system names are always ASCII, so use char[].
+ while (const char* result = uenum_next(numberingSystemNames, &resultLength, &status)) {
+ ASSERT(U_SUCCESS(status));
+ auto numsys = unumsys_openByName(result, &status);
+ ASSERT(U_SUCCESS(status));
+ if (!unumsys_isAlgorithmic(numsys)) {
+ availableNumberingSystems.push_back(std::string(result, resultLength));
}
- uenum_close(numberingSystemNames);
+ unumsys_close(numsys);
}
+ uenum_close(numberingSystemNames);
- status = U_ZERO_ERROR;
+ return availableNumberingSystems;
+}
+
+std::vector<std::string> Intl::numberingSystemsForLocale(String* locale)
+{
+ static std::vector<std::string> availableNumberingSystems = initAvailableNumberingSystems();
+
+ UErrorCode status = U_ZERO_ERROR;
UNumberingSystem* defaultSystem = unumsys_open(locale->toUTF8StringData().data(), &status);
ASSERT(U_SUCCESS(status));
std::string defaultSystemName(unumsys_getName(defaultSystem));
void* IntlPluralRulesObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(IntlPluralRulesObject)] = { 0 };
Object::fillGCDescriptor(desc);
#include "Escargot.h"
#include "Job.h"
#include "Context.h"
+#include "VMInstance.h"
#include "SandBox.h"
#include "runtime/FinalizationRegistryObject.h"
SandBox::SandBoxResult PromiseReactionJob::run()
{
+ Context* context = relatedContext();
+ ExecutionState state(context);
+
+ if (UNLIKELY(context->vmInstance()->isPromiseHookRegistered())) {
+ Object* promiseTarget = m_reaction.m_capability.m_promise;
+ PromiseObject* promise = (promiseTarget && promiseTarget->isPromiseObject()) ? promiseTarget->asPromiseObject() : nullptr;
+ context->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::Before, promise, Value());
+ }
+
// https://www.ecma-international.org/ecma-262/10.0/#sec-promisereactionjob
- SandBox sandbox(relatedContext());
- ExecutionState state(relatedContext());
- return sandbox.run([&]() -> Value {
+ SandBox sandbox(context);
+ SandBox::SandBoxResult result = sandbox.run([&]() -> Value {
/* 25.4.2.1.4 Handler is "Identity" case */
if (m_reaction.m_handler == (Object*)1) {
Value value[] = { m_argument };
}
return res.result;
});
+
+ if (UNLIKELY(context->vmInstance()->isPromiseHookRegistered())) {
+ Object* promiseTarget = m_reaction.m_capability.m_promise;
+ PromiseObject* promise = (promiseTarget && promiseTarget->isPromiseObject()) ? promiseTarget->asPromiseObject() : nullptr;
+ context->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::After, promise, Value());
+ }
+
+ return result;
}
SandBox::SandBoxResult PromiseResolveThenableJob::run()
void* MapObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(MapObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* MapIteratorObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(MapIteratorObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* NumberObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(NumberObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* ObjectRareData::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ObjectRareData)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ObjectRareData, m_prototype));
void* ObjectStructureItemVector::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ObjectStructureItemVector)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ObjectStructureItemVector, m_buffer));
void* ObjectStructureWithoutTransition::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ObjectStructureWithoutTransition)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ObjectStructureWithoutTransition, m_properties));
void* ObjectStructureWithTransition::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ObjectStructureWithTransition)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ObjectStructureWithTransition, m_properties));
void* ObjectStructureWithMap::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ObjectStructureWithMap)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ObjectStructureWithMap, m_properties));
void* ObjectTemplate::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word objBitmap[GC_BITMAP_SIZE(ObjectTemplate)] = { 0 };
Template::fillGCDescriptor(objBitmap);
// ArrayBuffer
virtual void* onMallocArrayBufferObjectDataBuffer(size_t sizeInByte) = 0;
virtual void onFreeArrayBufferObjectDataBuffer(void* buffer, size_t sizeInByte) = 0;
+ virtual void* onReallocArrayBufferObjectDataBuffer(void* oldBuffer, size_t oldSizeInByte, size_t newSizeInByte) = 0;
// Promise
virtual void markJSJobEnqueued(Context* relatedContext) = 0;
void* PromiseObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(PromiseObject)] = { 0 };
fillGCDescriptor(desc);
return PromiseReaction::Capability(this, resolve, reject);
}
-PromiseReaction::Capability PromiseObject::newPromiseCapability(ExecutionState& state, Object* constructor)
+PromiseReaction::Capability PromiseObject::newPromiseCapability(ExecutionState& state, Object* constructor, const Value& parentPromise)
{
// https://www.ecma-international.org/ecma-262/10.0/#sec-newpromisecapability
+ Context* context = state.context();
// fast path
- if (constructor == state.context()->globalObject()->promise()) {
- PromiseObject* promise = new PromiseObject(state, state.context()->globalObject()->promisePrototype());
+ if (constructor == context->globalObject()->promise()) {
+ PromiseObject* promise = new PromiseObject(state, context->globalObject()->promisePrototype());
+ // PromiseHook for Promise initialization case
+ if (UNLIKELY(context->vmInstance()->isPromiseHookRegistered())) {
+ context->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::Init, promise, parentPromise);
+ }
return promise->createResolvingFunctions(state);
}
- const StaticStrings* strings = &state.context()->staticStrings();
+ const StaticStrings* strings = &context->staticStrings();
// If IsConstructor(C) is false, throw a TypeError exception.
if (!constructor->isConstructor()) {
executor->setInternalSlot(BuiltinFunctionSlot::Capability, capability);
// Let promise be ? Construct(C, « executor »).
- Value arguments[] = { executor };
- Object* promise = Object::construct(state, constructor, 1, arguments).toObject(state);
+ Object* promise = nullptr;
+ if (UNLIKELY(context->vmInstance()->isPromiseHookRegistered())) {
+ // Note) parent promise is used only for PromiseHook (delivered as an argument here)
+ Value arguments[] = { executor, parentPromise };
+ promise = Object::construct(state, constructor, 2, arguments).toObject(state);
+ } else {
+ Value arguments[] = { executor };
+ promise = Object::construct(state, constructor, 1, arguments).toObject(state);
+ }
Value resolveFunction = capability->get(state, strings->resolve).value(state, capability);
Value rejectFunction = capability->get(state, strings->reject).value(state, capability);
// Let C be ? SpeciesConstructor(promise, %Promise%).
Value C = speciesConstructor(state, state.context()->globalObject()->promise());
// Let resultCapability be ? NewPromiseCapability(C).
- return PromiseObject::newPromiseCapability(state, C.toObject(state));
+ return PromiseObject::newPromiseCapability(state, C.toObject(state), this);
}
Object* PromiseObject::then(ExecutionState& state, Value handler)
Value promiseValue = callee->internalSlot(PromiseObject::BuiltinFunctionSlot::Promise);
PromiseObject* promise = promiseValue.asObject()->asPromiseObject();
+ if (UNLIKELY(state.context()->vmInstance()->isPromiseHookRegistered())) {
+ state.context()->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::Resolve, promise, Value());
+ }
+
// Let alreadyResolved be F.[[AlreadyResolved]].
Value alreadyResolvedValue = callee->internalSlot(PromiseObject::BuiltinFunctionSlot::AlreadyResolved);
Object* alreadyResolved = alreadyResolvedValue.asObject();
Value promiseValue = callee->internalSlot(PromiseObject::BuiltinFunctionSlot::Promise);
PromiseObject* promise = promiseValue.asObject()->asPromiseObject();
+ if (UNLIKELY(state.context()->vmInstance()->isPromiseHookRegistered())) {
+ state.context()->vmInstance()->triggerPromiseHook(state, VMInstance::PromiseHookType::Resolve, promise, Value());
+ }
+
// Let alreadyResolved be F.[[AlreadyResolved]].
Value alreadyResolvedValue = callee->internalSlot(PromiseObject::BuiltinFunctionSlot::AlreadyResolved);
Object* alreadyResolved = alreadyResolvedValue.asObject();
void* operator new(size_t size);
void* operator new[](size_t size) = delete;
- static PromiseReaction::Capability newPromiseCapability(ExecutionState& state, Object* constructor);
+ static PromiseReaction::Capability newPromiseCapability(ExecutionState& state, Object* constructor, const Value& parentPromise = Value());
static Value getCapabilitiesExecutorFunction(ExecutionState& state, Value thisValue, size_t argc, Value* argv, Optional<Object*> newTarget);
// http://www.ecma-international.org/ecma-262/10.0/#sec-promise-resolve
// The abstract operation PromiseResolve, given a constructor and a value, returns a new promise resolved with that value.
void* ProxyObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ProxyObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* RegExpObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(RegExpObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* RegExpStringIteratorObject::operator new(size_t size)
{
ASSERT(size == sizeof(RegExpStringIteratorObject));
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(RegExpStringIteratorObject)] = { 0 };
fillGCDescriptor(obj_bitmap);
void* ReloadableString::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ReloadableString)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ReloadableString, m_vmInstance));
void* RopeString::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(RopeString)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(RopeString, m_left));
void* SetObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(SetObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* SetIteratorObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(SetIteratorObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
platform->onFreeArrayBufferObjectDataBuffer(data, length);
},
platform, true);
-
- m_data = (uint8_t*)m_backingStore->data();
- m_byteLength = byteLength;
}
SharedArrayBufferObject* SharedArrayBufferObject::allocateSharedArrayBuffer(ExecutionState& state, Object* constructor, uint64_t byteLength)
void* SharedArrayBufferObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(SharedArrayBufferObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
namespace Escargot {
-String* String::emptyString;
+MAY_THREAD_LOCAL String* String::emptyString;
std::vector<std::string> split(const std::string& s, char seperator)
{
void* ASCIIString::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ASCIIString)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(ASCIIString, m_bufferData.buffer));
void* Latin1String::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(Latin1String)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(Latin1String, m_bufferData.buffer));
void* UTF16String::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(UTF16String)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(UTF16String, m_bufferData.buffer));
virtual UTF16StringData toUTF16StringData() const = 0;
virtual UTF8StringData toUTF8StringData() const = 0;
virtual UTF8StringDataNonGCStd toNonGCUTF8StringData(int options = StringWriteOption::NoOptions) const = 0;
- static String* emptyString;
+ static MAY_THREAD_LOCAL String* emptyString;
uint64_t tryToUseAsIndex() const;
uint32_t tryToUseAsIndex32() const;
void* StringObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(StringObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* StringIteratorObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(StringIteratorObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* StringView::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(StringView)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(StringView, m_bufferData.bufferAsString));
void* SymbolObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(SymbolObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
/////////////////////////////////////////////////
// VMInstance Global Data
/////////////////////////////////////////////////
-std::mt19937 VMInstance::g_randEngine((unsigned int)time(NULL));
-bf_context_t VMInstance::g_bfContext;
+MAY_THREAD_LOCAL std::mt19937* VMInstance::g_randEngine;
+MAY_THREAD_LOCAL bf_context_t VMInstance::g_bfContext;
#if defined(ENABLE_WASM)
#ifndef ESCARGOT_WASM_GC_CHECK_INTERVAL
#define ESCARGOT_WASM_GC_CHECK_INTERVAL 5000
#endif
-WASMContext VMInstance::g_wasmContext;
+MAY_THREAD_LOCAL WASMContext VMInstance::g_wasmContext;
#endif
-ASTAllocator* VMInstance::g_astAllocator;
-WTF::BumpPointerAllocator* VMInstance::g_bumpPointerAllocator;
+MAY_THREAD_LOCAL ASTAllocator* VMInstance::g_astAllocator;
+MAY_THREAD_LOCAL WTF::BumpPointerAllocator* VMInstance::g_bumpPointerAllocator;
void VMInstance::initialize()
{
- // g_randEngine already initialized
+ // g_randEngine
+ g_randEngine = new std::mt19937((unsigned int)time(NULL));
// g_bfContext
bf_context_init(&g_bfContext, [](void* opaque, void* ptr, size_t size) -> void* {
// because some registered gc-finalizers could use these global values
// g_randEngine does not need finalization
+ delete g_randEngine;
+ g_randEngine = nullptr;
// g_bfContext
bf_context_end(&g_bfContext);
// g_bumpPointerAllocator
delete g_bumpPointerAllocator;
g_bumpPointerAllocator = nullptr;
-
- // reset PointerValue tag values
- PointerValue::g_arrayObjectTag = 0;
- PointerValue::g_arrayPrototypeObjectTag = 0;
- PointerValue::g_objectRareDataTag = 0;
- PointerValue::g_doubleInEncodedValueTag = 0;
}
/////////////////////////////////////////////////
void* VMInstance::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(VMInstance)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(VMInstance, m_staticStrings.dtoaCache));
#endif
, m_onVMInstanceDestroy(nullptr)
, m_onVMInstanceDestroyData(nullptr)
+ , m_promiseHook(nullptr)
+ , m_promiseHookPublic(nullptr)
, m_cachedUTC(nullptr)
, m_platform(platform)
{
/////////////////////////////////
// Global Data
// global values which should be initialized once and shared during the runtime
- static std::mt19937 g_randEngine;
- static bf_context_t g_bfContext;
+ static MAY_THREAD_LOCAL std::mt19937* g_randEngine;
+ static MAY_THREAD_LOCAL bf_context_t g_bfContext;
#if defined(ENABLE_WASM)
- static WASMContext g_wasmContext;
+ static MAY_THREAD_LOCAL WASMContext g_wasmContext;
#endif
- static ASTAllocator* g_astAllocator;
- static WTF::BumpPointerAllocator* g_bumpPointerAllocator;
+ static MAY_THREAD_LOCAL ASTAllocator* g_astAllocator;
+ static MAY_THREAD_LOCAL WTF::BumpPointerAllocator* g_bumpPointerAllocator;
/////////////////////////////////
public:
static void finalize();
static std::mt19937& randEngine()
{
- return g_randEngine;
+ ASSERT(!!g_randEngine);
+ return *g_randEngine;
}
static bf_context_t* bfContext()
{
}
/////////////////////////////////
+ enum PromiseHookType {
+ Init,
+ Resolve,
+ Before,
+ After
+ };
+
+ typedef void (*PromiseHook)(ExecutionState& state, PromiseHookType type, PromiseObject* promise, const Value& parent, void* hook);
+
VMInstance(Platform* platform, const char* locale = nullptr, const char* timezone = nullptr, const char* baseCacheDir = nullptr);
~VMInstance();
m_onVMInstanceDestroyData = data;
}
+ // PromiseHook is triggered for each Promise event
+ // Third party app registers PromiseHook when it is necessary
+ bool isPromiseHookRegistered()
+ {
+ return !!m_promiseHook;
+ }
+
+ void registerPromiseHook(PromiseHook promiseHook, void* promiseHookPublic)
+ {
+ m_promiseHook = promiseHook;
+ m_promiseHookPublic = promiseHookPublic;
+ }
+
+ void unregisterPromiseHook()
+ {
+ m_promiseHook = nullptr;
+ m_promiseHookPublic = nullptr;
+ }
+
+ void triggerPromiseHook(ExecutionState& state, PromiseHookType type, PromiseObject* promise, const Value& parent)
+ {
+ ASSERT(!!m_promiseHook);
+ if (m_promiseHookPublic) {
+ m_promiseHook(state, type, promise, parent, m_promiseHookPublic);
+ }
+ }
+
#if defined(ENABLE_ICU) && defined(ENABLE_INTL)
const Vector<String*, GCUtil::gc_malloc_allocator<String*>>& intlCollatorAvailableLocales();
const Vector<String*, GCUtil::gc_malloc_allocator<String*>>& intlDateTimeFormatAvailableLocales();
void (*m_onVMInstanceDestroy)(VMInstance* instance, void* data);
void* m_onVMInstanceDestroyData;
+ // PromiseHook is triggered for each Promise event
+ // Third party app registers PromiseHook when it is necessary
+ PromiseHook m_promiseHook;
+ void* m_promiseHookPublic;
+
ToStringRecursionPreventer m_toStringRecursionPreventer;
void* m_stackStartAddress;
void* WeakMapObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(WeakMapObject)] = { 0 };
Object::fillGCDescriptor(desc);
void* WeakMapObject::WeakMapObjectDataItem::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(WeakMapObject::WeakMapObjectDataItem)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(WeakMapObject::WeakMapObjectDataItem, data));
void* WeakRefObject::operator new(size_t size)
{
#ifdef NDEBUG
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WeakRefObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* WeakSetObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(WeakSetObject)] = { 0 };
Object::fillGCDescriptor(desc);
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(TightVector)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(TightVector, m_buffer));
{
FILETIME ft;
unsigned __int64 tmpres = 0;
- static int tzflag = 0;
+ static MAY_THREAD_LOCAL int tzflag = 0;
if (NULL != tv) {
GetSystemTimeAsFileTime(&ft);
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(Vector)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(Vector, m_buffer));
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word desc[GC_BITMAP_SIZE(VectorWithNoSize)] = { 0 };
GC_set_bit(desc, GC_WORD_OFFSET(VectorWithNoSize, m_buffer));
void* ExportedFunctionObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(ExportedFunctionObject)] = { 0 };
FunctionObject::fillGCDescriptor(obj_bitmap);
void* WASMHostFunctionEnvironment::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMHostFunctionEnvironment)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(WASMHostFunctionEnvironment, func));
void* WASMModuleObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMModuleObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* WASMInstanceObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMInstanceObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* WASMMemoryObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMMemoryObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* WASMTableObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMTableObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
void* WASMGlobalObject::operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(WASMGlobalObject)] = { 0 };
Object::fillGCDescriptor(obj_bitmap);
* USA
*/
+#include "GCUtilInternal.h"
#include "Allocator.h"
#include <vector>
GC_API void GC_CALL GC_set_on_collection_event(GC_on_collection_event_proc);
-std::vector<std::pair<GC_on_event_proc, void*>> g_gcEventCallbacks;
+std::vector<std::pair<GC_on_event_proc, void*>>& gcEventCallbacks()
+{
+ static MAY_THREAD_LOCAL std::vector<std::pair<GC_on_event_proc, void*>> eventCallbacks;
+ return eventCallbacks;
+}
GC_API void GC_CALL GC_add_event_callback(GC_on_event_proc fn, void* data)
{
- g_gcEventCallbacks.push_back(std::make_pair(fn, data));
+ gcEventCallbacks().push_back(std::make_pair(fn, data));
GC_set_on_collection_event([](GC_EventType evtType) {
- for (size_t i = 0; i < g_gcEventCallbacks.size(); i ++) {
- g_gcEventCallbacks[i].first(evtType, g_gcEventCallbacks[i].second);
+ for (size_t i = 0; i < gcEventCallbacks().size(); i ++) {
+ gcEventCallbacks()[i].first(evtType, gcEventCallbacks()[i].second);
}
});
}
GC_API void GC_CALL GC_remove_event_callback(GC_on_event_proc fn, void* data)
{
- auto iter = std::find(g_gcEventCallbacks.begin(), g_gcEventCallbacks.end(), std::make_pair(fn, data));
- if (iter != g_gcEventCallbacks.end()) {
- g_gcEventCallbacks.erase(iter);
+ auto iter = std::find(gcEventCallbacks().begin(), gcEventCallbacks().end(), std::make_pair(fn, data));
+ if (iter != gcEventCallbacks().end()) {
+ gcEventCallbacks().erase(iter);
}
}
// This structure holds information about the size of the
// allcoated memory area and a finalization user callback
// with its user defined data.
-static std::map<void*, AllocInfo> addressTable;
+std::map<void*, AllocInfo>& addressTable()
+{
+ static MAY_THREAD_LOCAL std::map<void*, AllocInfo> table;
+ return table;
+}
-static HeapInfo heapInfo = { 0, 0, 0, 0, 0, 0, 0 };
+static MAY_THREAD_LOCAL HeapInfo heapInfo = { 0, 0, 0, 0, 0, 0, 0 };
// The addressTable allocation should be in a separated function. This is
// important, because the noise (helper structiore allcoations) can be
// filtered out by the Freya tool of Valgrind.
void createAddressEntry(void* address, size_t size)
{
- auto it = addressTable.find(address);
+ auto it = addressTable().find(address);
// The address should not exist.
- assert(it == addressTable.end());
+ assert(it == addressTable().end());
- addressTable[address] = { nullptr, nullptr, size };
+ addressTable()[address] = { nullptr, nullptr, size };
}
void unregisterGCAddress(void* address, void* data)
{
- auto it = addressTable.find(address);
+ auto it = addressTable().find(address);
// The address should exist.
- assert(it != addressTable.end());
+ assert(it != addressTable().end());
AllocInfo allocInfo = it->second;
// Execute the user defined callback.
heapInfo.allocated -= allocInfo.size;
heapInfo.free_count++;
- addressTable.erase(it);
+ addressTable().erase(it);
#ifdef ESCARGOT_VALGRIND
VALGRIND_FREELIKE_BLOCK(address, 0);
void registerGCAddress(void* address, size_t siz)
{
- auto it = addressTable.find(address);
+ auto it = addressTable().find(address);
// The address should not exist.
- assert(it == addressTable.end());
+ assert(it == addressTable().end());
createAddressEntry(address, siz);
void* cd, GC_finalization_proc *ofn,
void** ocd)
{
- auto it = addressTable.find(obj);
+ auto it = addressTable().find(obj);
// The address should exist.
- assert(it != addressTable.end());
+ assert(it != addressTable().end());
(it->second).user_cb = fn;
(it->second).user_data = cd;
FILE(GLOB GCUTIL_BDWGC_SRC bdwgc/*.c)
FILE(GLOB GCUTIL_SRC *.cpp)
-#SET (GCUTIL_MODE "release" CACHE STRING "GCUTIL_MODE")
-SET (GCUTIL_ENABLE_THREADING "0" CACHE STRING "GCUTIL_ENABLE_THREADING")
-SET (GCUTIL_ENABLE_THREAD_LOCAL_ALLOC "0" CACHE STRING "GCUTIL_ENABLE_THREAD_LOCAL_ALLOC")
-
SET (GCUTIL_CFLAGS_INTERNAL -g3 -fdata-sections -ffunction-sections -DESCARGOT -Wno-unused-variable -fno-strict-aliasing -DGC_DLL=1 -fvisibility=hidden)
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DALL_INTERIOR_POINTERS=0 -DENABLE_DISCLAIM=1 -DGC_ATOMIC_UNCOLLECTABLE=1 -DGC_DONT_REGISTER_MAIN_STATIC_DATA=1 -DGC_ENABLE_SUSPEND_THREAD=1)
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DGC_NO_THREADS_DISCOVERY=1 -DGC_VERSION_MAJOR=8 -DGC_VERSION_MICRO=4 -DGC_VERSION_MINOR=0)
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DIGNORE_DYNAMIC_LOADING=1 -DJAVA_FINALIZATION=1 -DMUNMAP_THRESHOLD=1 -DNO_EXECUTE_PERMISSION=1 -DSTDC_HEADERS=1 -DUSE_MMAP=1 -DUSE_MUNMAP=1)
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DHAVE_PTHREAD_GETATTR_NP=1 -DUSE_GET_STACKBASE_FOR_MAIN=1)
-IF (${GCUTIL_ENABLE_THREADING} STREQUAL "1")
- SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DGC_BUILTIN_ATOMIC=1 -DGC_THREADS=1 -DPARALLEL_MARK=1 -D_REENTRANT=1 -DGC_ENABLE_INCREMENTAL=1)
-
- IF (${GCUTIL_ENABLE_THREAD_LOCAL_ALLOC} STREQUAL "1")
- SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DTHREAD_LOCAL_ALLOC=1)
- ENDIF()
+IF (GCUTIL_ENABLE_THREADING)
+ SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -D_REENTRANT=1 -DGC_THREAD_ISOLATE=1)
ENDIF()
-IF (${GCUTIL_MODE} STREQUAL "debug")
+IF (GCUTIL_MODE STREQUAL "debug")
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DKEEP_BACK_PTRS=1 -DSAVE_CALL_COUNT=8 -DDBG_HDRS_ALL=1 -DGC_DEBUG -O0)
-ELSEIF (${GCUTIL_MODE} STREQUAL "release")
+ELSE()
SET (GCUTIL_CFLAGS_INTERNAL ${GCUTIL_CFLAGS_INTERNAL} -DNO_DEBUGGING=1 -O2)
ENDIF()
--- /dev/null
+/*
+ * Copyright (c) 2021-present Samsung Electronics Co., Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
+ * USA
+*/
+
+#ifndef __GCUtilInternal__
+#define __GCUtilInternal__
+
+#if defined(GC_THREAD_ISOLATE)
+
+#if defined(_MSC_VER)
+#define MAY_THREAD_LOCAL __declspec(thread)
+#else
+#define MAY_THREAD_LOCAL __thread
+#endif
+
+#else /* GC_THREAD_ISOLATE */
+
+#define MAY_THREAD_LOCAL
+#endif /* GC_THREAD_ISOLATE */
+
+#endif
#include <stdio.h>
#ifdef GC_USE_ENTIRE_HEAP
- int GC_use_entire_heap = TRUE;
+ MAY_THREAD_LOCAL int GC_use_entire_heap = TRUE;
#else
- int GC_use_entire_heap = FALSE;
+ MAY_THREAD_LOCAL int GC_use_entire_heap = FALSE;
#endif
/*
#ifndef GC_GCJ_SUPPORT
STATIC
#endif
- struct hblk * GC_hblkfreelist[N_HBLK_FLS+1] = { 0 };
+ MAY_THREAD_LOCAL struct hblk * GC_hblkfreelist[N_HBLK_FLS+1] = { 0 };
/* List of completely empty heap blocks */
/* Linked through hb_next field of */
/* header structure associated with */
#ifndef GC_GCJ_SUPPORT
STATIC
#endif
- word GC_free_bytes[N_HBLK_FLS+1] = { 0 };
+ MAY_THREAD_LOCAL word GC_free_bytes[N_HBLK_FLS+1] = { 0 };
/* Number of free bytes on each list. Remains visible to GCJ. */
/* Return the largest n such that the number of free bytes on lists */
# define MUNMAP_THRESHOLD 6
# endif
-GC_INNER int GC_unmap_threshold = MUNMAP_THRESHOLD;
+GC_INNER MAY_THREAD_LOCAL int GC_unmap_threshold = MUNMAP_THRESHOLD;
/* Unmap blocks that haven't been recently touched. This is the only way */
/* way blocks are ever unmapped. */
return result;
}
-STATIC long GC_large_alloc_warn_suppressed = 0;
+STATIC MAY_THREAD_LOCAL long GC_large_alloc_warn_suppressed = 0;
/* Number of warnings suppressed so far. */
/* The same, but with search restricted to nth free list. Flags is */
&& size_needed == (signed_word)HBLKSIZE
&& IS_MAPPED(hhdr)) {
if (!GC_find_leak) {
- static unsigned count = 0;
+ static MAY_THREAD_LOCAL unsigned count = 0;
/* The block is completely blacklisted. We need */
/* to drop some such blocks, since otherwise we spend */
* allocator.
*/
-word GC_non_gc_bytes = 0; /* Number of bytes not intended to be collected */
+MAY_THREAD_LOCAL word GC_non_gc_bytes = 0; /* Number of bytes not intended to be collected */
-word GC_gc_no = 0;
+MAY_THREAD_LOCAL word GC_gc_no = 0;
#ifndef NO_CLOCK
- static unsigned long full_gc_total_time = 0; /* in msecs, may wrap */
- static GC_bool measure_performance = FALSE;
+ static MAY_THREAD_LOCAL unsigned long full_gc_total_time = 0; /* in msecs, may wrap */
+ static MAY_THREAD_LOCAL GC_bool measure_performance = FALSE;
/* Do performance measurements if set to true (e.g., */
/* accumulation of the total time of full collections). */
#endif /* !NO_CLOCK */
#ifndef GC_DISABLE_INCREMENTAL
- GC_INNER GC_bool GC_incremental = FALSE; /* By default, stop the world. */
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_incremental = FALSE; /* By default, stop the world. */
#endif
GC_API int GC_CALL GC_is_incremental_mode(void)
#endif
#if defined(GC_FULL_FREQ) && !defined(CPPCHECK)
- int GC_full_freq = GC_FULL_FREQ;
+ int MAY_THREAD_LOCAL GC_full_freq = GC_FULL_FREQ;
#else
- int GC_full_freq = 19; /* Every 20th collection is a full */
+ int MAY_THREAD_LOCAL GC_full_freq = 19;
+ /* Every 20th collection is a full */
/* collection, whether we need it */
/* or not. */
#endif
-STATIC GC_bool GC_need_full_gc = FALSE;
+STATIC MAY_THREAD_LOCAL GC_bool GC_need_full_gc = FALSE;
/* Need full GC do to heap growth. */
#ifdef THREAD_LOCAL_ALLOC
GC_INNER GC_bool GC_world_stopped = FALSE;
#endif
-STATIC word GC_used_heap_size_after_full = 0;
+STATIC MAY_THREAD_LOCAL word GC_used_heap_size_after_full = 0;
/* GC_copyright symbol is externally visible. */
EXTERN_C_BEGIN
/* some more variables */
#ifdef GC_DONT_EXPAND
- int GC_dont_expand = TRUE;
+ MAY_THREAD_LOCAL int GC_dont_expand = TRUE;
#else
- int GC_dont_expand = FALSE;
+ MAY_THREAD_LOCAL int GC_dont_expand = FALSE;
#endif
#if defined(GC_FREE_SPACE_DIVISOR) && !defined(CPPCHECK)
- word GC_free_space_divisor = GC_FREE_SPACE_DIVISOR; /* must be > 0 */
+ MAY_THREAD_LOCAL word GC_free_space_divisor = GC_FREE_SPACE_DIVISOR; /* must be > 0 */
#else
- word GC_free_space_divisor = 3;
+ MAY_THREAD_LOCAL word GC_free_space_divisor = 3;
#endif
GC_INNER int GC_CALLBACK GC_never_stop_func(void)
}
#if defined(GC_TIME_LIMIT) && !defined(CPPCHECK)
- unsigned long GC_time_limit = GC_TIME_LIMIT;
+ MAY_THREAD_LOCAL unsigned long GC_time_limit = GC_TIME_LIMIT;
/* We try to keep pause times from exceeding */
/* this by much. In milliseconds. */
#else
- unsigned long GC_time_limit = 50;
+ MAY_THREAD_LOCAL unsigned long GC_time_limit = 50;
#endif
#ifndef NO_CLOCK
- STATIC CLOCK_TYPE GC_start_time = CLOCK_TYPE_INITIALIZER;
+ STATIC MAY_THREAD_LOCAL CLOCK_TYPE GC_start_time = CLOCK_TYPE_INITIALIZER;
/* Time at which we stopped world. */
/* used only in GC_timeout_stop_func. */
#endif
-STATIC int GC_n_attempts = 0; /* Number of attempts at finishing */
+STATIC MAY_THREAD_LOCAL int GC_n_attempts = 0;
+ /* Number of attempts at finishing */
/* collection within GC_time_limit. */
-STATIC GC_stop_func GC_default_stop_func = GC_never_stop_func;
+STATIC MAY_THREAD_LOCAL GC_stop_func GC_default_stop_func = GC_never_stop_func;
/* accessed holding the lock. */
GC_API void GC_CALL GC_set_stop_func(GC_stop_func stop_func)
STATIC int GC_CALLBACK GC_timeout_stop_func (void)
{
CLOCK_TYPE current_time;
- static unsigned count = 0;
+ static MAY_THREAD_LOCAL unsigned count = 0;
unsigned long time_diff;
if ((*GC_default_stop_func)())
GC_INNER word GC_total_stacksize = 0; /* updated on every push_all_stacks */
#endif
-static size_t min_bytes_allocd_minimum = 1;
+static MAY_THREAD_LOCAL size_t min_bytes_allocd_minimum = 1;
/* The lowest value returned by min_bytes_allocd(). */
GC_API void GC_CALL GC_set_min_bytes_allocd(size_t value)
? result : min_bytes_allocd_minimum;
}
-STATIC word GC_non_gc_bytes_at_gc = 0;
+STATIC MAY_THREAD_LOCAL word GC_non_gc_bytes_at_gc = 0;
/* Number of explicitly managed bytes of storage */
/* at last collection. */
/* Heap size at which we need a collection to avoid expanding past */
/* limits used by blacklisting. */
-STATIC word GC_collect_at_heapsize = GC_WORD_MAX;
+STATIC MAY_THREAD_LOCAL word GC_collect_at_heapsize = GC_WORD_MAX;
/* Have we allocated enough to amortize a collection? */
GC_INNER GC_bool GC_should_collect(void)
{
- static word last_min_bytes_allocd;
- static word last_gc_no;
+ static MAY_THREAD_LOCAL word last_min_bytes_allocd;
+ static MAY_THREAD_LOCAL word last_gc_no;
if (last_gc_no != GC_gc_no) {
last_gc_no = GC_gc_no;
last_min_bytes_allocd = min_bytes_allocd();
|| GC_heapsize >= GC_collect_at_heapsize);
}
-/* STATIC */ GC_start_callback_proc GC_start_call_back = 0;
+/* STATIC */ MAY_THREAD_LOCAL GC_start_callback_proc GC_start_call_back = 0;
/* Called at start of full collections. */
/* Not called if 0. Called with the allocation */
/* lock held. Not used by GC itself. */
}
}
-STATIC GC_bool GC_is_full_gc = FALSE;
+STATIC MAY_THREAD_LOCAL GC_bool GC_is_full_gc = FALSE;
STATIC GC_bool GC_stopped_mark(GC_stop_func stop_func);
STATIC void GC_finish_collection(void);
GC_ASSERT(I_HOLD_LOCK());
ASSERT_CANCEL_DISABLED();
if (GC_should_collect()) {
- static int n_partial_gcs = 0;
+ static MAY_THREAD_LOCAL int n_partial_gcs = 0;
if (!GC_incremental) {
/* TODO: If possible, GC_default_stop_func should be used here */
}
}
-STATIC GC_on_collection_event_proc GC_on_collection_event = 0;
+STATIC MAY_THREAD_LOCAL GC_on_collection_event_proc GC_on_collection_event = 0;
GC_API void GC_CALL GC_set_on_collection_event(GC_on_collection_event_proc fn)
{
/* how long it takes. Doesn't count the initial root scan */
/* for a full GC. */
-STATIC int GC_deficit = 0;/* The number of extra calls to GC_mark_some */
+STATIC MAY_THREAD_LOCAL int GC_deficit = 0;/* The number of extra calls to GC_mark_some */
/* that we have made. */
-STATIC int GC_rate = GC_RATE;
+STATIC MAY_THREAD_LOCAL int GC_rate = GC_RATE;
GC_API void GC_CALL GC_set_rate(int value)
{
return GC_rate;
}
-static int max_prior_attempts = MAX_PRIOR_ATTEMPTS;
+static MAY_THREAD_LOCAL int max_prior_attempts = MAX_PRIOR_ATTEMPTS;
GC_API void GC_CALL GC_set_max_prior_attempts(int value)
{
RESTORE_CANCEL(cancel_state);
}
-GC_INNER void (*GC_check_heap)(void) = 0;
-GC_INNER void (*GC_print_all_smashed)(void) = 0;
+GC_INNER MAY_THREAD_LOCAL void (*GC_check_heap)(void) = 0;
+GC_INNER MAY_THREAD_LOCAL void (*GC_print_all_smashed)(void) = 0;
GC_API int GC_CALL GC_collect_a_little(void)
{
/* Variables for world-stop average delay time statistic computation. */
/* "divisor" is incremented every world-stop and halved when reached */
/* its maximum (or upon "total_time" overflow). */
- static unsigned world_stopped_total_time = 0;
- static unsigned world_stopped_total_divisor = 0;
+ static MAY_THREAD_LOCAL unsigned world_stopped_total_time = 0;
+ static MAY_THREAD_LOCAL unsigned world_stopped_total_divisor = 0;
# ifndef MAX_TOTAL_TIME_DIVISOR
/* We shall not use big values here (so "outdated" delay time */
/* values would have less impact on "average" delay time value than */
void GC_check_tls(void);
#endif
-GC_on_heap_resize_proc GC_on_heap_resize = 0;
+MAY_THREAD_LOCAL GC_on_heap_resize_proc GC_on_heap_resize = 0;
/* Used for logging only. */
GC_INLINE int GC_compute_heap_usage_percent(void)
if (GC_have_errors) GC_print_all_errors();
}
-STATIC word GC_heapsize_at_forced_unmap = 0;
+STATIC MAY_THREAD_LOCAL word GC_heapsize_at_forced_unmap = 0;
GC_API void GC_CALL GC_gcollect_and_unmap(void)
{
(void)GC_try_to_collect_general(GC_never_stop_func, TRUE);
}
-GC_INNER word GC_n_heap_sects = 0;
+GC_INNER MAY_THREAD_LOCAL word GC_n_heap_sects = 0;
/* Number of sections currently in heap. */
#ifdef USE_PROC_FOR_LIBRARIES
- GC_INNER word GC_n_memory = 0;
+ GC_INNER MAY_THREAD_LOCAL word GC_n_memory = 0;
/* Number of GET_MEM allocated memory sections. */
#endif
}
#endif
-void * GC_least_plausible_heap_addr = (void *)GC_WORD_MAX;
-void * GC_greatest_plausible_heap_addr = 0;
+MAY_THREAD_LOCAL void * GC_least_plausible_heap_addr = (void *)GC_WORD_MAX;
+MAY_THREAD_LOCAL void * GC_greatest_plausible_heap_addr = 0;
GC_INLINE word GC_max(word x, word y)
{
return(x < y? x : y);
}
-STATIC word GC_max_heapsize = 0;
+STATIC MAY_THREAD_LOCAL word GC_max_heapsize = 0;
GC_API void GC_CALL GC_set_max_heap_size(GC_word n)
{
GC_max_heapsize = n;
}
-GC_word GC_max_retries = 0;
+MAY_THREAD_LOCAL GC_word GC_max_retries = 0;
/* This explicitly increases the size of the heap. It is used */
/* internally, but may also be invoked from GC_expand_hp by the user. */
return(result);
}
-word GC_fo_entries = 0; /* used also in extra/MacOS.c */
+MAY_THREAD_LOCAL word GC_fo_entries = 0; /* used also in extra/MacOS.c */
-GC_INNER unsigned GC_fail_count = 0;
+GC_INNER MAY_THREAD_LOCAL unsigned GC_fail_count = 0;
/* How many consecutive GC/expansion failures? */
/* Reset by GC_allochblk. */
-static word last_fo_entries = 0;
-static word last_bytes_finalized = 0;
+static MAY_THREAD_LOCAL word last_fo_entries = 0;
+static MAY_THREAD_LOCAL word last_bytes_finalized = 0;
/* Collect or expand heap in an attempt make the indicated number of */
/* free blocks available. Should be called until the blocks are */
#ifdef MAKE_BACK_GRAPH
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
#define MAX_IN 10 /* Maximum in-degree we handle directly */
/* #include <unistd.h> */
/* Pointers to individual tables. We replace one table by another by */
/* switching these pointers. */
-STATIC word * GC_old_normal_bl = NULL;
+STATIC MAY_THREAD_LOCAL word * GC_old_normal_bl = NULL;
/* Nonstack false references seen at last full */
/* collection. */
-STATIC word * GC_incomplete_normal_bl = NULL;
+STATIC MAY_THREAD_LOCAL word * GC_incomplete_normal_bl = NULL;
/* Nonstack false references seen since last */
/* full collection. */
-STATIC word * GC_old_stack_bl = NULL;
-STATIC word * GC_incomplete_stack_bl = NULL;
+STATIC MAY_THREAD_LOCAL word * GC_old_stack_bl = NULL;
+STATIC MAY_THREAD_LOCAL word * GC_incomplete_stack_bl = NULL;
-STATIC word GC_total_stack_black_listed = 0;
+STATIC MAY_THREAD_LOCAL word GC_total_stack_black_listed = 0;
/* Number of bytes on stack blacklist. */
-GC_INNER word GC_black_list_spacing = MINHINCR * HBLKSIZE;
+GC_INNER MAY_THREAD_LOCAL word GC_black_list_spacing = MINHINCR * HBLKSIZE;
/* Initial rough guess. */
STATIC void GC_clear_bl(word *);
IS_UNCOLLECTABLE(kind) ? "uncollectable" : "composite");
}
-GC_INNER void (*GC_print_heap_obj)(ptr_t p) = GC_default_print_heap_obj_proc;
+GC_INNER MAY_THREAD_LOCAL void (*GC_print_heap_obj)(ptr_t p) = GC_default_print_heap_obj_proc;
#ifdef PRINT_BLACK_LIST
STATIC void GC_print_blacklisted_ptr(word p, ptr_t source,
/* to hide it from collector. */
} page_entry;
-page_entry GC_sums[NSUMS];
+MAY_THREAD_LOCAL page_entry GC_sums[NSUMS];
-STATIC word GC_faulted[NSUMS] = { 0 };
+STATIC MAY_THREAD_LOCAL word GC_faulted[NSUMS] = { 0 };
/* Record of pages on which we saw a write fault. */
-STATIC size_t GC_n_faulted = 0;
+STATIC MAY_THREAD_LOCAL size_t GC_n_faulted = 0;
#if defined(MPROTECT_VDB) && !defined(DARWIN)
void GC_record_fault(struct hblk * h)
return(result | 0x80000000 /* doesn't look like pointer */);
}
-int GC_n_dirty_errors = 0;
-int GC_n_faulted_dirty_errors = 0;
-unsigned long GC_n_clean = 0;
-unsigned long GC_n_dirty = 0;
+MAY_THREAD_LOCAL int GC_n_dirty_errors = 0;
+MAY_THREAD_LOCAL int GC_n_faulted_dirty_errors = 0;
+MAY_THREAD_LOCAL unsigned long GC_n_clean = 0;
+MAY_THREAD_LOCAL unsigned long GC_n_dirty = 0;
STATIC void GC_update_check_page(struct hblk *h, int index)
{
}
#endif /* !SHORT_DBG_HDRS */
-STATIC GC_describe_type_fn GC_describe_type_fns[MAXOBJKINDS] = {0};
+STATIC MAY_THREAD_LOCAL GC_describe_type_fn GC_describe_type_fns[MAXOBJKINDS] = {0};
GC_API void GC_CALL GC_register_describe_type_fn(int kind,
GC_describe_type_fn fn)
GC_register_displacement_inner((word)sizeof(oh));
}
-size_t GC_debug_header_size = sizeof(oh);
+size_t MAY_THREAD_LOCAL GC_debug_header_size = sizeof(oh);
GC_API void GC_CALL GC_debug_register_displacement(size_t offset)
{
#ifndef MAX_SMASHED
# define MAX_SMASHED 20
#endif
-STATIC ptr_t GC_smashed[MAX_SMASHED] = {0};
-STATIC unsigned GC_n_smashed = 0;
+STATIC MAY_THREAD_LOCAL ptr_t GC_smashed[MAX_SMASHED] = {0};
+STATIC MAY_THREAD_LOCAL unsigned GC_n_smashed = 0;
STATIC void GC_add_smashed(ptr_t smashed)
{
/* determine whether a DSO really needs to be scanned by the GC. */
/* 0 means no filter installed. May be unused on some platforms. */
/* FIXME: Add filter support for more platforms. */
-STATIC GC_has_static_roots_func GC_has_static_roots = 0;
+STATIC MAY_THREAD_LOCAL GC_has_static_roots_func GC_has_static_roots = 0;
#if (defined(DYNAMIC_LOADING) || defined(MSWIN32) || defined(MSWINCE) \
|| defined(CYGWIN32)) && !defined(PCR)
ptr_t end2;
} load_segs[MAX_LOAD_SEGS];
- static int n_load_segs;
- static GC_bool load_segs_overflow;
+ static MAY_THREAD_LOCAL int n_load_segs;
+ static MAY_THREAD_LOCAL GC_bool load_segs_overflow;
# endif /* PT_GNU_RELRO */
STATIC int GC_register_dynlib_callback(struct dl_phdr_info * info,
STATIC struct link_map *
GC_FirstDLOpenedLinkMap(void)
{
- static struct link_map *cachedResult = 0;
+ static MAY_THREAD_LOCAL struct link_map *cachedResult = 0;
if (0 == COVERT_DATAFLOW(_DYNAMIC)) {
/* _DYNAMIC symbol not resolved. */
/* worry about. This may also work under other SVR4 variants. */
GC_INNER void GC_register_dynamic_libraries(void)
{
- static int fd = -1;
+ static MAY_THREAD_LOCAL int fd = -1;
char buf[30];
- static prmap_t * addr_map = 0;
- static int current_sz = 0; /* Number of records currently in addr_map */
+ static MAY_THREAD_LOCAL prmap_t * addr_map = 0;
+ static MAY_THREAD_LOCAL int current_sz = 0; /* Number of records currently in addr_map */
int needed_sz = 0; /* Required size of addr_map */
int i;
long flags;
word entries;
};
-STATIC struct dl_hashtbl_s GC_dl_hashtbl = {
+STATIC MAY_THREAD_LOCAL struct dl_hashtbl_s GC_dl_hashtbl = {
/* head */ NULL, /* log_size */ -1, /* entries */ 0 };
#ifndef GC_LONG_REFS_NOT_NEEDED
- STATIC struct dl_hashtbl_s GC_ll_hashtbl = { NULL, -1, 0 };
+ STATIC MAY_THREAD_LOCAL struct dl_hashtbl_s GC_ll_hashtbl = { NULL, -1, 0 };
#endif
struct finalizable_object {
finalization_mark_proc fo_mark_proc; /* Mark-through procedure */
};
-static signed_word log_fo_table_size = -1;
+static MAY_THREAD_LOCAL signed_word log_fo_table_size = -1;
-STATIC struct fnlz_roots_s {
+STATIC MAY_THREAD_LOCAL struct fnlz_roots_s {
struct finalizable_object **fo_head;
/* List of objects that should be finalized now: */
struct finalizable_object *finalize_now;
GC_hidden_pointer weak_ref;
} GCToggleRef;
- STATIC GC_toggleref_func GC_toggleref_callback = 0;
- STATIC GCToggleRef *GC_toggleref_arr = NULL;
- STATIC int GC_toggleref_array_size = 0;
- STATIC int GC_toggleref_array_capacity = 0;
+ STATIC MAY_THREAD_LOCAL GC_toggleref_func GC_toggleref_callback = 0;
+ STATIC MAY_THREAD_LOCAL GCToggleRef *GC_toggleref_arr = NULL;
+ STATIC MAY_THREAD_LOCAL int GC_toggleref_array_size = 0;
+ STATIC MAY_THREAD_LOCAL int GC_toggleref_array_capacity = 0;
GC_INNER void GC_process_togglerefs(void)
{
#endif /* !GC_TOGGLE_REFS_NOT_NEEDED */
/* Finalizer callback support. */
-STATIC GC_await_finalize_proc GC_object_finalized_proc = 0;
+STATIC MAY_THREAD_LOCAL GC_await_finalize_proc GC_object_finalized_proc = 0;
GC_API void GC_CALL GC_set_await_finalize_proc(GC_await_finalize_proc fn)
{
ocd, GC_null_finalize_mark_proc);
}
-static GC_bool need_unreachable_finalization = FALSE;
+static MAY_THREAD_LOCAL GC_bool need_unreachable_finalization = FALSE;
/* Avoid the work if this isn't used. */
GC_API void GC_CALL GC_register_finalizer_unreachable(void * obj,
#endif /* !NO_DEBUGGING */
#ifndef SMALL_CONFIG
- STATIC word GC_old_dl_entries = 0; /* for stats printing */
+ STATIC MAY_THREAD_LOCAL word GC_old_dl_entries = 0; /* for stats printing */
# ifndef GC_LONG_REFS_NOT_NEEDED
- STATIC word GC_old_ll_entries = 0;
+ STATIC MAY_THREAD_LOCAL word GC_old_ll_entries = 0;
# endif
#endif /* !SMALL_CONFIG */
#ifndef THREADS
/* Global variables to minimize the level of recursion when a client */
/* finalizer allocates memory. */
- STATIC int GC_finalizer_nested = 0;
+ STATIC MAY_THREAD_LOCAL int GC_finalizer_nested = 0;
/* Only the lowest byte is used, the rest is */
/* padding for proper global data alignment */
/* required for some compilers (like Watcom). */
- STATIC unsigned GC_finalizer_skipped = 0;
+ STATIC MAY_THREAD_LOCAL unsigned GC_finalizer_skipped = 0;
/* Checks and updates the level of finalizers recursion. */
/* Returns NULL if GC_invoke_finalizers() should not be called by the */
return count;
}
-static word last_finalizer_notification = 0;
+static MAY_THREAD_LOCAL word last_finalizer_notification = 0;
GC_INNER void GC_notify_or_invoke_finalizers(void)
{
GC_finalizer_notifier_proc notifier_fn = 0;
# if defined(KEEP_BACK_PTRS) || defined(MAKE_BACK_GRAPH)
- static word last_back_trace_gc_no = 1; /* Skip first one. */
+ static MAY_THREAD_LOCAL word last_back_trace_gc_no = 1; /* Skip first one. */
# endif
DCL_LOCK_STATE;
#include "gc_inline.h" /* for GC_malloc_kind */
#include "private/dbg_mlc.h" /* for oh type */
-STATIC int GC_finalized_kind = 0;
+STATIC MAY_THREAD_LOCAL int GC_finalized_kind = 0;
#if defined(KEEP_BACK_PTRS) || defined(MAKE_BACK_GRAPH)
/* The first bit is already used for a debug purpose. */
#else
STATIC
#endif
-GC_bool GC_gcj_malloc_initialized = FALSE;
+MAY_THREAD_LOCAL GC_bool GC_gcj_malloc_initialized = FALSE;
-int GC_gcj_kind = 0; /* Object kind for objects with descriptors */
+int MAY_THREAD_LOCAL GC_gcj_kind = 0; /* Object kind for objects with descriptors */
/* in "vtable". */
-int GC_gcj_debug_kind = 0;
+int MAY_THREAD_LOCAL GC_gcj_debug_kind = 0;
/* The kind of objects that is always marked */
/* with a mark proc call. */
-GC_INNER ptr_t * GC_gcjobjfreelist = NULL;
+GC_INNER MAY_THREAD_LOCAL ptr_t * GC_gcjobjfreelist = NULL;
STATIC struct GC_ms_entry * GC_gcj_fake_mark_proc(word * addr GC_ATTR_UNUSED,
struct GC_ms_entry *mark_stack_ptr,
* level tree.
*/
-STATIC bottom_index * GC_all_bottom_indices = 0;
+STATIC MAY_THREAD_LOCAL bottom_index * GC_all_bottom_indices = 0;
/* Pointer to the first (lowest address) */
/* bottom_index. Assumes the lock is held. */
-STATIC bottom_index * GC_all_bottom_indices_end = 0;
+STATIC MAY_THREAD_LOCAL bottom_index * GC_all_bottom_indices_end = 0;
/* Pointer to the last (highest address) */
/* bottom_index. Assumes the lock is held. */
/* Routines to dynamically allocate collector data structures that will */
/* never be freed. */
-static ptr_t scratch_free_ptr = 0;
+static MAY_THREAD_LOCAL ptr_t scratch_free_ptr = 0;
/* GC_scratch_last_end_ptr is end point of last obtained scratch area. */
/* GC_scratch_end_ptr is end point of current scratch area. */
}
}
-static hdr * hdr_free_list = 0;
+static MAY_THREAD_LOCAL hdr * hdr_free_list = 0;
/* Return an uninitialized header */
static hdr * alloc_hdr(void)
/* Public read-only variables */
/* The supplied getter functions are preferred for new code. */
-GC_API GC_ATTR_DEPRECATED GC_word GC_gc_no;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_word GC_gc_no;
/* Counter incremented per collection. */
/* Includes empty GCs at startup. */
GC_API GC_word GC_CALL GC_get_gc_no(void);
/* avoid data races on multiprocessors. */
#ifdef GC_THREADS
- GC_API GC_ATTR_DEPRECATED int GC_parallel;
+ GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_parallel;
/* GC is parallelized for performance on */
/* multiprocessors. Set to a non-zero value */
/* only implicitly if collector is built with */
/* The supplied setter and getter functions are preferred for new code. */
typedef void * (GC_CALLBACK * GC_oom_func)(size_t /* bytes_requested */);
-GC_API GC_ATTR_DEPRECATED GC_oom_func GC_oom_fn;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_oom_func GC_oom_fn;
/* When there is insufficient memory to satisfy */
/* an allocation request, we return */
/* (*GC_oom_fn)(size). By default this just */
GC_API GC_oom_func GC_CALL GC_get_oom_fn(void);
typedef void (GC_CALLBACK * GC_on_heap_resize_proc)(GC_word /* new_size */);
-GC_API GC_ATTR_DEPRECATED GC_on_heap_resize_proc GC_on_heap_resize;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_on_heap_resize_proc GC_on_heap_resize;
/* Invoked when the heap grows or shrinks. */
/* Called with the world stopped (and the */
/* allocation lock held). May be 0. */
/* acquire the GC lock (to avoid data races). */
#endif
-GC_API GC_ATTR_DEPRECATED int GC_find_leak;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_find_leak;
/* Set to true to turn on the leak-finding mode */
/* (do not actually garbage collect, but simply */
/* report inaccessible memory that was not */
GC_API void GC_CALL GC_set_find_leak(int);
GC_API int GC_CALL GC_get_find_leak(void);
-GC_API GC_ATTR_DEPRECATED int GC_all_interior_pointers;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_all_interior_pointers;
/* Arrange for pointers to object interiors to */
/* be recognized as valid. Typically should */
/* not be changed after GC initialization (in */
GC_API void GC_CALL GC_set_all_interior_pointers(int);
GC_API int GC_CALL GC_get_all_interior_pointers(void);
-GC_API GC_ATTR_DEPRECATED int GC_finalize_on_demand;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_finalize_on_demand;
/* If nonzero, finalizers will only be run in */
/* response to an explicit GC_invoke_finalizers */
/* call. The default is determined by whether */
GC_API void GC_CALL GC_set_finalize_on_demand(int);
GC_API int GC_CALL GC_get_finalize_on_demand(void);
-GC_API GC_ATTR_DEPRECATED int GC_java_finalization;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_java_finalization;
/* Mark objects reachable from finalizable */
/* objects in a separate post-pass. This makes */
/* it a bit safer to use non-topologically- */
GC_API int GC_CALL GC_get_java_finalization(void);
typedef void (GC_CALLBACK * GC_finalizer_notifier_proc)(void);
-GC_API GC_ATTR_DEPRECATED GC_finalizer_notifier_proc GC_finalizer_notifier;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_finalizer_notifier_proc GC_finalizer_notifier;
/* Invoked by the collector when there are */
/* objects to be finalized. Invoked at most */
/* once per GC cycle. Never invoked unless */
# ifndef GC_DONT_GC
GC_ATTR_DEPRECATED
# endif
+ GC_MAY_THREAD_LOCAL
int GC_dont_gc; /* != 0 ==> Don't collect. In versions 6.2a1+, */
/* this overrides explicit GC_gcollect() calls. */
/* Used as a counter, so that nested enabling */
/* GC is disabled, GC_is_disabled() is */
/* preferred for new code. */
-GC_API GC_ATTR_DEPRECATED int GC_dont_expand;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_dont_expand;
/* Do not expand the heap unless explicitly */
/* requested or forced to. The setter and */
/* getter are unsynchronized. */
GC_API void GC_CALL GC_set_dont_expand(int);
GC_API int GC_CALL GC_get_dont_expand(void);
-GC_API GC_ATTR_DEPRECATED int GC_use_entire_heap;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_use_entire_heap;
/* Causes the non-incremental collector to use the */
/* entire heap before collecting. This was the only */
/* option for GC versions < 5.0. This sometimes */
/* frequencies, and hence fewer instructions executed */
/* in the collector. */
-GC_API GC_ATTR_DEPRECATED int GC_full_freq;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_full_freq;
/* Number of partial collections between */
/* full collections. Matters only if */
/* GC_is_incremental_mode(). */
GC_API void GC_CALL GC_set_full_freq(int);
GC_API int GC_CALL GC_get_full_freq(void);
-GC_API GC_ATTR_DEPRECATED GC_word GC_non_gc_bytes;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_word GC_non_gc_bytes;
/* Bytes not considered candidates for */
/* collection. Used only to control scheduling */
/* of collections. Updated by */
GC_API void GC_CALL GC_set_non_gc_bytes(GC_word);
GC_API GC_word GC_CALL GC_get_non_gc_bytes(void);
-GC_API GC_ATTR_DEPRECATED int GC_no_dls;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_no_dls;
/* Don't register dynamic library data segments. */
/* Wizards only. Should be used only if the */
/* application explicitly registers all roots. */
GC_API void GC_CALL GC_set_no_dls(int);
GC_API int GC_CALL GC_get_no_dls(void);
-GC_API GC_ATTR_DEPRECATED GC_word GC_free_space_divisor;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_word GC_free_space_divisor;
/* We try to make sure that we allocate at */
/* least N/GC_free_space_divisor bytes between */
/* collections, where N is twice the number */
GC_API void GC_CALL GC_set_free_space_divisor(GC_word);
GC_API GC_word GC_CALL GC_get_free_space_divisor(void);
-GC_API GC_ATTR_DEPRECATED GC_word GC_max_retries;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL GC_word GC_max_retries;
/* The maximum number of GCs attempted before */
/* reporting out of memory after heap */
/* expansion fails. Initially 0. */
GC_API GC_word GC_CALL GC_get_max_retries(void);
-GC_API GC_ATTR_DEPRECATED char *GC_stackbottom;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL char *GC_stackbottom;
/* Cool end of user stack. */
/* May be set in the client prior to */
/* calling any GC_ routines. This */
/* GC_call_with_gc_active() and */
/* GC_register_my_thread() instead. */
-GC_API GC_ATTR_DEPRECATED int GC_dont_precollect;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL int GC_dont_precollect;
/* Do not collect as part of GC */
/* initialization. Should be set only */
/* if the client wants a chance to */
GC_API void GC_CALL GC_set_dont_precollect(int);
GC_API int GC_CALL GC_get_dont_precollect(void);
-GC_API GC_ATTR_DEPRECATED unsigned long GC_time_limit;
+GC_API GC_ATTR_DEPRECATED GC_MAY_THREAD_LOCAL unsigned long GC_time_limit;
/* If incremental collection is enabled, */
/* We try to terminate collections */
/* after this many milliseconds. Not a */
const void * /* q */);
/* Functions called to report pointer checking errors */
-GC_API void (GC_CALLBACK * GC_same_obj_print_proc)(void * /* p */,
+GC_API GC_MAY_THREAD_LOCAL void (GC_CALLBACK * GC_same_obj_print_proc)(void * /* p */,
void * /* q */);
-GC_API void (GC_CALLBACK * GC_is_valid_displacement_print_proc)(void *);
-GC_API void (GC_CALLBACK * GC_is_visible_print_proc)(void *);
+GC_API GC_MAY_THREAD_LOCAL void (GC_CALLBACK * GC_is_valid_displacement_print_proc)(void *);
+GC_API GC_MAY_THREAD_LOCAL void (GC_CALLBACK * GC_is_visible_print_proc)(void *);
#ifdef GC_PTHREADS
/* For pthread support, we generally need to intercept a number of */
extern "C" {
#endif
-GC_API void ** const GC_objfreelist_ptr;
-GC_API void ** const GC_aobjfreelist_ptr;
-GC_API void ** const GC_uobjfreelist_ptr;
+GC_API MAY_THREAD_LOCAL void ** GC_objfreelist_ptr;
+GC_API MAY_THREAD_LOCAL void ** GC_aobjfreelist_ptr;
+GC_API MAY_THREAD_LOCAL void ** GC_uobjfreelist_ptr;
#ifdef GC_ATOMIC_UNCOLLECTABLE
- GC_API void ** const GC_auobjfreelist_ptr;
+ GC_API MAY_THREAD_LOCAL void ** GC_auobjfreelist_ptr;
#endif
GC_API void GC_CALL GC_incr_bytes_allocd(size_t bytes);
#endif /* GC_PTHREADS */
+#if defined(GC_THREAD_ISOLATE)
+# if defined(GC_THREADS)
+# error "GC_THREAD_ISOLATE cannot used with GC_THREADS"
+# endif
+# if defined(_MSC_VER)
+# define GC_MAY_THREAD_LOCAL __declspec(thread)
+# else
+# define GC_MAY_THREAD_LOCAL __thread
+# endif
+#else /* GC_THREAD_ISOLATE */
+# define GC_MAY_THREAD_LOCAL
+#endif /* GC_THREAD_ISOLATE */
+
#endif
/* held. */
#define GC_INDIR_PER_OBJ_BIAS 0x10
-GC_API void * GC_least_plausible_heap_addr;
-GC_API void * GC_greatest_plausible_heap_addr;
+GC_API GC_MAY_THREAD_LOCAL void * GC_least_plausible_heap_addr;
+GC_API GC_MAY_THREAD_LOCAL void * GC_greatest_plausible_heap_addr;
/* Bounds on the heap. Guaranteed valid */
/* Likely to include future heap expansion. */
/* Hence usually includes not-yet-mapped */
(GC_word)(obj) <= (GC_word)GC_greatest_plausible_heap_addr ? \
GC_mark_and_push(obj, msp, lim, src) : (msp))
-GC_API size_t GC_debug_header_size;
+GC_API GC_MAY_THREAD_LOCAL size_t GC_debug_header_size;
/* The size of the header added to objects allocated through */
/* the GC_debug routines. */
/* Defined as a variable so that client mark procedures don't */
#define MAX_ENV \
(((word)1 << (WORDSZ - GC_DS_TAG_BITS - GC_LOG_MAX_MARK_PROCS)) - 1)
-GC_EXTERN unsigned GC_n_mark_procs;
+GC_EXTERN MAY_THREAD_LOCAL unsigned GC_n_mark_procs;
/* Number of mark stack entries to discard on overflow. */
#define GC_MARK_STACK_DISCARDS (INITIAL_MARK_STACK_SIZE/8)
-GC_EXTERN size_t GC_mark_stack_size;
+GC_EXTERN MAY_THREAD_LOCAL size_t GC_mark_stack_size;
#ifdef PARALLEL_MARK
/*
} \
} while (0)
-GC_EXTERN GC_bool GC_mark_stack_too_small;
+GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_mark_stack_too_small;
/* We need a larger mark stack. May be */
/* set by client supplied mark routines.*/
#define MS_INVALID 5 /* "I" may not hold. */
-GC_EXTERN mark_state_t GC_mark_state;
+GC_EXTERN MAY_THREAD_LOCAL mark_state_t GC_mark_state;
EXTERN_C_END
# if defined(SMALL_CONFIG) || defined(PCR)
# define GC_on_abort(msg) (void)0 /* be silent on abort */
# else
- GC_API_PRIV GC_abort_func GC_on_abort;
+ GC_API_PRIV MAY_THREAD_LOCAL GC_abort_func GC_on_abort;
# endif
# if defined(CPPCHECK)
# define ABORT(msg) { GC_on_abort(msg); abort(); }
#define WARN(msg, arg) \
(*GC_current_warn_proc)((/* no const */ char *)("GC Warning: " msg), \
(word)(arg))
-GC_EXTERN GC_warn_proc GC_current_warn_proc;
+GC_EXTERN MAY_THREAD_LOCAL GC_warn_proc GC_current_warn_proc;
/* Print format type macro for decimal signed_word value passed WARN(). */
/* This could be redefined for Win64 or LLP64, but typically should */
bottom_index * _top_index[TOP_SZ];
};
-GC_API_PRIV GC_FAR struct _GC_arrays GC_arrays;
+GC_API_PRIV GC_FAR MAY_THREAD_LOCAL struct _GC_arrays GC_arrays;
#define GC_all_nils GC_arrays._all_nils
#define GC_atomic_in_use GC_arrays._atomic_in_use
# define MAXOBJKINDS 16
#endif
#endif
-GC_EXTERN struct obj_kind {
+GC_EXTERN MAY_THREAD_LOCAL struct obj_kind {
void **ok_freelist; /* Array of free list headers for this kind of */
/* object. Point either to GC_arrays or to */
/* storage allocated with GC_scratch_alloc. */
/* introduce maintenance problems. */
#ifdef SEPARATE_GLOBALS
- extern word GC_bytes_allocd;
+ extern MAY_THREAD_LOCAL word GC_bytes_allocd;
/* Number of bytes allocated during this collection cycle. */
- extern ptr_t GC_objfreelist[MAXOBJGRANULES+1];
+ extern MAY_THREAD_LOCAL ptr_t GC_objfreelist[MAXOBJGRANULES+1];
/* free list for NORMAL objects */
# define beginGC_objfreelist ((ptr_t)(&GC_objfreelist))
# define endGC_objfreelist (beginGC_objfreelist + sizeof(GC_objfreelist))
- extern ptr_t GC_aobjfreelist[MAXOBJGRANULES+1];
+ extern MAY_THREAD_LOCAL ptr_t GC_aobjfreelist[MAXOBJGRANULES+1];
/* free list for atomic (PTRFREE) objects */
# define beginGC_aobjfreelist ((ptr_t)(&GC_aobjfreelist))
# define endGC_aobjfreelist (beginGC_aobjfreelist + sizeof(GC_aobjfreelist))
# define GC_N_KINDS_INITIAL_VALUE 3
#endif
-GC_EXTERN unsigned GC_n_kinds;
+GC_EXTERN MAY_THREAD_LOCAL unsigned GC_n_kinds;
-GC_EXTERN word GC_n_heap_sects; /* Number of separately added heap */
+GC_EXTERN MAY_THREAD_LOCAL word GC_n_heap_sects; /* Number of separately added heap */
/* sections. */
#ifdef USE_PROC_FOR_LIBRARIES
- GC_EXTERN word GC_n_memory; /* Number of GET_MEM allocated memory */
+ GC_EXTERN MAY_THREAD_LOCAL word GC_n_memory; /* Number of GET_MEM allocated memory */
/* sections. */
#endif
-GC_EXTERN size_t GC_page_size;
+GC_EXTERN MAY_THREAD_LOCAL size_t GC_page_size;
/* Round up allocation size to a multiple of a page size. */
/* GC_setpagesize() is assumed to be already invoked. */
GC_INNER GC_bool GC_is_heap_base(void *p);
#endif
-GC_EXTERN word GC_black_list_spacing;
+GC_EXTERN MAY_THREAD_LOCAL word GC_black_list_spacing;
/* Average number of bytes between blacklisted */
/* blocks. Approximate. */
/* Counts only blocks that are */
/* problematic in the interior of an object. */
#ifdef GC_GCJ_SUPPORT
- extern struct hblk * GC_hblkfreelist[];
- extern word GC_free_bytes[]; /* Both remain visible to GNU GCJ. */
+ extern MAY_THREAD_LOCAL struct hblk * GC_hblkfreelist[];
+ extern MAY_THREAD_LOCAL word GC_free_bytes[]; /* Both remain visible to GNU GCJ. */
#endif
-GC_EXTERN word GC_root_size; /* Total size of registered root sections. */
+GC_EXTERN MAY_THREAD_LOCAL word GC_root_size; /* Total size of registered root sections. */
-GC_EXTERN GC_bool GC_debugging_started;
+GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_debugging_started;
/* GC_debug_malloc has been called. */
/* This is used by GC_do_blocking[_inner](). */
/* frames belonging to the user functions invoked by GC_do_blocking. */
GC_INNER void GC_push_all_stack_sections(ptr_t lo, ptr_t hi,
struct GC_traced_stack_sect_s *traced_stack_sect);
- GC_EXTERN word GC_total_stacksize; /* updated on every push_all_stacks */
+ GC_EXTERN MAY_THREAD_LOCAL word GC_total_stacksize; /* updated on every push_all_stacks */
#else
- GC_EXTERN ptr_t GC_blocked_sp;
- GC_EXTERN struct GC_traced_stack_sect_s *GC_traced_stack_sect;
+ GC_EXTERN MAY_THREAD_LOCAL ptr_t GC_blocked_sp;
+ GC_EXTERN MAY_THREAD_LOCAL struct GC_traced_stack_sect_s *GC_traced_stack_sect;
/* Points to the "frame" data held in stack by */
/* the innermost GC_call_with_gc_active(). */
/* NULL if no such "frame" active. */
GC_INNER void GC_push_roots(GC_bool all, ptr_t cold_gc_frame);
/* Push all or dirty roots. */
-GC_API_PRIV GC_push_other_roots_proc GC_push_other_roots;
+GC_API_PRIV MAY_THREAD_LOCAL GC_push_other_roots_proc GC_push_other_roots;
/* Push system or application specific roots */
/* onto the mark stack. In some environments */
/* (e.g. threads environments) this is */
#ifdef THREADS
void GC_push_thread_structures(void);
#endif
-GC_EXTERN void (*GC_push_typed_structures)(void);
+GC_EXTERN MAY_THREAD_LOCAL void (*GC_push_typed_structures)(void);
/* A pointer such that we can avoid linking in */
/* the typed allocation support if unused. */
/* When set, it is OK to run GC from unknown thread. */
#endif
-GC_EXTERN GC_bool GC_is_initialized; /* GC_init() has been run. */
+GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_is_initialized; /* GC_init() has been run. */
GC_INNER void GC_collect_a_little_inner(int n);
/* Do n units worth of garbage */
/* Print smashed and leaked objects, if any. */
/* Clear the lists of such objects. */
-GC_EXTERN void (*GC_check_heap)(void);
+GC_EXTERN MAY_THREAD_LOCAL void (*GC_check_heap)(void);
/* Check that all objects in the heap with */
/* debugging info are intact. */
/* Add any that are not to GC_smashed list. */
-GC_EXTERN void (*GC_print_all_smashed)(void);
+GC_EXTERN MAY_THREAD_LOCAL void (*GC_print_all_smashed)(void);
/* Print GC_smashed if it's not empty. */
/* Clear GC_smashed list. */
-GC_EXTERN void (*GC_print_heap_obj)(ptr_t p);
+GC_EXTERN MAY_THREAD_LOCAL void (*GC_print_heap_obj)(ptr_t p);
/* If possible print (using GC_err_printf) */
/* a more detailed description (terminated with */
/* "\n") of the object referred to by p. */
#endif
#ifndef SHORT_DBG_HDRS
- GC_EXTERN GC_bool GC_findleak_delay_free;
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_findleak_delay_free;
/* Do not immediately deallocate object on */
/* free() in the leak-finding mode, just mark */
/* it as freed (and deallocate it after GC). */
GC_INNER GC_bool GC_check_leaked(ptr_t base); /* from dbg_mlc.c */
#endif
-GC_EXTERN GC_bool GC_have_errors; /* We saw a smashed or leaked object. */
+GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_have_errors; /* We saw a smashed or leaked object. */
/* Call error printing routine */
/* occasionally. It is OK to read it */
/* without acquiring the lock. */
#define VERBOSE 2
#if !defined(NO_CLOCK) || !defined(SMALL_CONFIG)
/* GC_print_stats should be visible to extra/MacOS.c. */
- extern int GC_print_stats; /* Nonzero generates basic GC log. */
+ extern MAY_THREAD_LOCAL int GC_print_stats; /* Nonzero generates basic GC log. */
/* VERBOSE generates add'l messages. */
#else /* SMALL_CONFIG */
# define GC_print_stats 0
#endif
#ifdef KEEP_BACK_PTRS
- GC_EXTERN long GC_backtraces;
+ GC_EXTERN MAY_THREAD_LOCAL long GC_backtraces;
GC_INNER void GC_generate_random_backtrace_no_gc(void);
#endif
GC_API_PRIV long GC_random(void);
#endif
-GC_EXTERN GC_bool GC_print_back_height;
+GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_print_back_height;
#ifdef MAKE_BACK_GRAPH
void GC_print_back_graph_stats(void);
# define REACHABLE_AFTER_DIRTY(p) (void)(p)
#else /* !GC_DISABLE_INCREMENTAL */
- GC_EXTERN GC_bool GC_incremental;
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_incremental;
/* Using incremental/generational collection. */
/* Assumes dirty bits are being maintained. */
/* it is OK to be called again if the client invokes */
/* GC_enable_incremental once more). */
- GC_EXTERN GC_bool GC_manual_vdb;
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_manual_vdb;
/* The incremental collection is in the manual VDB */
/* mode. Assumes GC_incremental is true. Should not */
/* be modified once GC_incremental is set to true. */
void GC_print_heap_sects(void);
void GC_print_static_roots(void);
-extern word GC_fo_entries; /* should be visible in extra/MacOS.c */
+extern MAY_THREAD_LOCAL word GC_fo_entries; /* should be visible in extra/MacOS.c */
#ifdef KEEP_BACK_PTRS
GC_INNER void GC_store_back_pointer(ptr_t source, ptr_t dest);
/* to nearest value). */
#define TO_KiB_UL(v) ((unsigned long)(((v) + ((1 << 9) - 1)) >> 10))
-GC_EXTERN unsigned GC_fail_count;
+GC_EXTERN MAY_THREAD_LOCAL unsigned GC_fail_count;
/* How many consecutive GC/expansion failures? */
/* Reset by GC_allochblk(); defined in alloc.c. */
-GC_EXTERN long GC_large_alloc_warn_interval; /* defined in misc.c */
+GC_EXTERN MAY_THREAD_LOCAL long GC_large_alloc_warn_interval; /* defined in misc.c */
-GC_EXTERN signed_word GC_bytes_found;
+GC_EXTERN MAY_THREAD_LOCAL signed_word GC_bytes_found;
/* Number of reclaimed bytes after garbage collection; */
/* protected by GC lock; defined in reclaim.c. */
#ifndef GC_GET_HEAP_USAGE_NOT_NEEDED
- GC_EXTERN word GC_reclaimed_bytes_before_gc;
+ GC_EXTERN MAY_THREAD_LOCAL word GC_reclaimed_bytes_before_gc;
/* Number of bytes reclaimed before this */
/* collection cycle; used for statistics only. */
#endif
#ifdef USE_MUNMAP
- GC_EXTERN int GC_unmap_threshold; /* defined in allchblk.c */
- GC_EXTERN GC_bool GC_force_unmap_on_gcollect; /* defined in misc.c */
+ GC_EXTERN MAY_THREAD_LOCAL int GC_unmap_threshold; /* defined in allchblk.c */
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_force_unmap_on_gcollect; /* defined in misc.c */
#endif
#ifdef MSWIN32
- GC_EXTERN GC_bool GC_no_win32_dlls; /* defined in os_dep.c */
- GC_EXTERN GC_bool GC_wnt; /* Is Windows NT derivative; */
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_no_win32_dlls; /* defined in os_dep.c */
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_wnt; /* Is Windows NT derivative; */
/* defined and set in os_dep.c. */
#endif
#ifdef GC_GCJ_SUPPORT
# ifdef GC_ASSERTIONS
- GC_EXTERN GC_bool GC_gcj_malloc_initialized; /* defined in gcj_mlc.c */
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_gcj_malloc_initialized; /* defined in gcj_mlc.c */
# endif
- GC_EXTERN ptr_t * GC_gcjobjfreelist;
+ GC_EXTERN MAY_THREAD_LOCAL ptr_t * GC_gcjobjfreelist;
#endif
#ifdef MPROTECT_VDB
} while (0)
#ifndef NO_DEBUGGING
- GC_EXTERN GC_bool GC_dump_regularly;
+ GC_EXTERN MAY_THREAD_LOCAL GC_bool GC_dump_regularly;
/* Generate regular debugging dumps. */
# define COND_DUMP if (EXPECT(GC_dump_regularly, FALSE)) { \
GC_dump_named(NULL); \
#if defined(NEED_FIND_LIMIT) \
|| (defined(USE_PROC_FOR_LIBRARIES) && defined(THREADS))
- GC_EXTERN JMP_BUF GC_jmp_buf;
+ GC_EXTERN MAY_THREAD_LOCAL JMP_BUF GC_jmp_buf;
/* Set up a handler for address faults which will longjmp to */
/* GC_jmp_buf. */
# define EXTERN_C_END /* empty */
#endif
+#if defined(GC_THREAD_ISOLATE)
+# if defined(GC_THREADS)
+# error "GC_THREAD_ISOLATE cannot used with GC_THREADS"
+# endif
+# if defined(_MSC_VER)
+# define MAY_THREAD_LOCAL __declspec(thread)
+# else
+# define MAY_THREAD_LOCAL __thread
+# endif
+#else /* GC_THREAD_ISOLATE */
+# define MAY_THREAD_LOCAL
+#endif /* GC_THREAD_ISOLATE */
+
EXTERN_C_BEGIN
/* Convenient internal macro to test version of Clang. */
#endif /* LINUX */
#if defined(SEARCH_FOR_DATA_START)
- extern ptr_t GC_data_start;
+ extern MAY_THREAD_LOCAL ptr_t GC_data_start;
# define DATASTART GC_data_start
#endif
/* Older versions of Darwin seem to lack getcontext(). */
/* ARM and MIPS Linux often doesn't support a real */
/* getcontext(). */
- static signed char getcontext_works = 0; /* (-1) - broken, 1 - works */
+ static MAY_THREAD_LOCAL signed char getcontext_works = 0; /* (-1) - broken, 1 - works */
ucontext_t ctxt;
# ifdef GETCONTEXT_FPU_EXCMASK_BUG
/* Workaround a bug (clearing the FPU exception mask) in */
/* Parameter to force GC at every malloc of size greater or equal to */
/* the given value. This might be handy during debugging. */
# if defined(CPPCHECK)
- size_t GC_dbg_collect_at_malloc_min_lb = 16*1024; /* e.g. */
+ MAY_THREAD_LOCAL size_t GC_dbg_collect_at_malloc_min_lb = 16*1024; /* e.g. */
# else
- size_t GC_dbg_collect_at_malloc_min_lb = (GC_COLLECT_AT_MALLOC);
+ MAY_THREAD_LOCAL size_t GC_dbg_collect_at_malloc_min_lb = (GC_COLLECT_AT_MALLOC);
# endif
#endif
/* free lists from inlined allocators without including gc_priv.h */
/* or introducing dependencies on internal data structure layouts. */
#include "gc_alloc_ptrs.h"
-void ** const GC_objfreelist_ptr = GC_objfreelist;
-void ** const GC_aobjfreelist_ptr = GC_aobjfreelist;
-void ** const GC_uobjfreelist_ptr = GC_uobjfreelist;
+MAY_THREAD_LOCAL void ** GC_objfreelist_ptr = 0;
+MAY_THREAD_LOCAL void ** GC_aobjfreelist_ptr = 0;
+MAY_THREAD_LOCAL void ** GC_uobjfreelist_ptr = 0;
# ifdef GC_ATOMIC_UNCOLLECTABLE
- void ** const GC_auobjfreelist_ptr = GC_auobjfreelist;
+ MAY_THREAD_LOCAL void ** GC_auobjfreelist_ptr = 0;
# endif
GC_API int GC_CALL GC_get_kind_and_size(const void * p, size_t * psize)
# endif
}
-volatile word GC_noop_sink;
+volatile MAY_THREAD_LOCAL word GC_noop_sink;
/* Single argument version, robust against whole program analysis. */
GC_ATTR_NO_SANITIZE_THREAD
/* mark_proc GC_mark_procs[MAX_MARK_PROCS] = {0} -- declared in gc_priv.h */
-GC_INNER unsigned GC_n_mark_procs = GC_RESERVED_MARK_PROCS;
+GC_INNER MAY_THREAD_LOCAL unsigned GC_n_mark_procs = GC_RESERVED_MARK_PROCS;
/* Initialize GC_obj_kinds properly and standard free lists properly. */
-/* This must be done statically since they may be accessed before */
-/* GC_init is called. */
/* It's done here, since we need to deal with mark descriptors. */
-GC_INNER struct obj_kind GC_obj_kinds[MAXOBJKINDS] = {
-/* PTRFREE */ { &GC_aobjfreelist[0], 0 /* filled in dynamically */,
+GC_INNER MAY_THREAD_LOCAL struct obj_kind GC_obj_kinds[MAXOBJKINDS] = {
+/* PTRFREE */ { 0, 0 /* filled in dynamically */,
/* 0 | */ GC_DS_LENGTH, FALSE, FALSE
#ifdef ESCARGOT
/*, */ OK_EAGER_SWEEP_INITZ
#endif
/*, */ OK_DISCLAIM_INITZ },
-/* NORMAL */ { &GC_objfreelist[0], 0,
+/* NORMAL */ { 0, 0,
/* 0 | */ GC_DS_LENGTH,
/* adjusted in GC_init for EXTRA_BYTES */
TRUE /* add length to descr */, TRUE
#endif
/*, */ OK_DISCLAIM_INITZ },
/* UNCOLLECTABLE */
- { &GC_uobjfreelist[0], 0,
+ { 0, 0,
/* 0 | */ GC_DS_LENGTH, TRUE /* add length to descr */, TRUE
#ifdef ESCARGOT
/*, */ OK_EAGER_SWEEP_INITZ
#endif
/*, */ OK_DISCLAIM_INITZ },
# ifdef GC_ATOMIC_UNCOLLECTABLE
- { &GC_auobjfreelist[0], 0,
+ { 0, 0,
/* 0 | */ GC_DS_LENGTH, FALSE /* add length to descr */, FALSE
#ifdef ESCARGOT
/*, */ OK_EAGER_SWEEP_INITZ
# endif
};
-GC_INNER unsigned GC_n_kinds = GC_N_KINDS_INITIAL_VALUE;
+GC_INNER MAY_THREAD_LOCAL unsigned GC_n_kinds = GC_N_KINDS_INITIAL_VALUE;
# ifndef INITIAL_MARK_STACK_SIZE
# define INITIAL_MARK_STACK_SIZE (1*HBLKSIZE)
# endif
#if !defined(GC_DISABLE_INCREMENTAL)
- STATIC word GC_n_rescuing_pages = 0;
+ STATIC MAY_THREAD_LOCAL word GC_n_rescuing_pages = 0;
/* Number of dirty pages we marked from */
/* excludes ptrfree pages, etc. */
/* Used for logging only. */
#endif
-GC_INNER size_t GC_mark_stack_size = 0;
+GC_INNER MAY_THREAD_LOCAL size_t GC_mark_stack_size = 0;
#ifdef PARALLEL_MARK
STATIC volatile AO_t GC_first_nonempty = 0;
/* thread. */
#endif
-GC_INNER mark_state_t GC_mark_state = MS_NONE;
+GC_INNER MAY_THREAD_LOCAL mark_state_t GC_mark_state = MS_NONE;
-GC_INNER GC_bool GC_mark_stack_too_small = FALSE;
+GC_INNER MAY_THREAD_LOCAL GC_bool GC_mark_stack_too_small = FALSE;
-static struct hblk * scan_ptr;
+static MAY_THREAD_LOCAL struct hblk * scan_ptr;
-STATIC GC_bool GC_objects_are_marked = FALSE;
+STATIC MAY_THREAD_LOCAL GC_bool GC_objects_are_marked = FALSE;
/* Are there collectible marked objects in the heap? */
/* Is a collection in progress? Note that this can return true in the */
typedef struct {
EXCEPTION_REGISTRATION ex_reg;
void *alt_path;
- } ext_ex_regn;
+ } MAY_THREAD_LOCAL ext_ex_regn;
static EXCEPTION_DISPOSITION mark_ex_handler(
struct _EXCEPTION_RECORD *ex_rec,
word bytes_allocd;
word arg1;
word arg2;
-} GC_trace_buf[TRACE_ENTRIES] = { { NULL, 0, 0, 0, 0 } };
+} MAY_THREAD_LOCAL GC_trace_buf[TRACE_ENTRIES] = { { NULL, 0, 0, 0, 0 } };
-int GC_trace_buf_ptr = 0;
+int MAY_THREAD_LOCAL GC_trace_buf_ptr = 0;
void GC_add_trace_entry(char *kind, word arg1, word arg2)
{
struct roots GC_static_roots[MAX_ROOT_SETS];
*/
-int GC_no_dls = 0; /* Register dynamic library data segments. */
+MAY_THREAD_LOCAL int GC_no_dls = 0; /* Register dynamic library data segments. */
-static int n_root_sets = 0;
+static MAY_THREAD_LOCAL int n_root_sets = 0;
/* GC_static_roots[0..n_root_sets) contains the valid root sets. */
#if !defined(NO_DEBUGGING) || defined(GC_ASSERTIONS)
/* Is the address p in one of the registered static root sections? */
GC_INNER GC_bool GC_is_static_root(void *p)
{
- static int last_root_set = MAX_ROOT_SETS;
+ static MAY_THREAD_LOCAL int last_root_set = MAX_ROOT_SETS;
int i;
if (last_root_set < n_root_sets
}
#endif /* !MSWIN32 && !MSWINCE && !CYGWIN32 */
-GC_INNER word GC_root_size = 0;
+GC_INNER MAY_THREAD_LOCAL word GC_root_size = 0;
GC_API void GC_CALL GC_add_roots(void *b, void *e)
{
n_root_sets++;
}
-static GC_bool roots_were_cleared = FALSE;
+static MAY_THREAD_LOCAL GC_bool roots_were_cleared = FALSE;
GC_API void GC_CALL GC_clear_roots(void)
{
/* Is the address p in one of the temporary static root sections? */
GC_API int GC_CALL GC_is_tmp_root(void *p)
{
- static int last_root_set = MAX_ROOT_SETS;
+ static MAY_THREAD_LOCAL int last_root_set = MAX_ROOT_SETS;
int i;
if (last_root_set < n_root_sets
-- address order.
*/
-STATIC size_t GC_excl_table_entries = 0;/* Number of entries in use. */
+STATIC MAY_THREAD_LOCAL size_t GC_excl_table_entries = 0;/* Number of entries in use. */
/* Return the first exclusion range that includes an address >= start_addr */
/* Assumes the exclusion table contains at least one entry (namely the */
# endif /* !THREADS */
}
-GC_INNER void (*GC_push_typed_structures)(void) = 0;
+GC_INNER MAY_THREAD_LOCAL void (*GC_push_typed_structures)(void) = 0;
/* Push GC internal roots. These are normally */
/* included in the static data segment, and */
GC_with_callee_saves_pushed(GC_push_current_stack, cold_gc_frame);
}
-STATIC GC_mark_stack_func GC_mark_stack_func_proc = 0;
+STATIC MAY_THREAD_LOCAL GC_mark_stack_func GC_mark_stack_func_proc = 0;
GC_API void GC_CALL GC_register_mark_stack_func(GC_mark_stack_func func)
{
GC_mark_stack_func_proc = func;
__thread unsigned char GC_cancel_disable_count = 0;
#endif
-GC_FAR struct _GC_arrays GC_arrays /* = { 0 } */;
+GC_FAR MAY_THREAD_LOCAL struct _GC_arrays GC_arrays /* = { 0 } */;
-GC_INNER GC_bool GC_debugging_started = FALSE;
+GC_INNER MAY_THREAD_LOCAL GC_bool GC_debugging_started = FALSE;
/* defined here so we don't have to load dbg_mlc.o */
-ptr_t GC_stackbottom = 0;
+ptr_t MAY_THREAD_LOCAL GC_stackbottom = 0;
#ifdef IA64
- ptr_t GC_register_stackbottom = 0;
+ ptr_t MAY_THREAD_LOCAL GC_register_stackbottom = 0;
#endif
-int GC_dont_gc = FALSE;
+int MAY_THREAD_LOCAL GC_dont_gc = FALSE;
-int GC_dont_precollect = FALSE;
+int MAY_THREAD_LOCAL GC_dont_precollect = FALSE;
-GC_bool GC_quiet = 0; /* used also in pcr_interface.c */
+GC_bool MAY_THREAD_LOCAL GC_quiet = 0; /* used also in pcr_interface.c */
#if !defined(NO_CLOCK) || !defined(SMALL_CONFIG)
- int GC_print_stats = 0;
+ int MAY_THREAD_LOCAL GC_print_stats = 0;
#endif
#ifdef GC_PRINT_BACK_HEIGHT
- GC_INNER GC_bool GC_print_back_height = TRUE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_print_back_height = TRUE;
#else
- GC_INNER GC_bool GC_print_back_height = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_print_back_height = FALSE;
#endif
#ifndef NO_DEBUGGING
# ifdef GC_DUMP_REGULARLY
- GC_INNER GC_bool GC_dump_regularly = TRUE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_dump_regularly = TRUE;
/* Generate regular debugging dumps. */
# else
- GC_INNER GC_bool GC_dump_regularly = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_dump_regularly = FALSE;
# endif
# ifndef NO_CLOCK
- STATIC CLOCK_TYPE GC_init_time;
+ STATIC MAY_THREAD_LOCAL CLOCK_TYPE GC_init_time;
/* The time that the GC was initialized at. */
# endif
#endif /* !NO_DEBUGGING */
#ifdef KEEP_BACK_PTRS
- GC_INNER long GC_backtraces = 0;
+ GC_INNER MAY_THREAD_LOCAL long GC_backtraces = 0;
/* Number of random backtraces to generate for each GC. */
#endif
#ifdef FIND_LEAK
- int GC_find_leak = 1;
+ int MAY_THREAD_LOCAL GC_find_leak = 1;
#else
- int GC_find_leak = 0;
+ int MAY_THREAD_LOCAL GC_find_leak = 0;
#endif
#ifndef SHORT_DBG_HDRS
# ifdef GC_FINDLEAK_DELAY_FREE
- GC_INNER GC_bool GC_findleak_delay_free = TRUE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_findleak_delay_free = TRUE;
# else
- GC_INNER GC_bool GC_findleak_delay_free = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_findleak_delay_free = FALSE;
# endif
#endif /* !SHORT_DBG_HDRS */
#ifdef ESCARGOT
- int GC_all_interior_pointers = 0;
+ int MAY_THREAD_LOCAL GC_all_interior_pointers = 0;
#else
#ifdef ALL_INTERIOR_POINTERS
- int GC_all_interior_pointers = 1;
+ int MAY_THREAD_LOCAL GC_all_interior_pointers = 1;
#else
- int GC_all_interior_pointers = 0;
+ int MAY_THREAD_LOCAL GC_all_interior_pointers = 0;
#endif
#endif
#ifdef FINALIZE_ON_DEMAND
- int GC_finalize_on_demand = 1;
+ int MAY_THREAD_LOCAL GC_finalize_on_demand = 1;
#else
- int GC_finalize_on_demand = 0;
+ int MAY_THREAD_LOCAL GC_finalize_on_demand = 0;
#endif
#ifdef JAVA_FINALIZATION
- int GC_java_finalization = 1;
+ int MAY_THREAD_LOCAL GC_java_finalization = 1;
#else
- int GC_java_finalization = 0;
+ int MAY_THREAD_LOCAL GC_java_finalization = 0;
#endif
/* All accesses to it should be synchronized to avoid data races. */
-GC_finalizer_notifier_proc GC_finalizer_notifier =
+MAY_THREAD_LOCAL GC_finalizer_notifier_proc GC_finalizer_notifier =
(GC_finalizer_notifier_proc)0;
#ifdef GC_FORCE_UNMAP_ON_GCOLLECT
/* Has no effect unless USE_MUNMAP. */
/* Has no effect on implicitly-initiated garbage collections. */
- GC_INNER GC_bool GC_force_unmap_on_gcollect = TRUE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_force_unmap_on_gcollect = TRUE;
#else
- GC_INNER GC_bool GC_force_unmap_on_gcollect = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_force_unmap_on_gcollect = FALSE;
#endif
#ifndef GC_LARGE_ALLOC_WARN_INTERVAL
# define GC_LARGE_ALLOC_WARN_INTERVAL 5
#endif
-GC_INNER long GC_large_alloc_warn_interval = GC_LARGE_ALLOC_WARN_INTERVAL;
+GC_INNER MAY_THREAD_LOCAL long GC_large_alloc_warn_interval = GC_LARGE_ALLOC_WARN_INTERVAL;
/* Interval between unsuppressed warnings. */
STATIC void * GC_CALLBACK GC_default_oom_fn(
}
/* All accesses to it should be synchronized to avoid data races. */
-GC_oom_func GC_oom_fn = GC_default_oom_fn;
+MAY_THREAD_LOCAL GC_oom_func GC_oom_fn = GC_default_oom_fn;
#ifdef CAN_HANDLE_FORK
# ifdef HANDLE_FORK
- GC_INNER int GC_handle_fork = 1;
+ GC_INNER MAY_THREAD_LOCAL int GC_handle_fork = 1;
/* The value is examined by GC_thr_init. */
# else
- GC_INNER int GC_handle_fork = FALSE;
+ GC_INNER MAY_THREAD_LOCAL int GC_handle_fork = FALSE;
# endif
#elif !defined(HAVE_NO_FORK)
# ifdef THREADS
# define BIG_CLEAR_SIZE 2048 /* Clear this much now and then. */
# else
- STATIC word GC_stack_last_cleared = 0; /* GC_no when we last did this */
- STATIC ptr_t GC_min_sp = NULL;
+ STATIC MAY_THREAD_LOCAL word GC_stack_last_cleared = 0; /* GC_no when we last did this */
+ STATIC MAY_THREAD_LOCAL ptr_t GC_min_sp = NULL;
/* Coolest stack pointer value from which */
/* we've already cleared the stack. */
- STATIC ptr_t GC_high_water = NULL;
+ STATIC MAY_THREAD_LOCAL ptr_t GC_high_water = NULL;
/* "hottest" stack pointer value we have seen */
/* recently. Degrades over time. */
- STATIC word GC_bytes_allocd_at_reset = 0;
+ STATIC MAY_THREAD_LOCAL word GC_bytes_allocd_at_reset = 0;
# define DEGRADE_RATE 50
# endif
UNLOCK();
}
- GC_INNER word GC_reclaimed_bytes_before_gc = 0;
+ GC_INNER MAY_THREAD_LOCAL word GC_reclaimed_bytes_before_gc = 0;
/* Fill in GC statistics provided the destination is of enough size. */
static void fill_prof_stats(struct GC_prof_stats_s *pstats)
#ifdef GC_READ_ENV_FILE
/* This works for Win32/WinCE for now. Really useful only for WinCE. */
- STATIC char *GC_envfile_content = NULL;
+ STATIC MAY_THREAD_LOCAL char *GC_envfile_content = NULL;
/* The content of the GC "env" file with CR and */
/* LF replaced to '\0'. NULL if the file is */
/* missing or empty. Otherwise, always ends */
/* with '\0'. */
- STATIC unsigned GC_envfile_length = 0;
+ STATIC MAY_THREAD_LOCAL unsigned GC_envfile_length = 0;
/* Length of GC_envfile_content (if non-NULL). */
# ifndef GC_ENVFILE_MAXLEN
}
#endif /* GC_READ_ENV_FILE */
-GC_INNER GC_bool GC_is_initialized = FALSE;
+GC_INNER MAY_THREAD_LOCAL GC_bool GC_is_initialized = FALSE;
GC_API int GC_CALL GC_is_init_called(void)
{
/* GC_find_leak cannot be used for this purpose as otherwise */
/* TSan finds a data race (between GC_default_on_abort and, e.g., */
/* GC_finish_collection). */
- static GC_bool skip_gc_atexit = FALSE;
+ static MAY_THREAD_LOCAL GC_bool skip_gc_atexit = FALSE;
# else
# define skip_gc_atexit FALSE
# endif
}
}
- static GC_bool installed_looping_handler = FALSE;
+ static MAY_THREAD_LOCAL GC_bool installed_looping_handler = FALSE;
static void maybe_install_looping_handler(void)
{
#if !defined(OS2) && !defined(MACOS) && !defined(GC_ANDROID_LOG) \
&& !defined(NN_PLATFORM_CTR) && !defined(NINTENDO_SWITCH) \
&& !defined(MSWIN32) && !defined(MSWINCE)
- STATIC int GC_stdout = GC_DEFAULT_STDOUT_FD;
- STATIC int GC_stderr = GC_DEFAULT_STDERR_FD;
- STATIC int GC_log = GC_DEFAULT_STDERR_FD;
+ STATIC MAY_THREAD_LOCAL int GC_stdout = GC_DEFAULT_STDOUT_FD;
+ STATIC MAY_THREAD_LOCAL int GC_stderr = GC_DEFAULT_STDERR_FD;
+ STATIC MAY_THREAD_LOCAL int GC_log = GC_DEFAULT_STDERR_FD;
GC_API void GC_CALL GC_set_log_fd(int fd)
{
#ifndef SMALL_CONFIG
# ifdef MANUAL_VDB
- static GC_bool manual_vdb_allowed = TRUE;
+ static MAY_THREAD_LOCAL GC_bool manual_vdb_allowed = TRUE;
# else
- static GC_bool manual_vdb_allowed = FALSE;
+ static MAY_THREAD_LOCAL GC_bool manual_vdb_allowed = FALSE;
# endif
GC_API void GC_CALL GC_set_manual_vdb_allowed(int value)
#define GC_LOG_STD_NAME "gc.log"
+#include "gc_alloc_ptrs.h"
+
GC_API void GC_CALL GC_init(void)
{
/* LOCK(); -- no longer does anything this early. */
# endif
if (EXPECT(GC_is_initialized, TRUE)) return;
+
+ GC_obj_kinds[0].ok_freelist = &GC_aobjfreelist[0];
+ GC_obj_kinds[1].ok_freelist = &GC_objfreelist[0];
+ GC_obj_kinds[2].ok_freelist = &GC_uobjfreelist[0];
+# ifdef GC_ATOMIC_UNCOLLECTABLE
+ GC_obj_kinds[3].ok_freelist = &GC_auobjfreelist[0];
+# endif
+
+ GC_objfreelist_ptr = GC_objfreelist;
+ GC_aobjfreelist_ptr = GC_aobjfreelist;
+ GC_uobjfreelist_ptr = GC_uobjfreelist;
+# ifdef GC_ATOMIC_UNCOLLECTABLE
+ GC_auobjfreelist_ptr = GC_auobjfreelist;
+# endif
+
# ifdef REDIRECT_MALLOC
{
static GC_bool init_started = FALSE;
# define WRITE(f, buf, len) GC_write(buf, len)
#elif defined(OS2) || defined(MACOS)
- STATIC FILE * GC_stdout = NULL;
- STATIC FILE * GC_stderr = NULL;
- STATIC FILE * GC_log = NULL;
+ STATIC MAY_THREAD_LOCAL FILE * GC_stdout = NULL;
+ STATIC MAY_THREAD_LOCAL FILE * GC_stderr = NULL;
+ STATIC MAY_THREAD_LOCAL FILE * GC_log = NULL;
/* Initialize GC_log (and the friends) passed to GC_write(). */
STATIC void GC_set_files(void)
GC_warn_printf(msg, arg);
}
-GC_INNER GC_warn_proc GC_current_warn_proc = GC_default_warn_proc;
+GC_INNER MAY_THREAD_LOCAL GC_warn_proc GC_current_warn_proc = GC_default_warn_proc;
/* This is recommended for production code (release). */
GC_API void GC_CALLBACK GC_ignore_warn_proc(char *msg, GC_word arg)
# endif
}
- GC_abort_func GC_on_abort = GC_default_on_abort;
+ MAY_THREAD_LOCAL GC_abort_func GC_on_abort = GC_default_on_abort;
GC_API void GC_CALL GC_set_abort_func(GC_abort_func fn)
{
#ifndef THREADS
-GC_INNER ptr_t GC_blocked_sp = NULL;
+GC_INNER MAY_THREAD_LOCAL ptr_t GC_blocked_sp = NULL;
/* NULL value means we are not inside GC_do_blocking() call. */
# ifdef IA64
- STATIC ptr_t GC_blocked_register_sp = NULL;
+ STATIC MAY_THREAD_LOCAL ptr_t GC_blocked_register_sp = NULL;
# endif
-GC_INNER struct GC_traced_stack_sect_s *GC_traced_stack_sect = NULL;
+GC_INNER MAY_THREAD_LOCAL struct GC_traced_stack_sect_s *GC_traced_stack_sect = NULL;
/* This is nearly the same as in win32_threads.c */
GC_API void * GC_CALL GC_call_with_gc_active(GC_fn_type fn,
#endif
#if !defined(NO_EXECUTE_PERMISSION)
- STATIC GC_bool GC_pages_executable = TRUE;
+ STATIC MAY_THREAD_LOCAL GC_bool GC_pages_executable = TRUE;
#else
- STATIC GC_bool GC_pages_executable = FALSE;
+ STATIC MAY_THREAD_LOCAL GC_bool GC_pages_executable = FALSE;
#endif
#define IGNORE_PAGES_EXECUTABLE 1
/* Undefined on GC_pages_executable real use. */
GC_INNER char * GC_get_maps(void)
{
ssize_t result;
- static char *maps_buf = NULL;
- static size_t maps_buf_sz = 1;
+ static MAY_THREAD_LOCAL char *maps_buf = NULL;
+ static MAY_THREAD_LOCAL size_t maps_buf_sz = 1;
size_t maps_size;
# ifdef THREADS
size_t old_maps_size = 0;
EXTERN_C_END
# endif /* LINUX */
- ptr_t GC_data_start = NULL;
+ MAY_THREAD_LOCAL ptr_t GC_data_start = NULL;
GC_INNER void GC_init_linux_data_start(void)
{
#ifdef ECOS
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
# ifndef ECOS_GC_MEMORY_SIZE
# define ECOS_GC_MEMORY_SIZE (448 * 1024)
# endif /* ECOS_GC_MEMORY_SIZE */
#endif /* ECOS */
#if defined(NETBSD) && defined(__ELF__)
+
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
ptr_t GC_data_start = NULL;
EXTERN_C_BEGIN
#endif
#ifdef OPENBSD
+
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
static struct sigaction old_segv_act;
STATIC JMP_BUF GC_jmp_buf_openbsd;
# ifdef OS2
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
# include <stddef.h>
# if !defined(__IBMC__) && !defined(__WATCOMC__) /* e.g. EMX */
# endif /* OS/2 */
/* Find the page size */
-GC_INNER size_t GC_page_size = 0;
+GC_INNER MAY_THREAD_LOCAL size_t GC_page_size = 0;
#if defined(MSWIN32) || defined(MSWINCE) || defined(CYGWIN32)
# ifndef VER_PLATFORM_WIN32_CE
# endif
# if defined(MSWINCE) && defined(THREADS)
- GC_INNER GC_bool GC_dont_query_stack_min = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_dont_query_stack_min = FALSE;
# endif
- GC_INNER SYSTEM_INFO GC_sysinfo;
+ GC_INNER MAY_THREAD_LOCAL SYSTEM_INFO GC_sysinfo;
GC_INNER void GC_setpagesize(void)
{
static struct sigaction old_bus_act;
# endif
# else
- static GC_fault_handler_t old_segv_handler;
+ static MAY_THREAD_LOCAL GC_fault_handler_t old_segv_handler;
# ifdef HAVE_SIGBUS
- static GC_fault_handler_t old_bus_handler;
+ static MAY_THREAD_LOCAL GC_fault_handler_t old_bus_handler;
# endif
# endif
/* Some tools to implement HEURISTIC2 */
# define MIN_PAGE_SIZE 256 /* Smallest conceivable page size, bytes */
- GC_INNER JMP_BUF GC_jmp_buf;
+ GC_INNER MAY_THREAD_LOCAL JMP_BUF GC_jmp_buf;
STATIC void GC_fault_handler(int sig GC_ATTR_UNUSED)
{
/* Requires allocation lock. */
STATIC ptr_t GC_find_limit_with_bound(ptr_t p, GC_bool up, ptr_t bound)
{
- static volatile ptr_t result;
+ static MAY_THREAD_LOCAL volatile ptr_t result;
/* Safer if static, since otherwise it may not be */
/* preserved across the longjmp. Can safely be */
/* static since it's only called with the */
typedef UINT (WINAPI * GetWriteWatch_type)(
DWORD, PVOID, GC_ULONG_PTR /* SIZE_T */,
PVOID *, GC_ULONG_PTR *, PULONG);
- static GetWriteWatch_type GetWriteWatch_func;
- static DWORD GetWriteWatch_alloc_flag;
+ static MAY_THREAD_LOCAL GetWriteWatch_type GetWriteWatch_func;
+ static MAY_THREAD_LOCAL DWORD GetWriteWatch_alloc_flag;
# define GC_GWW_AVAILABLE() (GetWriteWatch_func != NULL)
/* and all real work is done by GC_register_dynamic_libraries. Under */
/* win32s, we cannot find the data segments associated with dll's. */
/* We register the main data segment here. */
- GC_INNER GC_bool GC_no_win32_dlls = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_no_win32_dlls = FALSE;
/* This used to be set for gcc, to avoid dealing with */
/* the structured exception handling issues. But we now have */
/* assembly code to do that right. */
- GC_INNER GC_bool GC_wnt = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_wnt = FALSE;
/* This is a Windows NT derivative, i.e. NT, Win2K, XP or later. */
GC_INNER void GC_init_win32(void)
/* the malloc heap with HeapWalk on the default heap. But that */
/* apparently works only for NT-based Windows. */
- STATIC size_t GC_max_root_size = 100000; /* Appr. largest root size. */
+ STATIC MAY_THREAD_LOCAL size_t GC_max_root_size = 100000; /* Appr. largest root size. */
# ifdef USE_WINALLOC
/* In the long run, a better data structure would also be nice ... */
- STATIC struct GC_malloc_heap_list {
+ STATIC MAY_THREAD_LOCAL struct GC_malloc_heap_list {
void * allocation_base;
struct GC_malloc_heap_list *next;
} *GC_malloc_heap_l = 0;
# endif /* !REDIRECT_MALLOC */
- STATIC word GC_n_heap_bases = 0; /* See GC_heap_bases. */
+ STATIC MAY_THREAD_LOCAL word GC_n_heap_bases = 0; /* See GC_heap_bases. */
/* Is p the start of either the malloc heap, or of one of our */
/* heap sections? */
STATIC ptr_t GC_unix_mmap_get_mem(size_t bytes)
{
void *result;
- static ptr_t last_addr = HEAP_START;
+ static MAY_THREAD_LOCAL ptr_t last_addr = HEAP_START;
# ifndef USE_MMAP_ANON
static GC_bool initialized = FALSE;
/* environment, this is also responsible for marking from */
/* thread stacks. */
#ifndef THREADS
- GC_push_other_roots_proc GC_push_other_roots = 0;
+ MAY_THREAD_LOCAL GC_push_other_roots_proc GC_push_other_roots = 0;
#else /* THREADS */
# ifdef PCR
return(PCR_ERes_okay);
}
-extern struct PCR_MM_ProcsRep * GC_old_allocator;
+extern MAY_THREAD_LOCAL struct PCR_MM_ProcsRep * GC_old_allocator;
/* defined in pcr_interface.c. */
STATIC void GC_CALLBACK GC_default_push_other_roots(void)
# endif
#ifndef DARWIN
- STATIC SIG_HNDLR_PTR GC_old_segv_handler = 0;
+ STATIC MAY_THREAD_LOCAL SIG_HNDLR_PTR GC_old_segv_handler = 0;
/* Also old MSWIN32 ACCESS_VIOLATION filter */
# if defined(FREEBSD) || defined(HPUX) || defined(HURD) || defined(LINUX)
- STATIC SIG_HNDLR_PTR GC_old_bus_handler = 0;
+ STATIC MAY_THREAD_LOCAL SIG_HNDLR_PTR GC_old_bus_handler = 0;
# ifndef LINUX
- STATIC GC_bool GC_old_bus_handler_used_si = FALSE;
+ STATIC MAY_THREAD_LOCAL GC_bool GC_old_bus_handler_used_si = FALSE;
# endif
# endif
# if !defined(MSWIN32) && !defined(MSWINCE)
- STATIC GC_bool GC_old_segv_handler_used_si = FALSE;
+ STATIC MAY_THREAD_LOCAL GC_bool GC_old_segv_handler_used_si = FALSE;
# endif /* !MSWIN32 */
#endif /* !DARWIN */
/* only some of the address space), but it avoids intercepting system */
/* calls. */
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
# include <errno.h>
# include <sys/types.h>
# include <sys/signal.h>
# include "vd/PCR_VD.h"
+#if defined(GC_THREAD_ISOLATE)
+# error "You cannot this feature with thread-isolation"
+#endif
+
# define NPAGES (32*1024) /* 128 MB */
PCR_VD_DB GC_grungy_bits[NPAGES];
#endif /* PCR_VDB */
#ifndef GC_DISABLE_INCREMENTAL
- GC_INNER GC_bool GC_manual_vdb = FALSE;
+ GC_INNER MAY_THREAD_LOCAL GC_bool GC_manual_vdb = FALSE;
/* Manually mark the page containing p as dirty. Logically, this */
/* dirties the entire object. */
/* This value is only used on the reply port. */
# define ID_ACK 3
- STATIC GC_mprotect_state_t GC_mprotect_state = GC_MP_NORMAL;
+ STATIC MAY_THREAD_LOCAL GC_mprotect_state_t GC_mprotect_state = GC_MP_NORMAL;
/* The following should ONLY be called when the world is stopped. */
STATIC void GC_mprotect_thread_notify(mach_msg_id_t id)
/* Updates to this aren't atomic, but the SIGBUS'es seem pretty rare. */
/* Even if this doesn't get updated property, it isn't really a problem. */
- STATIC int GC_sigbus_count = 0;
+ STATIC MAY_THREAD_LOCAL int GC_sigbus_count = 0;
STATIC void GC_darwin_sigbus(int num, siginfo_t *sip, void *context)
{
GC_INNER void GC_print_callers(struct callinfo info[NFRAMES])
{
int i;
- static int reentry_count = 0;
+ static MAY_THREAD_LOCAL int reentry_count = 0;
GC_bool stop = FALSE;
DCL_LOCK_STATE;
{
FILE *pipe;
# define EXE_SZ 100
- static char exe_name[EXE_SZ];
+ static MAY_THREAD_LOCAL char exe_name[EXE_SZ];
# define CMD_SZ 200
char cmd_buf[CMD_SZ];
# define RESULT_SZ 200
- static char result_buf[RESULT_SZ];
+ static MAY_THREAD_LOCAL char result_buf[RESULT_SZ];
size_t result_len;
char *old_preload;
# define PRELOAD_SZ 200
char preload_buf[PRELOAD_SZ];
- static GC_bool found_exe_name = FALSE;
- static GC_bool will_fail = FALSE;
+ static MAY_THREAD_LOCAL GC_bool found_exe_name = FALSE;
+ static MAY_THREAD_LOCAL GC_bool will_fail = FALSE;
int ret_code;
/* Try to get it via a hairy and expensive scheme. */
/* First we get the name of the executable: */
": %p and %p are not in the same object", p, q);
}
-void (GC_CALLBACK *GC_same_obj_print_proc) (void *, void *)
+MAY_THREAD_LOCAL void (GC_CALLBACK *GC_same_obj_print_proc) (void *, void *)
= GC_default_same_obj_print_proc;
/* Check that p and q point to the same object. Call */
ABORT_ARG1("GC_is_valid_displacement test failed", ": %p not valid", p);
}
-void (GC_CALLBACK *GC_is_valid_displacement_print_proc)(void *) =
+MAY_THREAD_LOCAL void (GC_CALLBACK *GC_is_valid_displacement_print_proc)(void *) =
GC_default_is_valid_displacement_print_proc;
/* Check that if p is a pointer to a heap page, then it points to */
ABORT_ARG1("GC_is_visible test failed", ": %p not GC-visible", p);
}
-void (GC_CALLBACK *GC_is_visible_print_proc)(void * p) =
+MAY_THREAD_LOCAL void (GC_CALLBACK *GC_is_visible_print_proc)(void * p) =
GC_default_is_visible_print_proc;
#ifndef THREADS
#include <stdio.h>
-GC_INNER signed_word GC_bytes_found = 0;
+GC_INNER MAY_THREAD_LOCAL signed_word GC_bytes_found = 0;
/* Number of bytes of memory reclaimed */
/* minus the number of bytes originally */
/* on free lists which we had to drop. */
#ifndef MAX_LEAKED
# define MAX_LEAKED 40
#endif
-STATIC ptr_t GC_leaked[MAX_LEAKED] = { NULL };
-STATIC unsigned GC_n_leaked = 0;
+STATIC MAY_THREAD_LOCAL ptr_t GC_leaked[MAX_LEAKED] = { NULL };
+STATIC MAY_THREAD_LOCAL unsigned GC_n_leaked = 0;
-GC_INNER GC_bool GC_have_errors = FALSE;
+GC_INNER MAY_THREAD_LOCAL GC_bool GC_have_errors = FALSE;
#if !defined(EAGER_SWEEP) && (defined(ENABLE_DISCLAIM) || defined(ESCARGOT))
STATIC void GC_reclaim_unconditionally_marked(void);
/* Clear both lists. Called without the allocation lock held. */
GC_INNER void GC_print_all_errors(void)
{
- static GC_bool printing_errors = FALSE;
+ static MAY_THREAD_LOCAL GC_bool printing_errors = FALSE;
GC_bool have_errors;
unsigned i, n_leaked;
ptr_t leaked[MAX_LEAKED];
#define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
-STATIC int GC_explicit_kind = 0;
+STATIC MAY_THREAD_LOCAL int GC_explicit_kind = 0;
/* Object kind for objects with indirect */
/* (possibly extended) descriptors. */
-STATIC int GC_array_kind = 0;
+STATIC MAY_THREAD_LOCAL int GC_array_kind = 0;
/* Object kind for objects with complex */
/* descriptors and GC_array_mark_proc. */
} complex_descriptor;
#define TAG ad.ad_tag
-STATIC ext_descr * GC_ext_descriptors = NULL;
+STATIC MAY_THREAD_LOCAL ext_descr * GC_ext_descriptors = NULL;
/* Points to array of extended */
/* descriptors. */
-STATIC size_t GC_ed_size = 0; /* Current size of above arrays. */
+STATIC MAY_THREAD_LOCAL size_t GC_ed_size = 0; /* Current size of above arrays. */
#define ED_INITIAL_SIZE 100
-STATIC size_t GC_avail_descr = 0; /* Next available slot. */
+STATIC MAY_THREAD_LOCAL size_t GC_avail_descr = 0; /* Next available slot. */
-STATIC int GC_typed_mark_proc_index = 0; /* Indices of my mark */
-STATIC int GC_array_mark_proc_index = 0; /* procedures. */
+STATIC MAY_THREAD_LOCAL int GC_typed_mark_proc_index = 0; /* Indices of my mark */
+STATIC MAY_THREAD_LOCAL int GC_array_mark_proc_index = 0; /* procedures. */
#ifdef AO_HAVE_load_acquire
- STATIC volatile AO_t GC_explicit_typing_initialized = FALSE;
+ STATIC MAY_THREAD_LOCAL volatile AO_t GC_explicit_typing_initialized = FALSE;
#else
- STATIC GC_bool GC_explicit_typing_initialized = FALSE;
+ STATIC MAY_THREAD_LOCAL GC_bool GC_explicit_typing_initialized = FALSE;
#endif
STATIC void GC_push_typed_structures_proc(void)
}
/* Table of bitmap descriptors for n word long all pointer objects. */
-STATIC GC_descr GC_bm_table[WORDSZ/2];
+STATIC MAY_THREAD_LOCAL GC_descr GC_bm_table[WORDSZ/2];
/* Return a descriptor for the concatenation of 2 nwords long objects, */
/* each of which is described by descriptor. */
return((complex_descriptor *)result);
}
-STATIC ptr_t * GC_eobjfreelist = NULL;
+STATIC MAY_THREAD_LOCAL ptr_t * GC_eobjfreelist = NULL;
STATIC mse * GC_typed_mark_proc(word * addr, mse * mark_stack_ptr,
mse * mark_stack_limit, word env);
#include <string>
#include <type_traits>
+#include <thread>
+#include <mutex>
#include <inttypes.h>
#include "ICUTypes.h"
#define FOR_EACH_I18N_STICKY_OP(F) \
F(vzone_getOffset3)
-// TODO make this class thread-safe
class ICU {
private:
enum Function {
void vzone_getOffset3(VZone* zone, UDate date, UBool local, int32_t& rawOffset,
int32_t& dstOffset, UErrorCode& ec)
{
- ensureLoadSo(Soname::i18n);
-
- if (!m_functions[functionvzone_getOffset3]) {
- loadFunction(Soname::i18n, functionvzone_getOffset3);
+ {
+ std::lock_guard<std::mutex> guard(m_dataMutex);
+ ensureLoadSo(Soname::i18n);
+ if (!m_functions[functionvzone_getOffset3]) {
+ loadFunction(Soname::i18n, functionvzone_getOffset3);
+ }
}
typedef void (*FP)(VZone * zone, UDate date, UBool local, int32_t & rawOffset,
template <Soname soname, typename FunctionPrototype, typename FunctionReturnType, typename... Args>
FunctionReturnType invokeICU(Function kind, Args... args)
{
- ensureLoadSo(soname);
-
- if (!m_functions[kind]) {
- loadFunction(soname, kind);
+ {
+ std::lock_guard<std::mutex> guard(m_dataMutex);
+ ensureLoadSo(soname);
+ if (!m_functions[kind]) {
+ loadFunction(soname, kind);
+ }
}
FunctionPrototype fp = (FunctionPrototype)m_functions[kind];
template <Soname soname, typename FunctionPrototype, typename... Args>
void invokeICUWithoutReturn(Function kind, Args... args)
{
- ensureLoadSo(soname);
-
- if (!m_functions[kind]) {
- loadFunction(soname, kind);
+ {
+ ensureLoadSo(soname);
+ std::lock_guard<std::mutex> guard(m_dataMutex);
+ if (!m_functions[kind]) {
+ loadFunction(soname, kind);
+ }
}
FunctionPrototype fp = (FunctionPrototype)m_functions[kind];
void* m_soHandles[SonameMax];
void* m_functions[FunctionMax];
int m_icuVersion;
+ std::mutex m_dataMutex;
};
} // namespace RuntimeICUBinder
JS_EXPORT_PRIVATE YarrPattern(const String& pattern, RegExpFlags, ErrorCode&, void* stackLimit = nullptr);
void* operator new(size_t size)
{
- static bool typeInited = false;
- static GC_descr descr;
+ static MAY_THREAD_LOCAL bool typeInited = false;
+ static MAY_THREAD_LOCAL GC_descr descr;
if (!typeInited) {
GC_word obj_bitmap[GC_BITMAP_SIZE(YarrPattern)] = { 0 };
GC_set_bit(obj_bitmap, GC_WORD_OFFSET(YarrPattern, m_captureGroupNames));
'escargot_dir%': 'deps/escargot',
"escargot_lib_type%": 'shared_lib', # static_lib | shared_lib
'build_asan%': '<(build_asan)',
+ 'target_arch%': '<(target_arch)',
+# 'escargot_arch%': 'x64',
'conditions': [
['escargot_lib_type=="shared_lib"', {
'lib_ext': '.so'
}, {
'lib_ext': '.a'
}],
+ ['target_arch=="arm64"', {
+ 'target_arch': 'aarch64'
+ }],
+ ['target_arch=="x32"', {
+ 'target_arch': 'i686'
+ }],
],
},
'targets': [{
}
std::shared_ptr<v8::BackingStore> v8::ArrayBuffer::GetBackingStore() {
- auto lwContext = IsolateWrap::GetCurrent()->GetCurrentContext();
- auto self = CVAL(this)->value()->asArrayBufferObject();
+ auto lwIsolate = IsolateWrap::GetCurrent();
+ auto esSelf = CVAL(this)->value()->asArrayBufferObject();
BackingStoreRef* esBackingStore = nullptr;
- EvalResult r = Evaluator::execute(
- lwContext->get(),
- [](ExecutionStateRef* esState,
- ArrayBufferObjectRef* arrayBuffer,
- BackingStoreRef** backingStore) -> ValueRef* {
- auto v = arrayBuffer->backingStore();
-
- if (v.hasValue()) {
- *backingStore = v.value();
- } else {
- *backingStore = BackingStoreRef::create(0);
- }
-
- return ValueRef::createNull();
- },
- self,
- &esBackingStore);
- LWNODE_CHECK(esBackingStore);
+ if (esSelf->backingStore().hasValue()) {
+ esBackingStore = esSelf->backingStore().value();
+ } else {
+ esBackingStore = BackingStoreRef::create(lwIsolate->vmInstance(), 0);
+ }
return std::shared_ptr<v8::BackingStore>(
reinterpret_cast<v8::BackingStore*>(esBackingStore));
API_ENTER_WITH_CONTEXT(context, Nothing<bool>());
EvalResult r = ObjectRefHelper::setPrivate(VAL(*context)->context()->get(),
+ lwIsolate->privateValuesSymbol(),
VAL(this)->value()->asObject(),
VAL(*key)->value(),
VAL(*value)->value());
API_ENTER_WITH_CONTEXT(context, MaybeLocal<Value>());
EvalResult r = ObjectRefHelper::getPrivate(VAL(*context)->context()->get(),
+ lwIsolate->privateValuesSymbol(),
VAL(this)->value()->asObject(),
VAL(*key)->value());
Maybe<bool> v8::Object::DeletePrivate(Local<Context> context,
Local<Private> key) {
- LWNODE_RETURN_MAYBE(bool);
+ API_ENTER_WITH_CONTEXT(context, Nothing<bool>());
+
+ auto r =
+ ObjectRefHelper::deletePrivateProperty(VAL(*context)->context()->get(),
+ lwIsolate->privateValuesSymbol(),
+ VAL(this)->value()->asObject(),
+ VAL(*key)->value());
+ API_HANDLE_EXCEPTION(r, lwIsolate, Nothing<bool>());
+
+ return Just(r.result->asBoolean());
}
Maybe<bool> v8::Object::Has(Local<Context> context, Local<Value> key) {
API_ENTER_WITH_CONTEXT(context, Nothing<bool>());
EvalResult r = ObjectRefHelper::getPrivate(VAL(*context)->context()->get(),
+ lwIsolate->privateValuesSymbol(),
VAL(this)->value()->asObject(),
VAL(*key)->value());
Local<v8::Context> v8::Object::CreationContext() {
auto lwIsolate = IsolateWrap::GetCurrent();
+ LWNODE_DCHECK_MSG(lwIsolate->getNumberOfContexts() == 1,
+ "%zu",
+ lwIsolate->getNumberOfContexts());
return v8::Utils::NewLocal(lwIsolate->toV8(), lwIsolate->GetCurrentContext());
}
}
Local<Value> Private::Name() const {
- LWNODE_RETURN_LOCAL(Value);
+ auto lwIsolate = IsolateWrap::GetCurrent();
+ auto esDescription = CVAL(this)->value()->asSymbol()->description();
+ return Utils::NewLocal<String>(lwIsolate->toV8(), esDescription);
}
template <typename T, typename F>
}
Local<Value> Proxy::GetTarget() {
- LWNODE_RETURN_LOCAL(Value);
+ auto lwIsolate = IsolateWrap::GetCurrent();
+ auto esSelf = CVAL(this)->value()->asProxyObject();
+ auto target = esSelf->target();
+ if (target) {
+ return Utils::NewLocal<Value>(lwIsolate->toV8(), target);
+ }
+
+ return Utils::NewLocal<Value>(lwIsolate->toV8(), ValueRef::createNull());
}
Local<Value> Proxy::GetHandler() {
- LWNODE_RETURN_LOCAL(Value);
+ auto lwIsolate = IsolateWrap::GetCurrent();
+ auto esSelf = CVAL(this)->value()->asProxyObject();
+
+ auto handler = esSelf->handler();
+ if (handler) {
+ return Utils::NewLocal<Value>(lwIsolate->toV8(), handler);
+ }
+
+ return Utils::NewLocal<Value>(lwIsolate->toV8(), ValueRef::createNull());
}
bool Proxy::IsRevoked() {
- LWNODE_RETURN_FALSE;
+ auto esSelf = CVAL(this)->value()->asProxyObject();
+ return esSelf->isRevoked();
}
void Proxy::Revoke() {
- LWNODE_UNIMPLEMENT;
+ auto esSelf = CVAL(this)->value()->asProxyObject();
+ esSelf->revoke();
}
MaybeLocal<Proxy> Proxy::New(Local<Context> context,
Local<Object> local_target,
- Local<Object> local_handler){
- LWNODE_RETURN_LOCAL(Proxy)}
+ Local<Object> local_handler) {
+ API_ENTER_WITH_CONTEXT(context, MaybeLocal<Proxy>());
+ auto lwContext = VAL(*context)->context();
+
+ EvalResult r = Evaluator::execute(
+ lwContext->get(),
+ [](ExecutionStateRef* state,
+ ValueRef* target,
+ ValueRef* handler) -> ValueRef* {
+ return ProxyObjectRef::create(
+ state, target->asObject(), handler->asObject());
+ },
+ CVAL(*local_target)->value(),
+ CVAL(*local_handler)->value());
+ LWNODE_CHECK(r.isSuccessful());
+
+ return Utils::NewLocal<Proxy>(lwIsolate->toV8(), r.result);
+}
CompiledWasmModule::CompiledWasmModule(
std::shared_ptr<internal::wasm::NativeModule> native_module,
std::unique_ptr<v8::BackingStore> v8::ArrayBuffer::NewBackingStore(
Isolate* isolate, size_t byte_length) {
- API_ENTER_NO_EXCEPTION(isolate);
- auto lwContext = lwIsolate->GetCurrentContext();
-
- BackingStoreRef* esBackingStore = nullptr;
- EvalResult r = Evaluator::execute(
- lwContext->get(),
- [](ExecutionStateRef* esState,
- BackingStoreRef** backingStore,
- size_t byteLength) -> ValueRef* {
- *backingStore = BackingStoreRef::create(byteLength);
- return ValueRef::createNull();
- },
- &esBackingStore,
- byte_length);
- LWNODE_CHECK(esBackingStore);
+ auto lwIsolate = IsolateWrap::GetCurrent();
+ BackingStoreRef* esBackingStore =
+ BackingStoreRef::create(lwIsolate->vmInstance(), byte_length);
lwIsolate->addBackingStore(esBackingStore);
return std::unique_ptr<v8::BackingStore>(
}
void Isolate::ClearKeptObjects() {
+#if !defined(GC_HEAP_TRACE_ONLY)
+ MemoryUtil::gc();
+ MemoryUtil::gcInvokeFinalizers();
+#endif
LWNODE_RETURN_VOID;
}
std::stringstream ss(str.substr(pos + 1)); // +1 for skipping =
std::string token;
while (std::getline(ss, token, ',')) {
- Flags::setTraceCallId(token);
+ if (token.find('-') == 0) {
+ Flags::setNagativeTraceCallId(token.substr(1));
+ } else {
+ Flags::setTraceCallId(token);
+ }
}
}
} else if (strEquals("--internal-log", arg)) {
}
i::Address* V8::GlobalizeReference(i::Isolate* isolate, i::Address* obj) {
+ LWNODE_CALL_TRACE();
LWNODE_CHECK(isolate);
IsolateWrap::fromV8(isolate)->globalHandles()->Create(VAL(obj));
+ Engine::current()->gcHeap()->GlobalizeReference(obj, isolate);
return obj;
}
}
void V8::MoveGlobalReference(internal::Address** from, internal::Address** to) {
- LWNODE_CALL_TRACE();
+ // Nothing to do for this
}
void V8::MoveTracedGlobalReference(internal::Address** from,
void* parameter,
WeakCallbackInfo<void>::Callback weak_callback,
WeakCallbackType type) {
+ LWNODE_CALL_TRACE();
+
+ Engine::current()->gcHeap()->MakeWeak(
+ location, parameter, weak_callback, type);
+
#if defined(LWNODE_ENABLE_EXPERIMENTAL)
if (type != WeakCallbackType::kParameter) {
LWNODE_RETURN_VOID; // TODO
}
- IsolateWrap::GetCurrent()->globalHandles()->MakeWeak(
- VAL(location), parameter, weak_callback);
+ GlobalHandles::MakeWeak(VAL(location), parameter, weak_callback);
#else
LWNODE_RETURN_VOID;
#endif
}
void* V8::ClearWeak(i::Address* location) {
+ LWNODE_CALL_TRACE();
+ Engine::current()->gcHeap()->ClearWeak(location);
LWNODE_RETURN_NULLPTR;
}
}
void V8::DisposeGlobal(i::Address* location) {
- LWNODE_CHECK(IsolateWrap::GetCurrent());
- IsolateWrap::GetCurrent()->globalHandles()->Destroy(VAL(location));
+ LWNODE_CALL_TRACE();
+ GlobalHandles::Destroy(VAL(location));
+ Engine::current()->gcHeap()->DisposeGlobal(location);
}
void V8::DisposeTracedGlobal(internal::Address* location) {
MaybeLocal<Value> JSON::Parse(Local<Context> context,
Local<String> json_string) {
- LWNODE_RETURN_LOCAL(Value);
+ API_ENTER_WITH_CONTEXT(context, MaybeLocal<Value>());
+ auto lwContext = CVAL(*context)->context();
+
+ auto r = Evaluator::execute(
+ lwContext->get(),
+ [](ExecutionStateRef* state, ValueRef* jsonString) -> ValueRef* {
+ auto fn = state->context()->globalObject()->jsonParse();
+ auto parsed =
+ fn->call(state, ValueRef::createUndefined(), 1, &jsonString);
+ return parsed;
+ },
+ CVAL(*json_string)->value());
+ API_HANDLE_EXCEPTION(r, lwIsolate, MaybeLocal<Value>());
+
+ return Utils::NewLocal<Object>(lwIsolate->toV8(), r.result);
}
MaybeLocal<String> JSON::Stringify(Local<Context> context,
Local<Value> json_object,
Local<String> gap) {
- LWNODE_RETURN_LOCAL(String);
+ API_ENTER_WITH_CONTEXT(context, MaybeLocal<String>());
+ auto lwContext = CVAL(*context)->context();
+
+ StringRef* esGap = gap.IsEmpty() ? StringRef::emptyString()
+ : CVAL(*gap)->value()->asString();
+ auto r = Evaluator::execute(
+ lwContext->get(),
+ [](ExecutionStateRef* state,
+ ValueRef* jsonObject,
+ StringRef* gap) -> ValueRef* {
+ auto fn = state->context()->globalObject()->jsonStringify();
+ ValueRef* params[] = {jsonObject, ValueRef::createNull(), gap};
+ auto str = fn->call(state, ValueRef::createUndefined(), 3, params);
+ return str;
+ },
+ CVAL(*json_object)->value(),
+ esGap);
+ API_HANDLE_EXCEPTION(r, lwIsolate, MaybeLocal<String>());
+
+ return Utils::NewLocal<String>(lwIsolate->toV8(), r.result);
}
} // namespace v8
*/
#include "engine.h"
+#include <iomanip>
+#include <sstream>
#include "utils/logger.h"
#include "utils/misc.h"
#include "utils/string.h"
LWNODE_UNIMPLEMENT;
}
+// --- G C H e a p ---
+#if !defined(GC_HEAP_TRACE_ONLY)
+#define GC_WRAP_PERSISTENT_POINTER(p) (GC_heap_pointer)(p)
+#define GC_UNWRAP_POINTER(p) ((void*)p)
+#else
+#define GC_WRAP_PERSISTENT_POINTER(p) GC_HIDE_POINTER(p)
+#define GC_UNWRAP_POINTER(p) ((void*)GC_HIDE_POINTER(p))
+#endif
+
+/*
+ @note gc heap tracing lifetime
+
+ a) The types of heap tracing are strong, weak and phantom weak.
+ b) Every persitent is created as strong type at the registration.
+ Following flow is considered:
+
+ i) strong <-> weak <-> phantom weak -> (finalizer) -> nullptr
+ ii) strong -> nullptr
+ iii) phantom weak -> strong
+
+ If weak counter is 1, then the pointer will be phantom weak which is
+ collectable for GC.
+*/
+
+void GCHeap::GlobalizeReference(void* address, void* data) {
+ LWNODE_CALL_TRACE("address %p, data %p", address, data);
+ auto iter = persistents_.find(GC_WRAP_PERSISTENT_POINTER(address));
+ if (iter != persistents_.end()) {
+ iter->second.strong++;
+ iter->second.weak--;
+ iter->second.weak = std::max(iter->second.weak, 0);
+ // no-progress handling weak phantoms
+ } else {
+ auto iter = weakPhantoms_.find(GC_HIDE_POINTER(address));
+ if (iter != weakPhantoms_.end()) {
+ // move phantom weak to strong
+ AddressInfo info = iter->second;
+
+ LWNODE_CHECK(info.strong == 0);
+ LWNODE_CHECK(info.weak == 1);
+ info.strong++;
+ info.weak--;
+
+ persistents_.emplace(GC_WRAP_PERSISTENT_POINTER(address), info);
+ weakPhantoms_.erase(iter);
+ // no-progress handling weak phantoms
+ } else {
+ persistents_.emplace(GC_WRAP_PERSISTENT_POINTER(address),
+ AddressInfo(1, 0, data));
+ }
+ }
+ notifyUpdate(address);
+}
+
+void GCHeap::DisposeGlobal(void* address) {
+ LWNODE_CALL_TRACE("address %p", address);
+ auto iter = persistents_.find(GC_WRAP_PERSISTENT_POINTER(address));
+ if (iter != persistents_.end()) {
+ iter->second.strong--;
+ iter->second.strong = std::max(iter->second.strong, 0);
+
+ // progress handling weak phantoms
+ if (iter->second.strong == 0) {
+ if (iter->second.weak == 0) {
+ persistents_.erase(iter);
+ } else if (iter->second.weak <= 1) {
+ weakPhantoms_.emplace(GC_HIDE_POINTER(address), iter->second);
+ persistents_.erase(iter);
+ }
+ }
+ }
+ notifyUpdate(address);
+}
+
+void GCHeap::MakeWeak(void* address) {
+ LWNODE_CALL_TRACE("address %p", address);
+ auto iter = persistents_.find(GC_WRAP_PERSISTENT_POINTER(address));
+ if (iter != persistents_.end()) {
+ iter->second.strong--;
+ iter->second.strong = std::max(iter->second.strong, 0);
+ iter->second.weak++;
+
+ // progress handling weak phantoms
+ if (iter->second.strong == 0 && iter->second.weak <= 1) {
+ weakPhantoms_.emplace(GC_HIDE_POINTER(address), iter->second);
+ persistents_.erase(iter);
+ }
+ } else {
+ auto iter = weakPhantoms_.find(GC_HIDE_POINTER(address));
+ if (iter != weakPhantoms_.end()) {
+ // move phantom weak to weak
+ AddressInfo info = iter->second;
+
+ LWNODE_CHECK(info.strong == 0);
+ LWNODE_CHECK(info.weak == 1);
+ info.weak++;
+
+ persistents_.emplace(GC_WRAP_PERSISTENT_POINTER(address), info);
+ weakPhantoms_.erase(iter);
+ } else {
+ LWNODE_CHECK(false); // assumes this doesn't happen. let's see.
+ }
+ }
+
+ notifyUpdate(address);
+}
+
+void GCHeap::ClearWeak(void* address) {
+ LWNODE_CALL_TRACE("address %p", address);
+ // 1. handle persistents_ and weakPhantoms_
+ auto iter = persistents_.find(GC_WRAP_PERSISTENT_POINTER(address));
+ if (iter != persistents_.end()) {
+ // guard: ClearWeak can be called even if no weak reference exist.
+ if (iter->second.weak > 0) {
+ iter->second.weak--;
+ }
+
+ // progress handling weak phantoms
+ if (iter->second.strong <= 0 && iter->second.weak <= 1) {
+ weakPhantoms_.emplace(GC_HIDE_POINTER(address), iter->second);
+ persistents_.erase(iter);
+ }
+ }
+
+ // 2. ensure clearing finalizer bound to this address
+ MemoryUtil::gcRegisterFinalizer(address, nullptr, nullptr);
+
+ notifyUpdate(address);
+}
+
+void GCHeap::disposePhantomWeak(void* address) {
+ LWNODE_CALL_TRACE("address %p", address);
+ auto iter = weakPhantoms_.find(GC_HIDE_POINTER(address));
+ if (iter != weakPhantoms_.end()) {
+ weakPhantoms_.erase(iter);
+ }
+ notifyUpdate(address);
+}
+
+bool GCHeap::isTraced(void* address) {
+ if (persistents_.find(GC_WRAP_PERSISTENT_POINTER(address)) !=
+ persistents_.end()) {
+ return true;
+ }
+
+ if (weakPhantoms_.find(GC_HIDE_POINTER(address)) != weakPhantoms_.end()) {
+ return true;
+ }
+ return false;
+}
+
+void* GCHeap::getPersistentData(void* address) {
+ auto iter = persistents_.find(GC_WRAP_PERSISTENT_POINTER(address));
+ if (iter != persistents_.end()) {
+ return iter->second.data;
+ }
+
+ auto iterWeak = weakPhantoms_.find(GC_HIDE_POINTER(address));
+ if (iterWeak != weakPhantoms_.end()) {
+ return iterWeak->second.data;
+ }
+ return nullptr;
+}
+
+typedef void (*formatterFunction)(
+ std::stringstream& stream,
+ const std::pair<GCHeap::GC_heap_pointer, GCHeap::AddressInfo>& iter);
+
+static void printAddress(
+ const GCUnorderedMap<GCHeap::GC_heap_pointer, GCHeap::AddressInfo>& map,
+ formatterFunction formatter,
+ const int column = 4) {
+ std::stringstream ss;
+ std::vector<std::string> vector;
+ int count = 0;
+ for (const auto& iter : map) {
+ formatter(ss, iter);
+ if (++count % column == 0) {
+ vector.push_back(ss.str());
+ ss.str("");
+ }
+ }
+ if (count % column) {
+ vector.push_back(ss.str());
+ }
+ for (const auto& it : vector) {
+ LWNODE_LOG_INFO("%s", it.c_str());
+ }
+}
+
+void GCHeap::printStatus() {
+ if (isStatusPrinted) {
+ return;
+ }
+ isStatusPrinted = true;
+
+ if (persistents_.size() == 0 && weakPhantoms_.size() == 0) {
+ return;
+ }
+
+ LWNODE_LOG_INFO(COLOR_GREEN "----- GCHEAP -----" COLOR_RESET);
+ LWNODE_LOG_INFO("[HOLD]");
+ printAddress(
+ persistents_,
+ [](std::stringstream& stream,
+ const std::pair<GCHeap::GC_heap_pointer, GCHeap::AddressInfo>& iter) {
+ stream << std::setw(15) << std::right << GC_UNWRAP_POINTER(iter.first)
+ << " ("
+ << "S" << std::setw(3) << iter.second.strong << " W"
+ << std::setw(3) << iter.second.weak << ") ";
+ });
+
+ LWNODE_LOG_INFO(COLOR_GREEN "------------------" COLOR_RESET);
+ LWNODE_LOG_INFO("[PHANTOM]");
+ printAddress(
+ weakPhantoms_,
+ [](std::stringstream& stream,
+ const std::pair<GCHeap::GC_heap_pointer, GCHeap::AddressInfo>& iter) {
+ stream << std::setw(15) << std::right << GC_REVEAL_POINTER(iter.first)
+ << " ("
+ << "S" << std::setw(3) << iter.second.strong << " W"
+ << std::setw(3) << iter.second.weak << ") ";
+ });
+
+ LWNODE_LOG_INFO(COLOR_GREEN "------------------" COLOR_RESET);
+}
+
+void GCHeap::notifyUpdate(void* address) {
+ isStatusPrinted = false;
+}
+
+void GCHeap::MakeWeak(void* location,
+ void* parameter,
+ v8::WeakCallbackInfo<void>::Callback weak_callback,
+ v8::WeakCallbackType type) {
+ LWNODE_CALL_TRACE("address %p", location);
+
+ if (type != v8::WeakCallbackType::kParameter) {
+ LWNODE_CHECK(false);
+ }
+
+#if !defined(GC_HEAP_TRACE_ONLY)
+ // 1. register the given finalizer
+ struct Params {
+ v8::Isolate* isolate;
+ void* parameter;
+ v8::WeakCallbackInfo<void>::Callback weak_callback;
+ };
+
+ Params* params = new Params();
+
+ LWNODE_CHECK(isTraced(location));
+
+ v8::Isolate* v8Isolate =
+ reinterpret_cast<v8::Isolate*>(getPersistentData(location));
+
+ LWNODE_CHECK_NOT_NULL(v8Isolate);
+
+ params->isolate = v8Isolate;
+ params->parameter = parameter;
+ params->weak_callback = weak_callback;
+
+ MemoryUtil::gcRegisterFinalizer(
+ location,
+ [](void* address, void* data) {
+ Engine::current()->gcHeap()->disposePhantomWeak(address);
+ Params* params = (Params*)data;
+ void* embedderFields[v8::kEmbedderFieldsInWeakCallback] = {};
+ v8::WeakCallbackInfo<void> info(
+ params->isolate, params->parameter, embedderFields, nullptr);
+ params->weak_callback(info);
+ delete params;
+ },
+ params);
+#endif
+
+ // 2. make this location as weak type
+ MakeWeak(location);
+}
+
// --- E n g i n e ---
static Engine* s_engine;
Globals::initialize();
Memory::setGCFrequency(GC_FREE_SPACE_DIVISOR);
+ gcHeap_.reset(new GCHeap());
+
+ if (Flags::isTraceCallEnabled("HEAP")) {
+ Memory::setGCEventListener([]() {
+ // this is invoked at GC_EVENT_RECLAIM_END phase
+ Engine::current()->gcHeap()->printStatus();
+ });
+ }
auto flags = Flags::get();
if (Flags::isTraceGCEnabled()) {
void Engine::dispose() {
LWNODE_CALL_TRACE_GC_START();
+
+ Memory::setGCEventListener(nullptr);
+
+ gcHeap_.release();
+ MemoryUtil::gc();
+ GC_invoke_finalizers();
+
Globals::finalize();
disposeExternalStrings();
LWNODE_CALL_TRACE_GC_END();
v8::ArrayBuffer::Allocator* allocator_ = nullptr;
};
+#define GC_HEAP_TRACE_ONLY
+
+class GCHeap : public gc {
+ public:
+ void GlobalizeReference(void* address, void* data);
+ void DisposeGlobal(void* address);
+ void MakeWeak(void* address);
+ void MakeWeak(void* location,
+ void* parameter,
+ v8::WeakCallbackInfo<void>::Callback weak_callback,
+ v8::WeakCallbackType type);
+ void ClearWeak(void* address);
+ void disposePhantomWeak(void* address);
+ bool isTraced(void* address);
+ void* getPersistentData(void* address);
+ void printStatus();
+
+ typedef GC_word GC_heap_pointer;
+ struct AddressInfo {
+ AddressInfo(int strong_, int weak_, void* data_ = nullptr) {
+ strong = strong_;
+ weak = weak_;
+ data = data_;
+ }
+ int strong = 0;
+ int weak = 0;
+ void* data = nullptr;
+ };
+
+ private:
+ void notifyUpdate(void* address);
+
+ GCUnorderedMap<GC_heap_pointer, AddressInfo> persistents_;
+ GCUnorderedMap<GC_heap_pointer, AddressInfo> weakPhantoms_;
+ bool isStatusPrinted = false;
+};
+
class Engine {
public:
static bool Initialize();
static void unregisterExternalString(
v8::String::ExternalStringResourceBase* v8Str);
+ GCHeap* gcHeap() { return gcHeap_.get(); }
+
private:
Engine() = default;
void initialize();
static std::unordered_set<v8::String::ExternalStringResourceBase*>
s_externalStrings;
+
+ PersistentRefHolder<GCHeap> gcHeap_;
};
} // namespace EscargotShim
namespace EscargotShim {
-#define PRIVATE_SYMBOL_KEY "__hiddenvalues__"
-SymbolRef* ObjectRefHelper::s_symbolKeyForHiddenValues = nullptr;
-
ObjectRef* ObjectRefHelper::create(ContextRef* context) {
EvalResult r =
Evaluator::execute(context, [](ExecutionStateRef* state) -> ValueRef* {
LWNODE_CHECK(r.isSuccessful());
- if (s_symbolKeyForHiddenValues == nullptr) {
- s_symbolKeyForHiddenValues =
- SymbolRef::create(StringRef::createFromASCII(PRIVATE_SYMBOL_KEY));
- }
-
return r.result->asObject();
}
key);
}
+EvalResult ObjectRefHelper::deletePrivateProperty(ContextRef* context,
+ SymbolRef* privateValueSymbol,
+ ObjectRef* object,
+ ValueRef* key) {
+ LWNODE_DCHECK_NOT_NULL(object);
+ LWNODE_DCHECK_NOT_NULL(key);
+
+ return Evaluator::execute(
+ context,
+ [](ExecutionStateRef* state,
+ SymbolRef* privateValueSymbol,
+ ObjectRef* object,
+ ValueRef* key) -> ValueRef* {
+ ValueRef* privateValuesObj = object->get(state, privateValueSymbol);
+
+ if (privateValuesObj->isUndefined()) {
+ return ValueRef::create(false);
+ }
+
+ return ValueRef::create(
+ privateValuesObj->asObject()->deleteProperty(state, key));
+ },
+ privateValueSymbol,
+ object,
+ key);
+}
+
EvalResult ObjectRefHelper::defineAccessorProperty(
ContextRef* context,
ObjectRef* object,
context,
[](ExecutionStateRef* state,
ObjectRef* object,
- ValueRef* param1) -> ValueRef* {
- return ValueRef::create(object->setPrototype(state, param1));
+ ValueRef* proto) -> ValueRef* {
+ return ValueRef::create(object->setPrototype(state, proto));
},
object,
proto);
}
EvalResult ObjectRefHelper::getPrivate(ContextRef* context,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
ValueRef* key) {
- LWNODE_CHECK_NOT_NULL(s_symbolKeyForHiddenValues);
-
return Evaluator::execute(
context,
[](ExecutionStateRef* state,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
- ValueRef* param1) -> ValueRef* {
+ ValueRef* key) -> ValueRef* {
ValueRef* hiddenValuesRef =
- object->get(state, s_symbolKeyForHiddenValues);
+ object->getOwnProperty(state, privateValueSymbol);
if (hiddenValuesRef->isUndefined()) {
return ValueRef::createUndefined();
}
- ObjectRef* hiddenValuesObject = hiddenValuesRef->asObject();
-
- return ValueRef::create(hiddenValuesObject->get(state, param1));
+ return ValueRef::create(hiddenValuesRef->asObject()->get(state, key));
},
+ privateValueSymbol,
object,
key);
}
EvalResult ObjectRefHelper::setPrivate(ContextRef* context,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
ValueRef* key,
ValueRef* value) {
- LWNODE_CHECK_NOT_NULL(s_symbolKeyForHiddenValues);
LWNODE_CHECK(key->isSymbol());
return Evaluator::execute(
context,
[](ExecutionStateRef* state,
ContextRef* context,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
ValueRef* param1,
ValueRef* param2) -> ValueRef* {
- ValueRef* hiddenValuesRef =
- object->get(state, s_symbolKeyForHiddenValues);
+ ValueRef* hiddenValuesRef = object->get(state, privateValueSymbol);
ObjectRef* hiddenValuesObject = nullptr;
defineDataProperty(
context,
object,
- s_symbolKeyForHiddenValues,
+ privateValueSymbol,
ObjectRef::DataPropertyDescriptor(
hiddenValuesObject,
static_cast<ObjectRef::PresentAttribute>(
return ValueRef::create(true);
},
context,
+ privateValueSymbol,
object,
key,
value);
ValueRef* key);
static EvalResult getPrivate(ContextRef* context,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
ValueRef* key);
static EvalResult setPrivate(ContextRef* context,
+ SymbolRef* privateValueSymbol,
ObjectRef* object,
ValueRef* key,
ValueRef* value);
ObjectRef* object,
ValueRef* key);
+ static EvalResult deletePrivateProperty(ContextRef* context,
+ SymbolRef* privateValueSymbol,
+ ObjectRef* object,
+ ValueRef* key);
+
static EvalResult defineDataProperty(
ContextRef* context,
ObjectRef* object,
static StringRef* getConstructorName(ContextRef* context, ObjectRef* object);
private:
- static SymbolRef* s_symbolKeyForHiddenValues;
};
class ObjectTemplateRefHelper {
*/
#include <EscargotPublic.h>
+#include <algorithm>
#include "api/isolate.h"
#include "base.h"
namespace EscargotShim {
-static std::unordered_map<ValueWrap*, std::unique_ptr<GlobalHandles::NodeBlock>>
- g_WeakValues;
+std::vector<GlobalHandles*> g_globalHandlesVector;
+
+class GlobalWeakHandler {
+ public:
+ void pushBlock(ValueWrap* lwValue,
+ std::unique_ptr<GlobalHandles::NodeBlock> block) {
+ auto iter = weakValues_.find(lwValue);
+ if (iter != weakValues_.end()) {
+ // TODO
+ LWNODE_CHECK_NOT_REACH_HERE();
+ }
+ weakValues_.emplace(lwValue, std::move(block));
+ }
+
+ std::unique_ptr<GlobalHandles::NodeBlock> popBlock(ValueWrap* lwValue) {
+ auto iter = weakValues_.find(lwValue);
+ if (iter == weakValues_.end()) {
+ return nullptr;
+ }
+ auto nodeBlock = std::move(iter->second);
+ weakValues_.erase(iter);
+
+ return nodeBlock;
+ }
+
+ void dispose() { weakValues_.clear(); }
+
+ private:
+ std::unordered_map<ValueWrap*, std::unique_ptr<GlobalHandles::NodeBlock>>
+ weakValues_;
+};
+
+GlobalWeakHandler g_globalWeakHandler;
GlobalHandles::Node::Node(void* parameter,
v8::WeakCallbackInfo<void>::Callback callback)
LWNODE_CALL_TRACE_GC_START();
LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "Call weak callback");
- auto iter = g_WeakValues.find(VAL(self));
- if (iter == g_WeakValues.end()) {
- LWNODE_LOG_ERROR("Cannot find weakened value.");
- }
-
- auto curNode = iter->second->firstNode();
- if (!curNode) {
- LWNODE_LOG_ERROR();
+ auto block = g_globalWeakHandler.popBlock(VAL(self));
+ if (!block) {
+ LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "Cannot invoke callback: %p", self);
return;
}
- void* embedderFields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
- nullptr};
- v8::WeakCallbackInfo<void> info(
- iter->second->isolate(), curNode->parameter(), embedderFields, nullptr);
- curNode->callback()(info);
-
- if (curNode->nextNode()) {
- LWNODE_UNIMPLEMENT; // TODO
+ auto curNode = block->firstNode_;
+ if (curNode) {
+ if (curNode->callback()) {
+ void* embedderFields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
+ nullptr};
+ v8::WeakCallbackInfo<void> info(
+ block->isolate(), curNode->parameter(), embedderFields, nullptr);
+ LWNODE_CHECK_NOT_NULL(block->isolate());
+ LWNODE_CALL_TRACE_ID(
+ GLOBALHANDLES, "Call v8 callback: parm(%p)", curNode->parameter());
+ curNode->callback()(info);
+ }
+
+ if (curNode->nextNode()) {
+ LWNODE_UNIMPLEMENT; // TODO
+ }
}
-
- g_WeakValues.erase(iter);
-
LWNODE_CALL_TRACE_GC_END();
});
}
+GlobalHandles::GlobalHandles(v8::Isolate* isolate) : isolate_(isolate) {
+ g_globalHandlesVector.push_back(this);
+}
+
void GlobalHandles::Dispose() {
LWNODE_CALL_TRACE_ID(GLOBALHANDLES);
persistentValues_.clear();
+ auto it = std::find(
+ g_globalHandlesVector.begin(), g_globalHandlesVector.end(), this);
- // TODO: need to free the remaining values in the weak map, not clear map.
- g_WeakValues.clear();
+ if (it != g_globalHandlesVector.end()) {
+ g_globalHandlesVector.erase(it);
+ }
+ // TODO: consider multi isolate
+ if (g_globalHandlesVector.size() == 0) {
+ g_globalWeakHandler.dispose();
+ }
}
void GlobalHandles::Create(ValueWrap* lwValue) {
}
void GlobalHandles::Destroy(ValueWrap* lwValue) {
+ for (auto globalHandles : g_globalHandlesVector) {
+ if (globalHandles->destroy(lwValue)) {
+ return;
+ }
+ }
+ LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "Cannot destroy: %p", lwValue)
+}
+
+bool GlobalHandles::destroy(ValueWrap* lwValue) {
auto iter = persistentValues_.find(lwValue);
if (iter != persistentValues_.end()) {
if (iter->second == 1) {
} else {
--iter->second;
}
- } else {
- LWNODE_CALL_TRACE_ID(
- GLOBALHANDLES, "The value(%p) has already been removed.", lwValue)
+ return true;
}
+ return false;
}
-bool GlobalHandles::MakeWeak(ValueWrap* lwValue,
+void GlobalHandles::MakeWeak(ValueWrap* lwValue,
+ void* parameter,
+ v8::WeakCallbackInfo<void>::Callback callback) {
+ for (auto globalHandles : g_globalHandlesVector) {
+ if (globalHandles->makeWeak(lwValue, parameter, callback)) {
+ return;
+ }
+ }
+ LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "Cannot make weak value: %p", lwValue)
+}
+
+bool GlobalHandles::makeWeak(ValueWrap* lwValue,
void* parameter,
v8::WeakCallbackInfo<void>::Callback callback) {
auto iter = persistentValues_.find(lwValue);
if (iter == persistentValues_.end()) {
- LWNODE_LOG_ERROR("Only Persistent value can be made weak.");
return false;
}
- auto block = std::make_unique<NodeBlock>(isolate_, iter->second);
+
+ auto block =
+ std::make_unique<GlobalHandles::NodeBlock>(isolate_, iter->second);
block->pushNode(lwValue, new Node(parameter, callback));
- g_WeakValues.emplace(lwValue, std::move(block));
+ g_globalWeakHandler.pushBlock(lwValue, std::move(block));
persistentValues_.erase(iter);
- LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "The value(%p) was weakened.", lwValue)
+ LWNODE_CALL_TRACE_ID(GLOBALHANDLES, "MakeWeak: %p", lwValue)
return true;
}
class GlobalHandles final : public gc {
public:
- GlobalHandles(v8::Isolate* isolate) : isolate_(isolate) {}
- ~GlobalHandles() = default;
+ GlobalHandles(v8::Isolate* isolate);
void Create(ValueWrap* lwValue);
- void Destroy(ValueWrap* lwValue);
+ static void Destroy(ValueWrap* lwValue);
+ static void MakeWeak(ValueWrap* lwValue,
+ void* parameter,
+ v8::WeakCallbackInfo<void>::Callback callback);
- bool MakeWeak(ValueWrap* lwValue,
+ bool destroy(ValueWrap* lwValue);
+
+ bool makeWeak(ValueWrap* lwValue,
void* parameter,
v8::WeakCallbackInfo<void>::Callback callback);
class NodeBlock {
public:
- enum State { None, Weak, Clear };
-
NodeBlock(v8::Isolate* isolate, uint32_t count);
~NodeBlock();
#include "utils/misc.h"
#include <algorithm>
+#include <sstream>
namespace EscargotShim {
: type_(type), v8scope_(reinterpret_cast<void*>(scope)) {}
void HandleScopeWrap::add(HandleWrap* value) {
- LWNODE_CALL_TRACE("%p", value);
+ LWNODE_CALL_TRACE_ID(HDLSCOPE, "%p -> %p | %p", value, v8scope_, this);
handles_.push_back(value);
}
}
void HandleScopeWrap::clear() {
- if (Flags::isTraceCallEnabled()) {
- for (auto it = handles_.begin(); it != handles_.end(); it++) {
- LWNODE_CALL_TRACE("%p", *it);
+ LWNODE_CALL_TRACE_ID(HDLSCOPE);
+ if (Flags::isTraceCallEnabled("HDLSCOPE")) {
+ std::stringstream ss;
+ std::vector<std::string> vector;
+
+ const int column = 10;
+ int count = 0;
+ for (const auto& it : handles_) {
+ ss << it << " ";
+ if (++count % column == 0) {
+ vector.push_back(ss.str());
+ ss.str("");
+ }
+ }
+ if (count % column) {
+ vector.push_back(ss.str());
}
- }
+ LWNODE_CALL_TRACE_LOG("%p contains %d handles:", this, count);
+ for (const auto& it : vector) {
+ LWNODE_CALL_TRACE_LOG("%s", it.c_str());
+ }
+ }
handles_.clear();
}
void Isolate::RunPromiseHook(PromiseHookType type,
Escargot::PromiseObjectRef* promise,
Escargot::ValueRef* parent) {
- if (promise_hook_ == nullptr) {
+ if (!promise_hook_ || !promise) {
return;
}
THREAD_LOCAL IsolateWrap* IsolateWrap::s_previousIsolate;
IsolateWrap::IsolateWrap() {
- LWNODE_CALL_TRACE("malc: %p", this);
+ LWNODE_CALL_TRACE_ID(ISOWRAP, "malc: %p", this);
globalHandles_ = new GlobalHandles(toV8());
+ privateValuesSymbol_ = PersistentRefHolder<SymbolRef>(
+ SymbolRef::create(StringRef::createFromASCII(PRIVATE_VALUES.data(),
+ PRIVATE_VALUES.length())));
+
// NOTE: check lock_gc_release(); is needed (and where)
// lock_gc_release();
Memory::gcRegisterFinalizer(this, [](void* self) {
- LWNODE_CALL_TRACE("free: %p", self);
- reinterpret_cast<IsolateWrap*>(self)->globalHandles()->Dispose();
- LWNODE_CALL_TRACE_GC_START();
- // NOTE: Called when this IsolateWrap is deallocated by gc
- LWNODE_CALL_TRACE_GC_END();
+ reinterpret_cast<IsolateWrap*>(self)->~IsolateWrap();
});
}
+IsolateWrap::~IsolateWrap() {
+ LWNODE_CALL_TRACE_ID(ISOWRAP, "free: %p", this);
+ globalHandles_->Dispose();
+ LWNODE_CALL_TRACE_GC_START();
+ // NOTE: Called when this IsolateWrap is deallocated by gc
+ LWNODE_CALL_TRACE_GC_END();
+}
+
IsolateWrap* IsolateWrap::New() {
IsolateWrap* isolate = new IsolateWrap();
return isolate;
void IsolateWrap::popHandleScope(v8Scope_t* handleScope) {
LWNODE_CHECK(handleScopes_.back()->v8Scope() == handleScope);
- LWNODE_CALL_TRACE();
+ LWNODE_CALL_TRACE_ID(ISOWRAP);
// TODO: remove the following line and simply pop the last
handleScopes_.back()->clear();
}
void IsolateWrap::pushContext(ContextWrap* context) {
- LWNODE_CALL_TRACE("%p", context);
+ LWNODE_CALL_TRACE_ID(ISOWRAP, "%p", context);
if (contextScopes_.size() && (contextScopes_.back() != context)) {
LWNODE_DLOG_WARN(R"(multiple contexts exist:
contexts. In Node.js at this time, one main Context associated with the
Environment instance is used for most Node.js features (except writing
MessagePort objects.) So, on purpose, we don't store Object's creation
-context which is related to Object::CreateContext().
+context which is related to Object::CreationContext().
@note: we may ignore this warning if cctest not related runs.)");
}
contextScopes_.push_back(context);
}
+size_t IsolateWrap::getNumberOfContexts() {
+ GCUnorderedSet<ContextWrap*> uniqueContexts;
+ for (const auto& val : contextScopes_) {
+ uniqueContexts.insert(val);
+ }
+ return uniqueContexts.size();
+};
+
void IsolateWrap::popContext(ContextWrap* context) {
LWNODE_CHECK(contextScopes_.back() == context);
- LWNODE_CALL_TRACE("%p", context);
+ LWNODE_CALL_TRACE_ID(ISOWRAP, "%p", context);
contextScopes_.pop_back();
}
}
void IsolateWrap::addEternal(GCManagedObject* value) {
- LWNODE_CALL_TRACE("%p", value);
+ LWNODE_CALL_TRACE_ID(ISOWRAP, "%p", value);
eternals_.push_back(value);
}
SymbolRef* IsolateWrap::getPrivateSymbol(StringRef* esString) {
// @check replace this container if this function is called a lot.
- LWNODE_CALL_TRACE();
+ LWNODE_CALL_TRACE_ID(ISOWRAP);
for (size_t i = 0; i < privateSymbols_.size(); i++) {
if (privateSymbols_[i]->description()->equals(esString)) {
}
}
+void IsolateWrap::SetPromiseHook(v8::PromiseHook callback) {
+ auto lwIsolate = GetCurrent();
+
+ promise_hook_ = callback;
+
+ auto fn = [](ExecutionStateRef* state,
+ VMInstanceRef::PromiseHookType type,
+ PromiseObjectRef* promise,
+ ValueRef* parent) {
+ IsolateWrap::GetCurrent()->RunPromiseHook(
+ (v8::PromiseHookType)type, promise, parent);
+ };
+
+ lwIsolate->vmInstance()->registerPromiseHook(fn);
+}
} // namespace EscargotShim
promise_reject_callback_ = callback;
}
- void SetPromiseHook(v8::PromiseHook callback) { promise_hook_ = callback; }
-
void RunPromiseHook(PromiseHookType type,
Escargot::PromiseObjectRef* promise,
Escargot::ValueRef* parent);
class IsolateWrap final : public v8::internal::Isolate {
public:
+ virtual ~IsolateWrap();
+
+ const std::string PRIVATE_VALUES = "__private_values__";
+
static IsolateWrap* New();
void Initialize(const v8::Isolate::CreateParams& params);
void Dispose();
void popContext(ContextWrap* context);
bool InContext();
ContextWrap* GetCurrentContext();
+ size_t getNumberOfContexts();
// Eternal
void addEternal(GCManagedObject* value);
void lock_gc_release() { release_lock_.reset(this); }
void unlock_gc_release() { release_lock_.release(); }
+ SymbolRef* privateValuesSymbol() { return privateValuesSymbol_.get(); }
+
+ void SetPromiseHook(v8::PromiseHook callback);
+
private:
IsolateWrap();
GCUnorderedSet<BackingStoreRef*> backingStores_;
GCVector<HandleScopeWrap*> handleScopes_;
GCVector<ContextWrap*> contextScopes_;
+
+ PersistentRefHolder<SymbolRef> privateValuesSymbol_;
GCVector<Escargot::SymbolRef*> privateSymbols_;
// Isolate Scope
flag_t Flags::s_flags = FlagType::Empty;
std::set<std::string> Flags::s_trace_ids;
+std::set<std::string> Flags::s_negative_trace_ids;
bool Flags::isTraceCallEnabled(std::string id) {
if (!(s_flags & FlagType::TraceCall)) {
}
}
+ if (!s_negative_trace_ids.empty()) {
+ if (s_negative_trace_ids.find(id) != s_negative_trace_ids.end()) {
+ return false;
+ }
+ }
+
return true;
}
static bool isInternalLogEnabled() { return s_flags & FlagType::InternalLog; }
static void setTraceCallId(std::string id) { s_trace_ids.insert(id); }
+ static void setNagativeTraceCallId(std::string id) {
+ s_negative_trace_ids.insert(id);
+ }
private:
static std::set<std::string> s_trace_ids;
+ static std::set<std::string> s_negative_trace_ids;
static flag_t s_flags;
};
LWNODE_CALL_TRACE_GC_END();
}
+void MemoryUtil::gcInvokeFinalizers() {
+ GC_invoke_finalizers();
+}
+
void MemoryUtil::gcEndStatsTrace() {
Memory::setGCEventListener(nullptr);
}
GCAllocatedMemoryFinalizer callback) {
Escargot::Memory::gcUnregisterFinalizer(ptr->asObject(), callback);
}
+
+void MemoryUtil::gcRegisterFinalizer(
+ void* gcPtr, GCAllocatedMemoryFinalizerWithData callback, void* data) {
+ REGISTER_FINALIZER(gcPtr, callback, data);
+}
static void gcStartStatsTrace();
static void gcEndStatsTrace();
static void gcFull();
+ static void gcInvokeFinalizers();
static void gc();
typedef void (*GCAllocatedMemoryFinalizer)(void* self);
+ typedef void (*GCAllocatedMemoryFinalizerWithData)(void* self, void* data);
+ // @note this should not use on escargot values since they may be already
+ // bound with another finalizer with its internal data.
+ static void gcRegisterFinalizer(void* gcPtr,
+ GCAllocatedMemoryFinalizerWithData callback,
+ void* data);
static void gcRegisterFinalizer(Escargot::ValueRef* gcPtr,
GCAllocatedMemoryFinalizer callback);
static void gcRegisterFinalizer(EscargotShim::ValueWrap* gcPtr,
--- /dev/null
+<manifest>
+ <request>
+ <domain name="_"/>
+ </request>
+</manifest>
--- /dev/null
+# Copyright (c) 2021-present Samsung Electronics Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Name: lwnode
+Summary: -
+Version: 1.0.0
+Release: 1
+Group: System/Servers
+License: Apache 2.0
+Source: %{name}-%{version}.tar.gz
+
+BuildRequires: cmake
+BuildRequires: make
+BuildRequires: python
+BuildRequires: ninja
+BuildRequires: pkgconfig(dlog)
+BuildRequires: pkgconfig(aul)
+BuildRequires: pkgconfig(capi-appfw-app-common)
+BuildRequires: pkgconfig(capi-system-info),
+BuildRequires: pkgconfig(capi-system-system-settings)
+BuildRequires: pkgconfig(icu-i18n)
+BuildRequires: pkgconfig(icu-uc)
+BuildRequires: pkgconfig(glib-2.0)
+
+%if (0%{?tizen_version_major} >= 6)
+BuildRequires: pkgconfig(openssl1.1)
+%else
+ %if (0%{?tizen_version_major} == 5) && (0%{?tizen_version_minor} == 5)
+BuildRequires: pkgconfig(openssl1.1)
+ %else
+BuildRequires: pkgconfig(openssl)
+ %endif
+%endif
+
+##############################################
+# Packages for profiles
+##############################################
+
+%package devel
+Summary: Development files for Lightweight node.js
+Group: System/Servers
+Requires: %{name} = %{version}
+%description devel
+Development files for Lightweight node.js.
+
+# Initialize the variables
+%{!?target: %define target lwnode}
+%{!?target_lib: %define target_lib liblwnode}
+%{!?node_engine: %define node_engine escargot}
+%{!?build_profile: %define build_profile none}
+
+
+%description
+Node.js on Escargot is a memory efficient node.js implementation,
+which runs on top of Escargot, a memory optimized JavaScript Engine developed
+by Samsung Research, instead of the default V8 JS engine.
+
+
+%prep
+%setup -q
+
+
+##############################################
+# Build
+##############################################
+
+%build
+gcc --version
+
+
+##############################################
+## Build rules for each profile
+##############################################
+
+%ifarch armv7l
+%define tizen_arch arm
+%endif
+%ifarch aarch64
+%define tizen_arch arm64
+%endif
+%ifarch i686
+%define tizen_arch x32
+%endif
+%ifarch x86_64
+%define tizen_arch x64
+%endif
+
+%if 0%{?asan} == 1
+CFLAGS+="-fsanitize=address -fsanitize-recover=address -U_FORTIFY_SOURCE -fno-omit-frame-pointer -fno-common"
+CXXFLAGS+="-fsanitize=address -fsanitize-recover=address -U_FORTIFY_SOURCE -fno-omit-frame-pointer -fno-common"
+LDFLAGS+="-fsanitize=address"
+%endif
+
+echo "Building:" %{target}
+
+./configure --without-npm --without-bundled-v8 \
+ --without-inspector --without-node-code-cache --without-node-snapshot \
+ --with-intl none --shared-openssl --shared-zlib --dest-os linux --dest-cpu '%{tizen_arch}' \
+ --engine escargot --ninja --shared
+
+ninja -C out/Release %{target_lib}
+ninja -C out/Release %{target}
+
+
+##############################################
+## Install
+##############################################
+
+%install
+rm -rf %{buildroot}
+mkdir -p %{buildroot}%{_bindir}
+mkdir -p %{buildroot}%{_libdir}
+
+rm -f ./out/Release/lib/*.tmp ./out/Release/lib/*.TOC
+cp ./out/Release/lib/liblwnode.so* %{buildroot}%{_libdir}
+cp ./out/Release/gen/escargot/libescargot.so %{buildroot}%{_libdir}
+
+# for devel files
+cp ./out/Release/%{target} %{buildroot}%{_bindir}
+
+%clean
+rm ./*.list
+
+%post
+/sbin/ldconfig
+
+%postun
+/sbin/ldconfig
+
+
+##############################################
+## Packaging rpms
+##############################################
+
+%files
+%manifest packaging/%{name}.manifest
+%defattr(-,root,root,-)
+%{_libdir}/libescargot.so
+%{_libdir}/liblwnode.so*
+%license LICENSE.Apache-2.0 LICENSE.BOEHM-GC LICENSE.BSD-3-Clause LICENSE.MIT LICENSE.NodeJS
+
+%files devel
+%manifest packaging/%{name}.manifest
+%{_bindir}/%{target}
+++ /dev/null
-<manifest>
- <request>
- <domain name="_"/>
- </request>
-</manifest>
+++ /dev/null
-# Copyright (c) 2021-present Samsung Electronics Co., Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Name: nodejs-escargot
-Summary: -
-Version: 1.0.0
-Release: 1
-Group: System/Servers
-License: Apache 2.0
-Source: %{name}-%{version}.tar.gz
-
-BuildRequires: cmake
-BuildRequires: make
-BuildRequires: python
-BuildRequires: ninja
-BuildRequires: pkgconfig(dlog)
-BuildRequires: pkgconfig(aul)
-BuildRequires: pkgconfig(capi-appfw-app-common)
-BuildRequires: pkgconfig(capi-system-info),
-BuildRequires: pkgconfig(capi-system-system-settings)
-BuildRequires: pkgconfig(icu-i18n)
-BuildRequires: pkgconfig(icu-uc)
-BuildRequires: pkgconfig(glib-2.0)
-
-%if (0%{?tizen_version_major} >= 6)
-BuildRequires: pkgconfig(openssl1.1)
-%else
- %if (0%{?tizen_version_major} == 5) && (0%{?tizen_version_minor} == 5)
-BuildRequires: pkgconfig(openssl1.1)
- %else
-BuildRequires: pkgconfig(openssl)
- %endif
-%endif
-
-# Initialize the variables
-%{!?build_mode: %define build_mode release}
-%{!?build_target: %define build_target lwnode}
-%{!?node_engine: %define node_engine escargot}
-%{!?binary_name: %define binary_name lwnode}
-%{!?build_shared_lib: %define build_shared_lib false}
-%{!?build_profile: %define build_profile none}
-
-
-%description
-Node.js on Escargot is a memory efficient node.js implementation,
-which runs on top of Escargot, a memory optimized JavaScript Engine developed
-by Samsung Research, instead of the default V8 JS engine.
-
-
-%prep
-%setup -q
-
-%build
-gcc --version
-
-%ifarch armv7l
-%define tizen_arch arm
-%endif
-%ifarch aarch64
-%define tizen_arch arm64
-%endif
-%ifarch i686
-%define tizen_arch x32
-%endif
-%ifarch x86_64
-%define tizen_arch x64
-%endif
-
-%if 0%{?asan} == 1
-CFLAGS+="-fsanitize=address -fsanitize-recover=address -U_FORTIFY_SOURCE -fno-omit-frame-pointer -fno-common"
-CXXFLAGS+="-fsanitize=address -fsanitize-recover=address -U_FORTIFY_SOURCE -fno-omit-frame-pointer -fno-common"
-LDFLAGS+="-fsanitize=address"
-%endif
-
-echo "Build Configure"
-echo %{build_target}
-
-./configure --without-npm --without-bundled-v8 \
- --without-inspector --without-node-code-cache --without-node-snapshot \
- --with-intl none --shared-openssl --shared-zlib --dest-os linux --dest-cpu '%{tizen_arch}' \
- --engine escargot --ninja
-
-ninja -C out/Release lwnode
-
-%install
-rm -rf %{buildroot}
-mkdir -p %{buildroot}%{_bindir}
-mkdir -p %{buildroot}%{_libdir}
-
-cp -fr ./out/Release/%{build_target} %{buildroot}%{_bindir}
-cp -fr ./out/Release/gen/escargot/libescargot.so %{buildroot}%{_libdir}
-
-%clean
-rm ./*.list
-
-%post
-/sbin/ldconfig
-
-%postun
-/sbin/ldconfig
-
-%files
-%manifest packaging/%{name}.manifest
-%defattr(-,root,root,-)
-%{_libdir}/libescargot.so
-%{_bindir}/*
-%license LICENSE.Apache-2.0 LICENSE.BOEHM-GC LICENSE.BSD-3-Clause LICENSE.MIT LICENSE.NodeJS
)
os.close(fd_out)
os.close(fd_err)
- output = open(outname, encoding='utf8').read()
- errors = open(errname, encoding='utf8').read()
+
+ try:
+ output = open(outname, encoding='utf8').read()
+ errors = open(errname, encoding='utf8').read()
+ except UnicodeDecodeError as error:
+ # @note In old python, UnicodeDecodeError may occur
+ # e.g) Non-ASCII character '\xc3'
+ output = open(outname, encoding='utf8').read()
+ errors = open(errname, encoding='latin-1').read()
+
CheckedUnlink(outname)
CheckedUnlink(errname)
if self.vm is not None:
return self.vm
if arch == 'none':
- name = 'out/Debug/node' if mode == 'debug' else 'out/Release/node'
+ name = 'out/Debug/lwnode' if mode == 'debug' else 'out/Release/lwnode'
else:
- name = 'out/%s.%s/node' % (arch, mode)
+ name = 'out/%s.%s/lwnode' % (arch, mode)
# Currently GYP does not support output_dir for MSVS.
# http://code.google.com/p/gyp/issues/detail?id=40
result.add_option("--skip-tests",
help="Tests that should not be executed (comma-separated)",
default="")
+ result.add_option("--unsupported-tests",
+ help="Substrings of tests not supported (comma-separated)",
+ default="")
result.add_option("--warn-unused", help="Report unused rules",
default=False, action="store_true")
result.add_option("-j", help="The number of parallel tasks to run",
options.run = options.run.split(',')
# Split at commas and filter out all the empty strings.
options.skip_tests = [test for test in options.skip_tests.split(',') if test]
+ options.unsupported_tests = [test for test in options.unsupported_tests.split(',') if test]
+
if options.run == [""]:
options.run = None
elif len(options.run) != 2:
SKIP_LIST_FILENAME="skip_list.gen.txt"
-def OuputTestResult(progress, skip_count, options):
+def OuputTestResult(progress, skip_count, excluded_case_paths, options):
total_count = skip_count + progress.total
failed_count = len(progress.failed)
left_count = progress.remaining
succeed_count = progress.total - failed_count - left_count
+ excluded_count = len(excluded_case_paths)
- print()
- print("Total: %5d" % total_count)
- print(
- "Pass: %5d (%.2f%%)"
- % (
- succeed_count,
- (succeed_count / total_count) * 100.0,
- )
- )
- print("Fail: %5d" % failed_count)
- print("Skip: %5d" % skip_count)
- if left_count > 0 :
- print("Left: %5d" % left_count)
+ failed_case_paths = []
if failed_count != 0:
- print()
- print("=== %i failed command(s)" % failed_count)
-
- failed_cases_paths = []
+ failed_cmd_with_crash = []
+ failed_cmd_with_timeout = []
for failed in progress.failed:
command = failed.command
path = command[-1].split("/")
- failed_cases_path = "%s/%s/%s" % (path[-3], path[-2], path[-1])
- failed_cases_paths.append(failed_cases_path)
-
- print("%s %s" % ("C" if failed.HasCrashed() else "T", EscapeCommand(command)))
+ failed_case_path = "%s/%s/%s" % (path[-3], path[-2], path[-1])
+ failed_case_paths.append(failed_case_path)
+ if failed.HasCrashed():
+ failed_cmd_with_crash.append(EscapeCommand(command))
+ else:
+ failed_cmd_with_timeout.append(EscapeCommand(command))
- global SKIP_LIST_FILENAME
+ print()
+ print("=== %i failed command(s)" % failed_count)
+ print("Crash: %5d" % len(failed_cmd_with_crash))
+ print("Timeout: %5d" % len(failed_cmd_with_timeout))
+ for failed in failed_cmd_with_crash:
+ print("C %s" % failed)
+ print()
+ for failed in failed_cmd_with_timeout:
+ print("T %s" % failed)
+ global SKIP_LIST_FILENAME
+ if failed_count:
skip_tests = options.skip_tests[:]
- skip_tests.extend(failed_cases_paths)
+ skip_tests.extend(failed_case_paths)
+ skip_tests.extend(excluded_case_paths)
+ skip_tests = list(set(skip_tests))
skip_tests.sort()
WriteFileWithList(SKIP_LIST_FILENAME, skip_tests)
-
print()
print("=== skip_list is generated.")
print("%s" % (os.path.join(os.getcwd() , SKIP_LIST_FILENAME)))
+ # report
+ print()
+
+ if excluded_count == 0:
+ print("Total: %5d" % total_count)
+ else :
+ print("Total: %5d (%d excluded)" % (total_count, excluded_count))
+ print(
+ "Pass: %5d (%.2f%%)"
+ % (
+ succeed_count,
+ (succeed_count / total_count) * 100.0,
+ )
+ )
+ print("Fail: %5d" % failed_count)
+ print("Skip: %5d" % skip_count)
+ if left_count > 0 :
+ print("Left: %5d" % left_count)
+
def Main():
parser = BuildOptions()
(options, args) = parser.parse_args()
else:
return True
+ # preprocessing options.unsupported_tests
+ excluded_case_paths = []
+ if options.unsupported_tests:
+ def remove_unsupported(case):
+ if any((s in case.file) for s in options.unsupported_tests):
+ path = case.file.split(os.path.sep)
+ excluded_case_path = "%s/%s/%s" % (path[-3], path[-2], path[-1])
+ excluded_case_paths.append(excluded_case_path)
+ return False
+ return True;
+
+ all_cases = [
+ test_case for test_case in all_cases if remove_unsupported(test_case)
+ ]
+ # end of preprocessing options.unsupported_tests
+
cases_to_run = [
test_case for test_case in all_cases if should_keep(test_case)
]
duration = time.time() - start
skip_count = len(all_cases) - len(cases_to_run)
- OuputTestResult(progress, skip_count, options)
+ OuputTestResult(progress, skip_count, excluded_case_paths, options)
except KeyboardInterrupt:
print("Interrupted")