v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
+ i::Isolate* isolate = self->GetIsolate();
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized");
self->set_is_external(true);
+ isolate->heap()->UnregisterArrayBuffer(self->backing_store());
+
return GetContents();
}
void V8HeapExplorer::ExtractJSArrayBufferReferences(
int entry, JSArrayBuffer* buffer) {
- SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
- JSArrayBuffer::kWeakNextOffset);
// Setup a reference to a native memory backing_store object.
if (!buffer->backing_store())
return;
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
set_native_contexts_list(NULL);
- set_array_buffers_list(Smi::FromInt(0));
- set_last_array_buffer_in_list(Smi::FromInt(0));
set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0));
set_encountered_weak_cells(Smi::FromInt(0));
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
- ProcessArrayBuffers(retainer, false);
ProcessNativeContexts(retainer);
ProcessAllocationSites(retainer);
}
void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
- ProcessArrayBuffers(retainer, true);
ProcessNativeContexts(retainer);
}
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
- Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer,
- false, NULL);
+ Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
// Update the head of the list of contexts.
set_native_contexts_list(head);
}
-void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
- bool stop_after_young) {
- Object* last_array_buffer = undefined_value();
- Object* array_buffer_obj =
- VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer,
- stop_after_young, &last_array_buffer);
- set_array_buffers_list(array_buffer_obj);
- set_last_array_buffer_in_list(last_array_buffer);
-
- // Verify invariant that young array buffers come before old array buffers
- // in array buffers list if there was no promotion failure.
- Object* undefined = undefined_value();
- Object* next = array_buffers_list();
- bool old_objects_recorded = false;
- while (next != undefined) {
- if (!old_objects_recorded) {
- old_objects_recorded = !InNewSpace(next);
- }
- CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
- next = JSArrayBuffer::cast(next)->weak_next();
+void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
+ if (!data) return;
+ live_array_buffers_[data] = length;
+ reinterpret_cast<v8::Isolate*>(isolate_)
+ ->AdjustAmountOfExternalAllocatedMemory(length);
+}
+
+
+void Heap::UnregisterArrayBuffer(void* data) {
+ if (!data) return;
+ DCHECK(live_array_buffers_.count(data) > 0);
+ live_array_buffers_.erase(data);
+ not_yet_discovered_array_buffers_.erase(data);
+}
+
+
+void Heap::RegisterLiveArrayBuffer(void* data) {
+ not_yet_discovered_array_buffers_.erase(data);
+}
+
+
+void Heap::FreeDeadArrayBuffers() {
+ for (auto buffer = not_yet_discovered_array_buffers_.begin();
+ buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
+ isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
+ // Don't use the API method here since this could trigger another GC.
+ amount_of_external_allocated_memory_ -= buffer->second;
+ live_array_buffers_.erase(buffer->first);
}
+ not_yet_discovered_array_buffers_ = live_array_buffers_;
}
void Heap::TearDownArrayBuffers() {
- Object* undefined = undefined_value();
- for (Object* o = array_buffers_list(); o != undefined;) {
- JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
- Runtime::FreeArrayBuffer(isolate(), buffer);
- o = buffer->weak_next();
+ for (auto buffer = live_array_buffers_.begin();
+ buffer != live_array_buffers_.end(); ++buffer) {
+ isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
- set_array_buffers_list(undefined);
+ live_array_buffers_.clear();
+ not_yet_discovered_array_buffers_.clear();
}
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
- Object* allocation_site_obj = VisitWeakList<AllocationSite>(
- this, allocation_sites_list(), retainer, false, NULL);
+ Object* allocation_site_obj =
+ VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
set_allocation_sites_list(allocation_site_obj);
}
CHECK_EQ(0u, gc_count_);
set_native_contexts_list(undefined_value());
- set_array_buffers_list(undefined_value());
- set_last_array_buffer_in_list(undefined_value());
set_allocation_sites_list(undefined_value());
return true;
}
#define V8_HEAP_HEAP_H_
#include <cmath>
+#include <map>
#include "src/allocation.h"
#include "src/assert-scope.h"
}
Object* native_contexts_list() const { return native_contexts_list_; }
- void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
- Object* array_buffers_list() const { return array_buffers_list_; }
-
- void set_last_array_buffer_in_list(Object* object) {
- last_array_buffer_in_list_ = object;
- }
- Object* last_array_buffer_in_list() const {
- return last_array_buffer_in_list_;
- }
-
void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object;
}
bool deserialization_complete() const { return deserialization_complete_; }
+ void RegisterNewArrayBuffer(void* data, size_t length);
+ void UnregisterArrayBuffer(void* data);
+ void RegisterLiveArrayBuffer(void* data);
+ void FreeDeadArrayBuffers();
+
protected:
// Methods made available to tests.
// Weak list heads, threaded through the objects.
// List heads are initialized lazily and contain the undefined_value at start.
Object* native_contexts_list_;
- Object* array_buffers_list_;
- Object* last_array_buffer_in_list_;
Object* allocation_sites_list_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer);
- void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
void ProcessAllocationSites(WeakObjectRetainer* retainer);
// Deopts all code that contains allocation instruction which are tenured or
bool concurrent_sweeping_enabled_;
+ std::map<void*, size_t> live_array_buffers_;
+ std::map<void*, size_t> not_yet_discovered_array_buffers_;
+
friend class AlwaysAllocateScope;
friend class Deserializer;
friend class Factory;
#ifdef DEBUG
state_ = SWEEP_SPACES;
#endif
+ heap()->FreeDeadArrayBuffers();
+
MoveEvacuationCandidatesToEndOfPagesList();
// Noncompacting collections simply sweep the spaces to clear the mark
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
- VisitPointers(heap, HeapObject::RawField(
- object, JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
VisitPointers(
- heap, HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + kPointerSize),
+ heap,
+ HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
return JSArrayBuffer::kSizeWithInternalFields;
}
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
- StaticVisitor::VisitPointers(
- heap, HeapObject::RawField(object,
- JSArrayBuffer::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
+ if (!JSArrayBuffer::cast(object)->is_external()) {
+ heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
+ }
}
template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
- bool stop_after_young, Object** list_tail) {
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
Object* undefined = heap->undefined_value();
Object* head = undefined;
T* tail = NULL;
}
// Terminate the list if there is one or more elements.
- if (tail != NULL) {
- WeakListVisitor<T>::SetWeakNext(tail, undefined);
- if (list_tail) *list_tail = tail;
- }
+ if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
return head;
}
static void DoWeakList(Heap* heap, Context* context,
WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements.
- Object* list_head =
- VisitWeakList<T>(heap, context->get(index), retainer, false, NULL);
+ Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
// Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER);
};
-template <>
-struct WeakListVisitor<JSArrayBuffer> {
- static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
- obj->set_weak_next(next);
- }
-
- static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
-
- static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
-
- static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
- WeakObjectRetainer* retainer) {
- }
-
- static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
- Runtime::FreeArrayBuffer(heap->isolate(), phantom);
- }
-};
-
-
template <>
struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) {
template Object* VisitWeakList<Context>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer,
- bool stop_after_young,
- Object** list_tail);
-
-
-template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer,
- bool stop_after_young,
- Object** list_tail);
+ WeakObjectRetainer* retainer);
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
- WeakObjectRetainer* retainer,
- bool stop_after_young,
- Object** list_tail);
+ WeakObjectRetainer* retainer);
}
} // namespace v8::internal
// pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers.
template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
- bool stop_after_young, Object** list_tail);
-Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
- WeakObjectRetainer* retainer);
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
}
} // namespace v8::internal
}
-ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
-
-
Object* JSArrayBufferView::byte_offset() const {
if (WasNeutered()) return Smi::FromInt(0);
return Object::cast(READ_FIELD(this, kByteOffsetOffset));
inline bool was_neutered();
inline void set_was_neutered(bool value);
- // [weak_next]: linked list of array buffers.
- DECL_ACCESSORS(weak_next, Object)
-
DECLARE_CAST(JSArrayBuffer)
void Neuter();
#else
static const int kBitFieldOffset = kBitFieldSlot + kIntSize;
#endif
- static const int kWeakNextOffset = kBitFieldSlot + kPointerSize;
- static const int kSize = kWeakNextOffset + kPointerSize;
+ static const int kSize = kBitFieldSlot + kPointerSize;
static const int kSizeWithInternalFields =
kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize;
namespace v8 {
namespace internal {
-void Runtime::FreeArrayBuffer(Isolate* isolate,
- JSArrayBuffer* phantom_array_buffer) {
- if (phantom_array_buffer->is_external()) return;
-
- size_t allocated_length =
- NumberToSize(isolate, phantom_array_buffer->byte_length());
-
- reinterpret_cast<v8::Isolate*>(isolate)
- ->AdjustAmountOfExternalAllocatedMemory(
- -static_cast<int64_t>(allocated_length));
- CHECK(isolate->array_buffer_allocator() != NULL);
- isolate->array_buffer_allocator()->Free(phantom_array_buffer->backing_store(),
- allocated_length);
-}
-
-
void Runtime::SetupArrayBuffer(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer,
bool is_external, void* data,
CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
array_buffer->set_byte_length(*byte_length);
- if (isolate->heap()->InNewSpace(*array_buffer) ||
- isolate->heap()->array_buffers_list()->IsUndefined()) {
- array_buffer->set_weak_next(isolate->heap()->array_buffers_list());
- isolate->heap()->set_array_buffers_list(*array_buffer);
- if (isolate->heap()->last_array_buffer_in_list()->IsUndefined()) {
- isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
- }
- } else {
- JSArrayBuffer::cast(isolate->heap()->last_array_buffer_in_list())
- ->set_weak_next(*array_buffer);
- isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
+ if (data && !is_external) {
+ isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
}
}
}
SetupArrayBuffer(isolate, array_buffer, false, data, allocated_length);
-
- reinterpret_cast<v8::Isolate*>(isolate)
- ->AdjustAmountOfExternalAllocatedMemory(allocated_length);
-
return true;
}
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer);
+ isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value();
}
static void NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer);
- static void FreeArrayBuffer(Isolate* isolate,
- JSArrayBuffer* phantom_array_buffer);
-
static int FindIndexedNonNativeFrame(JavaScriptFrameIterator* it, int index);
enum TypedArrayId {
isolate_->heap()->set_native_contexts_list(
isolate_->heap()->undefined_value());
- isolate_->heap()->set_array_buffers_list(
- isolate_->heap()->undefined_value());
- isolate_->heap()->set_last_array_buffer_in_list(
- isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined.
'test-version.cc',
'test-weakmaps.cc',
'test-weaksets.cc',
- 'test-weaktypedarrays.cc',
'trace-extension.cc',
'../../src/startup-data-util.h',
'../../src/startup-data-util.cc'
+++ /dev/null
-// Copyright 2013 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#include <stdlib.h>
-
-#include "src/v8.h"
-#include "test/cctest/cctest.h"
-
-#include "src/api.h"
-#include "src/heap/heap.h"
-#include "src/objects.h"
-
-using namespace v8::internal;
-
-static Isolate* GetIsolateFrom(LocalContext* context) {
- return reinterpret_cast<Isolate*>((*context)->GetIsolate());
-}
-
-
-static int CountArrayBuffersInWeakList(Heap* heap) {
- int count = 0;
- for (Object* o = heap->array_buffers_list();
- !o->IsUndefined();
- o = JSArrayBuffer::cast(o)->weak_next()) {
- count++;
- }
- return count;
-}
-
-
-static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
- for (Object* o = heap->array_buffers_list();
- !o->IsUndefined();
- o = JSArrayBuffer::cast(o)->weak_next()) {
- if (ab == o) return true;
- }
- return false;
-}
-
-
-TEST(WeakArrayBuffersFromScript) {
- v8::V8::Initialize();
- LocalContext context;
- Isolate* isolate = GetIsolateFrom(&context);
- int start = CountArrayBuffersInWeakList(isolate->heap());
-
- for (int i = 1; i <= 3; i++) {
- // Create 3 array buffers, make i-th of them garbage,
- // validate correct state of array buffer weak list.
- CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
- {
- v8::HandleScope scope(context->GetIsolate());
-
- {
- v8::HandleScope s1(context->GetIsolate());
- CompileRun("var ab1 = new ArrayBuffer(256);"
- "var ab2 = new ArrayBuffer(256);"
- "var ab3 = new ArrayBuffer(256);");
- v8::Handle<v8::ArrayBuffer> ab1 =
- v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab1"));
- v8::Handle<v8::ArrayBuffer> ab2 =
- v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab2"));
- v8::Handle<v8::ArrayBuffer> ab3 =
- v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab3"));
-
- CHECK_EQ(3, CountArrayBuffersInWeakList(isolate->heap()) - start);
- CHECK(HasArrayBufferInWeakList(isolate->heap(),
- *v8::Utils::OpenHandle(*ab1)));
- CHECK(HasArrayBufferInWeakList(isolate->heap(),
- *v8::Utils::OpenHandle(*ab2)));
- CHECK(HasArrayBufferInWeakList(isolate->heap(),
- *v8::Utils::OpenHandle(*ab3)));
- }
-
- i::ScopedVector<char> source(1024);
- i::SNPrintF(source, "ab%d = null;", i);
- CompileRun(source.start());
- isolate->heap()->CollectAllGarbage();
-
- CHECK_EQ(2, CountArrayBuffersInWeakList(isolate->heap()) - start);
-
- {
- v8::HandleScope s2(context->GetIsolate());
- for (int j = 1; j <= 3; j++) {
- if (j == i) continue;
- i::SNPrintF(source, "ab%d", j);
- v8::Handle<v8::ArrayBuffer> ab =
- v8::Handle<v8::ArrayBuffer>::Cast(CompileRun(source.start()));
- CHECK(HasArrayBufferInWeakList(isolate->heap(),
- *v8::Utils::OpenHandle(*ab)));
- }
- }
-
- CompileRun("ab1 = null; ab2 = null; ab3 = null;");
- }
-
- isolate->heap()->CollectAllGarbage();
- CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
- }
-}