From 88144ee17fa563a4966eab931913b4189424ca72 Mon Sep 17 00:00:00 2001 From: "yangguo@chromium.org" Date: Wed, 30 Apr 2014 12:25:18 +0000 Subject: [PATCH] Kiss goodbye to MaybeObject. R=hpayer@chromium.org Review URL: https://codereview.chromium.org/259173003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@21086 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm64/simulator-arm64.cc | 2 +- src/assembler.h | 2 +- src/execution.cc | 2 +- src/factory.cc | 14 +- src/gdb-jit.cc | 2 +- src/handles-inl.h | 5 - src/heap-inl.h | 105 +++---- src/heap-snapshot-generator.cc | 21 +- src/heap.cc | 625 +++++++++++++++++++-------------------- src/heap.h | 127 ++++---- src/incremental-marking-inl.h | 4 +- src/incremental-marking.cc | 6 +- src/isolate.h | 4 +- src/mark-compact.cc | 28 +- src/objects-debug.cc | 15 - src/objects-inl.h | 64 ---- src/objects-printer.cc | 19 +- src/objects.cc | 12 - src/objects.h | 316 +++++++------------- src/spaces-inl.h | 6 +- src/spaces.cc | 14 +- src/spaces.h | 56 +++- src/v8globals.h | 3 +- test/cctest/cctest.h | 6 +- test/cctest/test-alloc.cc | 39 +-- test/cctest/test-heap.cc | 36 +-- test/cctest/test-mark-compact.cc | 35 +-- test/cctest/test-serialize.cc | 3 +- test/cctest/test-spaces.cc | 15 +- test/cctest/test-symbols.cc | 2 +- 30 files changed, 702 insertions(+), 886 deletions(-) diff --git a/src/arm64/simulator-arm64.cc b/src/arm64/simulator-arm64.cc index d7decb6..c88b9b7 100644 --- a/src/arm64/simulator-arm64.cc +++ b/src/arm64/simulator-arm64.cc @@ -570,7 +570,7 @@ void Simulator::DoRuntimeCall(Instruction* instr) { break; case ExternalReference::BUILTIN_CALL: { - // MaybeObject* f(v8::internal::Arguments). + // Object* f(v8::internal::Arguments). TraceSim("Type: BUILTIN_CALL\n"); SimulatorRuntimeCall target = reinterpret_cast(external); diff --git a/src/assembler.h b/src/assembler.h index ed3d99b..df1f187 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -683,7 +683,7 @@ class ExternalReference BASE_EMBEDDED { // Used in the simulator to support different native api calls. enum Type { // Builtin call. - // MaybeObject* f(v8::internal::Arguments). + // Object* f(v8::internal::Arguments). BUILTIN_CALL, // default // Builtin that takes float arguments and returns an int. diff --git a/src/execution.cc b/src/execution.cc index 961fd3c..dd62ee2 100644 --- a/src/execution.cc +++ b/src/execution.cc @@ -95,7 +95,7 @@ MUST_USE_RESULT static MaybeHandle Invoke( } #ifdef VERIFY_HEAP - value->Verify(); + value->ObjectVerify(); #endif // Update the pending exception flag and return the value. diff --git a/src/factory.cc b/src/factory.cc index fb1d6e7..7fc2a30 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -1069,10 +1069,12 @@ Handle Factory::NewNumberFromInt(int32_t value, Handle Factory::NewNumberFromUint(uint32_t value, - PretenureFlag pretenure) { - CALL_HEAP_FUNCTION( - isolate(), - isolate()->heap()->NumberFromUint32(value, pretenure), Object); + PretenureFlag pretenure) { + int32_t int32v = static_cast(value); + if (int32v >= 0 && Smi::IsValid(int32v)) { + return handle(Smi::FromInt(int32v), isolate()); + } + return NewHeapNumber(FastUI2D(value), pretenure); } @@ -1416,9 +1418,7 @@ Handle Factory::NewCode(const CodeDesc& desc, code->CopyFrom(desc); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - code->Verify(); - } + if (FLAG_verify_heap) code->ObjectVerify(); #endif return code; } diff --git a/src/gdb-jit.cc b/src/gdb-jit.cc index 0d17618..f2861c1 100644 --- a/src/gdb-jit.cc +++ b/src/gdb-jit.cc @@ -1818,7 +1818,7 @@ extern "C" { JITDescriptor __jit_debug_descriptor = { 1, 0, 0, 0 }; #ifdef OBJECT_PRINT - void __gdb_print_v8_object(MaybeObject* object) { + void __gdb_print_v8_object(Object* object) { object->Print(); PrintF(stdout, "\n"); } diff --git a/src/handles-inl.h b/src/handles-inl.h index 5e7d236..d9f8c69 100644 --- a/src/handles-inl.h +++ b/src/handles-inl.h @@ -16,21 +16,18 @@ namespace internal { template Handle::Handle(T* obj) { - ASSERT(!obj->IsFailure()); location_ = HandleScope::CreateHandle(obj->GetIsolate(), obj); } template Handle::Handle(T* obj, Isolate* isolate) { - ASSERT(!obj->IsFailure()); location_ = HandleScope::CreateHandle(isolate, obj); } template inline bool Handle::is_identical_to(const Handle o) const { - ASSERT(location_ == NULL || !(*location_)->IsFailure()); // Dereferencing deferred handles to check object equality is safe. SLOW_ASSERT( (location_ == NULL || IsDereferenceAllowed(NO_DEFERRED_CHECK)) && @@ -43,14 +40,12 @@ inline bool Handle::is_identical_to(const Handle o) const { template inline T* Handle::operator*() const { - ASSERT(location_ != NULL && !(*location_)->IsFailure()); SLOW_ASSERT(IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK)); return *BitCast(location_); } template inline T** Handle::location() const { - ASSERT(location_ == NULL || !(*location_)->IsFailure()); SLOW_ASSERT(location_ == NULL || IsDereferenceAllowed(INCLUDE_DEFERRED_CHECK)); return location_; diff --git a/src/heap-inl.h b/src/heap-inl.h index a728777..ab11b32 100644 --- a/src/heap-inl.h +++ b/src/heap-inl.h @@ -76,7 +76,7 @@ bool inline Heap::IsOneByte(String* str, int chars) { } -MaybeObject* Heap::AllocateInternalizedStringFromUtf8( +AllocationResult Heap::AllocateInternalizedStringFromUtf8( Vector str, int chars, uint32_t hash_field) { if (IsOneByte(str, chars)) { return AllocateOneByteInternalizedString( @@ -87,7 +87,7 @@ MaybeObject* Heap::AllocateInternalizedStringFromUtf8( template -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field) { if (IsOneByte(t, chars)) { return AllocateInternalizedStringImpl(t, chars, hash_field); @@ -96,8 +96,9 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( } -MaybeObject* Heap::AllocateOneByteInternalizedString(Vector str, - uint32_t hash_field) { +AllocationResult Heap::AllocateOneByteInternalizedString( + Vector str, + uint32_t hash_field) { if (str.length() > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -107,13 +108,13 @@ MaybeObject* Heap::AllocateOneByteInternalizedString(Vector str, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // String maps are all immortal immovable objects. - reinterpret_cast(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(str.length()); @@ -129,8 +130,8 @@ MaybeObject* Heap::AllocateOneByteInternalizedString(Vector str, } -MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector str, - uint32_t hash_field) { +AllocationResult Heap::AllocateTwoByteInternalizedString(Vector str, + uint32_t hash_field) { if (str.length() > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -140,12 +141,12 @@ MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector str, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast(result)->set_map(map); + result->set_map(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(str.length()); @@ -160,27 +161,27 @@ MaybeObject* Heap::AllocateTwoByteInternalizedString(Vector str, return answer; } -MaybeObject* Heap::CopyFixedArray(FixedArray* src) { +AllocationResult Heap::CopyFixedArray(FixedArray* src) { if (src->length() == 0) return src; return CopyFixedArrayWithMap(src, src->map()); } -MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { +AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) { if (src->length() == 0) return src; return CopyFixedDoubleArrayWithMap(src, src->map()); } -MaybeObject* Heap::CopyConstantPoolArray(ConstantPoolArray* src) { +AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) { if (src->length() == 0) return src; return CopyConstantPoolArrayWithMap(src, src->map()); } -MaybeObject* Heap::AllocateRaw(int size_in_bytes, - AllocationSpace space, - AllocationSpace retry_space) { +AllocationResult Heap::AllocateRaw(int size_in_bytes, + AllocationSpace space, + AllocationSpace retry_space) { ASSERT(AllowHandleAllocation::IsAllowed()); ASSERT(AllowHeapAllocation::IsAllowed()); ASSERT(gc_state_ == NOT_IN_GC); @@ -189,58 +190,49 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes, if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) && Heap::allocation_timeout_-- <= 0) { - return Failure::RetryAfterGC(space); + return AllocationResult::Retry(space); } isolate_->counters()->objs_since_last_full()->Increment(); isolate_->counters()->objs_since_last_young()->Increment(); #endif HeapObject* object; - MaybeObject* result; + AllocationResult allocation; if (NEW_SPACE == space) { - result = new_space_.AllocateRaw(size_in_bytes); - if (always_allocate() && result->IsFailure() && retry_space != NEW_SPACE) { + allocation = new_space_.AllocateRaw(size_in_bytes); + if (always_allocate() && + allocation.IsRetry() && + retry_space != NEW_SPACE) { space = retry_space; } else { - if (profiler->is_tracking_allocations() && result->To(&object)) { + if (profiler->is_tracking_allocations() && allocation.To(&object)) { profiler->AllocationEvent(object->address(), size_in_bytes); } - return result; + return allocation; } } if (OLD_POINTER_SPACE == space) { - result = old_pointer_space_->AllocateRaw(size_in_bytes); + allocation = old_pointer_space_->AllocateRaw(size_in_bytes); } else if (OLD_DATA_SPACE == space) { - result = old_data_space_->AllocateRaw(size_in_bytes); + allocation = old_data_space_->AllocateRaw(size_in_bytes); } else if (CODE_SPACE == space) { - result = code_space_->AllocateRaw(size_in_bytes); + allocation = code_space_->AllocateRaw(size_in_bytes); } else if (LO_SPACE == space) { - result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); + allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); } else if (CELL_SPACE == space) { - result = cell_space_->AllocateRaw(size_in_bytes); + allocation = cell_space_->AllocateRaw(size_in_bytes); } else if (PROPERTY_CELL_SPACE == space) { - result = property_cell_space_->AllocateRaw(size_in_bytes); + allocation = property_cell_space_->AllocateRaw(size_in_bytes); } else { ASSERT(MAP_SPACE == space); - result = map_space_->AllocateRaw(size_in_bytes); + allocation = map_space_->AllocateRaw(size_in_bytes); } - if (result->IsFailure()) old_gen_exhausted_ = true; - if (profiler->is_tracking_allocations() && result->To(&object)) { + if (allocation.IsRetry()) old_gen_exhausted_ = true; + if (profiler->is_tracking_allocations() && allocation.To(&object)) { profiler->AllocationEvent(object->address(), size_in_bytes); } - return result; -} - - -MaybeObject* Heap::NumberFromUint32( - uint32_t value, PretenureFlag pretenure) { - if (static_cast(value) >= 0 && - Smi::IsValid(static_cast(value))) { - return Smi::FromInt(static_cast(value)); - } - // Bypass NumberFromDouble to avoid various redundant checks. - return AllocateHeapNumber(FastUI2D(value), pretenure); + return allocation; } @@ -409,6 +401,8 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { case PROPERTY_CELL_SPACE: case LO_SPACE: return false; + default: + break; } UNREACHABLE(); return false; @@ -590,31 +584,28 @@ Isolate* Heap::isolate() { // __scope__ in a call to this macro. #define RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ - if (__maybe_object__->ToObject(&__object__)) { \ + if (!__allocation__.IsRetry()) { \ + __object__ = __allocation__.ToObjectChecked(); \ if (__object__ == (ISOLATE)->heap()->exception()) { RETURN_EMPTY; } \ RETURN_VALUE; \ } #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \ do { \ - MaybeObject* __maybe_object__ = FUNCTION_CALL; \ + AllocationResult __allocation__ = FUNCTION_CALL; \ Object* __object__ = NULL; \ RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ - ASSERT(__maybe_object__->IsRetryAfterGC()); \ - (ISOLATE)->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \ - allocation_space(), \ - "allocation failure"); \ - __maybe_object__ = FUNCTION_CALL; \ + (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \ + "allocation failure"); \ + __allocation__ = FUNCTION_CALL; \ RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ - ASSERT(__maybe_object__->IsRetryAfterGC()); \ (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \ (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \ { \ AlwaysAllocateScope __scope__(ISOLATE); \ - __maybe_object__ = FUNCTION_CALL; \ + __allocation__ = FUNCTION_CALL; \ } \ RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY) \ - ASSERT(__maybe_object__->IsRetryAfterGC()); \ /* TODO(1181417): Fix this. */ \ v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \ RETURN_EMPTY; \ diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc index eac30eb..cafee77 100644 --- a/src/heap-snapshot-generator.cc +++ b/src/heap-snapshot-generator.cc @@ -1056,23 +1056,30 @@ class IndexedReferencesExtractor : public ObjectVisitor { static void MarkVisitedField(HeapObject* obj, int offset) { if (offset < 0) return; Address field = obj->address() + offset; - ASSERT(!Memory::Object_at(field)->IsFailure()); ASSERT(Memory::Object_at(field)->IsHeapObject()); - Object* untagged = *reinterpret_cast(field); - intptr_t tagged = reinterpret_cast(untagged) | kFailureTag; - *reinterpret_cast(field) = reinterpret_cast(tagged); + intptr_t p = reinterpret_cast(Memory::Object_at(field)); + ASSERT(!IsMarked(p)); + intptr_t p_tagged = p | kTag; + Memory::Object_at(field) = reinterpret_cast(p_tagged); } private: bool CheckVisitedAndUnmark(Object** field) { - if ((*field)->IsFailure()) { - intptr_t untagged = reinterpret_cast(*field) & ~kFailureTagMask; - *field = reinterpret_cast(untagged | kHeapObjectTag); + intptr_t p = reinterpret_cast(*field); + if (IsMarked(p)) { + intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag; + *field = reinterpret_cast(p_untagged); ASSERT((*field)->IsHeapObject()); return true; } return false; } + + static const intptr_t kTaggingMask = 3; + static const intptr_t kTag = 3; + + static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; } + V8HeapExplorer* generator_; HeapObject* parent_obj_; int parent_; diff --git a/src/heap.cc b/src/heap.cc index ca7cea5..188b246 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -914,14 +914,14 @@ void Heap::ReserveSpace(int *sizes, Address *locations_out) { ASSERT(NEW_SPACE == FIRST_PAGED_SPACE - 1); for (int space = NEW_SPACE; space <= LAST_PAGED_SPACE; space++) { if (sizes[space] != 0) { - MaybeObject* allocation; + AllocationResult allocation; if (space == NEW_SPACE) { allocation = new_space()->AllocateRaw(sizes[space]); } else { allocation = paged_space(space)->AllocateRaw(sizes[space]); } FreeListNode* node; - if (!allocation->To(&node)) { + if (!allocation.To(&node)) { if (space == NEW_SPACE) { Heap::CollectGarbage(NEW_SPACE, "failed to reserve space in the new space"); @@ -2045,20 +2045,18 @@ class ScavengingVisitor : public StaticVisitorBase { Heap* heap = map->GetHeap(); if (heap->ShouldBePromoted(object->address(), object_size)) { - MaybeObject* maybe_result; + AllocationResult allocation; if (object_contents == DATA_OBJECT) { ASSERT(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE)); - maybe_result = heap->old_data_space()->AllocateRaw(allocation_size); + allocation = heap->old_data_space()->AllocateRaw(allocation_size); } else { ASSERT(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE)); - maybe_result = heap->old_pointer_space()->AllocateRaw(allocation_size); + allocation = heap->old_pointer_space()->AllocateRaw(allocation_size); } - Object* result = NULL; // Initialization to please compiler. - if (maybe_result->ToObject(&result)) { - HeapObject* target = HeapObject::cast(result); - + HeapObject* target = NULL; // Initialization to please compiler. + if (allocation.To(&target)) { if (alignment != kObjectAlignment) { target = EnsureDoubleAligned(heap, target, allocation_size); } @@ -2083,10 +2081,10 @@ class ScavengingVisitor : public StaticVisitorBase { } } ASSERT(heap->AllowedToBeMigrated(object, NEW_SPACE)); - MaybeObject* allocation = heap->new_space()->AllocateRaw(allocation_size); + AllocationResult allocation = + heap->new_space()->AllocateRaw(allocation_size); heap->promotion_queue()->SetNewLimit(heap->new_space()->top()); - Object* result = allocation->ToObjectUnchecked(); - HeapObject* target = HeapObject::cast(result); + HeapObject* target = HeapObject::cast(allocation.ToObjectChecked()); if (alignment != kObjectAlignment) { target = EnsureDoubleAligned(heap, target, allocation_size); @@ -2323,11 +2321,11 @@ void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) { } -MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, - int instance_size) { +AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, + int instance_size) { Object* result; - MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); + if (!allocation.To(&result)) return allocation; // Map::cast cannot be used due to uninitialized map field. reinterpret_cast(result)->set_map(raw_unchecked_meta_map()); @@ -2347,15 +2345,15 @@ MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type, } -MaybeObject* Heap::AllocateMap(InstanceType instance_type, - int instance_size, - ElementsKind elements_kind) { - Object* result; - MaybeObject* maybe_result = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); - if (!maybe_result->To(&result)) return maybe_result; +AllocationResult Heap::AllocateMap(InstanceType instance_type, + int instance_size, + ElementsKind elements_kind) { + HeapObject* result; + AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE); + if (!allocation.To(&result)) return allocation; - Map* map = reinterpret_cast(result); - map->set_map_no_write_barrier(meta_map()); + result->set_map_no_write_barrier(meta_map()); + Map* map = Map::cast(result); map->set_instance_type(instance_type); map->set_visitor_id( StaticVisitorBase::GetVisitorId(instance_type, instance_size)); @@ -2381,19 +2379,19 @@ MaybeObject* Heap::AllocateMap(InstanceType instance_type, } -MaybeObject* Heap::AllocateFillerObject(int size, - bool double_align, - AllocationSpace space) { - HeapObject* allocation; - { MaybeObject* maybe_allocation = AllocateRaw(size, space, space); - if (!maybe_allocation->To(&allocation)) return maybe_allocation; +AllocationResult Heap::AllocateFillerObject(int size, + bool double_align, + AllocationSpace space) { + HeapObject* obj; + { AllocationResult allocation = AllocateRaw(size, space, space); + if (!allocation.To(&obj)) return allocation; } #ifdef DEBUG - MemoryChunk* chunk = MemoryChunk::FromAddress(allocation->address()); + MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); ASSERT(chunk->owner()->identity() == space); #endif - CreateFillerObjectAt(allocation->address(), size); - return allocation; + CreateFillerObjectAt(obj->address(), size); + return obj; } @@ -2422,9 +2420,9 @@ const Heap::StructTable Heap::struct_table[] = { bool Heap::CreateInitialMaps() { - Object* obj; - { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize); - if (!maybe_obj->ToObject(&obj)) return false; + HeapObject* obj; + { AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize); + if (!allocation.To(&obj)) return false; } // Map::cast cannot be used due to uninitialized map field. Map* new_meta_map = reinterpret_cast(obj); @@ -2434,7 +2432,7 @@ bool Heap::CreateInitialMaps() { { // Partial map allocation #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \ { Map* map; \ - if (!AllocatePartialMap((instance_type), (size))->To(&map)) return false;\ + if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \ set_##field_name##_map(map); \ } @@ -2448,19 +2446,19 @@ bool Heap::CreateInitialMaps() { } // Allocate the empty array. - { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyFixedArray(); + if (!allocation.To(&obj)) return false; } set_empty_fixed_array(FixedArray::cast(obj)); - { MaybeObject* maybe_obj = Allocate(null_map(), OLD_POINTER_SPACE); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE); + if (!allocation.To(&obj)) return false; } set_null_value(Oddball::cast(obj)); Oddball::cast(obj)->set_kind(Oddball::kNull); - { MaybeObject* maybe_obj = Allocate(undefined_map(), OLD_POINTER_SPACE); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE); + if (!allocation.To(&obj)) return false; } set_undefined_value(Oddball::cast(obj)); Oddball::cast(obj)->set_kind(Oddball::kUndefined); @@ -2470,14 +2468,14 @@ bool Heap::CreateInitialMaps() { set_exception(null_value()); // Allocate the empty descriptor array. - { MaybeObject* maybe_obj = AllocateEmptyFixedArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyFixedArray(); + if (!allocation.To(&obj)) return false; } set_empty_descriptor_array(DescriptorArray::cast(obj)); // Allocate the constant pool array. - { MaybeObject* maybe_obj = AllocateEmptyConstantPoolArray(); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateEmptyConstantPoolArray(); + if (!allocation.To(&obj)) return false; } set_empty_constant_pool_array(ConstantPoolArray::cast(obj)); @@ -2528,7 +2526,7 @@ bool Heap::CreateInitialMaps() { { // Map allocation #define ALLOCATE_MAP(instance_type, size, field_name) \ { Map* map; \ - if (!AllocateMap((instance_type), size)->To(&map)) return false; \ + if (!AllocateMap((instance_type), size).To(&map)) return false; \ set_##field_name##_map(map); \ } @@ -2553,8 +2551,8 @@ bool Heap::CreateInitialMaps() { for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { const StringTypeTable& entry = string_type_table[i]; - { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size); - if (!maybe_obj->ToObject(&obj)) return false; + { AllocationResult allocation = AllocateMap(entry.type, entry.size); + if (!allocation.To(&obj)) return false; } // Mark cons string maps as unstable, because their objects can change // maps during GC. @@ -2600,7 +2598,7 @@ bool Heap::CreateInitialMaps() { for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { const StructTable& entry = struct_table[i]; Map* map; - if (!AllocateMap(entry.type, entry.size)->To(&map)) + if (!AllocateMap(entry.type, entry.size).To(&map)) return false; roots_[entry.index] = map; } @@ -2634,13 +2632,13 @@ bool Heap::CreateInitialMaps() { { // Empty arrays { ByteArray* byte_array; - if (!AllocateByteArray(0, TENURED)->To(&byte_array)) return false; + if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false; set_empty_byte_array(byte_array); } #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \ { ExternalArray* obj; \ - if (!AllocateEmptyExternalArray(kExternal##Type##Array)->To(&obj)) \ + if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \ return false; \ set_empty_external_##type##_array(obj); \ } @@ -2650,7 +2648,7 @@ bool Heap::CreateInitialMaps() { #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \ { FixedTypedArrayBase* obj; \ - if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array)->To(&obj)) \ + if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \ return false; \ set_empty_fixed_##type##_array(obj); \ } @@ -2663,7 +2661,8 @@ bool Heap::CreateInitialMaps() { } -MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { +AllocationResult Heap::AllocateHeapNumber(double value, + PretenureFlag pretenure) { // Statically ensure that it is safe to allocate heap numbers in paged // spaces. int size = HeapNumber::kSize; @@ -2671,42 +2670,41 @@ MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) { AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(heap_number_map()); + result->set_map_no_write_barrier(heap_number_map()); HeapNumber::cast(result)->set_value(value); return result; } -MaybeObject* Heap::AllocateCell(Object* value) { +AllocationResult Heap::AllocateCell(Object* value) { int size = Cell::kSize; STATIC_ASSERT(Cell::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, CELL_SPACE, CELL_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(cell_map()); + result->set_map_no_write_barrier(cell_map()); Cell::cast(result)->set_value(value); return result; } -MaybeObject* Heap::AllocatePropertyCell() { +AllocationResult Heap::AllocatePropertyCell() { int size = PropertyCell::kSize; STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - MaybeObject* maybe_result = + HeapObject* result; + AllocationResult allocation = AllocateRaw(size, PROPERTY_CELL_SPACE, PROPERTY_CELL_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; - HeapObject::cast(result)->set_map_no_write_barrier( - global_property_cell_map()); + result->set_map_no_write_barrier(global_property_cell_map()); PropertyCell* cell = PropertyCell::cast(result); cell->set_dependent_code(DependentCode::cast(empty_fixed_array()), SKIP_WRITE_BARRIER); @@ -3240,32 +3238,32 @@ FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) { } -MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) { +AllocationResult Heap::AllocateForeign(Address address, + PretenureFlag pretenure) { // Statically ensure that it is safe to allocate foreigns in paged spaces. STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize); AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE; Foreign* result; - MaybeObject* maybe_result = Allocate(foreign_map(), space); - if (!maybe_result->To(&result)) return maybe_result; + AllocationResult allocation = Allocate(foreign_map(), space); + if (!allocation.To(&result)) return allocation; result->set_foreign_address(address); return result; } -MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { if (length < 0 || length > ByteArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } int size = ByteArray::SizeFor(length); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast(result)->set_map_no_write_barrier( - byte_array_map()); - reinterpret_cast(result)->set_length(length); + result->set_map_no_write_barrier(byte_array_map()); + ByteArray::cast(result)->set_length(length); return result; } @@ -3318,23 +3316,21 @@ void Heap::AdjustLiveBytes(Address address, int by, InvocationMode mode) { } -MaybeObject* Heap::AllocateExternalArray(int length, +AllocationResult Heap::AllocateExternalArray(int length, ExternalArrayType array_type, void* external_pointer, PretenureFlag pretenure) { int size = ExternalArray::kAlignedSize; AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast(result)->set_map_no_write_barrier( + result->set_map_no_write_barrier( MapForExternalArrayType(array_type)); - reinterpret_cast(result)->set_length(length); - reinterpret_cast(result)->set_external_pointer( - external_pointer); - + ExternalArray::cast(result)->set_length(length); + ExternalArray::cast(result)->set_external_pointer(external_pointer); return result; } @@ -3359,9 +3355,9 @@ static void ForFixedTypedArray(ExternalArrayType array_type, } -MaybeObject* Heap::AllocateFixedTypedArray(int length, - ExternalArrayType array_type, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateFixedTypedArray(int length, + ExternalArrayType array_type, + PretenureFlag pretenure) { int element_size; ElementsKind elements_kind; ForFixedTypedArray(array_type, &element_size, &elements_kind); @@ -3375,36 +3371,35 @@ MaybeObject* Heap::AllocateFixedTypedArray(int length, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); HeapObject* object; - MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_object->To(&object)) return maybe_object; + AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&object)) return allocation; if (array_type == kExternalFloat64Array) { object = EnsureDoubleAligned(this, object, size); } - FixedTypedArrayBase* elements = - reinterpret_cast(object); - elements->set_map(MapForFixedTypedArray(array_type)); + object->set_map(MapForFixedTypedArray(array_type)); + FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object); elements->set_length(length); memset(elements->DataPtr(), 0, elements->DataSize()); return elements; } -MaybeObject* Heap::AllocateCode(int object_size, +AllocationResult Heap::AllocateCode(int object_size, bool immovable) { ASSERT(IsAligned(static_cast(object_size), kCodeAlignment)); - MaybeObject* maybe_result; + AllocationResult allocation; // Large code objects and code objects which should stay at a fixed address // are allocated in large object space. HeapObject* result; bool force_lo_space = object_size > code_space()->AreaSize(); if (force_lo_space) { - maybe_result = lo_space_->AllocateRaw(object_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(object_size, CODE_SPACE, CODE_SPACE); } - if (!maybe_result->To(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; if (immovable && !force_lo_space && // Objects on the first page of each space are never moved. @@ -3412,8 +3407,8 @@ MaybeObject* Heap::AllocateCode(int object_size, // Discard the first code allocation, which was on a page where it could be // moved. CreateFillerObjectAt(result->address(), object_size); - maybe_result = lo_space_->AllocateRaw(object_size, EXECUTABLE); - if (!maybe_result->To(&result)) return maybe_result; + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); + if (!allocation.To(&result)) return allocation; } result->set_map_no_write_barrier(code_map()); @@ -3426,15 +3421,15 @@ MaybeObject* Heap::AllocateCode(int object_size, } -MaybeObject* Heap::CopyCode(Code* code) { - MaybeObject* maybe_result; - Object* new_constant_pool; +AllocationResult Heap::CopyCode(Code* code) { + AllocationResult allocation; + HeapObject* new_constant_pool; if (FLAG_enable_ool_constant_pool && code->constant_pool() != empty_constant_pool_array()) { // Copy the constant pool, since edits to the copied code may modify // the constant pool. - maybe_result = CopyConstantPoolArray(code->constant_pool()); - if (!maybe_result->ToObject(&new_constant_pool)) return maybe_result; + allocation = CopyConstantPoolArray(code->constant_pool()); + if (!allocation.To(&new_constant_pool)) return allocation; } else { new_constant_pool = empty_constant_pool_array(); } @@ -3442,17 +3437,17 @@ MaybeObject* Heap::CopyCode(Code* code) { // Allocate an object the same size as the code object. int obj_size = code->Size(); if (obj_size > code_space()->AreaSize()) { - maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(obj_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE); } - Object* result; - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + if (!allocation.To(&result)) return allocation; // Copy code object. Address old_addr = code->address(); - Address new_addr = reinterpret_cast(result)->address(); + Address new_addr = result->address(); CopyBlock(new_addr, old_addr, obj_size); Code* new_code = Code::cast(result); @@ -3467,25 +3462,22 @@ MaybeObject* Heap::CopyCode(Code* code) { } -MaybeObject* Heap::CopyCode(Code* code, Vector reloc_info) { +AllocationResult Heap::CopyCode(Code* code, Vector reloc_info) { // Allocate ByteArray and ConstantPoolArray before the Code object, so that we // do not risk leaving uninitialized Code object (and breaking the heap). - Object* reloc_info_array; - { MaybeObject* maybe_reloc_info_array = + ByteArray* reloc_info_array; + { AllocationResult allocation = AllocateByteArray(reloc_info.length(), TENURED); - if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) { - return maybe_reloc_info_array; - } + if (!allocation.To(&reloc_info_array)) return allocation; } - Object* new_constant_pool; + HeapObject* new_constant_pool; if (FLAG_enable_ool_constant_pool && code->constant_pool() != empty_constant_pool_array()) { // Copy the constant pool, since edits to the copied code may modify // the constant pool. - MaybeObject* maybe_constant_pool = + AllocationResult allocation = CopyConstantPoolArray(code->constant_pool()); - if (!maybe_constant_pool->ToObject(&new_constant_pool)) - return maybe_constant_pool; + if (!allocation.To(&new_constant_pool)) return allocation; } else { new_constant_pool = empty_constant_pool_array(); } @@ -3499,24 +3491,24 @@ MaybeObject* Heap::CopyCode(Code* code, Vector reloc_info) { size_t relocation_offset = static_cast(code->instruction_end() - old_addr); - MaybeObject* maybe_result; + AllocationResult allocation; if (new_obj_size > code_space()->AreaSize()) { - maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); + allocation = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE); } else { - maybe_result = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); + allocation = AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE); } - Object* result; - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + if (!allocation.To(&result)) return allocation; // Copy code object. - Address new_addr = reinterpret_cast(result)->address(); + Address new_addr = result->address(); // Copy header and instructions. CopyBytes(new_addr, old_addr, relocation_offset); Code* new_code = Code::cast(result); - new_code->set_relocation_info(ByteArray::cast(reloc_info_array)); + new_code->set_relocation_info(reloc_info_array); // Update constant pool. new_code->set_constant_pool(new_constant_pool); @@ -3532,9 +3524,7 @@ MaybeObject* Heap::CopyCode(Code* code, Vector reloc_info) { new_code->Relocate(new_addr - old_addr); #ifdef VERIFY_HEAP - if (FLAG_verify_heap) { - code->Verify(); - } + if (FLAG_verify_heap) code->ObjectVerify(); #endif return new_code; } @@ -3551,7 +3541,7 @@ void Heap::InitializeAllocationMemento(AllocationMemento* memento, } -MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, +AllocationResult Heap::Allocate(Map* map, AllocationSpace space, AllocationSite* allocation_site) { ASSERT(gc_state_ == NOT_IN_GC); ASSERT(map->instance_type() != MAP_TYPE); @@ -3563,11 +3553,11 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, if (allocation_site != NULL) { size += AllocationMemento::kSize; } - Object* result; - MaybeObject* maybe_result = AllocateRaw(size, space, retry_space); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + AllocationResult allocation = AllocateRaw(size, space, retry_space); + if (!allocation.To(&result)) return allocation; // No need for write barrier since object is white and map is in old space. - HeapObject::cast(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); if (allocation_site != NULL) { AllocationMemento* alloc_memento = reinterpret_cast( reinterpret_cast
(result) + map->instance_size()); @@ -3577,7 +3567,7 @@ MaybeObject* Heap::Allocate(Map* map, AllocationSpace space, } -MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { +AllocationResult Heap::AllocateArgumentsObject(Object* callee, int length) { // To get fast allocation and map sharing for arguments objects we // allocate them based on an arguments boilerplate. @@ -3601,34 +3591,31 @@ MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) { ASSERT(arguments_object_size == boilerplate->map()->instance_size()); // Do the allocation. - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } // Copy the content. The arguments boilerplate doesn't have any // fields that point to new space so it's safe to skip the write // barrier here. - CopyBlock(HeapObject::cast(result)->address(), - boilerplate->address(), - JSObject::kHeaderSize); + CopyBlock(result->address(), boilerplate->address(), JSObject::kHeaderSize); // Set the length property. - JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex, - Smi::FromInt(length), - SKIP_WRITE_BARRIER); + JSObject* js_obj = JSObject::cast(result); + js_obj->InObjectPropertyAtPut( + kArgumentsLengthIndex, Smi::FromInt(length), SKIP_WRITE_BARRIER); // Set the callee property for sloppy mode arguments object only. if (!strict_mode_callee) { - JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex, - callee); + js_obj->InObjectPropertyAtPut(kArgumentsCalleeIndex, callee); } // Check the state of the object - ASSERT(JSObject::cast(result)->HasFastProperties()); - ASSERT(JSObject::cast(result)->HasFastObjectElements()); + ASSERT(js_obj->HasFastProperties()); + ASSERT(js_obj->HasFastObjectElements()); - return result; + return js_obj; } @@ -3664,7 +3651,7 @@ void Heap::InitializeJSObjectFromMap(JSObject* obj, } -MaybeObject* Heap::AllocateJSObjectFromMap( +AllocationResult Heap::AllocateJSObjectFromMap( Map* map, PretenureFlag pretenure, bool allocate_properties, @@ -3683,8 +3670,8 @@ MaybeObject* Heap::AllocateJSObjectFromMap( if (allocate_properties) { int prop_size = map->InitialPropertiesLength(); ASSERT(prop_size >= 0); - { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure); - if (!maybe_properties->To(&properties)) return maybe_properties; + { AllocationResult allocation = AllocateFixedArray(prop_size, pretenure); + if (!allocation.To(&properties)) return allocation; } } else { properties = empty_fixed_array(); @@ -3693,39 +3680,37 @@ MaybeObject* Heap::AllocateJSObjectFromMap( // Allocate the JSObject. int size = map->instance_size(); AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure); - Object* obj; - MaybeObject* maybe_obj = Allocate(map, space, allocation_site); - if (!maybe_obj->To(&obj)) return maybe_obj; + JSObject* js_obj; + AllocationResult allocation = Allocate(map, space, allocation_site); + if (!allocation.To(&js_obj)) return allocation; // Initialize the JSObject. - InitializeJSObjectFromMap(JSObject::cast(obj), properties, map); - ASSERT(JSObject::cast(obj)->HasFastElements() || - JSObject::cast(obj)->HasExternalArrayElements() || - JSObject::cast(obj)->HasFixedTypedArrayElements()); - return obj; + InitializeJSObjectFromMap(js_obj, properties, map); + ASSERT(js_obj->HasFastElements() || + js_obj->HasExternalArrayElements() || + js_obj->HasFixedTypedArrayElements()); + return js_obj; } -MaybeObject* Heap::AllocateJSObject(JSFunction* constructor, - PretenureFlag pretenure, - AllocationSite* allocation_site) { +AllocationResult Heap::AllocateJSObject(JSFunction* constructor, + PretenureFlag pretenure, + AllocationSite* allocation_site) { ASSERT(constructor->has_initial_map()); // Allocate the object based on the constructors initial map. - MaybeObject* result = AllocateJSObjectFromMap(constructor->initial_map(), - pretenure, - true, - allocation_site); + AllocationResult allocation = AllocateJSObjectFromMap( + constructor->initial_map(), pretenure, true, allocation_site); #ifdef DEBUG // Make sure result is NOT a global object if valid. - Object* non_failure; - ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject()); + HeapObject* obj; + ASSERT(!allocation.To(&obj) || !obj->IsGlobalObject()); #endif - return result; + return allocation; } -MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { +AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // Never used to copy functions. If functions need to be copied we // have to be careful to clear the literals array. SLOW_ASSERT(!source->IsJSFunction()); @@ -3733,7 +3718,7 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // Make the clone. Map* map = source->map(); int object_size = map->instance_size(); - Object* clone; + HeapObject* clone; ASSERT(site == NULL || AllocationSite::CanTrack(map->instance_type())); @@ -3742,11 +3727,11 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { // If we're forced to always allocate, we use the general allocation // functions which may leave us with an object in old space. if (always_allocate()) { - { MaybeObject* maybe_clone = + { AllocationResult allocation = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE); - if (!maybe_clone->ToObject(&clone)) return maybe_clone; + if (!allocation.To(&clone)) return allocation; } - Address clone_address = HeapObject::cast(clone)->address(); + Address clone_address = clone->address(); CopyBlock(clone_address, source->address(), object_size); @@ -3760,14 +3745,14 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { { int adjusted_object_size = site != NULL ? object_size + AllocationMemento::kSize : object_size; - MaybeObject* maybe_clone = + AllocationResult allocation = AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE); - if (!maybe_clone->ToObject(&clone)) return maybe_clone; + if (!allocation.To(&clone)) return allocation; } SLOW_ASSERT(InNewSpace(clone)); // Since we know the clone is allocated in new space, we can copy // the contents without worrying about updating the write barrier. - CopyBlock(HeapObject::cast(clone)->address(), + CopyBlock(clone->address(), source->address(), object_size); @@ -3784,35 +3769,35 @@ MaybeObject* Heap::CopyJSObject(JSObject* source, AllocationSite* site) { FixedArray* properties = FixedArray::cast(source->properties()); // Update elements if necessary. if (elements->length() > 0) { - Object* elem; - { MaybeObject* maybe_elem; + FixedArrayBase* elem; + { AllocationResult allocation; if (elements->map() == fixed_cow_array_map()) { - maybe_elem = FixedArray::cast(elements); + allocation = FixedArray::cast(elements); } else if (source->HasFastDoubleElements()) { - maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); + allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements)); } else { - maybe_elem = CopyFixedArray(FixedArray::cast(elements)); + allocation = CopyFixedArray(FixedArray::cast(elements)); } - if (!maybe_elem->ToObject(&elem)) return maybe_elem; + if (!allocation.To(&elem)) return allocation; } - JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode); + JSObject::cast(clone)->set_elements(elem, wb_mode); } // Update properties if necessary. if (properties->length() > 0) { - Object* prop; - { MaybeObject* maybe_prop = CopyFixedArray(properties); - if (!maybe_prop->ToObject(&prop)) return maybe_prop; + FixedArray* prop; + { AllocationResult allocation = CopyFixedArray(properties); + if (!allocation.To(&prop)) return allocation; } - JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode); + JSObject::cast(clone)->set_properties(prop, wb_mode); } // Return the new clone. return clone; } -MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector string, - int non_ascii_start, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateStringFromUtf8Slow(Vector string, + int non_ascii_start, + PretenureFlag pretenure) { // Continue counting the number of characters in the UTF-8 string, starting // from the first non-ascii character or word. Access @@ -3822,18 +3807,16 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector string, int utf16_length = decoder->Utf16Length(); ASSERT(utf16_length > 0); // Allocate string. - Object* result; + HeapObject* result; { int chars = non_ascii_start + utf16_length; - MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure); - if (!maybe_result->ToObject(&result) || result->IsException()) { - return maybe_result; + AllocationResult allocation = AllocateRawTwoByteString(chars, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; } } - // Convert and copy the characters into the new object. - SeqTwoByteString* twobyte = SeqTwoByteString::cast(result); // Copy ascii portion. - uint16_t* data = twobyte->GetChars(); + uint16_t* data = SeqTwoByteString::cast(result)->GetChars(); if (non_ascii_start != 0) { const char* ascii_data = string.start(); for (int i = 0; i < non_ascii_start; i++) { @@ -3846,23 +3829,23 @@ MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector string, } -MaybeObject* Heap::AllocateStringFromTwoByte(Vector string, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateStringFromTwoByte(Vector string, + PretenureFlag pretenure) { // Check if the string is an ASCII string. - Object* result; + HeapObject* result; int length = string.length(); const uc16* start = string.start(); if (String::IsOneByte(start, length)) { - MaybeObject* maybe_result = AllocateRawOneByteString(length, pretenure); - if (!maybe_result->ToObject(&result) || result->IsException()) { - return maybe_result; + AllocationResult allocation = AllocateRawOneByteString(length, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; } CopyChars(SeqOneByteString::cast(result)->GetChars(), start, length); } else { // It's not a one byte string. - MaybeObject* maybe_result = AllocateRawTwoByteString(length, pretenure); - if (!maybe_result->ToObject(&result) || result->IsException()) { - return maybe_result; + AllocationResult allocation = AllocateRawTwoByteString(length, pretenure); + if (!allocation.To(&result) || result->IsException()) { + return allocation; } CopyChars(SeqTwoByteString::cast(result)->GetChars(), start, length); } @@ -3919,7 +3902,7 @@ static inline void WriteTwoByteData(String* s, uint16_t* chars, int len) { template -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field) { ASSERT(chars >= 0); // Compute map and object size. @@ -3939,12 +3922,12 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED); // Allocate string. - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } - reinterpret_cast(result)->set_map_no_write_barrier(map); + result->set_map_no_write_barrier(map); // Set length and hash fields of the allocated string. String* answer = String::cast(result); answer->set_length(chars); @@ -3963,17 +3946,18 @@ MaybeObject* Heap::AllocateInternalizedStringImpl( // Need explicit instantiations. template -MaybeObject* Heap::AllocateInternalizedStringImpl(String*, int, uint32_t); +AllocationResult Heap::AllocateInternalizedStringImpl( + String*, int, uint32_t); template -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( String*, int, uint32_t); template -MaybeObject* Heap::AllocateInternalizedStringImpl( +AllocationResult Heap::AllocateInternalizedStringImpl( Vector, int, uint32_t); -MaybeObject* Heap::AllocateRawOneByteString(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawOneByteString(int length, + PretenureFlag pretenure) { if (length < 0 || length > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -3981,13 +3965,13 @@ MaybeObject* Heap::AllocateRawOneByteString(int length, ASSERT(size <= SeqOneByteString::kMaxSize); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // Partially initialize the object. - HeapObject::cast(result)->set_map_no_write_barrier(ascii_string_map()); + result->set_map_no_write_barrier(ascii_string_map()); String::cast(result)->set_length(length); String::cast(result)->set_hash_field(String::kEmptyHashField); ASSERT_EQ(size, HeapObject::cast(result)->Size()); @@ -3996,8 +3980,8 @@ MaybeObject* Heap::AllocateRawOneByteString(int length, } -MaybeObject* Heap::AllocateRawTwoByteString(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawTwoByteString(int length, + PretenureFlag pretenure) { if (length < 0 || length > String::kMaxLength) { return isolate()->ThrowInvalidStringLength(); } @@ -4005,13 +3989,13 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, ASSERT(size <= SeqTwoByteString::kMaxSize); AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); - Object* result; - { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&result)) return allocation; } // Partially initialize the object. - HeapObject::cast(result)->set_map_no_write_barrier(string_map()); + result->set_map_no_write_barrier(string_map()); String::cast(result)->set_length(length); String::cast(result)->set_hash_field(String::kEmptyHashField); ASSERT_EQ(size, HeapObject::cast(result)->Size()); @@ -4019,37 +4003,37 @@ MaybeObject* Heap::AllocateRawTwoByteString(int length, } -MaybeObject* Heap::AllocateEmptyFixedArray() { +AllocationResult Heap::AllocateEmptyFixedArray() { int size = FixedArray::SizeFor(0); - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } // Initialize the object. - reinterpret_cast(result)->set_map_no_write_barrier( - fixed_array_map()); - reinterpret_cast(result)->set_length(0); + result->set_map_no_write_barrier(fixed_array_map()); + FixedArray::cast(result)->set_length(0); return result; } -MaybeObject* Heap::AllocateEmptyExternalArray(ExternalArrayType array_type) { +AllocationResult Heap::AllocateEmptyExternalArray( + ExternalArrayType array_type) { return AllocateExternalArray(0, array_type, NULL, TENURED); } -MaybeObject* Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { +AllocationResult Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { if (!InNewSpace(src)) { return src; } int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(len, TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(len, TENURED); + if (!allocation.To(&obj)) return allocation; } - HeapObject::cast(obj)->set_map_no_write_barrier(fixed_array_map()); + obj->set_map_no_write_barrier(fixed_array_map()); FixedArray* result = FixedArray::cast(obj); result->set_length(len); @@ -4066,26 +4050,26 @@ MaybeObject* Heap::CopyAndTenureFixedCOWArray(FixedArray* src) { } -MaybeObject* Heap::AllocateEmptyFixedTypedArray(ExternalArrayType array_type) { +AllocationResult Heap::AllocateEmptyFixedTypedArray( + ExternalArrayType array_type) { return AllocateFixedTypedArray(0, array_type, TENURED); } -MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { +AllocationResult Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(len, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } if (InNewSpace(obj)) { - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); - CopyBlock(dst->address() + kPointerSize, + obj->set_map_no_write_barrier(map); + CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize, FixedArray::SizeFor(len) - kPointerSize); return obj; } - HeapObject::cast(obj)->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); FixedArray* result = FixedArray::cast(obj); result->set_length(len); @@ -4097,48 +4081,47 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) { } -MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, - Map* map) { +AllocationResult Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, + Map* map) { int len = src->length(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); CopyBlock( - dst->address() + FixedDoubleArray::kLengthOffset, + obj->address() + FixedDoubleArray::kLengthOffset, src->address() + FixedDoubleArray::kLengthOffset, FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset); return obj; } -MaybeObject* Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, - Map* map) { +AllocationResult Heap::CopyConstantPoolArrayWithMap(ConstantPoolArray* src, + Map* map) { int int64_entries = src->count_of_int64_entries(); int code_ptr_entries = src->count_of_code_ptr_entries(); int heap_ptr_entries = src->count_of_heap_ptr_entries(); int int32_entries = src->count_of_int32_entries(); - Object* obj; - { MaybeObject* maybe_obj = + HeapObject* obj; + { AllocationResult allocation = AllocateConstantPoolArray(int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + if (!allocation.To(&obj)) return allocation; } - HeapObject* dst = HeapObject::cast(obj); - dst->set_map_no_write_barrier(map); + obj->set_map_no_write_barrier(map); int size = ConstantPoolArray::SizeFor( int64_entries, code_ptr_entries, heap_ptr_entries, int32_entries); CopyBlock( - dst->address() + ConstantPoolArray::kLengthOffset, + obj->address() + ConstantPoolArray::kLengthOffset, src->address() + ConstantPoolArray::kLengthOffset, size - ConstantPoolArray::kLengthOffset); return obj; } -MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawFixedArray(int length, + PretenureFlag pretenure) { if (length < 0 || length > FixedArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } @@ -4149,20 +4132,20 @@ MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) { } -MaybeObject* Heap::AllocateFixedArrayWithFiller(int length, - PretenureFlag pretenure, - Object* filler) { +AllocationResult Heap::AllocateFixedArrayWithFiller(int length, + PretenureFlag pretenure, + Object* filler) { ASSERT(length >= 0); ASSERT(empty_fixed_array()->IsFixedArray()); if (length == 0) return empty_fixed_array(); ASSERT(!InNewSpace(filler)); - Object* result; - { MaybeObject* maybe_result = AllocateRawFixedArray(length, pretenure); - if (!maybe_result->ToObject(&result)) return maybe_result; + HeapObject* result; + { AllocationResult allocation = AllocateRawFixedArray(length, pretenure); + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(fixed_array_map()); + result->set_map_no_write_barrier(fixed_array_map()); FixedArray* array = FixedArray::cast(result); array->set_length(length); MemsetPointer(array->data_start(), filler, length); @@ -4170,45 +4153,42 @@ MaybeObject* Heap::AllocateFixedArrayWithFiller(int length, } -MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { +AllocationResult Heap::AllocateFixedArray(int length, PretenureFlag pretenure) { return AllocateFixedArrayWithFiller(length, pretenure, undefined_value()); } -MaybeObject* Heap::AllocateUninitializedFixedArray(int length) { +AllocationResult Heap::AllocateUninitializedFixedArray(int length) { if (length == 0) return empty_fixed_array(); - Object* obj; - { MaybeObject* maybe_obj = AllocateRawFixedArray(length, NOT_TENURED); - if (!maybe_obj->ToObject(&obj)) return maybe_obj; + HeapObject* obj; + { AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED); + if (!allocation.To(&obj)) return allocation; } - reinterpret_cast(obj)->set_map_no_write_barrier( - fixed_array_map()); + obj->set_map_no_write_barrier(fixed_array_map()); FixedArray::cast(obj)->set_length(length); return obj; } -MaybeObject* Heap::AllocateUninitializedFixedDoubleArray( +AllocationResult Heap::AllocateUninitializedFixedDoubleArray( int length, PretenureFlag pretenure) { if (length == 0) return empty_fixed_array(); - Object* elements_object; - MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure); - if (!maybe_obj->ToObject(&elements_object)) return maybe_obj; - FixedDoubleArray* elements = - reinterpret_cast(elements_object); + HeapObject* elements; + AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure); + if (!allocation.To(&elements)) return allocation; elements->set_map_no_write_barrier(fixed_double_array_map()); - elements->set_length(length); + FixedDoubleArray::cast(elements)->set_length(length); return elements; } -MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, - PretenureFlag pretenure) { +AllocationResult Heap::AllocateRawFixedDoubleArray(int length, + PretenureFlag pretenure) { if (length < 0 || length > FixedDoubleArray::kMaxLength) { v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true); } @@ -4219,18 +4199,18 @@ MaybeObject* Heap::AllocateRawFixedDoubleArray(int length, AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure); HeapObject* object; - { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_DATA_SPACE); - if (!maybe_object->To(&object)) return maybe_object; + { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE); + if (!allocation.To(&object)) return allocation; } return EnsureDoubleAligned(this, object, size); } -MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, - int number_of_code_ptr_entries, - int number_of_heap_ptr_entries, - int number_of_int32_entries) { +AllocationResult Heap::AllocateConstantPoolArray(int number_of_int64_entries, + int number_of_code_ptr_entries, + int number_of_heap_ptr_entries, + int number_of_int32_entries) { CHECK(number_of_int64_entries >= 0 && number_of_int64_entries <= ConstantPoolArray::kMaxEntriesPerType && number_of_code_ptr_entries >= 0 && @@ -4249,14 +4229,13 @@ MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); HeapObject* object; - { MaybeObject* maybe_object = AllocateRaw(size, space, OLD_POINTER_SPACE); - if (!maybe_object->To(&object)) return maybe_object; + { AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE); + if (!allocation.To(&object)) return allocation; } object = EnsureDoubleAligned(this, object, size); - HeapObject::cast(object)->set_map_no_write_barrier(constant_pool_array_map()); + object->set_map_no_write_barrier(constant_pool_array_map()); - ConstantPoolArray* constant_pool = - reinterpret_cast(object); + ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object); constant_pool->Init(number_of_int64_entries, number_of_code_ptr_entries, number_of_heap_ptr_entries, @@ -4281,29 +4260,29 @@ MaybeObject* Heap::AllocateConstantPoolArray(int number_of_int64_entries, } -MaybeObject* Heap::AllocateEmptyConstantPoolArray() { +AllocationResult Heap::AllocateEmptyConstantPoolArray() { int size = ConstantPoolArray::SizeFor(0, 0, 0, 0); - Object* result; - { MaybeObject* maybe_result = + HeapObject* result; + { AllocationResult allocation = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE); - if (!maybe_result->ToObject(&result)) return maybe_result; + if (!allocation.To(&result)) return allocation; } - HeapObject::cast(result)->set_map_no_write_barrier(constant_pool_array_map()); + result->set_map_no_write_barrier(constant_pool_array_map()); ConstantPoolArray::cast(result)->Init(0, 0, 0, 0); return result; } -MaybeObject* Heap::AllocateSymbol() { +AllocationResult Heap::AllocateSymbol() { // Statically ensure that it is safe to allocate symbols in paged spaces. STATIC_ASSERT(Symbol::kSize <= Page::kMaxRegularHeapObjectSize); - Object* result; - MaybeObject* maybe = + HeapObject* result; + AllocationResult allocation = AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE); - if (!maybe->ToObject(&result)) return maybe; + if (!allocation.To(&result)) return allocation; - HeapObject::cast(result)->set_map_no_write_barrier(symbol_map()); + result->set_map_no_write_barrier(symbol_map()); // Generate a random hash value. int hash; @@ -4324,7 +4303,7 @@ MaybeObject* Heap::AllocateSymbol() { } -MaybeObject* Heap::AllocateStruct(InstanceType type) { +AllocationResult Heap::AllocateStruct(InstanceType type) { Map* map; switch (type) { #define MAKE_CASE(NAME, Name, name) \ @@ -4337,11 +4316,11 @@ STRUCT_LIST(MAKE_CASE) } int size = map->instance_size(); AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED); - Object* result; - { MaybeObject* maybe_result = Allocate(map, space); - if (!maybe_result->ToObject(&result)) return maybe_result; + Struct* result; + { AllocationResult allocation = Allocate(map, space); + if (!allocation.To(&result)) return allocation; } - Struct::cast(result)->InitializeBody(size); + result->InitializeBody(size); return result; } @@ -4640,8 +4619,10 @@ bool Heap::InSpace(Address addr, AllocationSpace space) { return property_cell_space_->Contains(addr); case LO_SPACE: return lo_space_->SlowContains(addr); + default: + break; } - + UNREACHABLE(); return false; } diff --git a/src/heap.h b/src/heap.h index 77c0984..f2acb44 100644 --- a/src/heap.h +++ b/src/heap.h @@ -684,10 +684,9 @@ class Heap { // Returns a deep copy of the JavaScript object. // Properties and elements are copied too. - // Returns failure if allocation failed. // Optionally takes an AllocationSite to be appended in an AllocationMemento. - MUST_USE_RESULT MaybeObject* CopyJSObject(JSObject* source, - AllocationSite* site = NULL); + MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source, + AllocationSite* site = NULL); // Clear the Instanceof cache (used when a prototype changes). inline void ClearInstanceofCache(); @@ -716,13 +715,6 @@ class Heap { // callee is only valid in sloppy mode. static const int kArgumentsCalleeIndex = 1; - // Converts an int into either a Smi or a HeapNumber object. - // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation - // failed. - // Please note this does not perform a garbage collection. - MUST_USE_RESULT inline MaybeObject* NumberFromUint32( - uint32_t value, PretenureFlag pretenure = NOT_TENURED); - // Finalizes an external string by deleting the associated external // data and clearing the resource pointer. inline void FinalizeExternalString(String* string); @@ -1440,7 +1432,7 @@ class Heap { // Methods made available to tests. // Allocates a JS Map in the heap. - MUST_USE_RESULT MaybeObject* AllocateMap( + MUST_USE_RESULT AllocationResult AllocateMap( InstanceType instance_type, int instance_size, ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND); @@ -1449,7 +1441,7 @@ class Heap { // constructor. // If allocation_site is non-null, then a memento is emitted after the object // that points to the site. - MUST_USE_RESULT MaybeObject* AllocateJSObject( + MUST_USE_RESULT AllocationResult AllocateJSObject( JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED, AllocationSite* allocation_site = NULL); @@ -1457,33 +1449,34 @@ class Heap { // Allocates and initializes a new JavaScript object based on a map. // Passing an allocation site means that a memento will be created that // points to the site. - MUST_USE_RESULT MaybeObject* AllocateJSObjectFromMap( + MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap( Map* map, PretenureFlag pretenure = NOT_TENURED, bool alloc_props = true, AllocationSite* allocation_site = NULL); // Allocated a HeapNumber from value. - MUST_USE_RESULT MaybeObject* AllocateHeapNumber( + MUST_USE_RESULT AllocationResult AllocateHeapNumber( double value, PretenureFlag pretenure = NOT_TENURED); // Allocate a byte array of the specified length - MUST_USE_RESULT MaybeObject* AllocateByteArray( + MUST_USE_RESULT AllocationResult AllocateByteArray( int length, PretenureFlag pretenure = NOT_TENURED); // Allocates an arguments object - optionally with an elements array. - MUST_USE_RESULT MaybeObject* AllocateArgumentsObject( + MUST_USE_RESULT AllocationResult AllocateArgumentsObject( Object* callee, int length); // Copy the code and scope info part of the code object, but insert // the provided data as the relocation information. - MUST_USE_RESULT MaybeObject* CopyCode(Code* code, Vector reloc_info); + MUST_USE_RESULT AllocationResult CopyCode(Code* code, + Vector reloc_info); - MUST_USE_RESULT MaybeObject* CopyCode(Code* code); + MUST_USE_RESULT AllocationResult CopyCode(Code* code); // Allocates a fixed array initialized with undefined values - MUST_USE_RESULT MaybeObject* AllocateFixedArray( + MUST_USE_RESULT AllocationResult AllocateFixedArray( int length, PretenureFlag pretenure = NOT_TENURED); @@ -1739,17 +1732,21 @@ class Heap { // hardware and OS allow. This is the single choke-point for allocations // performed by the runtime and should not be bypassed (to extend this to // inlined allocations, use the Heap::DisableInlineAllocation() support). - MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes, - AllocationSpace space, - AllocationSpace retry_space); + MUST_USE_RESULT inline AllocationResult AllocateRaw( + int size_in_bytes, + AllocationSpace space, + AllocationSpace retry_space); // Allocates a heap object based on the map. - MUST_USE_RESULT MaybeObject* Allocate(Map* map, AllocationSpace space, - AllocationSite* allocation_site = NULL); + MUST_USE_RESULT AllocationResult Allocate( + Map* map, + AllocationSpace space, + AllocationSite* allocation_site = NULL); // Allocates a partial map for bootstrapping. - MUST_USE_RESULT MaybeObject* AllocatePartialMap(InstanceType instance_type, - int instance_size); + MUST_USE_RESULT AllocationResult AllocatePartialMap( + InstanceType instance_type, + int instance_size); // Initializes a JSObject based on its map. void InitializeJSObjectFromMap(JSObject* obj, @@ -1760,29 +1757,29 @@ class Heap { // Allocate a block of memory in the given space (filled with a filler). // Used as a fall-back for generated code when the space is full. - MUST_USE_RESULT MaybeObject* AllocateFillerObject(int size, + MUST_USE_RESULT AllocationResult AllocateFillerObject(int size, bool double_align, AllocationSpace space); // Allocate an uninitialized fixed array. - MUST_USE_RESULT MaybeObject* AllocateRawFixedArray( + MUST_USE_RESULT AllocationResult AllocateRawFixedArray( int length, PretenureFlag pretenure); // Allocate an uninitialized fixed double array. - MUST_USE_RESULT MaybeObject* AllocateRawFixedDoubleArray( + MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray( int length, PretenureFlag pretenure); // Allocate an initialized fixed array with the given filler value. - MUST_USE_RESULT MaybeObject* AllocateFixedArrayWithFiller( + MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller( int length, PretenureFlag pretenure, Object* filler); // Allocate and partially initializes a String. There are two String // encodings: ASCII and two byte. These functions allocate a string of the // given length and set its map and length fields. The characters of the // string are uninitialized. - MUST_USE_RESULT MaybeObject* AllocateRawOneByteString( + MUST_USE_RESULT AllocationResult AllocateRawOneByteString( int length, PretenureFlag pretenure); - MUST_USE_RESULT MaybeObject* AllocateRawTwoByteString( + MUST_USE_RESULT AllocationResult AllocateRawTwoByteString( int length, PretenureFlag pretenure); // Allocates and fully initializes a String. There are two String @@ -1800,11 +1797,11 @@ class Heap { // encoded. If the characters are all single-byte characters, the // result will be converted to ASCII, otherwise it will be left as // two-byte. - MUST_USE_RESULT MaybeObject* AllocateStringFromUtf8Slow( + MUST_USE_RESULT AllocationResult AllocateStringFromUtf8Slow( Vector str, int non_ascii_start, PretenureFlag pretenure = NOT_TENURED); - MUST_USE_RESULT MaybeObject* AllocateStringFromTwoByte( + MUST_USE_RESULT AllocationResult AllocateStringFromTwoByte( Vector str, PretenureFlag pretenure = NOT_TENURED); @@ -1813,89 +1810,90 @@ class Heap { // Allocates an internalized string in old space based on the character // stream. - MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringFromUtf8( + MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8( Vector str, int chars, uint32_t hash_field); - MUST_USE_RESULT inline MaybeObject* AllocateOneByteInternalizedString( + MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString( Vector str, uint32_t hash_field); - MUST_USE_RESULT inline MaybeObject* AllocateTwoByteInternalizedString( + MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString( Vector str, uint32_t hash_field); template - MUST_USE_RESULT MaybeObject* AllocateInternalizedStringImpl( + MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field); template - MUST_USE_RESULT inline MaybeObject* AllocateInternalizedStringImpl( + MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl( T t, int chars, uint32_t hash_field); // Allocates an uninitialized fixed array. It must be filled by the caller. - MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedArray(int length); + MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length); // Make a copy of src and return it. Returns // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyFixedArray(FixedArray* src); + MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src); // Make a copy of src, set the map, and return the copy. Returns // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map); + MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray* src, + Map* map); // Make a copy of src and return it. Returns // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray( + MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray( FixedDoubleArray* src); // Make a copy of src and return it. Returns // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed. - MUST_USE_RESULT inline MaybeObject* CopyConstantPoolArray( + MUST_USE_RESULT inline AllocationResult CopyConstantPoolArray( ConstantPoolArray* src); // Computes a single character string where the character has code. // A cache is used for ASCII codes. - MUST_USE_RESULT MaybeObject* LookupSingleCharacterStringFromCode( + MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode( uint16_t code); // Allocate a symbol in old space. - MUST_USE_RESULT MaybeObject* AllocateSymbol(); + MUST_USE_RESULT AllocationResult AllocateSymbol(); // Make a copy of src, set the map, and return the copy. - MUST_USE_RESULT MaybeObject* CopyConstantPoolArrayWithMap( + MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap( ConstantPoolArray* src, Map* map); - MUST_USE_RESULT MaybeObject* AllocateConstantPoolArray( + MUST_USE_RESULT AllocationResult AllocateConstantPoolArray( int number_of_int64_entries, int number_of_code_ptr_entries, int number_of_heap_ptr_entries, int number_of_int32_entries); // Allocates an external array of the specified length and type. - MUST_USE_RESULT MaybeObject* AllocateExternalArray( + MUST_USE_RESULT AllocationResult AllocateExternalArray( int length, ExternalArrayType array_type, void* external_pointer, PretenureFlag pretenure); // Allocates a fixed typed array of the specified length and type. - MUST_USE_RESULT MaybeObject* AllocateFixedTypedArray( + MUST_USE_RESULT AllocationResult AllocateFixedTypedArray( int length, ExternalArrayType array_type, PretenureFlag pretenure); // Make a copy of src and return it. - MUST_USE_RESULT MaybeObject* CopyAndTenureFixedCOWArray(FixedArray* src); + MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src); // Make a copy of src, set the map, and return the copy. - MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap( + MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap( FixedDoubleArray* src, Map* map); // Allocates a fixed double array with uninitialized values. Returns - MUST_USE_RESULT MaybeObject* AllocateUninitializedFixedDoubleArray( + MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray( int length, PretenureFlag pretenure = NOT_TENURED); @@ -1907,37 +1905,38 @@ class Heap { void CreateFixedStubs(); // Allocate empty fixed array. - MUST_USE_RESULT MaybeObject* AllocateEmptyFixedArray(); + MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray(); // Allocate empty external array of given type. - MUST_USE_RESULT MaybeObject* AllocateEmptyExternalArray( + MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray( ExternalArrayType array_type); // Allocate empty fixed typed array of given type. - MUST_USE_RESULT MaybeObject* AllocateEmptyFixedTypedArray( + MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray( ExternalArrayType array_type); // Allocate empty constant pool array. - MUST_USE_RESULT MaybeObject* AllocateEmptyConstantPoolArray(); + MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray(); // Allocate a tenured simple cell. - MUST_USE_RESULT MaybeObject* AllocateCell(Object* value); + MUST_USE_RESULT AllocationResult AllocateCell(Object* value); // Allocate a tenured JS global property cell initialized with the hole. - MUST_USE_RESULT MaybeObject* AllocatePropertyCell(); + MUST_USE_RESULT AllocationResult AllocatePropertyCell(); // Allocates a new utility object in the old generation. - MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type); + MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type); // Allocates a new foreign object. - MUST_USE_RESULT MaybeObject* AllocateForeign( + MUST_USE_RESULT AllocationResult AllocateForeign( Address address, PretenureFlag pretenure = NOT_TENURED); - MUST_USE_RESULT MaybeObject* AllocateCode(int object_size, bool immovable); + MUST_USE_RESULT AllocationResult AllocateCode(int object_size, + bool immovable); - MUST_USE_RESULT MaybeObject* InternalizeStringWithKey(HashTableKey* key); + MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key); - MUST_USE_RESULT MaybeObject* InternalizeString(String* str); + MUST_USE_RESULT AllocationResult InternalizeString(String* str); // Performs a minor collection in new generation. void Scavenge(); diff --git a/src/incremental-marking-inl.h b/src/incremental-marking-inl.h index 1762f2b..19d471c 100644 --- a/src/incremental-marking-inl.h +++ b/src/incremental-marking-inl.h @@ -45,7 +45,7 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, void IncrementalMarking::RecordWrite(HeapObject* obj, Object** slot, Object* value) { - if (IsMarking() && value->NonFailureIsHeapObject()) { + if (IsMarking() && value->IsHeapObject()) { RecordWriteSlow(obj, slot, value); } } @@ -61,7 +61,7 @@ void IncrementalMarking::RecordWriteOfCodeEntry(JSFunction* host, void IncrementalMarking::RecordWriteIntoCode(HeapObject* obj, RelocInfo* rinfo, Object* value) { - if (IsMarking() && value->NonFailureIsHeapObject()) { + if (IsMarking() && value->IsHeapObject()) { RecordWriteIntoCodeSlow(obj, rinfo, value); } } diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc index 268e2ec..2b6765c 100644 --- a/src/incremental-marking.cc +++ b/src/incremental-marking.cc @@ -240,7 +240,7 @@ class IncrementalMarkingMarkingVisitor INLINE(static void VisitPointer(Heap* heap, Object** p)) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(p, p, obj); MarkObject(heap, obj); } @@ -249,7 +249,7 @@ class IncrementalMarkingMarkingVisitor INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { for (Object** p = start; p < end; p++) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(start, p, obj); MarkObject(heap, obj); } @@ -262,7 +262,7 @@ class IncrementalMarkingMarkingVisitor Object** end)) { for (Object** p = start; p < end; p++) { Object* obj = *p; - if (obj->NonFailureIsHeapObject()) { + if (obj->IsHeapObject()) { heap->mark_compact_collector()->RecordSlot(anchor, p, obj); MarkObject(heap, obj); } diff --git a/src/isolate.h b/src/isolate.h index bc7646b..fdd89b4 100644 --- a/src/isolate.h +++ b/src/isolate.h @@ -663,8 +663,8 @@ class Isolate { class ExceptionScope { public: explicit ExceptionScope(Isolate* isolate) : - // Scope currently can only be used for regular exceptions, not - // failures like OOM or termination exception. + // Scope currently can only be used for regular exceptions, + // not termination exception. isolate_(isolate), pending_exception_(isolate_->pending_exception(), isolate_), catcher_(isolate_->catcher()) diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 1a11053..ec8e941 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -17,6 +17,7 @@ #include "mark-compact.h" #include "objects-visiting.h" #include "objects-visiting-inl.h" +#include "spaces-inl.h" #include "stub-cache.h" #include "sweeper-thread.h" @@ -2059,8 +2060,8 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage( } // Promotion failed. Just migrate object to another semispace. - MaybeObject* allocation = new_space->AllocateRaw(size); - if (allocation->IsFailure()) { + AllocationResult allocation = new_space->AllocateRaw(size); + if (allocation.IsRetry()) { if (!new_space->AddFreshPage()) { // Shouldn't happen. We are sweeping linearly, and to-space // has the same number of pages as from-space, so there is @@ -2068,9 +2069,9 @@ int MarkCompactCollector::DiscoverAndPromoteBlackObjectsOnPage( UNREACHABLE(); } allocation = new_space->AllocateRaw(size); - ASSERT(!allocation->IsFailure()); + ASSERT(!allocation.IsRetry()); } - Object* target = allocation->ToObjectUnchecked(); + Object* target = allocation.ToObjectChecked(); MigrateObject(HeapObject::cast(target), object, @@ -3074,10 +3075,9 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object, ASSERT(target_space == heap()->old_pointer_space() || target_space == heap()->old_data_space()); - Object* result; - MaybeObject* maybe_result = target_space->AllocateRaw(object_size); - if (maybe_result->ToObject(&result)) { - HeapObject* target = HeapObject::cast(result); + HeapObject* target; + AllocationResult allocation = target_space->AllocateRaw(object_size); + if (allocation.To(&target)) { MigrateObject(target, object, object_size, @@ -3148,19 +3148,15 @@ void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) { int size = object->Size(); - MaybeObject* target = space->AllocateRaw(size); - if (target->IsFailure()) { + HeapObject* target_object; + AllocationResult allocation = space->AllocateRaw(size); + if (!allocation.To(&target_object)) { // OS refused to give us memory. V8::FatalProcessOutOfMemory("Evacuation"); return; } - Object* target_object = target->ToObjectUnchecked(); - - MigrateObject(HeapObject::cast(target_object), - object, - size, - space->identity()); + MigrateObject(target_object, object, size, space->identity()); ASSERT(object->map_word().IsForwardingAddress()); } diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 2f51c9e..27c936e 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -15,16 +15,6 @@ namespace internal { #ifdef VERIFY_HEAP -void MaybeObject::Verify() { - Object* this_as_object; - if (ToObject(&this_as_object)) { - this_as_object->ObjectVerify(); - } else { - Failure::cast(this)->FailureVerify(); - } -} - - void Object::ObjectVerify() { if (IsSmi()) { Smi::cast(this)->SmiVerify(); @@ -48,11 +38,6 @@ void Smi::SmiVerify() { } -void Failure::FailureVerify() { - CHECK(IsFailure()); -} - - void HeapObject::HeapObjectVerify() { InstanceType instance_type = map()->instance_type(); diff --git a/src/objects-inl.h b/src/objects-inl.h index 165e4f4..2f3de9d 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -157,12 +157,6 @@ bool Object::IsHeapObject() { } -bool Object::NonFailureIsHeapObject() { - ASSERT(!this->IsFailure()); - return (reinterpret_cast(this) & kSmiTagMask) != 0; -} - - TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) TYPE_CHECKER(Symbol, SYMBOL_TYPE) @@ -652,23 +646,6 @@ bool Object::IsFixedTypedArrayBase() { } -bool MaybeObject::IsFailure() { - return HAS_FAILURE_TAG(this); -} - - -bool MaybeObject::IsRetryAfterGC() { - return HAS_FAILURE_TAG(this) - && Failure::cast(this)->type() == Failure::RETRY_AFTER_GC; -} - - -Failure* Failure::cast(MaybeObject* obj) { - ASSERT(HAS_FAILURE_TAG(obj)); - return reinterpret_cast(obj); -} - - bool Object::IsJSReceiver() { STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); return IsHeapObject() && @@ -1282,47 +1259,6 @@ Smi* Smi::FromIntptr(intptr_t value) { } -Failure::Type Failure::type() const { - return static_cast(value() & kFailureTypeTagMask); -} - - -AllocationSpace Failure::allocation_space() const { - ASSERT_EQ(RETRY_AFTER_GC, type()); - return static_cast((value() >> kFailureTypeTagSize) - & kSpaceTagMask); -} - - -intptr_t Failure::value() const { - return static_cast( - reinterpret_cast(this) >> kFailureTagSize); -} - - -Failure* Failure::RetryAfterGC() { - return RetryAfterGC(NEW_SPACE); -} - - -Failure* Failure::RetryAfterGC(AllocationSpace space) { - ASSERT((space & ~kSpaceTagMask) == 0); - return Construct(RETRY_AFTER_GC, space); -} - - -Failure* Failure::Construct(Type type, intptr_t value) { - uintptr_t info = - (static_cast(value) << kFailureTypeTagSize) | type; - ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info); - // Fill the unused bits with a pattern that's easy to recognize in crash - // dumps. - static const int kFailureMagicPattern = 0x0BAD0000; - return reinterpret_cast( - (info << kFailureTagSize) | kFailureTag | kFailureMagicPattern); -} - - bool Smi::IsValid(intptr_t value) { bool result = Internals::IsValidSmi(value); ASSERT_EQ(result, value >= kMinValue && value <= kMaxValue); diff --git a/src/objects-printer.cc b/src/objects-printer.cc index e6606bc..4fb5b56 100644 --- a/src/objects-printer.cc +++ b/src/objects-printer.cc @@ -14,32 +14,27 @@ namespace internal { #ifdef OBJECT_PRINT -void MaybeObject::Print() { +void Object::Print() { Print(stdout); } -void MaybeObject::Print(FILE* out) { - Object* this_as_object; - if (ToObject(&this_as_object)) { - if (this_as_object->IsSmi()) { - Smi::cast(this_as_object)->SmiPrint(out); - } else { - HeapObject::cast(this_as_object)->HeapObjectPrint(out); - } +void Object::Print(FILE* out) { + if (IsSmi()) { + Smi::cast(this)->SmiPrint(out); } else { - Failure::cast(this)->FailurePrint(out); + HeapObject::cast(this)->HeapObjectPrint(out); } Flush(out); } -void MaybeObject::PrintLn() { +void Object::PrintLn() { PrintLn(stdout); } -void MaybeObject::PrintLn(FILE* out) { +void Object::PrintLn(FILE* out) { Print(out); PrintF(out, "\n"); } diff --git a/src/objects.cc b/src/objects.cc index 2733593..b411ec6 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -988,8 +988,6 @@ void Object::ShortPrint(FILE* out) { void Object::ShortPrint(StringStream* accumulator) { if (IsSmi()) { Smi::cast(this)->SmiPrint(accumulator); - } else if (IsFailure()) { - Failure::cast(this)->FailurePrint(accumulator); } else { HeapObject::cast(this)->HeapObjectShortPrint(accumulator); } @@ -1006,16 +1004,6 @@ void Smi::SmiPrint(StringStream* accumulator) { } -void Failure::FailurePrint(StringStream* accumulator) { - accumulator->Add("Failure(%p)", reinterpret_cast(value())); -} - - -void Failure::FailurePrint(FILE* out) { - PrintF(out, "Failure(%p)", reinterpret_cast(value())); -} - - // Should a word be prefixed by 'a' or 'an' in order to read naturally in // English? Returns false for non-ASCII or words that don't start with // a capital letter. The a/an rule follows pronunciation in English. diff --git a/src/objects.h b/src/objects.h index 42f2764..0a3b892 100644 --- a/src/objects.h +++ b/src/objects.h @@ -29,116 +29,113 @@ // Most object types in the V8 JavaScript are described in this file. // // Inheritance hierarchy: -// - MaybeObject (an object or a failure) -// - Failure (immediate for marking failed operation) -// - Object -// - Smi (immediate small integer) -// - HeapObject (superclass for everything allocated in the heap) -// - JSReceiver (suitable for property access) -// - JSObject -// - JSArray -// - JSArrayBuffer -// - JSArrayBufferView -// - JSTypedArray -// - JSDataView -// - JSSet -// - JSMap -// - JSSetIterator -// - JSMapIterator -// - JSWeakCollection -// - JSWeakMap -// - JSWeakSet -// - JSRegExp -// - JSFunction -// - JSGeneratorObject -// - JSModule -// - GlobalObject -// - JSGlobalObject -// - JSBuiltinsObject -// - JSGlobalProxy -// - JSValue -// - JSDate -// - JSMessageObject -// - JSProxy -// - JSFunctionProxy -// - FixedArrayBase -// - ByteArray -// - FixedArray -// - DescriptorArray -// - HashTable -// - Dictionary -// - StringTable -// - CompilationCacheTable -// - CodeCacheHashTable -// - MapCache -// - OrderedHashTable -// - OrderedHashSet -// - OrderedHashMap -// - Context -// - JSFunctionResultCache -// - ScopeInfo -// - TransitionArray -// - FixedDoubleArray -// - ExternalArray -// - ExternalUint8ClampedArray -// - ExternalInt8Array -// - ExternalUint8Array -// - ExternalInt16Array -// - ExternalUint16Array -// - ExternalInt32Array -// - ExternalUint32Array -// - ExternalFloat32Array -// - Name -// - String -// - SeqString -// - SeqOneByteString -// - SeqTwoByteString -// - SlicedString -// - ConsString -// - ExternalString -// - ExternalAsciiString -// - ExternalTwoByteString -// - InternalizedString -// - SeqInternalizedString -// - SeqOneByteInternalizedString -// - SeqTwoByteInternalizedString -// - ConsInternalizedString -// - ExternalInternalizedString -// - ExternalAsciiInternalizedString -// - ExternalTwoByteInternalizedString -// - Symbol -// - HeapNumber -// - Cell -// - PropertyCell -// - Code -// - Map -// - Oddball -// - Foreign -// - SharedFunctionInfo -// - Struct -// - Box -// - DeclaredAccessorDescriptor -// - AccessorInfo -// - DeclaredAccessorInfo -// - ExecutableAccessorInfo -// - AccessorPair -// - AccessCheckInfo -// - InterceptorInfo -// - CallHandlerInfo -// - TemplateInfo -// - FunctionTemplateInfo -// - ObjectTemplateInfo -// - Script -// - SignatureInfo -// - TypeSwitchInfo -// - DebugInfo -// - BreakPointInfo -// - CodeCache +// - Object +// - Smi (immediate small integer) +// - HeapObject (superclass for everything allocated in the heap) +// - JSReceiver (suitable for property access) +// - JSObject +// - JSArray +// - JSArrayBuffer +// - JSArrayBufferView +// - JSTypedArray +// - JSDataView +// - JSSet +// - JSMap +// - JSSetIterator +// - JSMapIterator +// - JSWeakCollection +// - JSWeakMap +// - JSWeakSet +// - JSRegExp +// - JSFunction +// - JSGeneratorObject +// - JSModule +// - GlobalObject +// - JSGlobalObject +// - JSBuiltinsObject +// - JSGlobalProxy +// - JSValue +// - JSDate +// - JSMessageObject +// - JSProxy +// - JSFunctionProxy +// - FixedArrayBase +// - ByteArray +// - FixedArray +// - DescriptorArray +// - HashTable +// - Dictionary +// - StringTable +// - CompilationCacheTable +// - CodeCacheHashTable +// - MapCache +// - OrderedHashTable +// - OrderedHashSet +// - OrderedHashMap +// - Context +// - JSFunctionResultCache +// - ScopeInfo +// - TransitionArray +// - FixedDoubleArray +// - ExternalArray +// - ExternalUint8ClampedArray +// - ExternalInt8Array +// - ExternalUint8Array +// - ExternalInt16Array +// - ExternalUint16Array +// - ExternalInt32Array +// - ExternalUint32Array +// - ExternalFloat32Array +// - Name +// - String +// - SeqString +// - SeqOneByteString +// - SeqTwoByteString +// - SlicedString +// - ConsString +// - ExternalString +// - ExternalAsciiString +// - ExternalTwoByteString +// - InternalizedString +// - SeqInternalizedString +// - SeqOneByteInternalizedString +// - SeqTwoByteInternalizedString +// - ConsInternalizedString +// - ExternalInternalizedString +// - ExternalAsciiInternalizedString +// - ExternalTwoByteInternalizedString +// - Symbol +// - HeapNumber +// - Cell +// - PropertyCell +// - Code +// - Map +// - Oddball +// - Foreign +// - SharedFunctionInfo +// - Struct +// - Box +// - DeclaredAccessorDescriptor +// - AccessorInfo +// - DeclaredAccessorInfo +// - ExecutableAccessorInfo +// - AccessorPair +// - AccessCheckInfo +// - InterceptorInfo +// - CallHandlerInfo +// - TemplateInfo +// - FunctionTemplateInfo +// - ObjectTemplateInfo +// - Script +// - SignatureInfo +// - TypeSwitchInfo +// - DebugInfo +// - BreakPointInfo +// - CodeCache // // Formats of Object*: // Smi: [31 bit signed int] 0 // HeapObject: [32 bit direct pointer] (4 byte aligned) | 01 -// Failure: [30 bit signed int] 11 namespace v8 { namespace internal { @@ -861,7 +858,6 @@ class AllocationSiteCreationContext; class AllocationSiteUsageContext; class DictionaryElementsAccessor; class ElementsAccessor; -class Failure; class FixedArrayBase; class GlobalObject; class ObjectVisitor; @@ -887,46 +883,6 @@ template inline bool Is(Object* obj); #define DECLARE_PRINTER(Name) #endif -class MaybeObject BASE_EMBEDDED { - public: - inline bool IsFailure(); - inline bool IsRetryAfterGC(); - inline bool ToObject(Object** obj) { - if (IsFailure()) return false; - *obj = reinterpret_cast(this); - return true; - } - inline Object* ToObjectUnchecked() { - // TODO(jkummerow): Turn this back into an ASSERT when we can be certain - // that it never fires in Release mode in the wild. - CHECK(!IsFailure()); - return reinterpret_cast(this); - } - inline Object* ToObjectChecked() { - CHECK(!IsFailure()); - return reinterpret_cast(this); - } - - template - inline bool To(T** obj) { - if (IsFailure()) return false; - *obj = T::cast(reinterpret_cast(this)); - return true; - } - -#ifdef OBJECT_PRINT - // Prints this object with details. - void Print(); - void Print(FILE* out); - void PrintLn(); - void PrintLn(FILE* out); -#endif -#ifdef VERIFY_HEAP - // Verifies the object. - void Verify(); -#endif -}; - #define OBJECT_TYPE_LIST(V) \ V(Smi) \ @@ -1383,9 +1339,9 @@ const char* GetBailoutReason(BailoutReason reason); // object hierarchy. // Object does not use any virtual functions to avoid the // allocation of the C++ vtable. -// Since Smi and Failure are subclasses of Object no +// Since both Smi and HeapObject are subclasses of Object no // data members can be present in Object. -class Object : public MaybeObject { +class Object { public: // Type testing. bool IsObject() { return true; } @@ -1418,7 +1374,6 @@ class Object : public MaybeObject { INLINE(bool IsTrue()); INLINE(bool IsFalse()); inline bool IsArgumentsMarker(); - inline bool NonFailureIsHeapObject(); // Filler objects (fillers and free space objects). inline bool IsFiller(); @@ -1583,6 +1538,14 @@ class Object : public MaybeObject { // Layout description. static const int kHeaderSize = 0; // Object does not take up any space. +#ifdef OBJECT_PRINT + // Prints this object with details. + void Print(); + void Print(FILE* out); + void PrintLn(); + void PrintLn(FILE* out); +#endif + private: DISALLOW_IMPLICIT_CONSTRUCTORS(Object); }; @@ -1625,59 +1588,6 @@ class Smi: public Object { }; -// Failure is mainly used for reporting a situation requiring a GC. -// Failure objects are transient and cannot occur as part of the object graph. -// -// Failures are a single word, encoded as follows: -// +-------------------------+---+--+--+ -// |.........unused..........|sss|tt|11| -// +-------------------------+---+--+--+ -// 7 6 4 32 10 -// -// -// The low two bits, 0-1, are the failure tag, 11. The next two bits, -// 2-3, are a failure type tag 'tt' with possible values: -// 00 RETRY_AFTER_GC -// -// The next three bits, 4-6, are an allocation space tag 'sss'. The -// allocation space tag is 000 for all failure types except -// RETRY_AFTER_GC. For RETRY_AFTER_GC, the possible values are the -// allocation spaces (the encoding is found in globals.h). - -// Failure type tag info. -const int kFailureTypeTagSize = 2; -const int kFailureTypeTagMask = (1 << kFailureTypeTagSize) - 1; - -class Failure: public MaybeObject { - public: - enum Type { - RETRY_AFTER_GC = 0 - }; - - inline Type type() const; - - // Returns the space that needs to be collected for RetryAfterGC failures. - inline AllocationSpace allocation_space() const; - - static inline Failure* RetryAfterGC(AllocationSpace space); - static inline Failure* RetryAfterGC(); // NEW_SPACE - // Casting. - static inline Failure* cast(MaybeObject* object); - - // Dispatched behavior. - void FailurePrint(FILE* out = stdout); - void FailurePrint(StringStream* accumulator); - - DECLARE_VERIFIER(Failure) - - private: - inline intptr_t value() const; - static inline Failure* Construct(Type type, intptr_t value = 0); - - DISALLOW_IMPLICIT_CONSTRUCTORS(Failure); -}; - - // Heap objects typically have a map pointer in their first word. However, // during GC other data (e.g. mark bits, forwarding addresses) is sometimes // encoded in the first word. The class MapWord is an abstraction of the diff --git a/src/spaces-inl.h b/src/spaces-inl.h index c44a51f..da9c03d 100644 --- a/src/spaces-inl.h +++ b/src/spaces-inl.h @@ -251,7 +251,7 @@ HeapObject* PagedSpace::AllocateLinearly(int size_in_bytes) { // Raw allocation. -MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { +AllocationResult PagedSpace::AllocateRaw(int size_in_bytes) { HeapObject* object = AllocateLinearly(size_in_bytes); if (object != NULL) { if (identity() == CODE_SPACE) { @@ -280,7 +280,7 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { return object; } - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } @@ -288,7 +288,7 @@ MaybeObject* PagedSpace::AllocateRaw(int size_in_bytes) { // NewSpace -MaybeObject* NewSpace::AllocateRaw(int size_in_bytes) { +AllocationResult NewSpace::AllocateRaw(int size_in_bytes) { Address old_top = allocation_info_.top(); #ifdef DEBUG // If we are stressing compaction we waste some memory in new space diff --git a/src/spaces.cc b/src/spaces.cc index c4ebabb..6d25d75 100644 --- a/src/spaces.cc +++ b/src/spaces.cc @@ -1397,7 +1397,7 @@ bool NewSpace::AddFreshPage() { } -MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) { +AllocationResult NewSpace::SlowAllocateRaw(int size_in_bytes) { Address old_top = allocation_info_.top(); Address high = to_space_.page_high(); if (allocation_info_.limit() < high) { @@ -1419,7 +1419,7 @@ MaybeObject* NewSpace::SlowAllocateRaw(int size_in_bytes) { top_on_previous_step_ = to_space_.page_low(); return AllocateRaw(size_in_bytes); } else { - return Failure::RetryAfterGC(); + return AllocationResult::Retry(); } } @@ -2842,22 +2842,22 @@ void LargeObjectSpace::TearDown() { } -MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, - Executability executable) { +AllocationResult LargeObjectSpace::AllocateRaw(int object_size, + Executability executable) { // Check if we want to force a GC before growing the old space further. // If so, fail the allocation. if (!heap()->always_allocate() && heap()->OldGenerationAllocationLimitReached()) { - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } if (Size() + object_size > max_capacity_) { - return Failure::RetryAfterGC(identity()); + return AllocationResult::Retry(identity()); } LargePage* page = heap()->isolate()->memory_allocator()-> AllocateLargePage(object_size, this, executable); - if (page == NULL) return Failure::RetryAfterGC(identity()); + if (page == NULL) return AllocationResult::Retry(identity()); ASSERT(page->area_size() >= object_size); size_ += static_cast(page->size()); diff --git a/src/spaces.h b/src/spaces.h index c7f6f69..735f1fb 100644 --- a/src/spaces.h +++ b/src/spaces.h @@ -1497,9 +1497,8 @@ class FreeListNode: public HeapObject { inline void Zap(); - static inline FreeListNode* cast(MaybeObject* maybe) { - ASSERT(!maybe->IsFailure()); - return reinterpret_cast(maybe); + static inline FreeListNode* cast(Object* object) { + return reinterpret_cast(object); } private: @@ -1670,6 +1669,47 @@ class FreeList { }; +class AllocationResult { + public: + // Implicit constructor from Object*. + AllocationResult(Object* object) : object_(object), // NOLINT + retry_space_(INVALID_SPACE) { } + + AllocationResult() : object_(NULL), + retry_space_(INVALID_SPACE) { } + + static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) { + return AllocationResult(space); + } + + inline bool IsRetry() { return retry_space_ != INVALID_SPACE; } + + template + bool To(T** obj) { + if (IsRetry()) return false; + *obj = T::cast(object_); + return true; + } + + Object* ToObjectChecked() { + CHECK(!IsRetry()); + return object_; + } + + AllocationSpace RetrySpace() { + ASSERT(IsRetry()); + return retry_space_; + } + + private: + explicit AllocationResult(AllocationSpace space) : object_(NULL), + retry_space_(space) { } + + Object* object_; + AllocationSpace retry_space_; +}; + + class PagedSpace : public Space { public: // Creates a space with a maximum capacity, and an id. @@ -1790,7 +1830,7 @@ class PagedSpace : public Space { // Allocate the requested number of bytes in the space if possible, return a // failure object if not. - MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes); + MUST_USE_RESULT inline AllocationResult AllocateRaw(int size_in_bytes); // Give a block of memory to the space's free list. It might be added to // the free list or accounted as waste. @@ -2516,7 +2556,7 @@ class NewSpace : public Space { return allocation_info_.limit_address(); } - MUST_USE_RESULT INLINE(MaybeObject* AllocateRaw(int size_in_bytes)); + MUST_USE_RESULT INLINE(AllocationResult AllocateRaw(int size_in_bytes)); // Reset the allocation pointer to the beginning of the active semispace. void ResetAllocationInfo(); @@ -2633,7 +2673,7 @@ class NewSpace : public Space { HistogramInfo* allocated_histogram_; HistogramInfo* promoted_histogram_; - MUST_USE_RESULT MaybeObject* SlowAllocateRaw(int size_in_bytes); + MUST_USE_RESULT AllocationResult SlowAllocateRaw(int size_in_bytes); friend class SemiSpaceIterator; @@ -2788,8 +2828,8 @@ class LargeObjectSpace : public Space { // Shared implementation of AllocateRaw, AllocateRawCode and // AllocateRawFixedArray. - MUST_USE_RESULT MaybeObject* AllocateRaw(int object_size, - Executability executable); + MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, + Executability executable); // Available bytes for objects in this space. inline intptr_t Available(); diff --git a/src/v8globals.h b/src/v8globals.h index c7228c0..4c9da40 100644 --- a/src/v8globals.h +++ b/src/v8globals.h @@ -36,6 +36,7 @@ const intptr_t kCodeAlignment = 1 << kCodeAlignmentBits; const intptr_t kCodeAlignmentMask = kCodeAlignment - 1; // Tag information for Failure. +// TODO(yangguo): remove this from space owner calculation. const int kFailureTag = 3; const int kFailureTagSize = 2; const intptr_t kFailureTagMask = (1 << kFailureTagSize) - 1; @@ -124,7 +125,6 @@ class MapSpace; class MarkCompactCollector; class NewSpace; class Object; -class MaybeObject; class OldSpace; class Foreign; class Scope; @@ -162,6 +162,7 @@ enum AllocationSpace { CELL_SPACE, // Only and all cell objects. PROPERTY_CELL_SPACE, // Only and all global property cell objects. LO_SPACE, // Promoted large objects. + INVALID_SPACE, // Only used in AllocationResult to signal success. FIRST_SPACE = NEW_SPACE, LAST_SPACE = LO_SPACE, diff --git a/test/cctest/cctest.h b/test/cctest/cctest.h index b85f462..36e1b96 100644 --- a/test/cctest/cctest.h +++ b/test/cctest/cctest.h @@ -415,8 +415,10 @@ static inline void SimulateFullSpace(v8::internal::NewSpace* space) { int new_linear_size = static_cast( *space->allocation_limit_address() - *space->allocation_top_address()); if (new_linear_size == 0) return; - v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size); - v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe); + v8::internal::AllocationResult allocation = + space->AllocateRaw(new_linear_size); + v8::internal::FreeListNode* node = + v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); node->set_size(space->heap(), new_linear_size); } diff --git a/test/cctest/test-alloc.cc b/test/cctest/test-alloc.cc index 2ab6a92..7a213ae 100644 --- a/test/cctest/test-alloc.cc +++ b/test/cctest/test-alloc.cc @@ -35,32 +35,33 @@ using namespace v8::internal; -static MaybeObject* AllocateAfterFailures() { +static AllocationResult AllocateAfterFailures() { static int attempts = 0; - if (++attempts < 3) return Failure::RetryAfterGC(); + + if (++attempts < 3) return AllocationResult::Retry(); TestHeap* heap = CcTest::test_heap(); // New space. SimulateFullSpace(heap->new_space()); - CHECK(!heap->AllocateByteArray(100)->IsFailure()); - CHECK(!heap->AllocateFixedArray(100, NOT_TENURED)->IsFailure()); + heap->AllocateByteArray(100).ToObjectChecked(); + heap->AllocateFixedArray(100, NOT_TENURED).ToObjectChecked(); // Make sure we can allocate through optimized allocation functions // for specific kinds. - CHECK(!heap->AllocateFixedArray(100)->IsFailure()); - CHECK(!heap->AllocateHeapNumber(0.42)->IsFailure()); - CHECK(!heap->AllocateArgumentsObject(Smi::FromInt(87), 10)->IsFailure()); + heap->AllocateFixedArray(100).ToObjectChecked(); + heap->AllocateHeapNumber(0.42).ToObjectChecked(); + heap->AllocateArgumentsObject(Smi::FromInt(87), 10).ToObjectChecked(); Object* object = heap->AllocateJSObject( - *CcTest::i_isolate()->object_function())->ToObjectChecked(); - CHECK(!heap->CopyJSObject(JSObject::cast(object))->IsFailure()); + *CcTest::i_isolate()->object_function()).ToObjectChecked(); + heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked(); // Old data space. SimulateFullSpace(heap->old_data_space()); - CHECK(!heap->AllocateByteArray(100, TENURED)->IsFailure()); + heap->AllocateByteArray(100, TENURED).ToObjectChecked(); // Old pointer space. SimulateFullSpace(heap->old_pointer_space()); - CHECK(!heap->AllocateFixedArray(10000, TENURED)->IsFailure()); + heap->AllocateFixedArray(10000, TENURED).ToObjectChecked(); // Large object space. static const int kLargeObjectSpaceFillerLength = 300000; @@ -68,22 +69,22 @@ static MaybeObject* AllocateAfterFailures() { kLargeObjectSpaceFillerLength); ASSERT(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize()); while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) { - CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)-> - IsFailure()); + heap->AllocateFixedArray( + kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked(); } - CHECK(!heap->AllocateFixedArray(kLargeObjectSpaceFillerLength, TENURED)-> - IsFailure()); + heap->AllocateFixedArray( + kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked(); // Map space. SimulateFullSpace(heap->map_space()); int instance_size = JSObject::kHeaderSize; - CHECK(!heap->AllocateMap(JS_OBJECT_TYPE, instance_size)->IsFailure()); + heap->AllocateMap(JS_OBJECT_TYPE, instance_size).ToObjectChecked(); // Test that we can allocate in old pointer space and code space. SimulateFullSpace(heap->code_space()); - CHECK(!heap->AllocateFixedArray(100, TENURED)->IsFailure()); - CHECK(!heap->CopyCode(CcTest::i_isolate()->builtins()->builtin( - Builtins::kIllegal))->IsFailure()); + heap->AllocateFixedArray(100, TENURED).ToObjectChecked(); + heap->CopyCode(CcTest::i_isolate()->builtins()->builtin( + Builtins::kIllegal)).ToObjectChecked(); // Return success. return Smi::FromInt(42); diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 59b6e25..e91a080 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -235,11 +235,6 @@ TEST(Tagging) { int request = 24; CHECK_EQ(request, static_cast(OBJECT_POINTER_ALIGN(request))); CHECK(Smi::FromInt(42)->IsSmi()); - CHECK(Failure::RetryAfterGC(NEW_SPACE)->IsFailure()); - CHECK_EQ(NEW_SPACE, - Failure::RetryAfterGC(NEW_SPACE)->allocation_space()); - CHECK_EQ(OLD_POINTER_SPACE, - Failure::RetryAfterGC(OLD_POINTER_SPACE)->allocation_space()); CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); } @@ -1017,8 +1012,7 @@ TEST(Regression39128) { Address* limit_addr = new_space->allocation_limit_address(); while ((*limit_addr - *top_addr) > allocation_amount) { CHECK(!heap->always_allocate()); - Object* array = heap->AllocateFixedArray(allocation_len)->ToObjectChecked(); - CHECK(!array->IsFailure()); + Object* array = heap->AllocateFixedArray(allocation_len).ToObjectChecked(); CHECK(new_space->Contains(array)); } @@ -1028,11 +1022,10 @@ TEST(Regression39128) { CHECK(fixed_array_len < FixedArray::kMaxLength); CHECK(!heap->always_allocate()); - Object* array = heap->AllocateFixedArray(fixed_array_len)->ToObjectChecked(); - CHECK(!array->IsFailure()); + Object* array = heap->AllocateFixedArray(fixed_array_len).ToObjectChecked(); CHECK(new_space->Contains(array)); - Object* object = heap->AllocateJSObjectFromMap(*my_map)->ToObjectChecked(); + Object* object = heap->AllocateJSObjectFromMap(*my_map).ToObjectChecked(); CHECK(new_space->Contains(object)); JSObject* jsobject = JSObject::cast(object); CHECK_EQ(0, FixedArray::cast(jsobject->elements())->length()); @@ -1046,7 +1039,7 @@ TEST(Regression39128) { // in old pointer space. Address old_pointer_space_top = heap->old_pointer_space()->top(); AlwaysAllocateScope aa_scope(isolate); - Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked(); + Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked(); JSObject* clone = JSObject::cast(clone_obj); if (clone->address() != old_pointer_space_top) { // Alas, got allocated from free list, we cannot do checks. @@ -1624,7 +1617,7 @@ TEST(TestSizeOfObjects) { AlwaysAllocateScope always_allocate(CcTest::i_isolate()); int filler_size = static_cast(FixedArray::SizeFor(8192)); for (int i = 1; i <= 100; i++) { - CcTest::test_heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked(); + CcTest::test_heap()->AllocateFixedArray(8192, TENURED).ToObjectChecked(); CHECK_EQ(initial_size + i * filler_size, static_cast(CcTest::heap()->SizeOfObjects())); } @@ -3487,8 +3480,10 @@ static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, *space->allocation_limit_address() - *space->allocation_top_address()); CHECK(space_remaining >= extra_bytes); int new_linear_size = space_remaining - extra_bytes; - v8::internal::MaybeObject* maybe = space->AllocateRaw(new_linear_size); - v8::internal::FreeListNode* node = v8::internal::FreeListNode::cast(maybe); + v8::internal::AllocationResult allocation = + space->AllocateRaw(new_linear_size); + v8::internal::FreeListNode* node = + v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); node->set_size(space->heap(), new_linear_size); } @@ -3546,14 +3541,13 @@ TEST(Regress169928) { // We need filler the size of AllocationMemento object, plus an extra // fill pointer value. - MaybeObject* maybe_object = CcTest::heap()->new_space()->AllocateRaw( + HeapObject* obj = NULL; + AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw( AllocationMemento::kSize + kPointerSize); - Object* obj = NULL; - CHECK(maybe_object->ToObject(&obj)); - Address addr_obj = reinterpret_cast
( - reinterpret_cast(obj - kHeapObjectTag)); - CcTest::heap()->CreateFillerObjectAt(addr_obj, - AllocationMemento::kSize + kPointerSize); + CHECK(allocation.To(&obj)); + Address addr_obj = obj->address(); + CcTest::heap()->CreateFillerObjectAt( + addr_obj, AllocationMemento::kSize + kPointerSize); // Give the array a name, making sure not to allocate strings. v8::Handle array_obj = v8::Utils::ToLocal(array); diff --git a/test/cctest/test-mark-compact.cc b/test/cctest/test-mark-compact.cc index 1a0e94f..5f13bd2 100644 --- a/test/cctest/test-mark-compact.cc +++ b/test/cctest/test-mark-compact.cc @@ -85,7 +85,7 @@ TEST(Promotion) { int array_length = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / (4 * kPointerSize); - Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked(); + Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); Handle array(FixedArray::cast(obj)); // Array should be in the new space. @@ -110,7 +110,7 @@ TEST(NoPromotion) { int array_length = (Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / (2 * kPointerSize); - Object* obj = heap->AllocateFixedArray(array_length)->ToObjectChecked(); + Object* obj = heap->AllocateFixedArray(array_length).ToObjectChecked(); Handle array(FixedArray::cast(obj)); // Array should be in the new space. @@ -139,22 +139,19 @@ TEST(MarkCompactCollector) { // keep allocating garbage in new space until it fails const int ARRAY_SIZE = 100; - Object* array; - MaybeObject* maybe_array; + AllocationResult allocation; do { - maybe_array = heap->AllocateFixedArray(ARRAY_SIZE); - } while (maybe_array->ToObject(&array)); + allocation = heap->AllocateFixedArray(ARRAY_SIZE); + } while (!allocation.IsRetry()); heap->CollectGarbage(NEW_SPACE, "trigger 2"); - heap->AllocateFixedArray(ARRAY_SIZE)->ToObjectChecked(); + heap->AllocateFixedArray(ARRAY_SIZE).ToObjectChecked(); // keep allocating maps until it fails - Object* map; - MaybeObject* maybe_map; do { - maybe_map = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); - } while (maybe_map->ToObject(&map)); + allocation = heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); + } while (!allocation.IsRetry()); heap->CollectGarbage(MAP_SPACE, "trigger 3"); - heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize)->ToObjectChecked(); + heap->AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize).ToObjectChecked(); { HandleScope scope(isolate); // allocate a garbage @@ -258,11 +255,11 @@ TEST(ObjectGroups) { v8::HandleScope handle_scope(CcTest::isolate()); Handle g1s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle g1s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle g1c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); std::pair*, int> g1s1_and_id(&g1s1, 1234); GlobalHandles::MakeWeak(g1s1.location(), reinterpret_cast(&g1s1_and_id), @@ -277,11 +274,11 @@ TEST(ObjectGroups) { &WeakPointerCallback); Handle g2s1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle g2s2 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); Handle g2c1 = - global_handles->Create(heap->AllocateFixedArray(1)->ToObjectChecked()); + global_handles->Create(heap->AllocateFixedArray(1).ToObjectChecked()); std::pair*, int> g2s1_and_id(&g2s1, 1234); GlobalHandles::MakeWeak(g2s1.location(), reinterpret_cast(&g2s1_and_id), @@ -392,7 +389,7 @@ TEST(EmptyObjectGroups) { v8::HandleScope handle_scope(CcTest::isolate()); Handle object = global_handles->Create( - CcTest::test_heap()->AllocateFixedArray(1)->ToObjectChecked()); + CcTest::test_heap()->AllocateFixedArray(1).ToObjectChecked()); TestRetainedObjectInfo info; global_handles->AddObjectGroup(NULL, 0, &info); diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc index 16fcb58..10c35c1 100644 --- a/test/cctest/test-serialize.cc +++ b/test/cctest/test-serialize.cc @@ -297,8 +297,7 @@ static void SanityCheck() { CHECK(isolate->global_object()->IsJSObject()); CHECK(isolate->native_context()->IsContext()); CHECK(CcTest::heap()->string_table()->IsStringTable()); - CHECK(!isolate->factory()->InternalizeOneByteString( - STATIC_ASCII_VECTOR("Empty"))->IsFailure()); + isolate->factory()->InternalizeOneByteString(STATIC_ASCII_VECTOR("Empty")); } diff --git a/test/cctest/test-spaces.cc b/test/cctest/test-spaces.cc index 223912e..47e2536 100644 --- a/test/cctest/test-spaces.cc +++ b/test/cctest/test-spaces.cc @@ -328,9 +328,8 @@ TEST(NewSpace) { CHECK(new_space.HasBeenSetUp()); while (new_space.Available() >= Page::kMaxRegularHeapObjectSize) { - Object* obj = - new_space.AllocateRaw(Page::kMaxRegularHeapObjectSize)-> - ToObjectUnchecked(); + Object* obj = new_space.AllocateRaw( + Page::kMaxRegularHeapObjectSize).ToObjectChecked(); CHECK(new_space.Contains(HeapObject::cast(obj))); } @@ -359,7 +358,7 @@ TEST(OldSpace) { CHECK(s->SetUp()); while (s->Available() > 0) { - s->AllocateRaw(Page::kMaxRegularHeapObjectSize)->ToObjectUnchecked(); + s->AllocateRaw(Page::kMaxRegularHeapObjectSize).ToObjectChecked(); } s->TearDown(); @@ -377,7 +376,7 @@ TEST(LargeObjectSpace) { int lo_size = Page::kPageSize; - Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->ToObjectUnchecked(); + Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked(); CHECK(obj->IsHeapObject()); HeapObject* ho = HeapObject::cast(obj); @@ -390,15 +389,15 @@ TEST(LargeObjectSpace) { while (true) { intptr_t available = lo->Available(); - { MaybeObject* maybe_obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); - if (!maybe_obj->ToObject(&obj)) break; + { AllocationResult allocation = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); + if (allocation.IsRetry()) break; } CHECK(lo->Available() < available); }; CHECK(!lo->IsEmpty()); - CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE)->IsFailure()); + CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); } diff --git a/test/cctest/test-symbols.cc b/test/cctest/test-symbols.cc index 6fceea6..f0d0ed1 100644 --- a/test/cctest/test-symbols.cc +++ b/test/cctest/test-symbols.cc @@ -33,7 +33,7 @@ TEST(Create) { symbols[i]->Print(); #endif #if VERIFY_HEAP - symbols[i]->Verify(); + symbols[i]->ObjectVerify(); #endif } -- 2.7.4