From 2c94151e6e1938bb36d68ef34cd6e6f24c34811c Mon Sep 17 00:00:00 2001 From: "ishell@chromium.org" Date: Tue, 1 Jul 2014 15:02:31 +0000 Subject: [PATCH] Reland r22082 "Replace HeapNumber as doublebox with an explicit MutableHeapNumber." R=verwaest@chromium.org Review URL: https://codereview.chromium.org/334323003 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22129 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- include/v8.h | 6 +- src/arm/macro-assembler-arm.cc | 9 +- src/arm/macro-assembler-arm.h | 3 +- src/arm/stub-cache-arm.cc | 5 +- src/arm64/macro-assembler-arm64.cc | 11 +- src/arm64/macro-assembler-arm64.h | 3 +- src/arm64/stub-cache-arm64.cc | 3 +- src/deoptimizer.cc | 8 +- src/factory.cc | 14 +- src/factory.h | 2 +- src/heap-snapshot-generator.cc | 2 + src/heap.cc | 15 +- src/heap.h | 6 +- src/hydrogen.cc | 16 +- src/ia32/macro-assembler-ia32.cc | 10 +- src/ia32/macro-assembler-ia32.h | 3 +- src/ia32/stub-cache-ia32.cc | 2 +- src/json-parser.h | 8 +- src/json-stringifier.h | 1 + src/mark-compact.cc | 1 + src/mips/macro-assembler-mips.cc | 9 +- src/mips/macro-assembler-mips.h | 3 +- src/mips/stub-cache-mips.cc | 5 +- src/objects-debug.cc | 5 +- src/objects-inl.h | 35 +++- src/objects-printer.cc | 5 + src/objects-visiting.cc | 1 + src/objects.cc | 31 ++- src/objects.h | 15 +- src/runtime.cc | 4 +- src/x64/macro-assembler-x64.cc | 9 +- src/x64/macro-assembler-x64.h | 3 +- src/x64/stub-cache-x64.cc | 2 +- src/x87/macro-assembler-x87.cc | 10 +- src/x87/macro-assembler-x87.h | 3 +- src/x87/stub-cache-x87.cc | 2 +- test/cctest/test-heap-profiler.cc | 9 +- test/mjsunit/migrations.js | 311 +++++++++++++++++++++++++++++ test/mjsunit/mjsunit.status | 1 + 39 files changed, 513 insertions(+), 78 deletions(-) create mode 100644 test/mjsunit/migrations.js diff --git a/include/v8.h b/include/v8.h index 6607c45f6..5730fdf8e 100644 --- a/include/v8.h +++ b/include/v8.h @@ -5587,7 +5587,7 @@ class Internals { static const int kNullValueRootIndex = 7; static const int kTrueValueRootIndex = 8; static const int kFalseValueRootIndex = 9; - static const int kEmptyStringRootIndex = 160; + static const int kEmptyStringRootIndex = 161; // The external allocation limit should be below 256 MB on all architectures // to avoid that resource-constrained embedders run low on memory. @@ -5602,10 +5602,10 @@ class Internals { static const int kNodeIsIndependentShift = 4; static const int kNodeIsPartiallyDependentShift = 5; - static const int kJSObjectType = 0xbb; + static const int kJSObjectType = 0xbc; static const int kFirstNonstringType = 0x80; static const int kOddballType = 0x83; - static const int kForeignType = 0x87; + static const int kForeignType = 0x88; static const int kUndefinedOddballKind = 5; static const int kNullOddballKind = 3; diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index c35fb9e50..0d7a1d8bc 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -3274,14 +3274,19 @@ void MacroAssembler::AllocateHeapNumber(Register result, Register scratch2, Register heap_number_map, Label* gc_required, - TaggingMode tagging_mode) { + TaggingMode tagging_mode, + MutableMode mode) { // Allocate an object in the heap for the heap number and tag it as a heap // object. Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); + Heap::RootListIndex map_index = mode == MUTABLE + ? Heap::kMutableHeapNumberMapRootIndex + : Heap::kHeapNumberMapRootIndex; + AssertIsRoot(heap_number_map, map_index); + // Store heap number map in the allocated object. - AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); if (tagging_mode == TAG_RESULT) { str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); } else { diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index 2d915757b..e30096ae9 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -778,7 +778,8 @@ class MacroAssembler: public Assembler { Register scratch2, Register heap_number_map, Label* gc_required, - TaggingMode tagging_mode = TAG_RESULT); + TaggingMode tagging_mode = TAG_RESULT, + MutableMode mode = IMMUTABLE); void AllocateHeapNumberWithValue(Register result, DwVfpRegister value, Register scratch1, diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index bdf04c05f..1f7c15249 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -426,8 +426,9 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } else if (representation.IsDouble()) { Label do_store, heap_number; - __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); - __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); + __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, + TAG_RESULT, MUTABLE); __ JumpIfNotSmi(value_reg, &heap_number); __ SmiUntag(scratch1, value_reg); diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index eb8f0d282..343a9e399 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -3570,7 +3570,8 @@ void MacroAssembler::AllocateHeapNumber(Register result, Register scratch1, Register scratch2, CPURegister value, - CPURegister heap_number_map) { + CPURegister heap_number_map, + MutableMode mode) { ASSERT(!value.IsValid() || value.Is64Bits()); UseScratchRegisterScope temps(this); @@ -3579,6 +3580,10 @@ void MacroAssembler::AllocateHeapNumber(Register result, Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, NO_ALLOCATION_FLAGS); + Heap::RootListIndex map_index = mode == MUTABLE + ? Heap::kMutableHeapNumberMapRootIndex + : Heap::kHeapNumberMapRootIndex; + // Prepare the heap number map. if (!heap_number_map.IsValid()) { // If we have a valid value register, use the same type of register to store @@ -3588,7 +3593,7 @@ void MacroAssembler::AllocateHeapNumber(Register result, } else { heap_number_map = scratch1; } - LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); + LoadRoot(heap_number_map, map_index); } if (emit_debug_code()) { Register map; @@ -3598,7 +3603,7 @@ void MacroAssembler::AllocateHeapNumber(Register result, } else { map = Register(heap_number_map); } - AssertRegisterIsRoot(map, Heap::kHeapNumberMapRootIndex); + AssertRegisterIsRoot(map, map_index); } // Store the heap number map and the value in the allocated object. diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index 544d7bd25..95d69990d 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -1372,7 +1372,8 @@ class MacroAssembler : public Assembler { Register scratch1, Register scratch2, CPURegister value = NoFPReg, - CPURegister heap_number_map = NoReg); + CPURegister heap_number_map = NoReg, + MutableMode mode = IMMUTABLE); // --------------------------------------------------------------------------- // Support functions. diff --git a/src/arm64/stub-cache-arm64.cc b/src/arm64/stub-cache-arm64.cc index a0ce997fe..85774d982 100644 --- a/src/arm64/stub-cache-arm64.cc +++ b/src/arm64/stub-cache-arm64.cc @@ -399,7 +399,8 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); __ Bind(&do_store); - __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2, temp_double); + __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2, temp_double, + NoReg, MUTABLE); } // Stub never generated for non-global objects that require access checks. diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index 08fb148c0..4c98b8473 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -1813,9 +1813,11 @@ Handle Deoptimizer::MaterializeNextHeapObject() { Handle map = Map::GeneralizeAllFieldRepresentations( Handle::cast(MaterializeNextValue())); switch (map->instance_type()) { + case MUTABLE_HEAP_NUMBER_TYPE: case HEAP_NUMBER_TYPE: { // Reuse the HeapNumber value directly as it is already properly - // tagged and skip materializing the HeapNumber explicitly. + // tagged and skip materializing the HeapNumber explicitly. Turn mutable + // heap numbers immutable. Handle object = MaterializeNextValue(); if (object_index < prev_materialized_count_) { materialized_objects_->Add(Handle( @@ -1877,6 +1879,9 @@ Handle Deoptimizer::MaterializeNextHeapObject() { Handle Deoptimizer::MaterializeNextValue() { int value_index = materialization_value_index_++; Handle value = materialized_values_->at(value_index); + if (value->IsMutableHeapNumber()) { + HeapNumber::cast(*value)->set_map(isolate_->heap()->heap_number_map()); + } if (*value == isolate_->heap()->arguments_marker()) { value = MaterializeNextHeapObject(); } @@ -3383,6 +3388,7 @@ Handle SlotRefValueBuilder::GetNext(Isolate* isolate, int lvl) { // TODO(jarin) this should be unified with the code in // Deoptimizer::MaterializeNextHeapObject() switch (map->instance_type()) { + case MUTABLE_HEAP_NUMBER_TYPE: case HEAP_NUMBER_TYPE: { // Reuse the HeapNumber value directly as it is already properly // tagged and skip materializing the HeapNumber explicitly. diff --git a/src/factory.cc b/src/factory.cc index 8e903f165..a09c7841e 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -1008,7 +1008,7 @@ Handle Factory::NewNumber(double value, // We need to distinguish the minus zero value and this cannot be // done after conversion to int. Doing this by comparing bit // patterns is faster than using fpclassify() et al. - if (IsMinusZero(value)) return NewHeapNumber(-0.0, pretenure); + if (IsMinusZero(value)) return NewHeapNumber(-0.0, IMMUTABLE, pretenure); int int_value = FastD2I(value); if (value == int_value && Smi::IsValid(int_value)) { @@ -1016,15 +1016,15 @@ Handle Factory::NewNumber(double value, } // Materialize the value in the heap. - return NewHeapNumber(value, pretenure); + return NewHeapNumber(value, IMMUTABLE, pretenure); } Handle Factory::NewNumberFromInt(int32_t value, PretenureFlag pretenure) { if (Smi::IsValid(value)) return handle(Smi::FromInt(value), isolate()); - // Bypass NumberFromDouble to avoid various redundant checks. - return NewHeapNumber(FastI2D(value), pretenure); + // Bypass NewNumber to avoid various redundant checks. + return NewHeapNumber(FastI2D(value), IMMUTABLE, pretenure); } @@ -1034,15 +1034,17 @@ Handle Factory::NewNumberFromUint(uint32_t value, if (int32v >= 0 && Smi::IsValid(int32v)) { return handle(Smi::FromInt(int32v), isolate()); } - return NewHeapNumber(FastUI2D(value), pretenure); + return NewHeapNumber(FastUI2D(value), IMMUTABLE, pretenure); } Handle Factory::NewHeapNumber(double value, + MutableMode mode, PretenureFlag pretenure) { CALL_HEAP_FUNCTION( isolate(), - isolate()->heap()->AllocateHeapNumber(value, pretenure), HeapNumber); + isolate()->heap()->AllocateHeapNumber(value, mode, pretenure), + HeapNumber); } diff --git a/src/factory.h b/src/factory.h index 25051da05..3736b4bd6 100644 --- a/src/factory.h +++ b/src/factory.h @@ -352,9 +352,9 @@ class Factory V8_FINAL { return NewNumber(static_cast(value), pretenure); } Handle NewHeapNumber(double value, + MutableMode mode = IMMUTABLE, PretenureFlag pretenure = NOT_TENURED); - // These objects are used by the api to create env-independent data // structures in the heap. inline Handle NewNeanderObject() { diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc index 80d3c1061..a713d4027 100644 --- a/src/heap-snapshot-generator.cc +++ b/src/heap-snapshot-generator.cc @@ -1641,6 +1641,8 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { for (int i = 0; i < real_size; i++) { switch (descs->GetType(i)) { case FIELD: { + Representation r = descs->GetDetails(i).representation(); + if (r.IsSmi() || r.IsDouble()) break; int index = descs->GetFieldIndex(i); Name* k = descs->GetKey(i); diff --git a/src/heap.cc b/src/heap.cc index 812fec127..3d9797423 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -2528,6 +2528,8 @@ bool Heap::CreateInitialMaps() { ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) ALLOCATE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number) + ALLOCATE_MAP( + MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize, mutable_heap_number) ALLOCATE_MAP(SYMBOL_TYPE, Symbol::kSize, symbol) ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign) @@ -2652,6 +2654,7 @@ bool Heap::CreateInitialMaps() { AllocationResult Heap::AllocateHeapNumber(double value, + MutableMode mode, PretenureFlag pretenure) { // Statically ensure that it is safe to allocate heap numbers in paged // spaces. @@ -2665,7 +2668,8 @@ AllocationResult Heap::AllocateHeapNumber(double value, if (!allocation.To(&result)) return allocation; } - result->set_map_no_write_barrier(heap_number_map()); + Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map(); + HeapObject::cast(result)->set_map_no_write_barrier(map); HeapNumber::cast(result)->set_value(value); return result; } @@ -2771,12 +2775,13 @@ void Heap::CreateInitialObjects() { HandleScope scope(isolate()); Factory* factory = isolate()->factory(); - // The -0 value must be set before NumberFromDouble works. - set_minus_zero_value(*factory->NewHeapNumber(-0.0, TENURED)); + // The -0 value must be set before NewNumber works. + set_minus_zero_value(*factory->NewHeapNumber(-0.0, IMMUTABLE, TENURED)); ASSERT(std::signbit(minus_zero_value()->Number()) != 0); - set_nan_value(*factory->NewHeapNumber(base::OS::nan_value(), TENURED)); - set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, TENURED)); + set_nan_value( + *factory->NewHeapNumber(base::OS::nan_value(), IMMUTABLE, TENURED)); + set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED)); // The hole has not been created yet, but we want to put something // predictable in the gaps in the string table, so lets make that Smi zero. diff --git a/src/heap.h b/src/heap.h index 754b5588e..f3b02ce63 100644 --- a/src/heap.h +++ b/src/heap.h @@ -42,6 +42,7 @@ namespace internal { V(Map, shared_function_info_map, SharedFunctionInfoMap) \ V(Map, meta_map, MetaMap) \ V(Map, heap_number_map, HeapNumberMap) \ + V(Map, mutable_heap_number_map, MutableHeapNumberMap) \ V(Map, native_context_map, NativeContextMap) \ V(Map, fixed_array_map, FixedArrayMap) \ V(Map, code_map, CodeMap) \ @@ -230,6 +231,7 @@ namespace internal { V(shared_function_info_map) \ V(meta_map) \ V(heap_number_map) \ + V(mutable_heap_number_map) \ V(native_context_map) \ V(fixed_array_map) \ V(code_map) \ @@ -1460,7 +1462,9 @@ class Heap { // Allocated a HeapNumber from value. MUST_USE_RESULT AllocationResult AllocateHeapNumber( - double value, PretenureFlag pretenure = NOT_TENURED); + double value, + MutableMode mode = IMMUTABLE, + PretenureFlag pretenure = NOT_TENURED); // Allocate a byte array of the specified length MUST_USE_RESULT AllocationResult AllocateByteArray( diff --git a/src/hydrogen.cc b/src/hydrogen.cc index a4b72a72a..d21541b28 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -5781,8 +5781,9 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, - HEAP_NUMBER_TYPE); - AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map()); + MUTABLE_HEAP_NUMBER_TYPE); + AddStoreMapConstant( + heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); instr = New(checked_object->ActualValue(), @@ -10950,11 +10951,14 @@ void HOptimizedGraphBuilder::BuildEmitInObjectProperties( // 2) we can just use the mode of the parent object for pretenuring HInstruction* double_box = Add(heap_number_constant, HType::HeapObject(), - pretenure_flag, HEAP_NUMBER_TYPE); + pretenure_flag, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(double_box, - isolate()->factory()->heap_number_map()); - Add(double_box, HObjectAccess::ForHeapNumberValue(), - Add(value)); + isolate()->factory()->mutable_heap_number_map()); + // Unwrap the mutable heap number from the boilerplate. + HValue* double_value = + Add(Handle::cast(value)->value()); + Add( + double_box, HObjectAccess::ForHeapNumberValue(), double_value); value_instruction = double_box; } else if (representation.IsSmi()) { value_instruction = value->IsUninitialized() diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index b3cb50764..8276bc1fb 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -1683,14 +1683,18 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) { void MacroAssembler::AllocateHeapNumber(Register result, Register scratch1, Register scratch2, - Label* gc_required) { + Label* gc_required, + MutableMode mode) { // Allocate heap number in new space. Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT); + Handle map = mode == MUTABLE + ? isolate()->factory()->mutable_heap_number_map() + : isolate()->factory()->heap_number_map(); + // Set the map. - mov(FieldOperand(result, HeapObject::kMapOffset), - Immediate(isolate()->factory()->heap_number_map())); + mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map)); } diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index b0b61f7a8..fdb22286c 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -638,7 +638,8 @@ class MacroAssembler: public Assembler { void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, - Label* gc_required); + Label* gc_required, + MutableMode mode = IMMUTABLE); // Allocate a sequential string. All the header fields of the string object // are initialized. diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index 4b5d16973..487800142 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -523,7 +523,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } else if (representation.IsDouble()) { Label do_store, heap_number; - __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow, MUTABLE); __ JumpIfNotSmi(value_reg, &heap_number); __ SmiUntag(value_reg); diff --git a/src/json-parser.h b/src/json-parser.h index e4f8f8cea..78ac378c1 100644 --- a/src/json-parser.h +++ b/src/json-parser.h @@ -387,11 +387,9 @@ Handle JsonParser::ParseJsonObject() { Representation expected_representation = details.representation(); if (value->FitsRepresentation(expected_representation)) { - // If the target representation is double and the value is already - // double, use the existing box. - if (value->IsSmi() && expected_representation.IsDouble()) { - value = factory()->NewHeapNumber( - Handle::cast(value)->value()); + if (expected_representation.IsDouble()) { + value = Object::NewStorageFor(isolate(), value, + expected_representation); } else if (expected_representation.IsHeapObject() && !target->instance_descriptors()->GetFieldType( descriptor)->NowContains(value)) { diff --git a/src/json-stringifier.h b/src/json-stringifier.h index 0b5abbcdc..0a453ef67 100644 --- a/src/json-stringifier.h +++ b/src/json-stringifier.h @@ -407,6 +407,7 @@ BasicJsonStringifier::Result BasicJsonStringifier::Serialize_( switch (HeapObject::cast(*object)->map()->instance_type()) { case HEAP_NUMBER_TYPE: + case MUTABLE_HEAP_NUMBER_TYPE: if (deferred_string_key) SerializeDeferredKey(comma, key); return SerializeHeapNumber(Handle::cast(object)); case ODDBALL_TYPE: diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 8846f19bb..191b94bfe 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -292,6 +292,7 @@ class VerifyNativeContextSeparationVisitor: public ObjectVisitor { case CODE_TYPE: case FIXED_DOUBLE_ARRAY_TYPE: case HEAP_NUMBER_TYPE: + case MUTABLE_HEAP_NUMBER_TYPE: case INTERCEPTOR_INFO_TYPE: case ODDBALL_TYPE: case SCRIPT_TYPE: diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index ce5cd90d9..5060e6884 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -3274,14 +3274,19 @@ void MacroAssembler::AllocateHeapNumber(Register result, Register scratch2, Register heap_number_map, Label* need_gc, - TaggingMode tagging_mode) { + TaggingMode tagging_mode, + MutableMode mode) { // Allocate an object in the heap for the heap number and tag it as a heap // object. Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc, tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); + Heap::RootListIndex map_index = mode == MUTABLE + ? Heap::kMutableHeapNumberMapRootIndex + : Heap::kHeapNumberMapRootIndex; + AssertIsRoot(heap_number_map, map_index); + // Store heap number map in the allocated object. - AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); if (tagging_mode == TAG_RESULT) { sw(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); } else { diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h index c07b64bf9..8644827f8 100644 --- a/src/mips/macro-assembler-mips.h +++ b/src/mips/macro-assembler-mips.h @@ -546,7 +546,8 @@ class MacroAssembler: public Assembler { Register scratch2, Register heap_number_map, Label* gc_required, - TaggingMode tagging_mode = TAG_RESULT); + TaggingMode tagging_mode = TAG_RESULT, + MutableMode mode = IMMUTABLE); void AllocateHeapNumberWithValue(Register result, FPURegister value, Register scratch1, diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc index 16c69d57a..e3d8fe14d 100644 --- a/src/mips/stub-cache-mips.cc +++ b/src/mips/stub-cache-mips.cc @@ -413,8 +413,9 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } else if (representation.IsDouble()) { Label do_store, heap_number; - __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); - __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); + __ LoadRoot(scratch3, Heap::kMutableHeapNumberMapRootIndex); + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow, + TAG_RESULT, MUTABLE); __ JumpIfNotSmi(value_reg, &heap_number); __ SmiUntag(scratch1, value_reg); diff --git a/src/objects-debug.cc b/src/objects-debug.cc index 344fe711b..ee141549e 100644 --- a/src/objects-debug.cc +++ b/src/objects-debug.cc @@ -54,6 +54,7 @@ void HeapObject::HeapObjectVerify() { Map::cast(this)->MapVerify(); break; case HEAP_NUMBER_TYPE: + case MUTABLE_HEAP_NUMBER_TYPE: HeapNumber::cast(this)->HeapNumberVerify(); break; case FIXED_ARRAY_TYPE: @@ -205,7 +206,7 @@ void Symbol::SymbolVerify() { void HeapNumber::HeapNumberVerify() { - CHECK(IsHeapNumber()); + CHECK(IsHeapNumber() || IsMutableHeapNumber()); } @@ -263,7 +264,7 @@ void JSObject::JSObjectVerify() { Representation r = descriptors->GetDetails(i).representation(); FieldIndex index = FieldIndex::ForDescriptor(map(), i); Object* value = RawFastPropertyAt(index); - if (r.IsDouble()) ASSERT(value->IsHeapNumber()); + if (r.IsDouble()) ASSERT(value->IsMutableHeapNumber()); if (value->IsUninitialized()) continue; if (r.IsSmi()) ASSERT(value->IsSmi()); if (r.IsHeapObject()) ASSERT(value->IsHeapObject()); diff --git a/src/objects-inl.h b/src/objects-inl.h index bca5a5522..a581d9eb2 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -165,6 +165,7 @@ bool Object::IsHeapObject() const { TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE) +TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE) TYPE_CHECKER(Symbol, SYMBOL_TYPE) @@ -277,10 +278,27 @@ Handle Object::NewStorageFor(Isolate* isolate, return handle(Smi::FromInt(0), isolate); } if (!representation.IsDouble()) return object; + double value; if (object->IsUninitialized()) { - return isolate->factory()->NewHeapNumber(0); + value = 0; + } else if (object->IsMutableHeapNumber()) { + value = HeapNumber::cast(*object)->value(); + } else { + value = object->Number(); + } + return isolate->factory()->NewHeapNumber(value, MUTABLE); +} + + +Handle Object::WrapForRead(Isolate* isolate, + Handle object, + Representation representation) { + ASSERT(!object->IsUninitialized()); + if (!representation.IsDouble()) { + ASSERT(object->FitsRepresentation(representation)); + return object; } - return isolate->factory()->NewHeapNumber(object->Number()); + return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value()); } @@ -3079,7 +3097,6 @@ CAST_ACCESSOR(FixedTypedArrayBase) CAST_ACCESSOR(Foreign) CAST_ACCESSOR(FreeSpace) CAST_ACCESSOR(GlobalObject) -CAST_ACCESSOR(HeapNumber) CAST_ACCESSOR(HeapObject) CAST_ACCESSOR(JSArray) CAST_ACCESSOR(JSArrayBuffer) @@ -5949,6 +5966,18 @@ ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset) ACCESSORS(JSValue, value, Object, kValueOffset) +HeapNumber* HeapNumber::cast(Object* object) { + SLOW_ASSERT(object->IsHeapNumber() || object->IsMutableHeapNumber()); + return reinterpret_cast(object); +} + + +const HeapNumber* HeapNumber::cast(const Object* object) { + SLOW_ASSERT(object->IsHeapNumber() || object->IsMutableHeapNumber()); + return reinterpret_cast(object); +} + + ACCESSORS(JSDate, value, Object, kValueOffset) ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset) ACCESSORS(JSDate, year, Object, kYearOffset) diff --git a/src/objects-printer.cc b/src/objects-printer.cc index e86059b71..5d9c517f4 100644 --- a/src/objects-printer.cc +++ b/src/objects-printer.cc @@ -64,6 +64,11 @@ void HeapObject::HeapObjectPrint(FILE* out) { case HEAP_NUMBER_TYPE: HeapNumber::cast(this)->HeapNumberPrint(out); break; + case MUTABLE_HEAP_NUMBER_TYPE: + PrintF(out, "HeapNumberPrint(out); + PrintF(out, ">"); + break; case FIXED_DOUBLE_ARRAY_TYPE: FixedDoubleArray::cast(this)->FixedDoubleArrayPrint(out); break; diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc index f2f47b0f8..7d10fbd27 100644 --- a/src/objects-visiting.cc +++ b/src/objects-visiting.cc @@ -148,6 +148,7 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( return kVisitJSFunction; case HEAP_NUMBER_TYPE: + case MUTABLE_HEAP_NUMBER_TYPE: #define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \ case EXTERNAL_##TYPE##_ARRAY_TYPE: diff --git a/src/objects.cc b/src/objects.cc index 176828ec9..785bd4ce4 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -1540,6 +1540,11 @@ void HeapObject::HeapObjectShortPrint(StringStream* accumulator) { HeapNumber::cast(this)->HeapNumberPrint(accumulator); accumulator->Put('>'); break; + case MUTABLE_HEAP_NUMBER_TYPE: + accumulator->Add("HeapNumberPrint(accumulator); + accumulator->Put('>'); + break; case JS_PROXY_TYPE: accumulator->Add(""); break; @@ -1665,6 +1670,7 @@ void HeapObject::IterateBody(InstanceType type, int object_size, break; case HEAP_NUMBER_TYPE: + case MUTABLE_HEAP_NUMBER_TYPE: case FILLER_TYPE: case BYTE_ARRAY_TYPE: case FREE_SPACE_TYPE: @@ -1706,7 +1712,7 @@ bool HeapNumber::HeapNumberBooleanValue() { void HeapNumber::HeapNumberPrint(FILE* out) { - PrintF(out, "%.16g", Number()); + PrintF(out, "%.16g", value()); } @@ -1718,7 +1724,7 @@ void HeapNumber::HeapNumberPrint(StringStream* accumulator) { // print that using vsnprintf (which may truncate but never allocate if // there is no more space in the buffer). EmbeddedVector buffer; - SNPrintF(buffer, "%.16g", Number()); + SNPrintF(buffer, "%.16g", value()); accumulator->Add("%s", buffer.start()); } @@ -2070,8 +2076,8 @@ bool Map::InstancesNeedRewriting(Map* target, DescriptorArray* new_desc = target->instance_descriptors(); int limit = NumberOfOwnDescriptors(); for (int i = 0; i < limit; i++) { - if (new_desc->GetDetails(i).representation().IsDouble() && - !old_desc->GetDetails(i).representation().IsDouble()) { + if (new_desc->GetDetails(i).representation().IsDouble() != + old_desc->GetDetails(i).representation().IsDouble()) { return true; } } @@ -2168,7 +2174,7 @@ void JSObject::MigrateFastToFast(Handle object, Handle new_map) { PropertyDetails details = new_map->GetLastDescriptorDetails(); Handle value; if (details.representation().IsDouble()) { - value = isolate->factory()->NewHeapNumber(0); + value = isolate->factory()->NewHeapNumber(0, MUTABLE); } else { value = isolate->factory()->uninitialized_value(); } @@ -2216,6 +2222,9 @@ void JSObject::MigrateFastToFast(Handle object, Handle new_map) { value = handle(Smi::FromInt(0), isolate); } value = Object::NewStorageFor(isolate, value, details.representation()); + } else if (old_details.representation().IsDouble() && + !details.representation().IsDouble()) { + value = Object::WrapForRead(isolate, value, old_details.representation()); } ASSERT(!(details.representation().IsDouble() && value->IsSmi())); int target_index = new_descriptors->GetFieldIndex(i) - inobject; @@ -2228,7 +2237,7 @@ void JSObject::MigrateFastToFast(Handle object, Handle new_map) { if (details.type() != FIELD) continue; Handle value; if (details.representation().IsDouble()) { - value = isolate->factory()->NewHeapNumber(0); + value = isolate->factory()->NewHeapNumber(0, MUTABLE); } else { value = isolate->factory()->uninitialized_value(); } @@ -3975,6 +3984,7 @@ void JSObject::WriteToField(int descriptor, Object* value) { // Nothing more to be done. if (value->IsUninitialized()) return; HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index)); + ASSERT(box->IsMutableHeapNumber()); box->set_value(value->Number()); } else { FastPropertyAtPut(index, value); @@ -4644,6 +4654,11 @@ void JSObject::MigrateFastToSlow(Handle object, FieldIndex index = FieldIndex::ForDescriptor(*map, i); Handle value( object->RawFastPropertyAt(index), isolate); + if (details.representation().IsDouble()) { + ASSERT(value->IsMutableHeapNumber()); + Handle old = Handle::cast(value); + value = isolate->factory()->NewHeapNumber(old->value()); + } PropertyDetails d = PropertyDetails(details.attributes(), NORMAL, i + 1); dictionary = NameDictionary::Add(dictionary, key, value, d); @@ -5811,7 +5826,7 @@ Handle JSObject::FastPropertyAt(Handle object, FieldIndex index) { Isolate* isolate = object->GetIsolate(); Handle raw_value(object->RawFastPropertyAt(index), isolate); - return Object::NewStorageFor(isolate, raw_value, representation); + return Object::WrapForRead(isolate, raw_value, representation); } @@ -7002,7 +7017,7 @@ Object* JSObject::SlowReverseLookup(Object* value) { Object* property = RawFastPropertyAt(FieldIndex::ForDescriptor(map(), i)); if (descs->GetDetails(i).representation().IsDouble()) { - ASSERT(property->IsHeapNumber()); + ASSERT(property->IsMutableHeapNumber()); if (value->IsNumber() && property->Number() == value->Number()) { return descs->GetKey(i); } diff --git a/src/objects.h b/src/objects.h index 526760aff..600c371a3 100644 --- a/src/objects.h +++ b/src/objects.h @@ -168,6 +168,12 @@ enum ContextualMode { }; +enum MutableMode { + MUTABLE, + IMMUTABLE +}; + + static const int kGrowICDelta = STORE_AND_GROW_NO_TRANSITION - STANDARD_STORE; STATIC_ASSERT(STANDARD_STORE == 0); @@ -352,6 +358,7 @@ const int kStubMinorKeyBits = kBitsPerInt - kSmiTagSize - kStubMajorKeyBits; V(PROPERTY_CELL_TYPE) \ \ V(HEAP_NUMBER_TYPE) \ + V(MUTABLE_HEAP_NUMBER_TYPE) \ V(FOREIGN_TYPE) \ V(BYTE_ARRAY_TYPE) \ V(FREE_SPACE_TYPE) \ @@ -680,6 +687,7 @@ enum InstanceType { // "Data", objects that cannot contain non-map-word pointers to heap // objects. HEAP_NUMBER_TYPE, + MUTABLE_HEAP_NUMBER_TYPE, FOREIGN_TYPE, BYTE_ARRAY_TYPE, FREE_SPACE_TYPE, @@ -900,6 +908,7 @@ template inline bool Is(Object* obj); #define HEAP_OBJECT_TYPE_LIST(V) \ V(HeapNumber) \ + V(MutableHeapNumber) \ V(Name) \ V(UniqueName) \ V(String) \ @@ -1427,7 +1436,7 @@ class Object { } else if (FLAG_track_fields && representation.IsSmi()) { return IsSmi(); } else if (FLAG_track_double_fields && representation.IsDouble()) { - return IsNumber(); + return IsMutableHeapNumber() || IsNumber(); } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) { return IsHeapObject(); } @@ -1440,6 +1449,10 @@ class Object { Handle object, Representation representation); + inline static Handle WrapForRead(Isolate* isolate, + Handle object, + Representation representation); + // Returns true if the object is of the correct type to be used as a // implementation of a JSObject's elements. inline bool HasValidElements(); diff --git a/src/runtime.cc b/src/runtime.cc index 44f09161c..9b665a272 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -14542,8 +14542,8 @@ RUNTIME_FUNCTION(Runtime_LoadMutableDouble) { object->properties()->length()); } Handle raw_value(object->RawFastPropertyAt(field_index), isolate); - RUNTIME_ASSERT(raw_value->IsNumber() || raw_value->IsUninitialized()); - return *Object::NewStorageFor(isolate, raw_value, Representation::Double()); + RUNTIME_ASSERT(raw_value->IsMutableHeapNumber()); + return *Object::WrapForRead(isolate, raw_value, Representation::Double()); } diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 67ad2a919..b82117788 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -4593,12 +4593,17 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) { void MacroAssembler::AllocateHeapNumber(Register result, Register scratch, - Label* gc_required) { + Label* gc_required, + MutableMode mode) { // Allocate heap number in new space. Allocate(HeapNumber::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT); + Heap::RootListIndex map_index = mode == MUTABLE + ? Heap::kMutableHeapNumberMapRootIndex + : Heap::kHeapNumberMapRootIndex; + // Set the map. - LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex); + LoadRoot(kScratchRegister, map_index); movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister); } diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 8a0ffa615..7a27776a5 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -1186,7 +1186,8 @@ class MacroAssembler: public Assembler { // space is full. void AllocateHeapNumber(Register result, Register scratch, - Label* gc_required); + Label* gc_required, + MutableMode mode = IMMUTABLE); // Allocate a sequential string. All the header fields of the string object // are initialized. diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index 9e334239e..b305392ea 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -489,7 +489,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } else if (representation.IsDouble()) { Label do_store, heap_number; - __ AllocateHeapNumber(storage_reg, scratch1, slow); + __ AllocateHeapNumber(storage_reg, scratch1, slow, MUTABLE); __ JumpIfNotSmi(value_reg, &heap_number); __ SmiToInteger32(scratch1, value_reg); diff --git a/src/x87/macro-assembler-x87.cc b/src/x87/macro-assembler-x87.cc index c54966803..65682b855 100644 --- a/src/x87/macro-assembler-x87.cc +++ b/src/x87/macro-assembler-x87.cc @@ -1576,14 +1576,18 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object) { void MacroAssembler::AllocateHeapNumber(Register result, Register scratch1, Register scratch2, - Label* gc_required) { + Label* gc_required, + MutableMode mode) { // Allocate heap number in new space. Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, TAG_OBJECT); + Handle map = mode == MUTABLE + ? isolate()->factory()->mutable_heap_number_map() + : isolate()->factory()->heap_number_map(); + // Set the map. - mov(FieldOperand(result, HeapObject::kMapOffset), - Immediate(isolate()->factory()->heap_number_map())); + mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map)); } diff --git a/src/x87/macro-assembler-x87.h b/src/x87/macro-assembler-x87.h index 84141e6bc..70770a662 100644 --- a/src/x87/macro-assembler-x87.h +++ b/src/x87/macro-assembler-x87.h @@ -617,7 +617,8 @@ class MacroAssembler: public Assembler { void AllocateHeapNumber(Register result, Register scratch1, Register scratch2, - Label* gc_required); + Label* gc_required, + MutableMode mode = IMMUTABLE); // Allocate a sequential string. All the header fields of the string object // are initialized. diff --git a/src/x87/stub-cache-x87.cc b/src/x87/stub-cache-x87.cc index f8ec4ba8d..faab67a8c 100644 --- a/src/x87/stub-cache-x87.cc +++ b/src/x87/stub-cache-x87.cc @@ -523,7 +523,7 @@ void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } else if (representation.IsDouble()) { Label do_store, heap_number; - __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow, MUTABLE); __ JumpIfNotSmi(value_reg, &heap_number); __ SmiUntag(value_reg); diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc index bef9897fd..db6b9da2a 100644 --- a/test/cctest/test-heap-profiler.cc +++ b/test/cctest/test-heap-profiler.cc @@ -1688,7 +1688,7 @@ TEST(GetHeapValueForNode) { v8::HandleScope scope(env->GetIsolate()); v8::HeapProfiler* heap_profiler = env->GetIsolate()->GetHeapProfiler(); - CompileRun("a = { s_prop: \'value\', n_prop: 0.1 };"); + CompileRun("a = { s_prop: \'value\', n_prop: \'value2\' };"); const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot(v8_str("value")); CHECK(ValidateSnapshot(snapshot)); @@ -1709,10 +1709,9 @@ TEST(GetHeapValueForNode) { CHECK(js_s_prop == heap_profiler->FindObjectById(s_prop->GetId())); const v8::HeapGraphNode* n_prop = GetProperty(obj, v8::HeapGraphEdge::kProperty, "n_prop"); - v8::Local js_n_prop = - js_obj->Get(v8_str("n_prop")).As(); - CHECK(js_n_prop->NumberValue() == - heap_profiler->FindObjectById(n_prop->GetId())->NumberValue()); + v8::Local js_n_prop = + js_obj->Get(v8_str("n_prop")).As(); + CHECK(js_n_prop == heap_profiler->FindObjectById(n_prop->GetId())); } diff --git a/test/mjsunit/migrations.js b/test/mjsunit/migrations.js new file mode 100644 index 000000000..6a2ea64a7 --- /dev/null +++ b/test/mjsunit/migrations.js @@ -0,0 +1,311 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-ayle license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax --track-fields --expose-gc + +var global = Function('return this')(); +var verbose = 0; + +function test(ctor_desc, use_desc, migr_desc) { + var n = 5; + var objects = []; + var results = []; + + if (verbose) { + print(); + print("==========================================================="); + print("=== " + ctor_desc.name + + " | " + use_desc.name + " |--> " + migr_desc.name); + print("==========================================================="); + } + + // Clean ICs and transitions. + %NotifyContextDisposed(); + gc(); gc(); gc(); + + + // create objects + if (verbose) { + print("-----------------------------"); + print("--- construct"); + print(); + } + for (var i = 0; i < n; i++) { + objects[i] = ctor_desc.ctor.apply(ctor_desc, ctor_desc.args(i)); + } + + try { + // use them + if (verbose) { + print("-----------------------------"); + print("--- use 1"); + print(); + } + var use = use_desc.use1; + for (var i = 0; i < n; i++) { + if (i == 3) %OptimizeFunctionOnNextCall(use); + results[i] = use(objects[i], i); + } + + // trigger migrations + if (verbose) { + print("-----------------------------"); + print("--- trigger migration"); + print(); + } + var migr = migr_desc.migr; + for (var i = 0; i < n; i++) { + if (i == 3) %OptimizeFunctionOnNextCall(migr); + migr(objects[i], i); + } + + // use again + if (verbose) { + print("-----------------------------"); + print("--- use 2"); + print(); + } + var use = use_desc.use2 !== undefined ? use_desc.use2 : use_desc.use1; + for (var i = 0; i < n; i++) { + if (i == 3) %OptimizeFunctionOnNextCall(use); + results[i] = use(objects[i], i); + if (verbose >= 2) print(results[i]); + } + + } catch (e) { + if (verbose) print("--- incompatible use: " + e); + } + return results; +} + + +var ctors = [ + { + name: "none-to-double", + ctor: function(v) { return {a: v}; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "double", + ctor: function(v) { var o = {}; o.a = v; return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "none-to-smi", + ctor: function(v) { return {a: v}; }, + args: function(i) { return [i]; }, + }, + { + name: "smi", + ctor: function(v) { var o = {}; o.a = v; return o; }, + args: function(i) { return [i]; }, + }, + { + name: "none-to-object", + ctor: function(v) { return {a: v}; }, + args: function(i) { return ["s"]; }, + }, + { + name: "object", + ctor: function(v) { var o = {}; o.a = v; return o; }, + args: function(i) { return ["s"]; }, + }, + { + name: "{a:, b:, c:}", + ctor: function(v1, v2, v3) { return {a: v1, b: v2, c: v3}; }, + args: function(i) { return [1.5 + i, 1.6, 1.7]; }, + }, + { + name: "{a..h:}", + ctor: function(v) { var o = {}; o.h=o.g=o.f=o.e=o.d=o.c=o.b=o.a=v; return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "1", + ctor: function(v) { var o = 1; o.a = v; return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "f()", + ctor: function(v) { var o = function() { return v;}; o.a = v; return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "f().bind", + ctor: function(v) { var o = function(a,b,c) { return a+b+c; }; o = o.bind(o, v, v+1, v+2.2); return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "dictionary elements", + ctor: function(v) { var o = []; o[1] = v; o[200000] = v; return o; }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "json", + ctor: function(v) { var json = '{"a":' + v + ',"b":' + v + '}'; return JSON.parse(json); }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "fast accessors", + accessor: { + get: function() { return this.a_; }, + set: function(value) {this.a_ = value; }, + configurable: true, + }, + ctor: function(v) { + var o = {a_:v}; + Object.defineProperty(o, "a", this.accessor); + return o; + }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "slow accessor", + accessor1: { value: this.a_, configurable: true }, + accessor2: { + get: function() { return this.a_; }, + set: function(value) {this.a_ = value; }, + configurable: true, + }, + ctor: function(v) { + var o = {a_:v}; + Object.defineProperty(o, "a", this.accessor1); + Object.defineProperty(o, "a", this.accessor2); + return o; + }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "slow", + proto: {}, + ctor: function(v) { + var o = {__proto__: this.proto}; + o.a = v; + for (var i = 0; %HasFastProperties(o); i++) o["f"+i] = v; + return o; + }, + args: function(i) { return [1.5 + i]; }, + }, + { + name: "global", + ctor: function(v) { return global; }, + args: function(i) { return [i]; }, + }, +]; + + + +var uses = [ + { + name: "o.a+1.0", + use1: function(o, i) { return o.a + 1.0; }, + use2: function(o, i) { return o.a + 1.1; }, + }, + { + name: "o.b+1.0", + use1: function(o, i) { return o.b + 1.0; }, + use2: function(o, i) { return o.b + 1.1; }, + }, + { + name: "o[1]+1.0", + use1: function(o, i) { return o[1] + 1.0; }, + use2: function(o, i) { return o[1] + 1.1; }, + }, + { + name: "o[-1]+1.0", + use1: function(o, i) { return o[-1] + 1.0; }, + use2: function(o, i) { return o[-1] + 1.1; }, + }, + { + name: "()", + use1: function(o, i) { return o() + 1.0; }, + use2: function(o, i) { return o() + 1.1; }, + }, +]; + + + +var migrations = [ + { + name: "to smi", + migr: function(o, i) { if (i == 0) o.a = 1; }, + }, + { + name: "to double", + migr: function(o, i) { if (i == 0) o.a = 1.1; }, + }, + { + name: "to object", + migr: function(o, i) { if (i == 0) o.a = {}; }, + }, + { + name: "set prototype {}", + migr: function(o, i) { o.__proto__ = {}; }, + }, + { + name: "%FunctionSetPrototype", + migr: function(o, i) { %FunctionSetPrototype(o, null); }, + }, + { + name: "modify prototype", + migr: function(o, i) { if (i == 0) o.__proto__.__proto1__ = [,,,5,,,]; }, + }, + { + name: "freeze prototype", + migr: function(o, i) { if (i == 0) Object.freeze(o.__proto__); }, + }, + { + name: "delete and re-add property", + migr: function(o, i) { var v = o.a; delete o.a; o.a = v; }, + }, + { + name: "modify prototype", + migr: function(o, i) { if (i >= 0) o.__proto__ = {}; }, + }, + { + name: "set property callback", + migr: function(o, i) { + Object.defineProperty(o, "a", { + get: function() { return 1.5 + i; }, + set: function(value) {}, + configurable: true, + }); + }, + }, + { + name: "observe", + migr: function(o, i) { Object.observe(o, function(){}); }, + }, + { + name: "%EnableAccessChecks", + migr: function(o, i) { + if (typeof (o) !== 'function') %EnableAccessChecks(o); + }, + }, + { + name: "%DisableAccessChecks", + migr: function(o, i) { + if ((typeof (o) !== 'function') && (o !== global)) %DisableAccessChecks(o); + }, + }, + { + name: "seal", + migr: function(o, i) { Object.seal(o); }, + }, + { // Must be the last in the sequence, because after the global object freeze + // the other modifications does not make sence. + name: "freeze", + migr: function(o, i) { Object.freeze(o); }, + }, +]; + + + +migrations.forEach(function(migr) { + uses.forEach(function(use) { + ctors.forEach(function(ctor) { + test(ctor, use, migr); + }); + }); +}); diff --git a/test/mjsunit/mjsunit.status b/test/mjsunit/mjsunit.status index c18a9acef..2e656a9ff 100644 --- a/test/mjsunit/mjsunit.status +++ b/test/mjsunit/mjsunit.status @@ -87,6 +87,7 @@ ############################################################################## # Skip long running tests that time out in debug mode. 'generated-transition-stub': [PASS, ['mode == debug', SKIP]], + 'migrations': [PASS, ['mode == debug', SLOW]], ############################################################################## # This test sets the umask on a per-process basis and hence cannot be -- 2.34.1