From: verwaest@chromium.org Date: Wed, 8 May 2013 15:02:08 +0000 (+0000) Subject: Use mutable heapnumbers to store doubles in fields. X-Git-Tag: upstream/4.7.83~14298 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=52008429b740625afc289f97e1738e351cd9a194;p=platform%2Fupstream%2Fv8.git Use mutable heapnumbers to store doubles in fields. R=danno@chromium.org Review URL: https://chromiumcodereview.appspot.com/14850006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@14597 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 58c628b..0bc1f48 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1593,7 +1593,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { : ObjectLiteral::kNoFlags; __ mov(r0, Operand(Smi::FromInt(flags))); int properties_count = constant_properties->length() / 2; - if (expr->depth() > 1) { + if ((FLAG_track_double_fields && expr->may_store_doubles()) || + expr->depth() > 1) { __ Push(r3, r2, r1, r0); __ CallRuntime(Runtime::kCreateObjectLiteral, 4); } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements || diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc index 893ac4e..c644be5 100644 --- a/src/arm/ic-arm.cc +++ b/src/arm/ic-arm.cc @@ -1180,6 +1180,25 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) { } +void StoreIC::GenerateSlow(MacroAssembler* masm) { + // ---------- S t a t e -------------- + // -- r0 : value + // -- r2 : key + // -- r1 : receiver + // -- lr : return address + // ----------------------------------- + + // Push receiver, key and value for runtime call. + __ Push(r1, r2, r0); + + // The slow case calls into the runtime to complete the store without causing + // an IC miss that would otherwise cause a transition to the generic stub. + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); + __ TailCallExternalReference(ref, 3, 1); +} + + void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { // ---------- S t a t e -------------- // -- r0 : value diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc index 483a923..3fe46ff 100644 --- a/src/arm/lithium-arm.cc +++ b/src/arm/lithium-arm.cc @@ -2318,17 +2318,22 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { : UseRegisterAtStart(instr->object()); } - LOperand* val = - needs_write_barrier || - (FLAG_track_fields && instr->field_representation().IsSmi()) - ? UseTempRegister(instr->value()) : UseRegister(instr->value()); + LOperand* val; + if (needs_write_barrier || + (FLAG_track_fields && instr->field_representation().IsSmi())) { + val = UseTempRegister(instr->value()); + } else if (FLAG_track_double_fields && + instr->field_representation().IsDouble()) { + val = UseRegisterAtStart(instr->value()); + } else { + val = UseRegister(instr->value()); + } // We need a temporary register for write barrier of the map field. LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL; LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp); - if ((FLAG_track_fields && instr->field_representation().IsSmi()) || - (FLAG_track_double_fields && instr->field_representation().IsDouble())) { + if (FLAG_track_fields && instr->field_representation().IsSmi()) { return AssignEnvironment(result); } return result; diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 8b3fc94..ded6487 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -3057,31 +3057,20 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { + int offset = instr->hydrogen()->offset(); Register object = ToRegister(instr->object()); - if (!FLAG_track_double_fields) { - ASSERT(!instr->hydrogen()->representation().IsDouble()); + if (instr->hydrogen()->representation().IsDouble()) { + DwVfpRegister result = ToDoubleRegister(instr->result()); + __ vldr(result, FieldMemOperand(object, offset)); + return; } - Register temp = instr->hydrogen()->representation().IsDouble() - ? scratch0() : ToRegister(instr->result()); + + Register result = ToRegister(instr->result()); if (instr->hydrogen()->is_in_object()) { - __ ldr(temp, FieldMemOperand(object, instr->hydrogen()->offset())); + __ ldr(result, FieldMemOperand(object, offset)); } else { - __ ldr(temp, FieldMemOperand(object, JSObject::kPropertiesOffset)); - __ ldr(temp, FieldMemOperand(temp, instr->hydrogen()->offset())); - } - - if (instr->hydrogen()->representation().IsDouble()) { - Label load_from_heap_number, done; - DwVfpRegister result = ToDoubleRegister(instr->result()); - SwVfpRegister flt_scratch = double_scratch0().low(); - __ JumpIfNotSmi(temp, &load_from_heap_number); - __ SmiUntag(temp); - __ vmov(flt_scratch, temp); - __ vcvt_f64_s32(result, flt_scratch); - __ b(&done); - __ bind(&load_from_heap_number); - __ vldr(result, FieldMemOperand(temp, HeapNumber::kValueOffset)); - __ bind(&done); + __ ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); + __ ldr(result, FieldMemOperand(result, offset)); } } @@ -4230,29 +4219,26 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { Representation representation = instr->representation(); Register object = ToRegister(instr->object()); - Register value = ToRegister(instr->value()); - ASSERT(!object.is(value)); Register scratch = scratch0(); int offset = instr->offset(); + Handle transition = instr->transition(); + if (FLAG_track_fields && representation.IsSmi()) { + Register value = ToRegister(instr->value()); __ SmiTag(value, value, SetCC); if (!instr->hydrogen()->value()->range()->IsInSmiRange()) { DeoptimizeIf(vs, instr->environment()); } - } else if (FLAG_track_double_fields && representation.IsDouble() && - !instr->hydrogen()->value()->type().IsSmi() && - !instr->hydrogen()->value()->type().IsHeapNumber()) { - Label do_store; - __ JumpIfSmi(value, &do_store); - Handle map(isolate()->factory()->heap_number_map()); - - __ ldr(scratch, FieldMemOperand(value, HeapObject::kMapOffset)); - DoCheckMapCommon(scratch, map, REQUIRE_EXACT_MAP, instr->environment()); - __ bind(&do_store); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + ASSERT(transition.is_null()); + ASSERT(instr->is_in_object()); + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); + DwVfpRegister value = ToDoubleRegister(instr->value()); + __ vstr(value, FieldMemOperand(object, offset)); + return; } - Handle transition = instr->transition(); if (!transition.is_null()) { if (transition->CanBeDeprecated()) { transition_maps_.Add(transition, info()->zone()); @@ -4274,6 +4260,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } // Do the store. + Register value = ToRegister(instr->value()); + ASSERT(!object.is(value)); HType type = instr->hydrogen()->value()->type(); SmiCheck check_needed = type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; @@ -5562,7 +5550,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { // Pick the right runtime function or stub to call. int properties_count = instr->hydrogen()->constant_properties_length() / 2; - if (instr->hydrogen()->depth() > 1) { + if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) || + instr->hydrogen()->depth() > 1) { __ Push(r3, r2, r1, r0); CallRuntime(Runtime::kCreateObjectLiteral, 4, instr); } else if (flags != ObjectLiteral::kFastElements || diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index 0627d70..127bf3f 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -315,11 +315,13 @@ void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype( } -void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - bool inobject, - int index) { +void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, + Register dst, + Register src, + bool inobject, + int index, + Representation representation) { + ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); int offset = index * kPointerSize; if (!inobject) { // Calculate the offset into the properties array. @@ -451,8 +453,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, Register value_reg, Register scratch1, Register scratch2, + Register scratch3, Label* miss_label, - Label* miss_restore_name) { + Label* miss_restore_name, + Label* slow) { // r0 : value Label exit; @@ -474,16 +478,6 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Ensure no transitions to deprecated maps are followed. __ CheckMapDeprecated(transition, scratch1, miss_label); - if (FLAG_track_fields && representation.IsSmi()) { - __ JumpIfNotSmi(value_reg, miss_label); - } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); - __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, - miss_label, DONT_DO_SMI_CHECK); - __ bind(&do_store); - } - // Check that we are allowed to write this. if (object->GetPrototype()->IsJSObject()) { JSObject* holder; @@ -499,7 +493,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, } Register holder_reg = CheckPrototypes( object, receiver_reg, Handle(holder), name_reg, - scratch1, scratch2, name, miss_restore_name); + scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); // If no property was found, and the holder (the last object in the // prototype chain) is in slow mode, we need to do a negative lookup on the // holder. @@ -518,6 +512,30 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } + Register storage_reg = name_reg; + + if (FLAG_track_fields && representation.IsSmi()) { + __ JumpIfNotSmi(value_reg, miss_restore_name); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + Label do_store, heap_number; + __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex); + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow); + + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiUntag(scratch1, value_reg); + __ vmov(s0, scratch1); + __ vcvt_f64_s32(d0, s0); + __ jmp(&do_store); + + __ bind(&heap_number); + __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, + miss_restore_name, DONT_DO_SMI_CHECK); + __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); + + __ bind(&do_store); + __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset)); + } + // Stub never generated for non-global objects that require access // checks. ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); @@ -546,7 +564,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, - name_reg, + scratch2, kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, @@ -564,7 +582,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, if (index < 0) { // Set the property straight into the object. int offset = object->map()->instance_size() + (index * kPointerSize); - __ str(value_reg, FieldMemOperand(receiver_reg, offset)); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ str(storage_reg, FieldMemOperand(receiver_reg, offset)); + } else { + __ str(value_reg, FieldMemOperand(receiver_reg, offset)); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Skip updating write barrier if storing a smi. @@ -572,7 +594,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Update the write barrier for the array address. // Pass the now unused name_reg as a scratch register. - __ mov(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ mov(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField(receiver_reg, offset, name_reg, @@ -586,7 +612,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Get the properties array __ ldr(scratch1, FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); - __ str(value_reg, FieldMemOperand(scratch1, offset)); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ str(storage_reg, FieldMemOperand(scratch1, offset)); + } else { + __ str(value_reg, FieldMemOperand(scratch1, offset)); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Skip updating write barrier if storing a smi. @@ -594,7 +624,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Update the write barrier for the array address. // Ok to clobber receiver_reg and name_reg, since we return. - __ mov(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ mov(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField(scratch1, offset, name_reg, @@ -652,11 +686,36 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, if (FLAG_track_fields && representation.IsSmi()) { __ JumpIfNotSmi(value_reg, miss_label); } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); - __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, + // Load the double storage. + if (index < 0) { + int offset = object->map()->instance_size() + (index * kPointerSize); + __ ldr(scratch1, FieldMemOperand(receiver_reg, offset)); + } else { + __ ldr(scratch1, + FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset)); + int offset = index * kPointerSize + FixedArray::kHeaderSize; + __ ldr(scratch1, FieldMemOperand(scratch1, offset)); + } + + // Store the value into the storage. + Label do_store, heap_number; + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiUntag(scratch2, value_reg); + __ vmov(s0, scratch2); + __ vcvt_f64_s32(d0, s0); + __ jmp(&do_store); + + __ bind(&heap_number); + __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex, miss_label, DONT_DO_SMI_CHECK); + __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); + __ bind(&do_store); + __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset)); + // Return the value (register r0). + ASSERT(value_reg.is(r0)); + __ Ret(); + return; } // TODO(verwaest): Share this code as a code stub. @@ -1309,15 +1368,18 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend( void BaseLoadStubCompiler::GenerateLoadField(Register reg, Handle holder, - PropertyIndex field) { + PropertyIndex field, + Representation representation) { if (!reg.is(receiver())) __ mov(receiver(), reg); if (kind() == Code::LOAD_IC) { LoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } else { KeyedLoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } } @@ -1543,7 +1605,8 @@ Handle CallStubCompiler::CompileCallField(Handle object, // Do the right check and compute the holder register. Register reg = CheckPrototypes(object, r0, holder, r1, r3, r4, name, &miss); - GenerateFastPropertyLoad(masm(), r1, reg, holder, index); + GenerateFastPropertyLoad(masm(), r1, reg, index.is_inobject(holder), + index.translate(holder), Representation::Tagged()); GenerateCallFunction(masm(), object, arguments(), &miss, extra_state_); diff --git a/src/ast.h b/src/ast.h index b36d967..9ffb00d 100644 --- a/src/ast.h +++ b/src/ast.h @@ -1332,10 +1332,9 @@ class ObjectLiteral: public MaterializedLiteral { return constant_properties_; } ZoneList* properties() const { return properties_; } - bool fast_elements() const { return fast_elements_; } - - bool has_function() { return has_function_; } + bool may_store_doubles() const { return may_store_doubles_; } + bool has_function() const { return has_function_; } // Mark all computed expressions that are bound to a key that // is shadowed by a later occurrence of the same key. For the @@ -1362,17 +1361,20 @@ class ObjectLiteral: public MaterializedLiteral { bool is_simple, bool fast_elements, int depth, + bool may_store_doubles, bool has_function) : MaterializedLiteral(isolate, literal_index, is_simple, depth), constant_properties_(constant_properties), properties_(properties), fast_elements_(fast_elements), + may_store_doubles_(may_store_doubles), has_function_(has_function) {} private: Handle constant_properties_; ZoneList* properties_; bool fast_elements_; + bool may_store_doubles_; bool has_function_; }; @@ -2857,10 +2859,11 @@ class AstNodeFactory BASE_EMBEDDED { bool is_simple, bool fast_elements, int depth, + bool may_store_doubles, bool has_function) { ObjectLiteral* lit = new(zone_) ObjectLiteral( isolate_, constant_properties, properties, literal_index, - is_simple, fast_elements, depth, has_function); + is_simple, fast_elements, depth, may_store_doubles, has_function); VISIT_AND_RETURN(ObjectLiteral, lit) } diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc index 4c0521c..b0d3a5e 100644 --- a/src/bootstrapper.cc +++ b/src/bootstrapper.cc @@ -2416,7 +2416,8 @@ void Genesis::TransferNamedProperties(Handle from, HandleScope inner(isolate()); Handle key = Handle(descs->GetKey(i)); int index = descs->GetFieldIndex(i); - Handle value = Handle(from->FastPropertyAt(index), + ASSERT(!descs->GetDetails(i).representation().IsDouble()); + Handle value = Handle(from->RawFastPropertyAt(index), isolate()); CHECK_NOT_EMPTY_HANDLE(isolate(), JSObject::SetLocalPropertyIgnoreAttributes( diff --git a/src/builtins.cc b/src/builtins.cc index 8b5ae01..661ee94 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -1511,6 +1511,11 @@ static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) { KeyedLoadIC::GenerateNonStrictArguments(masm); } +static void Generate_StoreIC_Slow(MacroAssembler* masm) { + StoreIC::GenerateSlow(masm); +} + + static void Generate_StoreIC_Initialize(MacroAssembler* masm) { StoreIC::GenerateInitialize(masm); } diff --git a/src/builtins.h b/src/builtins.h index 8df48a8..6fc17c4 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -124,6 +124,8 @@ enum BuiltinExtraArguments { Code::kNoExtraICState) \ V(StoreIC_Miss, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) \ + V(StoreIC_Slow, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ V(KeyedStoreIC_Miss, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) \ V(KeyedStoreIC_MissForceGeneric, BUILTIN, UNINITIALIZED, \ diff --git a/src/code-stubs.h b/src/code-stubs.h index 61c121c..646aee2 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -781,8 +781,9 @@ class HandlerStub: public HICStub { class LoadFieldStub: public HandlerStub { public: - LoadFieldStub(bool inobject, int index) : HandlerStub() { - Initialize(Code::LOAD_IC, inobject, index); + LoadFieldStub(bool inobject, int index, Representation representation) + : HandlerStub() { + Initialize(Code::LOAD_IC, inobject, index, representation); } virtual Handle GenerateCode(); @@ -792,6 +793,7 @@ class LoadFieldStub: public HandlerStub { CodeStubInterfaceDescriptor* descriptor); Representation representation() { + if (unboxed_double()) return Representation::Double(); return Representation::Tagged(); } @@ -810,21 +812,31 @@ class LoadFieldStub: public HandlerStub { return FixedArray::kHeaderSize + offset; } + bool unboxed_double() { + return UnboxedDoubleBits::decode(bit_field_); + } + virtual Code::StubType GetStubType() { return Code::FIELD; } protected: LoadFieldStub() : HandlerStub() { } - void Initialize(Code::Kind kind, bool inobject, int index) { + void Initialize(Code::Kind kind, + bool inobject, + int index, + Representation representation) { + bool unboxed_double = FLAG_track_double_fields && representation.IsDouble(); bit_field_ = KindBits::encode(kind) | InobjectBits::encode(inobject) - | IndexBits::encode(index); + | IndexBits::encode(index) + | UnboxedDoubleBits::encode(unboxed_double); } private: STATIC_ASSERT(KindBits::kSize == 4); class InobjectBits: public BitField {}; class IndexBits: public BitField {}; + class UnboxedDoubleBits: public BitField {}; virtual CodeStub::Major MajorKey() { return LoadField; } virtual int NotMissMinorKey() { return bit_field_; } @@ -834,8 +846,9 @@ class LoadFieldStub: public HandlerStub { class KeyedLoadFieldStub: public LoadFieldStub { public: - KeyedLoadFieldStub(bool inobject, int index) : LoadFieldStub() { - Initialize(Code::KEYED_LOAD_IC, inobject, index); + KeyedLoadFieldStub(bool inobject, int index, Representation representation) + : LoadFieldStub() { + Initialize(Code::KEYED_LOAD_IC, inobject, index, representation); } virtual void InitializeInterfaceDescriptor( diff --git a/src/factory.cc b/src/factory.cc index 46bd4ae..fe71a22 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -660,6 +660,14 @@ Handle Factory::NewNumberFromUint(uint32_t value, } +Handle Factory::NewHeapNumber(double value, + PretenureFlag pretenure) { + CALL_HEAP_FUNCTION( + isolate(), + isolate()->heap()->AllocateHeapNumber(value, pretenure), HeapNumber); +} + + Handle Factory::NewNeanderObject() { CALL_HEAP_FUNCTION( isolate(), diff --git a/src/factory.h b/src/factory.h index ca6ad41..5e89708 100644 --- a/src/factory.h +++ b/src/factory.h @@ -267,6 +267,9 @@ class Factory { Handle NewNumberFromUint(uint32_t value, PretenureFlag pretenure = NOT_TENURED); + Handle NewHeapNumber(double value, + PretenureFlag pretenure = NOT_TENURED); + // These objects are used by the api to create env-independent data // structures in the heap. Handle NewNeanderObject(); diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 1fde179..d5d58a7 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -231,6 +231,8 @@ DEFINE_bool(trace_gvn, false, "trace global value numbering") DEFINE_bool(trace_representation, false, "trace representation types") DEFINE_bool(trace_track_allocation_sites, false, "trace the tracking of allocation sites") +DEFINE_bool(trace_migration, false, "trace object migration") +DEFINE_bool(trace_generalization, false, "trace map generalization") DEFINE_bool(stress_pointer_maps, false, "pointer map for every instruction") DEFINE_bool(stress_environments, false, "environment for every instruction") DEFINE_int(deopt_every_n_times, diff --git a/src/heap-snapshot-generator.cc b/src/heap-snapshot-generator.cc index fa2c4fd..f488304 100644 --- a/src/heap-snapshot-generator.cc +++ b/src/heap-snapshot-generator.cc @@ -1331,7 +1331,7 @@ void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) { js_obj->GetInObjectPropertyOffset(index)); } } else { - Object* value = js_obj->FastPropertyAt(index); + Object* value = js_obj->RawFastPropertyAt(index); if (k != heap_->hidden_string()) { SetPropertyReference(js_obj, entry, k, value); } else { diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc index fc57aa8..8f8c59e 100644 --- a/src/hydrogen-instructions.cc +++ b/src/hydrogen-instructions.cc @@ -2521,6 +2521,8 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context, i < types->length() && types_.length() < kMaxLoadPolymorphism; ++i) { Handle map = types->at(i); + // Deprecated maps are updated to the current map in the type oracle. + ASSERT(!map->is_deprecated()); LookupResult lookup(map->GetIsolate()); map->LookupDescriptor(NULL, *name, &lookup); if (lookup.IsFound()) { @@ -2532,6 +2534,12 @@ HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context, } else { SetGVNFlag(kDependsOnBackingStoreFields); } + if (FLAG_track_double_fields && + lookup.representation().IsDouble()) { + // Since the value needs to be boxed, use a generic handler for + // loading doubles. + continue; + } types_.Add(types->at(i), zone); break; } diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h index 60f2dd4..3073f45 100644 --- a/src/hydrogen-instructions.h +++ b/src/hydrogen-instructions.h @@ -206,6 +206,7 @@ class LChunkBuilder; V(Calls) \ V(InobjectFields) \ V(BackingStoreFields) \ + V(DoubleFields) \ V(ElementsKind) \ V(ElementsPointer) \ V(ArrayElements) \ @@ -5218,11 +5219,16 @@ class HLoadNamedField: public HTemplateInstruction<2> { set_representation(Representation::Tagged()); } SetFlag(kUseGVN); - SetGVNFlag(kDependsOnMaps); - if (is_in_object) { + if (FLAG_track_double_fields && representation().IsDouble()) { + ASSERT(is_in_object); + ASSERT(offset == HeapNumber::kValueOffset); + SetGVNFlag(kDependsOnDoubleFields); + } else if (is_in_object) { SetGVNFlag(kDependsOnInobjectFields); + SetGVNFlag(kDependsOnMaps); } else { SetGVNFlag(kDependsOnBackingStoreFields); + SetGVNFlag(kDependsOnMaps); } } @@ -5562,7 +5568,7 @@ class HLoadKeyedGeneric: public HTemplateInstruction<3> { class HStoreNamedField: public HTemplateInstruction<2> { public: HStoreNamedField(HValue* obj, - Handle name, + Handle name, HValue* val, bool in_object, Representation field_representation, @@ -5576,18 +5582,25 @@ class HStoreNamedField: public HTemplateInstruction<2> { SetOperandAt(0, obj); SetOperandAt(1, val); SetFlag(kTrackSideEffectDominators); - SetGVNFlag(kDependsOnNewSpacePromotion); - if (is_in_object_) { + if (FLAG_track_double_fields && field_representation.IsDouble()) { + SetGVNFlag(kChangesDoubleFields); + } else if (is_in_object_) { SetGVNFlag(kChangesInobjectFields); + SetGVNFlag(kDependsOnNewSpacePromotion); } else { SetGVNFlag(kChangesBackingStoreFields); + SetGVNFlag(kDependsOnNewSpacePromotion); } } DECLARE_CONCRETE_INSTRUCTION(StoreNamedField) virtual Representation RequiredInputRepresentation(int index) { - if (FLAG_track_fields && index == 1 && field_representation_.IsSmi()) { + if (FLAG_track_double_fields && + index == 1 && field_representation_.IsDouble()) { + return field_representation_; + } else if (FLAG_track_fields && + index == 1 && field_representation_.IsSmi()) { return Representation::Integer32(); } return Representation::Tagged(); @@ -5601,7 +5614,7 @@ class HStoreNamedField: public HTemplateInstruction<2> { HValue* object() { return OperandAt(0); } HValue* value() { return OperandAt(1); } - Handle name() const { return name_; } + Handle name() const { return name_; } bool is_in_object() const { return is_in_object_; } int offset() const { return offset_; } Handle transition() const { return transition_; } @@ -5610,7 +5623,11 @@ class HStoreNamedField: public HTemplateInstruction<2> { HValue* new_space_dominator() const { return new_space_dominator_; } bool NeedsWriteBarrier() { + ASSERT(!(FLAG_track_double_fields && field_representation_.IsDouble()) || + transition_.is_null()); return (!FLAG_track_fields || !field_representation_.IsSmi()) && + // If there is a transition, a new storage object needs to be allocated. + !(FLAG_track_double_fields && field_representation_.IsDouble()) && StoringValueNeedsWriteBarrier(value()) && ReceiverObjectNeedsWriteBarrier(object(), new_space_dominator()); } @@ -5628,7 +5645,7 @@ class HStoreNamedField: public HTemplateInstruction<2> { } private: - Handle name_; + Handle name_; bool is_in_object_; Representation field_representation_; int offset_; @@ -6117,12 +6134,14 @@ class HObjectLiteral: public HMaterializedLiteral<1> { bool fast_elements, int literal_index, int depth, + bool may_store_doubles, bool has_function) : HMaterializedLiteral<1>(literal_index, depth), constant_properties_(constant_properties), constant_properties_length_(constant_properties->length()), literals_(literals), fast_elements_(fast_elements), + may_store_doubles_(may_store_doubles), has_function_(has_function) { SetOperandAt(0, context); SetGVNFlag(kChangesNewSpacePromotion); @@ -6137,6 +6156,7 @@ class HObjectLiteral: public HMaterializedLiteral<1> { } Handle literals() const { return literals_; } bool fast_elements() const { return fast_elements_; } + bool may_store_doubles() const { return may_store_doubles_; } bool has_function() const { return has_function_; } virtual Representation RequiredInputRepresentation(int index) { @@ -6151,6 +6171,7 @@ class HObjectLiteral: public HMaterializedLiteral<1> { int constant_properties_length_; Handle literals_; bool fast_elements_ : 1; + bool may_store_doubles_ : 1; bool has_function_ : 1; }; diff --git a/src/hydrogen.cc b/src/hydrogen.cc index e583e50..5c573fe 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -6694,10 +6694,16 @@ static bool IsFastLiteral(Handle boilerplate, if (properties->length() > 0) { return false; } else { - int nof = boilerplate->map()->inobject_properties(); - for (int i = 0; i < nof; i++) { + Handle descriptors( + boilerplate->map()->instance_descriptors()); + int limit = boilerplate->map()->NumberOfOwnDescriptors(); + for (int i = 0; i < limit; i++) { + PropertyDetails details = descriptors->GetDetails(i); + if (details.type() != FIELD) continue; + Representation representation = details.representation(); + int index = descriptors->GetFieldIndex(i); if ((*max_properties)-- == 0) return false; - Handle value(boilerplate->InObjectPropertyAt(i), isolate); + Handle value(boilerplate->InObjectPropertyAt(index), isolate); if (value->IsJSObject()) { Handle value_object = Handle::cast(value); if (!IsFastLiteral(value_object, @@ -6707,6 +6713,8 @@ static bool IsFastLiteral(Handle boilerplate, pointer_size)) { return false; } + } else if (representation.IsDouble()) { + *data_size += HeapNumber::kSize; } } } @@ -6756,6 +6764,7 @@ void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) { expr->fast_elements(), expr->literal_index(), expr->depth(), + expr->may_store_doubles(), expr->has_function())); } @@ -7062,9 +7071,33 @@ HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField( } else { offset += FixedArray::kHeaderSize; } + bool transition_to_field = lookup->IsTransitionToField(*map); + if (FLAG_track_double_fields && representation.IsDouble()) { + if (transition_to_field) { + NoObservableSideEffectsScope no_side_effects(this); + HInstruction* heap_number_size = AddInstruction(new(zone()) HConstant( + HeapNumber::kSize, Representation::Integer32())); + HInstruction* double_box = AddInstruction(new(zone()) HAllocate( + environment()->LookupContext(), heap_number_size, + HType::HeapNumber(), HAllocate::CAN_ALLOCATE_IN_NEW_SPACE)); + BuildStoreMap(double_box, isolate()->factory()->heap_number_map()); + AddInstruction(new(zone()) HStoreNamedField( + double_box, name, value, true, + Representation::Double(), HeapNumber::kValueOffset)); + value = double_box; + representation = Representation::Tagged(); + } else { + HInstruction* double_box = AddInstruction(new(zone()) HLoadNamedField( + object, is_in_object, Representation::Tagged(), offset)); + double_box->set_type(HType::HeapNumber()); + return new(zone()) HStoreNamedField( + double_box, name, value, true, + Representation::Double(), HeapNumber::kValueOffset); + } + } HStoreNamedField* instr = new(zone()) HStoreNamedField( object, name, value, is_in_object, representation, offset); - if (lookup->IsTransitionToField(*map)) { + if (transition_to_field) { Handle transition(lookup->GetTransitionMapFromMap(*map)); instr->set_transition(transition); // TODO(fschneider): Record the new map type of the object in the IR to @@ -7780,7 +7813,20 @@ HLoadNamedField* HGraphBuilder::DoBuildLoadNamedField( bool inobject, Representation representation, int offset) { - return new(zone()) HLoadNamedField(object, inobject, representation, offset); + bool load_double = false; + if (representation.IsDouble()) { + representation = Representation::Tagged(); + load_double = FLAG_track_double_fields; + } + HLoadNamedField* field = + new(zone()) HLoadNamedField(object, inobject, representation, offset); + if (load_double) { + AddInstruction(field); + field->set_type(HType::HeapNumber()); + return new(zone()) HLoadNamedField( + field, true, Representation::Double(), HeapNumber::kValueOffset); + } + return field; } @@ -10779,7 +10825,6 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy( elements->map() != isolate()->heap()->fixed_cow_array_map()) ? elements->Size() : 0; int elements_offset = *offset + object_size; - int inobject_properties = boilerplate_object->map()->inobject_properties(); if (create_allocation_site_info) { elements_offset += AllocationSiteInfo::kSize; *offset += AllocationSiteInfo::kSize; @@ -10793,32 +10838,49 @@ void HOptimizedGraphBuilder::BuildEmitDeepCopy( // Copy in-object properties. HValue* object_properties = AddInstruction(new(zone) HInnerAllocatedObject(target, object_offset)); - for (int i = 0; i < inobject_properties; i++) { + + Handle descriptors( + boilerplate_object->map()->instance_descriptors()); + int limit = boilerplate_object->map()->NumberOfOwnDescriptors(); + + for (int i = 0; i < limit; i++) { + PropertyDetails details = descriptors->GetDetails(i); + if (details.type() != FIELD) continue; + int index = descriptors->GetFieldIndex(i); + int property_offset = boilerplate_object->GetInObjectPropertyOffset(index); + Handle name(descriptors->GetKey(i)); Handle value = - Handle(boilerplate_object->InObjectPropertyAt(i), + Handle(boilerplate_object->InObjectPropertyAt(index), isolate()); if (value->IsJSObject()) { Handle value_object = Handle::cast(value); Handle original_value_object = Handle::cast( - Handle(original_boilerplate_object->InObjectPropertyAt(i), + Handle(original_boilerplate_object->InObjectPropertyAt(index), isolate())); HInstruction* value_instruction = AddInstruction(new(zone) HInnerAllocatedObject(target, *offset)); - // TODO(verwaest): choose correct storage. AddInstruction(new(zone) HStoreNamedField( - object_properties, factory->unknown_field_string(), value_instruction, - true, Representation::Tagged(), - boilerplate_object->GetInObjectPropertyOffset(i))); + object_properties, name, value_instruction, true, + Representation::Tagged(), property_offset)); BuildEmitDeepCopy(value_object, original_value_object, target, offset, DONT_TRACK_ALLOCATION_SITE); } else { - // TODO(verwaest): choose correct storage. + Representation representation = details.representation(); HInstruction* value_instruction = AddInstruction(new(zone) HConstant( value, Representation::Tagged())); + if (representation.IsDouble()) { + HInstruction* double_box = + AddInstruction(new(zone) HInnerAllocatedObject(target, *offset)); + BuildStoreMap(double_box, factory->heap_number_map()); + AddInstruction(new(zone) HStoreNamedField( + double_box, name, value_instruction, true, + Representation::Double(), HeapNumber::kValueOffset)); + value_instruction = double_box; + *offset += HeapNumber::kSize; + } AddInstruction(new(zone) HStoreNamedField( - object_properties, factory->unknown_field_string(), value_instruction, - true, Representation::Tagged(), - boilerplate_object->GetInObjectPropertyOffset(i))); + object_properties, name, value_instruction, true, + Representation::Tagged(), property_offset)); } } diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 3b3908d..5a78019 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1529,7 +1529,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; int properties_count = constant_properties->length() / 2; - if (expr->depth() > 1) { + if ((FLAG_track_double_fields && expr->may_store_doubles()) || + expr->depth() > 1) { __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(edi, JSFunction::kLiteralsOffset)); __ push(Immediate(Smi::FromInt(expr->literal_index()))); diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc index 964db0e..e05031b 100644 --- a/src/ia32/ic-ia32.cc +++ b/src/ia32/ic-ia32.cc @@ -1530,6 +1530,26 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, ICMissMode miss_mode) { } +void StoreIC::GenerateSlow(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- eax : value + // -- ecx : key + // -- edx : receiver + // -- esp[0] : return address + // ----------------------------------- + + __ pop(ebx); + __ push(edx); + __ push(ecx); + __ push(eax); + __ push(ebx); // return address + + // Do tail-call to runtime routine. + ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate()); + __ TailCallExternalReference(ref, 3, 1); +} + + void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : value diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index bfed6d3..99955a5 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -2955,42 +2955,27 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { + int offset = instr->hydrogen()->offset(); Register object = ToRegister(instr->object()); - if (!FLAG_track_double_fields) { - ASSERT(!instr->hydrogen()->representation().IsDouble()); - } - Register temp = instr->hydrogen()->representation().IsDouble() - ? ToRegister(instr->temp()) : ToRegister(instr->result()); - if (instr->hydrogen()->is_in_object()) { - __ mov(temp, FieldOperand(object, instr->hydrogen()->offset())); - } else { - __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset)); - __ mov(temp, FieldOperand(temp, instr->hydrogen()->offset())); - } - - if (instr->hydrogen()->representation().IsDouble()) { - Label load_from_heap_number, done; + if (FLAG_track_double_fields && + instr->hydrogen()->representation().IsDouble()) { if (CpuFeatures::IsSupported(SSE2)) { CpuFeatureScope scope(masm(), SSE2); XMMRegister result = ToDoubleRegister(instr->result()); - __ JumpIfNotSmi(temp, &load_from_heap_number); - __ SmiUntag(temp); - __ cvtsi2sd(result, Operand(temp)); - __ jmp(&done); - __ bind(&load_from_heap_number); - __ movdbl(result, FieldOperand(temp, HeapNumber::kValueOffset)); + __ movdbl(result, FieldOperand(object, offset)); } else { - __ JumpIfNotSmi(temp, &load_from_heap_number); - __ SmiUntag(temp); - __ push(temp); - __ fild_s(Operand(esp, 0)); - __ pop(temp); - __ jmp(&done); - __ bind(&load_from_heap_number); - PushX87DoubleOperand(FieldOperand(temp, HeapNumber::kValueOffset)); + PushX87DoubleOperand(FieldOperand(object, offset)); CurrentInstructionReturnsX87Result(); } - __ bind(&done); + return; + } + + Register result = ToRegister(instr->result()); + if (instr->hydrogen()->is_in_object()) { + __ mov(result, FieldOperand(object, offset)); + } else { + __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); + __ mov(result, FieldOperand(result, offset)); } } @@ -4240,6 +4225,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { int offset = instr->offset(); + Handle transition = instr->transition(); + if (FLAG_track_fields && representation.IsSmi()) { if (instr->value()->IsConstantOperand()) { LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); @@ -4253,18 +4240,20 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { DeoptimizeIf(overflow, instr->environment()); } } - } else if (FLAG_track_double_fields && representation.IsDouble() && - !instr->hydrogen()->value()->type().IsSmi() && - !instr->hydrogen()->value()->type().IsHeapNumber()) { - Register value = ToRegister(instr->value()); - Label do_store; - __ JumpIfSmi(value, &do_store); - Handle map(isolate()->factory()->heap_number_map()); - DoCheckMapCommon(value, map, REQUIRE_EXACT_MAP, instr); - __ bind(&do_store); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + ASSERT(transition.is_null()); + ASSERT(instr->is_in_object()); + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope scope(masm(), SSE2); + XMMRegister value = ToDoubleRegister(instr->value()); + __ movdbl(FieldOperand(object, offset), value); + } else { + __ fstp_d(FieldOperand(object, offset)); + } + return; } - Handle transition = instr->transition(); if (!transition.is_null()) { if (transition->CanBeDeprecated()) { transition_maps_.Add(transition, info()->zone()); @@ -4310,6 +4299,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { __ mov(FieldOperand(write_register, offset), ToRegister(operand_value)); } else { Handle handle_value = ToHandle(operand_value); + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); __ mov(FieldOperand(write_register, offset), handle_value); } } else { @@ -6157,7 +6147,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { // Set up the parameters to the stub/runtime call and pick the right // runtime function or stub to call. int properties_count = instr->hydrogen()->constant_properties_length() / 2; - if (instr->hydrogen()->depth() > 1) { + if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) || + instr->hydrogen()->depth() > 1) { __ PushHeapObject(literals); __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index()))); __ push(Immediate(constant_properties)); diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc index e5ba3c6..6c9098e 100644 --- a/src/ia32/lithium-ia32.cc +++ b/src/ia32/lithium-ia32.cc @@ -2183,9 +2183,7 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) { LOperand* obj = UseRegisterAtStart(instr->object()); - LOperand* temp = instr->representation().IsDouble() ? TempRegister() : NULL; - ASSERT(temp == NULL || FLAG_track_double_fields); - return DefineAsRegister(new(zone()) LLoadNamedField(obj, temp)); + return DefineAsRegister(new(zone()) LLoadNamedField(obj)); } @@ -2438,6 +2436,13 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { val = UseRegisterOrConstant(instr->value()); } else if (FLAG_track_fields && instr->field_representation().IsSmi()) { val = UseTempRegister(instr->value()); + } else if (FLAG_track_double_fields && + instr->field_representation().IsDouble()) { + if (CpuFeatures::IsSafeForSnapshot(SSE2)) { + val = UseRegisterAtStart(instr->value()); + } else { + val = UseX87TopOfStack(instr->value()); + } } else { val = UseRegister(instr->value()); } @@ -2445,15 +2450,14 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { // We only need a scratch register if we have a write barrier or we // have a store into the properties array (not in-object-property). LOperand* temp = (!instr->is_in_object() || needs_write_barrier || - needs_write_barrier_for_map) ? TempRegister() : NULL; + needs_write_barrier_for_map) ? TempRegister() : NULL; // We need a temporary register for write barrier of the map field. LOperand* temp_map = needs_write_barrier_for_map ? TempRegister() : NULL; LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp, temp_map); - if ((FLAG_track_fields && instr->field_representation().IsSmi()) || - (FLAG_track_double_fields && instr->field_representation().IsDouble())) { + if (FLAG_track_fields && instr->field_representation().IsSmi()) { return AssignEnvironment(result); } return result; diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h index f5da47c..8202418 100644 --- a/src/ia32/lithium-ia32.h +++ b/src/ia32/lithium-ia32.h @@ -1494,15 +1494,18 @@ class LReturn: public LTemplateInstruction<0, 3, 0> { }; -class LLoadNamedField: public LTemplateInstruction<1, 1, 1> { +class LLoadNamedField: public LTemplateInstruction<1, 1, 0> { public: - explicit LLoadNamedField(LOperand* object, LOperand* temp) { + explicit LLoadNamedField(LOperand* object) { inputs_[0] = object; - temps_[0] = temp; + } + + virtual bool ClobbersDoubleRegisters() const { + return !CpuFeatures::IsSupported(SSE2) && + !hydrogen()->representation().IsDouble(); } LOperand* object() { return inputs_[0]; } - LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field") DECLARE_HYDROGEN_ACCESSOR(LoadNamedField) diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index 18a032f..9623b9a 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -369,11 +369,13 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, } -void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - bool inobject, - int index) { +void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, + Register dst, + Register src, + bool inobject, + int index, + Representation representation) { + ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); int offset = index * kPointerSize; if (!inobject) { // Calculate the offset into the properties array. @@ -763,8 +765,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, Register value_reg, Register scratch1, Register scratch2, + Register unused, Label* miss_label, - Label* miss_restore_name) { + Label* miss_restore_name, + Label* slow) { // Check that the map of the object hasn't changed. __ CheckMap(receiver_reg, Handle(object->map()), miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP); @@ -783,16 +787,6 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Ensure no transitions to deprecated maps are followed. __ CheckMapDeprecated(transition, scratch1, miss_label); - if (FLAG_track_fields && representation.IsSmi()) { - __ JumpIfNotSmi(value_reg, miss_label); - } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); - __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), - miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); - __ bind(&do_store); - } - // Check that we are allowed to write this. if (object->GetPrototype()->IsJSObject()) { JSObject* holder; @@ -809,7 +803,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // We need an extra register, push Register holder_reg = CheckPrototypes( object, receiver_reg, Handle(holder), name_reg, - scratch1, scratch2, name, miss_restore_name); + scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); // If no property was found, and the holder (the last object in the // prototype chain) is in slow mode, we need to do a negative lookup on the // holder. @@ -828,6 +822,46 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } + Register storage_reg = name_reg; + + if (FLAG_track_fields && representation.IsSmi()) { + __ JumpIfNotSmi(value_reg, miss_restore_name); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + Label do_store, heap_number; + __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow); + + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiUntag(value_reg); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ cvtsi2sd(xmm0, value_reg); + } else { + __ push(value_reg); + __ fild_s(Operand(esp, 0)); + __ pop(value_reg); + } + __ SmiTag(value_reg); + __ jmp(&do_store); + + __ bind(&heap_number); + __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), + miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); + } else { + __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset)); + } + + __ bind(&do_store); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ movdbl(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); + } else { + __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset)); + } + } + // Stub never generated for non-global objects that require access // checks. ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); @@ -839,7 +873,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ pop(scratch1); // Return address. __ push(receiver_reg); __ push(Immediate(transition)); - __ push(eax); + __ push(value_reg); __ push(scratch1); __ TailCallExternalReference( ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), @@ -853,12 +887,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ mov(scratch1, Immediate(transition)); __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); - // Update the write barrier for the map field and pass the now unused - // name_reg as scratch register. + // Update the write barrier for the map field. __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, - name_reg, + scratch2, kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); @@ -875,12 +908,20 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, if (index < 0) { // Set the property straight into the object. int offset = object->map()->instance_size() + (index * kPointerSize); - __ mov(FieldOperand(receiver_reg, offset), value_reg); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ mov(FieldOperand(receiver_reg, offset), storage_reg); + } else { + __ mov(FieldOperand(receiver_reg, offset), value_reg); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Update the write barrier for the array address. // Pass the value being stored in the now unused name_reg. - __ mov(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ mov(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField(receiver_reg, offset, name_reg, @@ -892,12 +933,20 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, int offset = index * kPointerSize + FixedArray::kHeaderSize; // Get the properties array (optimistically). __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); - __ mov(FieldOperand(scratch1, offset), eax); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ mov(FieldOperand(scratch1, offset), storage_reg); + } else { + __ mov(FieldOperand(scratch1, offset), value_reg); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Update the write barrier for the array address. // Pass the value being stored in the now unused name_reg. - __ mov(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ mov(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField(scratch1, offset, name_reg, @@ -948,13 +997,53 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, if (FLAG_track_fields && representation.IsSmi()) { __ JumpIfNotSmi(value_reg, miss_label); } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); + // Load the double storage. + if (index < 0) { + int offset = object->map()->instance_size() + (index * kPointerSize); + __ mov(scratch1, FieldOperand(receiver_reg, offset)); + } else { + __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); + int offset = index * kPointerSize + FixedArray::kHeaderSize; + __ mov(scratch1, FieldOperand(scratch1, offset)); + } + + // Store the value into the storage. + Label do_store, heap_number; + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiUntag(value_reg); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ cvtsi2sd(xmm0, value_reg); + } else { + __ push(value_reg); + __ fild_s(Operand(esp, 0)); + __ pop(value_reg); + } + __ SmiTag(value_reg); + __ jmp(&do_store); + __ bind(&heap_number); __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ movdbl(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); + } else { + __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset)); + } __ bind(&do_store); + if (CpuFeatures::IsSupported(SSE2)) { + CpuFeatureScope use_sse2(masm, SSE2); + __ movdbl(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); + } else { + __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset)); + } + // Return the value (register eax). + ASSERT(value_reg.is(eax)); + __ ret(0); + return; } + ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); // TODO(verwaest): Share this code as a code stub. if (index < 0) { // Set the property straight into the object. @@ -976,7 +1065,7 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, int offset = index * kPointerSize + FixedArray::kHeaderSize; // Get the properties array (optimistically). __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); - __ mov(FieldOperand(scratch1, offset), eax); + __ mov(FieldOperand(scratch1, offset), value_reg); if (!FLAG_track_fields || !representation.IsSmi()) { // Update the write barrier for the array address. @@ -1236,15 +1325,18 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend( void BaseLoadStubCompiler::GenerateLoadField(Register reg, Handle holder, - PropertyIndex field) { + PropertyIndex field, + Representation representation) { if (!reg.is(receiver())) __ mov(receiver(), reg); if (kind() == Code::LOAD_IC) { LoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } else { KeyedLoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } } @@ -1501,7 +1593,9 @@ Handle CallStubCompiler::CompileCallField(Handle object, Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi, name, &miss); - GenerateFastPropertyLoad(masm(), edi, reg, holder, index); + GenerateFastPropertyLoad( + masm(), edi, reg, index.is_inobject(holder), + index.translate(holder), Representation::Tagged()); // Check that the function really is a function. __ JumpIfSmi(edi, &miss); diff --git a/src/ic.cc b/src/ic.cc index 0bfb755..31845f2 100644 --- a/src/ic.cc +++ b/src/ic.cc @@ -1241,7 +1241,8 @@ Handle LoadIC::ComputeLoadHandler(LookupResult* lookup, switch (lookup->type()) { case FIELD: return isolate()->stub_cache()->ComputeLoadField( - name, receiver, holder, lookup->GetFieldIndex()); + name, receiver, holder, + lookup->GetFieldIndex(), lookup->representation()); case CONSTANT_FUNCTION: { Handle constant(lookup->GetConstantFunction()); return isolate()->stub_cache()->ComputeLoadConstant( @@ -1283,7 +1284,7 @@ Handle LoadIC::ComputeLoadHandler(LookupResult* lookup, PropertyIndex lengthIndex = PropertyIndex::NewHeaderIndex(JSArray::kLengthOffset / kPointerSize); return isolate()->stub_cache()->ComputeLoadField( - name, receiver, holder, lengthIndex); + name, receiver, holder, lengthIndex, Representation::Tagged()); } // TODO(dcarney): Handle correctly. if (callback->IsDeclaredAccessorInfo()) break; @@ -1450,7 +1451,8 @@ Handle KeyedLoadIC::ComputeLoadHandler(LookupResult* lookup, switch (lookup->type()) { case FIELD: return isolate()->stub_cache()->ComputeKeyedLoadField( - name, receiver, holder, lookup->GetFieldIndex()); + name, receiver, holder, + lookup->GetFieldIndex(), lookup->representation()); case CONSTANT_FUNCTION: { Handle constant(lookup->GetConstantFunction(), isolate()); return isolate()->stub_cache()->ComputeKeyedLoadConstant( @@ -2260,11 +2262,24 @@ RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) { int new_unused = transition->unused_property_fields(); int new_size = old_storage->length() + new_unused + 1; Object* result; - { MaybeObject* maybe_result = old_storage->CopySize(new_size); - if (!maybe_result->ToObject(&result)) return maybe_result; - } + MaybeObject* maybe_result = old_storage->CopySize(new_size); + if (!maybe_result->ToObject(&result)) return maybe_result; + FixedArray* new_storage = FixedArray::cast(result); - new_storage->set(old_storage->length(), value); + + Object* to_store = value; + + if (FLAG_track_double_fields) { + DescriptorArray* descriptors = transition->instance_descriptors(); + PropertyDetails details = descriptors->GetDetails(transition->LastAdded()); + if (details.representation().IsDouble()) { + MaybeObject* maybe_storage = + isolate->heap()->AllocateHeapNumber(value->Number()); + if (!maybe_storage->To(&to_store)) return maybe_storage; + } + } + + new_storage->set(old_storage->length(), to_store); // Set the new property value and do the map transition. object->set_properties(new_storage); @@ -2306,6 +2321,24 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure) { } +RUNTIME_FUNCTION(MaybeObject*, StoreIC_Slow) { + NoHandleAllocation na(isolate); + ASSERT(args.length() == 3); + StoreIC ic(IC::NO_EXTRA_FRAME, isolate); + Code::ExtraICState extra_ic_state = ic.target()->extra_ic_state(); + Handle object = args.at(0); + Handle key = args.at(1); + Handle value = args.at(2); + StrictModeFlag strict_mode = Code::GetStrictMode(extra_ic_state); + return Runtime::SetObjectProperty(isolate, + object, + key, + value, + NONE, + strict_mode); +} + + RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) { NoHandleAllocation na(isolate); ASSERT(args.length() == 3); diff --git a/src/ic.h b/src/ic.h index 2136dcb..739f34c 100644 --- a/src/ic.h +++ b/src/ic.h @@ -45,6 +45,7 @@ namespace internal { ICU(KeyedCallIC_Miss) \ ICU(StoreIC_Miss) \ ICU(StoreIC_ArrayLength) \ + ICU(StoreIC_Slow) \ ICU(SharedStoreIC_ExtendStorage) \ ICU(KeyedStoreIC_Miss) \ ICU(KeyedStoreIC_MissForceGeneric) \ @@ -504,6 +505,7 @@ class StoreIC: public IC { } // Code generators for stub routines. Only called once at startup. + static void GenerateSlow(MacroAssembler* masm); static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); } static void GenerateMiss(MacroAssembler* masm); static void GenerateMegamorphic(MacroAssembler* masm, diff --git a/src/json-parser.h b/src/json-parser.h index 78c1a7a..ddc3b73 100644 --- a/src/json-parser.h +++ b/src/json-parser.h @@ -381,39 +381,23 @@ Handle JsonParser::ParseJsonObject() { // First check whether there is a single expected transition. If so, try // to parse it first. bool follow_expected = false; + Handle target; if (seq_ascii) { key = JSObject::ExpectedTransitionKey(map); follow_expected = !key.is_null() && ParseJsonString(key); } // If the expected transition hits, follow it. if (follow_expected) { - map = JSObject::ExpectedTransitionTarget(map); + target = JSObject::ExpectedTransitionTarget(map); } else { // If the expected transition failed, parse an internalized string and // try to find a matching transition. key = ParseJsonInternalizedString(); if (key.is_null()) return ReportUnexpectedCharacter(); - Handle target = JSObject::FindTransitionToField(map, key); + target = JSObject::FindTransitionToField(map, key); // If a transition was found, follow it and continue. - if (!target.is_null()) { - map = target; - } else { - // If no transition was found, commit the intermediate state to the - // object and stop transitioning. - JSObject::TransitionToMap(json_object, map); - int length = properties.length(); - for (int i = 0; i < length; i++) { - Handle value = properties[i]; - Representation representation = - map->instance_descriptors()->GetDetails(i).representation(); - if (representation.IsDouble() && value->IsSmi()) { - // TODO(verwaest): Allocate heap number. - } - json_object->FastPropertyAtPut(i, *value); - } - transitioning = false; - } + transitioning = !target.is_null(); } if (c0_ != ':') return ReportUnexpectedCharacter(); @@ -421,16 +405,35 @@ Handle JsonParser::ParseJsonObject() { value = ParseJsonValue(); if (value.is_null()) return ReportUnexpectedCharacter(); - properties.Add(value, zone()); if (transitioning) { - int field = properties.length() - 1; - Representation expected_representation = - map->instance_descriptors()->GetDetails(field).representation(); - if (!value->FitsRepresentation(expected_representation)) { - map = Map::GeneralizeRepresentation( - map, field, value->OptimalRepresentation()); + int descriptor = map->NumberOfOwnDescriptors(); + PropertyDetails details = + target->instance_descriptors()->GetDetails(descriptor); + Representation expected_representation = details.representation(); + + if (value->FitsRepresentation(expected_representation)) { + // If the target representation is double and the value is already + // double, use the existing box. + if (FLAG_track_double_fields && + value->IsSmi() && + expected_representation.IsDouble()) { + value = factory()->NewHeapNumber( + Handle::cast(value)->value()); + } + properties.Add(value, zone()); + map = target; + continue; + } else { + transitioning = false; } - continue; + } + + // Commit the intermediate state to the object and stop transitioning. + JSObject::AllocateStorageForMap(json_object, map); + int length = properties.length(); + for (int i = 0; i < length; i++) { + Handle value = properties[i]; + json_object->FastPropertyAtPut(i, *value); } } else { key = ParseJsonInternalizedString(); @@ -450,14 +453,19 @@ Handle JsonParser::ParseJsonObject() { // If we transitioned until the very end, transition the map now. if (transitioning) { - JSObject::TransitionToMap(json_object, map); + JSObject::AllocateStorageForMap(json_object, map); int length = properties.length(); for (int i = 0; i < length; i++) { Handle value = properties[i]; - Representation representation = - map->instance_descriptors()->GetDetails(i).representation(); - if (representation.IsDouble() && value->IsSmi()) { - // TODO(verwaest): Allocate heap number. + // If the target representation is double and the value is already + // double, use the existing box. + if (FLAG_track_double_fields && value->IsSmi()) { + Representation representation = + map->instance_descriptors()->GetDetails(i).representation(); + if (representation.IsDouble()) { + value = factory()->NewHeapNumber( + Handle::cast(value)->value()); + } } json_object->FastPropertyAtPut(i, *value); } diff --git a/src/json-stringifier.h b/src/json-stringifier.h index 47a0129..b67a9f6 100644 --- a/src/json-stringifier.h +++ b/src/json-stringifier.h @@ -644,7 +644,7 @@ BasicJsonStringifier::Result BasicJsonStringifier::SerializeJSObject( Handle property; if (details.type() == FIELD && *map == object->map()) { property = Handle( - object->FastPropertyAt( + object->RawFastPropertyAt( map->instance_descriptors()->GetFieldIndex(i)), isolate_); } else { diff --git a/src/objects-inl.h b/src/objects-inl.h index a393822..06a13df 100644 --- a/src/objects-inl.h +++ b/src/objects-inl.h @@ -283,6 +283,16 @@ bool Object::HasValidElements() { return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray(); } + +MaybeObject* Object::AllocateNewStorageFor(Heap* heap, + Representation representation, + PretenureFlag tenure) { + if (!FLAG_track_double_fields) return this; + if (!representation.IsDouble()) return this; + return heap->AllocateHeapNumber(Number(), tenure); +} + + StringShape::StringShape(String* str) : type_(str->map()->instance_type()) { set_valid(); @@ -1509,7 +1519,7 @@ MaybeObject* JSObject::ResetElements() { } -MaybeObject* JSObject::TransitionToMap(Map* map) { +MaybeObject* JSObject::AllocateStorageForMap(Map* map) { ASSERT(this->map()->inobject_properties() == map->inobject_properties()); ElementsKind expected_kind = this->map()->elements_kind(); if (map->elements_kind() != expected_kind) { @@ -1699,10 +1709,17 @@ void JSObject::SetInternalField(int index, Smi* value) { } +MaybeObject* JSObject::FastPropertyAt(Representation representation, + int index) { + Object* raw_value = RawFastPropertyAt(index); + return raw_value->AllocateNewStorageFor(GetHeap(), representation); +} + + // Access fast-case object properties at index. The use of these routines // is needed to correctly distinguish between properties stored in-object and // properties stored in the properties array. -Object* JSObject::FastPropertyAt(int index) { +Object* JSObject::RawFastPropertyAt(int index) { // Adjust for the number of properties stored in the object. index -= map()->inobject_properties(); if (index < 0) { @@ -1715,7 +1732,7 @@ Object* JSObject::FastPropertyAt(int index) { } -Object* JSObject::FastPropertyAtPut(int index, Object* value) { +void JSObject::FastPropertyAtPut(int index, Object* value) { // Adjust for the number of properties stored in the object. index -= map()->inobject_properties(); if (index < 0) { @@ -1726,7 +1743,6 @@ Object* JSObject::FastPropertyAtPut(int index, Object* value) { ASSERT(index < properties()->length()); properties()->set(index, value); } - return value; } diff --git a/src/objects-printer.cc b/src/objects-printer.cc index 5aeeec6..0849a63 100644 --- a/src/objects-printer.cc +++ b/src/objects-printer.cc @@ -271,7 +271,7 @@ void JSObject::PrintProperties(FILE* out) { switch (descs->GetType(i)) { case FIELD: { int index = descs->GetFieldIndex(i); - FastPropertyAt(index)->ShortPrint(out); + RawFastPropertyAt(index)->ShortPrint(out); PrintF(out, " (field at offset %d)\n", index); break; } diff --git a/src/objects.cc b/src/objects.cc index ddf0eff..d127d1b 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -815,11 +815,14 @@ MaybeObject* Object::GetProperty(Object* receiver, value = result->holder()->GetNormalizedProperty(result); ASSERT(!value->IsTheHole() || result->IsReadOnly()); return value->IsTheHole() ? heap->undefined_value() : value; - case FIELD: - value = result->holder()->FastPropertyAt( + case FIELD: { + MaybeObject* maybe_result = result->holder()->FastPropertyAt( + result->representation(), result->GetFieldIndex().field_index()); + if (!maybe_result->To(&value)) return maybe_result; ASSERT(!value->IsTheHole() || result->IsReadOnly()); return value->IsTheHole() ? heap->undefined_value() : value; + } case CONSTANT_FUNCTION: return result->GetConstantFunction(); case CALLBACKS: @@ -1711,7 +1714,15 @@ String* JSReceiver::constructor_name() { MaybeObject* JSObject::AddFastPropertyUsingMap(Map* new_map, Name* name, Object* value, - int field_index) { + int field_index, + Representation representation) { + // This method is used to transition to a field. If we are transitioning to a + // double field, allocate new storage. + Object* storage; + MaybeObject* maybe_storage = + value->AllocateNewStorageFor(GetHeap(), representation); + if (!maybe_storage->To(&storage)) return maybe_storage; + if (map()->unused_property_fields() == 0) { int new_unused = new_map->unused_property_fields(); FixedArray* values; @@ -1721,8 +1732,11 @@ MaybeObject* JSObject::AddFastPropertyUsingMap(Map* new_map, set_properties(values); } + set_map(new_map); - return FastPropertyAtPut(field_index, value); + + FastPropertyAtPut(field_index, storage); + return value; } @@ -1774,8 +1788,8 @@ MaybeObject* JSObject::AddFastProperty(Name* name, int index = map()->NextFreePropertyIndex(); // Allocate new instance descriptors with (name, index) added - FieldDescriptor new_field( - name, index, attributes, value->OptimalRepresentation()); + Representation representation = value->OptimalRepresentation(); + FieldDescriptor new_field(name, index, attributes, representation); ASSERT(index < map()->inobject_properties() || (index - map()->inobject_properties()) < properties()->length() || @@ -1783,6 +1797,7 @@ MaybeObject* JSObject::AddFastProperty(Name* name, FixedArray* values = NULL; + // TODO(verwaest): Merge with AddFastPropertyUsingMap. if (map()->unused_property_fields() == 0) { // Make room for the new value MaybeObject* maybe_values = @@ -1792,10 +1807,17 @@ MaybeObject* JSObject::AddFastProperty(Name* name, TransitionFlag flag = INSERT_TRANSITION; + Heap* heap = isolate->heap(); + Map* new_map; MaybeObject* maybe_new_map = map()->CopyAddDescriptor(&new_field, flag); if (!maybe_new_map->To(&new_map)) return maybe_new_map; + Object* storage; + MaybeObject* maybe_storage = + value->AllocateNewStorageFor(heap, representation); + if (!maybe_storage->To(&storage)) return maybe_storage; + if (map()->unused_property_fields() == 0) { ASSERT(values != NULL); set_properties(values); @@ -1805,7 +1827,9 @@ MaybeObject* JSObject::AddFastProperty(Name* name, } set_map(new_map); - return FastPropertyAtPut(index, value); + + FastPropertyAtPut(index, storage); + return value; } @@ -2068,9 +2092,9 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name, return ReplaceSlowProperty(name, new_value, attributes); } + Representation representation = new_value->OptimalRepresentation(); int index = map()->NextFreePropertyIndex(); - FieldDescriptor new_field( - name, index, attributes, new_value->OptimalRepresentation()); + FieldDescriptor new_field(name, index, attributes, representation); // Make a new map for the object. Map* new_map; @@ -2088,6 +2112,12 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name, if (!maybe_new_properties->To(&new_properties)) return maybe_new_properties; } + Heap* heap = GetHeap(); + Object* storage; + MaybeObject* maybe_storage = + new_value->AllocateNewStorageFor(heap, representation); + if (!maybe_storage->To(&storage)) return maybe_storage; + // Update pointers to commit changes. // Object points to the new map. new_map->set_unused_property_fields(new_unused_property_fields); @@ -2095,7 +2125,8 @@ MaybeObject* JSObject::ConvertDescriptorToField(Name* name, if (new_properties != NULL) { set_properties(new_properties); } - return FastPropertyAtPut(index, new_value); + FastPropertyAtPut(index, new_value); + return new_value; } @@ -2163,13 +2194,28 @@ static void RightTrimFixedArray(Heap* heap, FixedArray* elms, int to_trim) { } -bool Map::InstancesNeedRewriting(int target_number_of_fields, +bool Map::InstancesNeedRewriting(Map* target, + int target_number_of_fields, int target_inobject, int target_unused) { // If fields were added (or removed), rewrite the instance. int number_of_fields = NumberOfFields(); ASSERT(target_number_of_fields >= number_of_fields); if (target_number_of_fields != number_of_fields) return true; + + if (FLAG_track_double_fields) { + // If smi descriptors were replaced by double descriptors, rewrite. + DescriptorArray* old_desc = instance_descriptors(); + DescriptorArray* new_desc = target->instance_descriptors(); + int limit = NumberOfOwnDescriptors(); + for (int i = 0; i < limit; i++) { + if (new_desc->GetDetails(i).representation().IsDouble() && + old_desc->GetDetails(i).representation().IsSmi()) { + return true; + } + } + } + // If no fields were added, and no inobject properties were removed, setting // the map is sufficient. if (target_inobject == inobject_properties()) return false; @@ -2209,7 +2255,8 @@ MaybeObject* JSObject::MigrateToMap(Map* new_map) { int unused = new_map->unused_property_fields(); // Nothing to do if no functions were converted to fields. - if (!old_map->InstancesNeedRewriting(number_of_fields, inobject, unused)) { + if (!old_map->InstancesNeedRewriting( + new_map, number_of_fields, inobject, unused)) { set_map(new_map); return this; } @@ -2232,7 +2279,21 @@ MaybeObject* JSObject::MigrateToMap(Map* new_map) { old_details.type() == FIELD); Object* value = old_details.type() == CONSTANT_FUNCTION ? old_descriptors->GetValue(i) - : FastPropertyAt(old_descriptors->GetFieldIndex(i)); + : RawFastPropertyAt(old_descriptors->GetFieldIndex(i)); + if (FLAG_track_double_fields && + old_details.representation().IsSmi() && + details.representation().IsDouble()) { + // Objects must be allocated in the old object space, since the + // overall number of HeapNumbers needed for the conversion might + // exceed the capacity of new space, and we would fail repeatedly + // trying to migrate the instance. + MaybeObject* maybe_storage = + value->AllocateNewStorageFor(heap, details.representation(), TENURED); + if (!maybe_storage->To(&value)) return maybe_storage; + } + ASSERT(!(FLAG_track_double_fields && + details.representation().IsDouble() && + value->IsSmi())); int target_index = new_descriptors->GetFieldIndex(i) - inobject; if (target_index < 0) target_index += total_size; array->set(target_index, value); @@ -2297,6 +2358,10 @@ MaybeObject* Map::CopyGeneralizeAllRepresentations() { new_map->instance_descriptors()->InitializeRepresentations( Representation::Tagged()); + if (FLAG_trace_generalization) { + PrintF("failed generalization %p -> %p\n", + static_cast(this), static_cast(new_map)); + } return new_map; } @@ -2462,7 +2527,13 @@ MaybeObject* Map::GeneralizeRepresentation(int modify_index, verbatim, valid, descriptors, old_descriptors)) { Representation updated_representation = updated_descriptors->GetDetails(modify_index).representation(); - if (new_representation.fits_into(updated_representation)) return updated; + if (new_representation.fits_into(updated_representation)) { + if (FLAG_trace_generalization) { + PrintF("migrating to existing map %p -> %p\n", + static_cast(this), static_cast(updated)); + } + return updated; + } } DescriptorArray* new_descriptors; @@ -2487,6 +2558,13 @@ MaybeObject* Map::GeneralizeRepresentation(int modify_index, split_map->DeprecateTarget( old_descriptors->GetKey(descriptor), new_descriptors); + if (FLAG_trace_generalization) { + PrintF("migrating to new map %p -> %p (%i steps)\n", + static_cast(this), + static_cast(new_descriptors), + descriptors - descriptor); + } + Map* new_map = split_map; // Add missing transitions. for (; descriptor < descriptors; descriptor++) { @@ -3029,7 +3107,7 @@ void JSObject::LocalLookupRealNamedProperty(Name* name, LookupResult* result) { // occur as fields. if (result->IsField() && result->IsReadOnly() && - FastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) { + RawFastPropertyAt(result->GetFieldIndex().field_index())->IsTheHole()) { result->DisallowCaching(); } return; @@ -3462,14 +3540,19 @@ MUST_USE_RESULT Handle JSProxy::CallTrap(const char* name, } -void JSObject::TransitionToMap(Handle object, Handle map) { +void JSObject::AllocateStorageForMap(Handle object, Handle map) { CALL_HEAP_FUNCTION_VOID( object->GetIsolate(), - object->TransitionToMap(*map)); + object->AllocateStorageForMap(*map)); } void JSObject::MigrateInstance(Handle object) { + if (FLAG_trace_migration) { + PrintF("migrating instance %p (%p)\n", + static_cast(*object), + static_cast(object->map())); + } CALL_HEAP_FUNCTION_VOID( object->GetIsolate(), object->MigrateInstance()); @@ -3478,10 +3561,10 @@ void JSObject::MigrateInstance(Handle object) { Handle Map::GeneralizeRepresentation(Handle map, int modify_index, - Representation new_representation) { + Representation representation) { CALL_HEAP_FUNCTION( map->GetIsolate(), - map->GeneralizeRepresentation(modify_index, new_representation), + map->GeneralizeRepresentation(modify_index, representation), Map); } @@ -3581,9 +3664,21 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup, lookup->holder()->GeneralizeFieldRepresentation( lookup->GetDescriptorIndex(), value->OptimalRepresentation()); if (maybe_failure->IsFailure()) return maybe_failure; + DescriptorArray* desc = lookup->holder()->map()->instance_descriptors(); + int descriptor = lookup->GetDescriptorIndex(); + representation = desc->GetDetails(descriptor).representation(); + } + if (FLAG_track_double_fields && representation.IsDouble()) { + HeapNumber* storage = + HeapNumber::cast(lookup->holder()->RawFastPropertyAt( + lookup->GetFieldIndex().field_index())); + storage->set_value(value->Number()); + result = *value; + break; } - result = lookup->holder()->FastPropertyAtPut( + lookup->holder()->FastPropertyAtPut( lookup->GetFieldIndex().field_index(), *value); + result = *value; break; } case CONSTANT_FUNCTION: @@ -3612,7 +3707,8 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup, if (details.type() == FIELD) { if (attributes == details.attributes()) { - if (!value->FitsRepresentation(details.representation())) { + Representation representation = details.representation(); + if (!value->FitsRepresentation(representation)) { MaybeObject* maybe_map = transition_map->GeneralizeRepresentation( descriptor, value->OptimalRepresentation()); if (!maybe_map->To(&transition_map)) return maybe_map; @@ -3622,10 +3718,13 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* lookup, lookup->holder()->MigrateToMap(Map::cast(back)); if (maybe_failure->IsFailure()) return maybe_failure; } + DescriptorArray* desc = transition_map->instance_descriptors(); + int descriptor = transition_map->LastAdded(); + representation = desc->GetDetails(descriptor).representation(); } int field_index = descriptors->GetFieldIndex(descriptor); result = lookup->holder()->AddFastPropertyUsingMap( - transition_map, *name, *value, field_index); + transition_map, *name, *value, field_index, representation); } else { result = lookup->holder()->ConvertDescriptorToField( *name, *value, attributes); @@ -3765,9 +3864,20 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( MaybeObject* maybe_failure = self->GeneralizeFieldRepresentation( lookup.GetDescriptorIndex(), value->OptimalRepresentation()); if (maybe_failure->IsFailure()) return maybe_failure; + DescriptorArray* desc = self->map()->instance_descriptors(); + int descriptor = lookup.GetDescriptorIndex(); + representation = desc->GetDetails(descriptor).representation(); + } + if (FLAG_track_double_fields && representation.IsDouble()) { + HeapNumber* storage = + HeapNumber::cast(self->RawFastPropertyAt( + lookup.GetFieldIndex().field_index())); + storage->set_value(value->Number()); + result = *value; + break; } - result = self->FastPropertyAtPut( - lookup.GetFieldIndex().field_index(), *value); + self->FastPropertyAtPut(lookup.GetFieldIndex().field_index(), *value); + result = *value; break; } case CONSTANT_FUNCTION: @@ -3792,7 +3902,8 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( if (details.type() == FIELD) { if (attributes == details.attributes()) { - if (!value->FitsRepresentation(details.representation())) { + Representation representation = details.representation(); + if (!value->FitsRepresentation(representation)) { MaybeObject* maybe_map = transition_map->GeneralizeRepresentation( descriptor, value->OptimalRepresentation()); if (!maybe_map->To(&transition_map)) return maybe_map; @@ -3801,10 +3912,13 @@ MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes( MaybeObject* maybe_failure = self->MigrateToMap(Map::cast(back)); if (maybe_failure->IsFailure()) return maybe_failure; } + DescriptorArray* desc = transition_map->instance_descriptors(); + int descriptor = transition_map->LastAdded(); + representation = desc->GetDetails(descriptor).representation(); } int field_index = descriptors->GetFieldIndex(descriptor); result = self->AddFastPropertyUsingMap( - transition_map, *name, *value, field_index); + transition_map, *name, *value, field_index, representation); } else { result = self->ConvertDescriptorToField(*name, *value, attributes); } @@ -4223,9 +4337,9 @@ MaybeObject* JSObject::NormalizeProperties(PropertyNormalizationMode mode, break; } case FIELD: { - PropertyDetails d = PropertyDetails( - details.attributes(), NORMAL, i + 1); - Object* value = FastPropertyAt(descs->GetFieldIndex(i)); + PropertyDetails d = + PropertyDetails(details.attributes(), NORMAL, i + 1); + Object* value = RawFastPropertyAt(descs->GetFieldIndex(i)); MaybeObject* maybe_dictionary = dictionary->Add(descs->GetKey(i), value, d); if (!maybe_dictionary->To(&dictionary)) return maybe_dictionary; @@ -4603,8 +4717,10 @@ MaybeObject* JSObject::GetHiddenPropertiesHashTable( if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && sorted_index < map()->NumberOfOwnDescriptors()) { ASSERT(descriptors->GetType(sorted_index) == FIELD); - inline_value = - this->FastPropertyAt(descriptors->GetFieldIndex(sorted_index)); + MaybeObject* maybe_value = this->FastPropertyAt( + descriptors->GetDetails(sorted_index).representation(), + descriptors->GetFieldIndex(sorted_index)); + if (!maybe_value->To(&inline_value)) return maybe_value; } else { inline_value = GetHeap()->undefined_value(); } @@ -4673,8 +4789,7 @@ MaybeObject* JSObject::SetHiddenPropertiesHashTable(Object* value) { if (descriptors->GetKey(sorted_index) == GetHeap()->hidden_string() && sorted_index < map()->NumberOfOwnDescriptors()) { ASSERT(descriptors->GetType(sorted_index) == FIELD); - this->FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), - value); + FastPropertyAtPut(descriptors->GetFieldIndex(sorted_index), value); return this; } } @@ -5150,6 +5265,11 @@ MUST_USE_RESULT MaybeObject* JSObject::DeepCopy(Isolate* isolate) { StackLimitCheck check(isolate); if (check.HasOverflowed()) return isolate->StackOverflow(); + if (map()->is_deprecated()) { + MaybeObject* maybe_failure = MigrateInstance(); + if (maybe_failure->IsFailure()) return maybe_failure; + } + Heap* heap = isolate->heap(); Object* result; { MaybeObject* maybe_result = heap->CopyJSObject(this); @@ -5159,27 +5279,24 @@ MUST_USE_RESULT MaybeObject* JSObject::DeepCopy(Isolate* isolate) { // Deep copy local properties. if (copy->HasFastProperties()) { - FixedArray* properties = copy->properties(); - for (int i = 0; i < properties->length(); i++) { - Object* value = properties->get(i); + DescriptorArray* descriptors = copy->map()->instance_descriptors(); + int limit = copy->map()->NumberOfOwnDescriptors(); + for (int i = 0; i < limit; i++) { + PropertyDetails details = descriptors->GetDetails(i); + if (details.type() != FIELD) continue; + int index = descriptors->GetFieldIndex(i); + Object* value = RawFastPropertyAt(index); if (value->IsJSObject()) { JSObject* js_object = JSObject::cast(value); - { MaybeObject* maybe_result = js_object->DeepCopy(isolate); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - properties->set(i, result); - } - } - int nof = copy->map()->inobject_properties(); - for (int i = 0; i < nof; i++) { - Object* value = copy->InObjectPropertyAt(i); - if (value->IsJSObject()) { - JSObject* js_object = JSObject::cast(value); - { MaybeObject* maybe_result = js_object->DeepCopy(isolate); - if (!maybe_result->ToObject(&result)) return maybe_result; - } - copy->InObjectPropertyAtPut(i, result); + MaybeObject* maybe_copy = js_object->DeepCopy(isolate); + if (!maybe_copy->To(&value)) return maybe_copy; + } else { + Representation representation = details.representation(); + MaybeObject* maybe_storage = + value->AllocateNewStorageFor(heap, representation); + if (!maybe_storage->To(&value)) return maybe_storage; } + copy->FastPropertyAtPut(index, value); } } else { { MaybeObject* maybe_result = @@ -6004,7 +6121,14 @@ Object* JSObject::SlowReverseLookup(Object* value) { DescriptorArray* descs = map()->instance_descriptors(); for (int i = 0; i < number_of_own_descriptors; i++) { if (descs->GetType(i) == FIELD) { - if (FastPropertyAt(descs->GetFieldIndex(i)) == value) { + Object* property = RawFastPropertyAt(descs->GetFieldIndex(i)); + if (FLAG_track_double_fields && + descs->GetDetails(i).representation().IsDouble()) { + ASSERT(property->IsHeapNumber()); + if (value->IsNumber() && property->Number() == value->Number()) { + return descs->GetKey(i); + } + } else if (property == value) { return descs->GetKey(i); } } else if (descs->GetType(i) == CONSTANT_FUNCTION) { @@ -6034,6 +6158,7 @@ MaybeObject* Map::RawCopy(int instance_size) { new_bit_field3 = OwnsDescriptors::update(new_bit_field3, true); new_bit_field3 = NumberOfOwnDescriptorsBits::update(new_bit_field3, 0); new_bit_field3 = EnumLengthBits::update(new_bit_field3, kInvalidEnumCache); + new_bit_field3 = Deprecated::update(new_bit_field3, false); result->set_bit_field3(new_bit_field3); return result; } diff --git a/src/objects.h b/src/objects.h index 978046b..1b4ed5b 100644 --- a/src/objects.h +++ b/src/objects.h @@ -1080,6 +1080,10 @@ class Object : public MaybeObject { return true; } + inline MaybeObject* AllocateNewStorageFor(Heap* heap, + Representation representation, + PretenureFlag tenure = NOT_TENURED); + // Returns true if the object is of the correct type to be used as a // implementation of a JSObject's elements. inline bool HasValidElements(); @@ -1828,8 +1832,8 @@ class JSObject: public JSReceiver { // Extend the receiver with a single fast property appeared first in the // passed map. This also extends the property backing store if necessary. - static void TransitionToMap(Handle object, Handle map); - inline MUST_USE_RESULT MaybeObject* TransitionToMap(Map* map); + static void AllocateStorageForMap(Handle object, Handle map); + inline MUST_USE_RESULT MaybeObject* AllocateStorageForMap(Map* map); static void MigrateInstance(Handle instance); inline MUST_USE_RESULT MaybeObject* MigrateInstance(); @@ -2135,10 +2139,12 @@ class JSObject: public JSReceiver { // Add a property to a fast-case object using a map transition to // new_map. - MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap(Map* new_map, - Name* name, - Object* value, - int field_index); + MUST_USE_RESULT MaybeObject* AddFastPropertyUsingMap( + Map* new_map, + Name* name, + Object* value, + int field_index, + Representation representation); // Add a constant function property to a fast-case object. // This leaves a CONSTANT_TRANSITION in the old map, and @@ -2247,8 +2253,11 @@ class JSObject: public JSReceiver { int unused_property_fields); // Access fast-case object properties at index. - inline Object* FastPropertyAt(int index); - inline Object* FastPropertyAtPut(int index, Object* value); + MUST_USE_RESULT inline MaybeObject* FastPropertyAt( + Representation representation, + int index); + inline Object* RawFastPropertyAt(int index); + inline void FastPropertyAtPut(int index, Object* value); // Access to in object properties. inline int GetInObjectPropertyOffset(int index); @@ -5199,7 +5208,8 @@ class Map: public HeapObject { int NumberOfFields(); - bool InstancesNeedRewriting(int target_number_of_fields, + bool InstancesNeedRewriting(Map* target, + int target_number_of_fields, int target_inobject, int target_unused); static Handle GeneralizeRepresentation( diff --git a/src/parser.cc b/src/parser.cc index 33b5fab..cff51bc 100644 --- a/src/parser.cc +++ b/src/parser.cc @@ -3738,33 +3738,6 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) { } -void Parser::BuildArrayLiteralBoilerplateLiterals(ZoneList* values, - Handle literals, - bool* is_simple, - int* depth) { - // Fill in the literals. - // Accumulate output values in local variables. - bool is_simple_acc = true; - int depth_acc = 1; - for (int i = 0; i < values->length(); i++) { - MaterializedLiteral* m_literal = values->at(i)->AsMaterializedLiteral(); - if (m_literal != NULL && m_literal->depth() >= depth_acc) { - depth_acc = m_literal->depth() + 1; - } - Handle boilerplate_value = GetBoilerplateValue(values->at(i)); - if (boilerplate_value->IsUndefined()) { - literals->set_the_hole(i); - is_simple_acc = false; - } else { - literals->set(i, *boilerplate_value); - } - } - - *is_simple = is_simple_acc; - *depth = depth_acc; -} - - Expression* Parser::ParseArrayLiteral(bool* ok) { // ArrayLiteral :: // '[' Expression? (',' Expression?)* ']' @@ -3991,7 +3964,8 @@ void Parser::BuildObjectLiteralConstantProperties( Handle constant_properties, bool* is_simple, bool* fast_elements, - int* depth) { + int* depth, + bool* may_store_doubles) { int position = 0; // Accumulate the value in local variables and store it at the end. bool is_simple_acc = true; @@ -4014,6 +3988,13 @@ void Parser::BuildObjectLiteralConstantProperties( // runtime. The enumeration order is maintained. Handle key = property->key()->handle(); Handle value = GetBoilerplateValue(property->value()); + + // Ensure objects with doubles are always treated as nested objects. + // TODO(verwaest): Remove once we can store them inline. + if (FLAG_track_double_fields && value->IsNumber()) { + *may_store_doubles = true; + } + is_simple_acc = is_simple_acc && !value->IsUndefined(); // Keep track of the number of elements in the object literal and @@ -4215,17 +4196,20 @@ Expression* Parser::ParseObjectLiteral(bool* ok) { bool is_simple = true; bool fast_elements = true; int depth = 1; + bool may_store_doubles = false; BuildObjectLiteralConstantProperties(properties, constant_properties, &is_simple, &fast_elements, - &depth); + &depth, + &may_store_doubles); return factory()->NewObjectLiteral(constant_properties, properties, literal_index, is_simple, fast_elements, depth, + may_store_doubles, has_function); } diff --git a/src/parser.h b/src/parser.h index acf47bb..1defbf2 100644 --- a/src/parser.h +++ b/src/parser.h @@ -692,13 +692,8 @@ class Parser BASE_EMBEDDED { Handle constants, bool* is_simple, bool* fast_elements, - int* depth); - - // Populate the literals fixed array for a materialized array literal. - void BuildArrayLiteralBoilerplateLiterals(ZoneList* properties, - Handle constants, - bool* is_simple, - int* depth); + int* depth, + bool* may_store_doubles); // Decide if a property should be in the object boilerplate. bool IsBoilerplateProperty(ObjectLiteral::Property* property); diff --git a/src/property.h b/src/property.h index 0dffe96..606f111 100644 --- a/src/property.h +++ b/src/property.h @@ -259,6 +259,8 @@ class LookupResult BASE_EMBEDDED { Representation representation() { ASSERT(IsFound()); + ASSERT(!IsTransition()); + ASSERT(details_.type() != NONEXISTENT); return details_.representation(); } @@ -346,7 +348,7 @@ class LookupResult BASE_EMBEDDED { Object* GetLazyValue() { switch (type()) { case FIELD: - return holder()->FastPropertyAt(GetFieldIndex().field_index()); + return holder()->RawFastPropertyAt(GetFieldIndex().field_index()); case NORMAL: { Object* value; value = holder()->property_dictionary()->ValueAt(GetDictionaryEntry()); diff --git a/src/runtime.cc b/src/runtime.cc index 3cc9a71..4e00b29 100644 --- a/src/runtime.cc +++ b/src/runtime.cc @@ -4473,7 +4473,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache(); int offset = keyed_lookup_cache->Lookup(receiver_map, key); if (offset != -1) { - Object* value = receiver->FastPropertyAt(offset); + // Doubles are not cached, so raw read the value. + Object* value = receiver->RawFastPropertyAt(offset); return value->IsTheHole() ? isolate->heap()->undefined_value() : value; @@ -4484,8 +4485,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_KeyedGetProperty) { receiver->LocalLookup(key, &result); if (result.IsField()) { int offset = result.GetFieldIndex().field_index(); - keyed_lookup_cache->Update(receiver_map, key, offset); - return receiver->FastPropertyAt(offset); + // Do not track double fields in the keyed lookup cache. Reading + // double values requires boxing. + if (!FLAG_track_double_fields || + !result.representation().IsDouble()) { + keyed_lookup_cache->Update(receiver_map, key, offset); + } + return receiver->FastPropertyAt(result.representation(), offset); } } else { // Attempt dictionary lookup. @@ -4661,6 +4667,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDataProperty) { return lookup.holder()->GetNormalizedProperty(&lookup); case FIELD: return lookup.holder()->FastPropertyAt( + lookup.representation(), lookup.GetFieldIndex().field_index()); case CONSTANT_FUNCTION: return lookup.GetConstantFunction(); @@ -10177,14 +10184,18 @@ static MaybeObject* DebugLookupResultValue(Heap* heap, return heap->undefined_value(); } return value; - case FIELD: - value = + case FIELD: { + Object* value; + MaybeObject* maybe_value = JSObject::cast(result->holder())->FastPropertyAt( + result->representation(), result->GetFieldIndex().field_index()); + if (!maybe_value->To(&value)) return maybe_value; if (value->IsTheHole()) { return heap->undefined_value(); } return value; + } case CONSTANT_FUNCTION: return result->GetConstantFunction(); case CALLBACKS: { diff --git a/src/string-stream.cc b/src/string-stream.cc index 913a23f..ebe1b5b 100644 --- a/src/string-stream.cc +++ b/src/string-stream.cc @@ -367,7 +367,7 @@ void StringStream::PrintUsingMap(JSObject* js_object) { key->ShortPrint(); } Add(": "); - Object* value = js_object->FastPropertyAt(descs->GetFieldIndex(i)); + Object* value = js_object->RawFastPropertyAt(descs->GetFieldIndex(i)); Add("%o\n", value); } } diff --git a/src/stub-cache.cc b/src/stub-cache.cc index 214527f..f43c9ac 100644 --- a/src/stub-cache.cc +++ b/src/stub-cache.cc @@ -221,10 +221,12 @@ Handle StubCache::ComputeLoadNonexistent(Handle name, Handle StubCache::ComputeLoadField(Handle name, Handle receiver, Handle holder, - PropertyIndex field) { + PropertyIndex field, + Representation representation) { if (receiver.is_identical_to(holder)) { LoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); return stub.GetCode(isolate()); } @@ -235,7 +237,7 @@ Handle StubCache::ComputeLoadField(Handle name, LoadStubCompiler compiler(isolate_); Handle handler = - compiler.CompileLoadField(receiver, holder, name, field); + compiler.CompileLoadField(receiver, holder, name, field, representation); JSObject::UpdateMapCodeCache(stub_holder, name, handler); return handler; } @@ -336,10 +338,12 @@ Handle StubCache::ComputeLoadGlobal(Handle name, Handle StubCache::ComputeKeyedLoadField(Handle name, Handle receiver, Handle holder, - PropertyIndex field) { + PropertyIndex field, + Representation representation) { if (receiver.is_identical_to(holder)) { KeyedLoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); return stub.GetCode(isolate()); } @@ -350,7 +354,7 @@ Handle StubCache::ComputeKeyedLoadField(Handle name, KeyedLoadStubCompiler compiler(isolate_); Handle handler = - compiler.CompileLoadField(receiver, holder, name, field); + compiler.CompileLoadField(receiver, holder, name, field, representation); JSObject::UpdateMapCodeCache(stub_holder, name, handler); return handler; } @@ -1494,15 +1498,17 @@ Register BaseLoadStubCompiler::HandlerFrontend(Handle object, } -Handle BaseLoadStubCompiler::CompileLoadField(Handle object, - Handle holder, - Handle name, - PropertyIndex field) { +Handle BaseLoadStubCompiler::CompileLoadField( + Handle object, + Handle holder, + Handle name, + PropertyIndex field, + Representation representation) { Label miss; Register reg = HandlerFrontendHeader(object, receiver(), holder, name, &miss); - GenerateLoadField(reg, holder, field); + GenerateLoadField(reg, holder, field, representation); __ bind(&miss); TailCallBuiltin(masm(), MissBuiltin(kind())); @@ -1512,19 +1518,6 @@ Handle BaseLoadStubCompiler::CompileLoadField(Handle object, } -// Load a fast property out of a holder object (src). In-object properties -// are loaded directly otherwise the property is loaded from the properties -// fixed array. -void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - Handle holder, - PropertyIndex index) { - DoGenerateFastPropertyLoad( - masm, dst, src, index.is_inobject(holder), index.translate(holder)); -} - - Handle BaseLoadStubCompiler::CompileLoadConstant( Handle object, Handle holder, @@ -1587,14 +1580,16 @@ void BaseLoadStubCompiler::GenerateLoadPostInterceptor( if (lookup->IsField()) { PropertyIndex field = lookup->GetFieldIndex(); if (interceptor_holder.is_identical_to(holder)) { - GenerateLoadField(interceptor_reg, holder, field); + GenerateLoadField( + interceptor_reg, holder, field, lookup->representation()); } else { // We found FIELD property in prototype chain of interceptor's holder. // Retrieve a field from field's holder. Register reg = HandlerFrontend( interceptor_holder, interceptor_reg, holder, name, &success); __ bind(&success); - GenerateLoadField(reg, holder, field); + GenerateLoadField( + reg, holder, field, lookup->representation()); } } else { // We found CALLBACKS property in prototype chain of interceptor's @@ -1646,7 +1641,7 @@ Handle BaseStoreStubCompiler::CompileStoreTransition( LookupResult* lookup, Handle transition, Handle name) { - Label miss, miss_restore_name; + Label miss, miss_restore_name, slow; GenerateNameCheck(name, this->name(), &miss); @@ -1656,15 +1651,19 @@ Handle BaseStoreStubCompiler::CompileStoreTransition( transition, name, receiver(), this->name(), value(), - scratch1(), scratch2(), + scratch1(), scratch2(), scratch3(), &miss, - &miss_restore_name); + &miss_restore_name, + &slow); // Handle store cache miss. GenerateRestoreName(masm(), &miss_restore_name, name); __ bind(&miss); TailCallBuiltin(masm(), MissBuiltin(kind())); + GenerateRestoreName(masm(), &slow, name); + TailCallBuiltin(masm(), SlowBuiltin(kind())); + // Return the generated code. return GetICCode(kind(), Code::MAP_TRANSITION, name); } diff --git a/src/stub-cache.h b/src/stub-cache.h index 84f7f93..cbaeace 100644 --- a/src/stub-cache.h +++ b/src/stub-cache.h @@ -112,7 +112,8 @@ class StubCache { Handle ComputeLoadField(Handle name, Handle object, Handle holder, - PropertyIndex field_index); + PropertyIndex field_index, + Representation representation); Handle ComputeLoadCallback(Handle name, Handle object, @@ -147,7 +148,8 @@ class StubCache { Handle ComputeKeyedLoadField(Handle name, Handle object, Handle holder, - PropertyIndex field_index); + PropertyIndex field_index, + Representation representation); Handle ComputeKeyedLoadCallback( Handle name, @@ -506,13 +508,9 @@ class StubCompiler BASE_EMBEDDED { static void GenerateFastPropertyLoad(MacroAssembler* masm, Register dst, Register src, - Handle holder, - PropertyIndex index); - static void DoGenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - bool inobject, - int index); + bool inobject, + int index, + Representation representation); static void GenerateLoadArrayLength(MacroAssembler* masm, Register receiver, @@ -542,8 +540,10 @@ class StubCompiler BASE_EMBEDDED { Register value_reg, Register scratch1, Register scratch2, + Register scratch3, Label* miss_label, - Label* miss_restore_name); + Label* miss_restore_name, + Label* slow); void GenerateStoreField(MacroAssembler* masm, Handle object, @@ -565,6 +565,14 @@ class StubCompiler BASE_EMBEDDED { } return Builtins::kLoadIC_Miss; } + static Builtins::Name SlowBuiltin(Code::Kind kind) { + switch (kind) { + case Code::STORE_IC: return Builtins::kStoreIC_Slow; + case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow; + default: UNREACHABLE(); + } + return Builtins::kStoreIC_Slow; + } static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name); // Generates code that verifies that the property holder has not changed @@ -643,7 +651,8 @@ class BaseLoadStubCompiler: public StubCompiler { Handle CompileLoadField(Handle object, Handle holder, Handle name, - PropertyIndex index); + PropertyIndex index, + Representation representation); Handle CompileLoadCallback(Handle object, Handle holder, @@ -695,7 +704,8 @@ class BaseLoadStubCompiler: public StubCompiler { void GenerateLoadField(Register reg, Handle holder, - PropertyIndex index); + PropertyIndex field, + Representation representation); void GenerateLoadConstant(Handle value); void GenerateLoadCallback(Register reg, Handle callback); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 745de97..19fa0aa 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1554,7 +1554,8 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags; int properties_count = constant_properties->length() / 2; - if (expr->depth() > 1) { + if ((FLAG_track_double_fields && expr->may_store_doubles()) || + expr->depth() > 1) { __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset)); __ Push(Smi::FromInt(expr->literal_index())); diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index 0a9ceaa..761e05a 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -1528,6 +1528,26 @@ void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm, } +void StoreIC::GenerateSlow(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- rax : value + // -- rcx : key + // -- rdx : receiver + // -- rsp[0] : return address + // ----------------------------------- + + __ pop(rbx); + __ push(rdx); // receiver + __ push(rcx); // key + __ push(rax); // value + __ push(rbx); // return address + + // Do tail-call to runtime routine. + ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate()); + __ TailCallExternalReference(ref, 3, 1); +} + + void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : value diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index c441cdb..97b27d5 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -2676,29 +2676,21 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) { + int offset = instr->hydrogen()->offset(); Register object = ToRegister(instr->object()); - if (!FLAG_track_double_fields) { - ASSERT(!instr->hydrogen()->representation().IsDouble()); + if (FLAG_track_double_fields && + instr->hydrogen()->representation().IsDouble()) { + XMMRegister result = ToDoubleRegister(instr->result()); + __ movsd(result, FieldOperand(object, offset)); + return; } - Register temp = instr->hydrogen()->representation().IsDouble() - ? ToRegister(instr->temp()) : ToRegister(instr->result()); + + Register result = ToRegister(instr->result()); if (instr->hydrogen()->is_in_object()) { - __ movq(temp, FieldOperand(object, instr->hydrogen()->offset())); + __ movq(result, FieldOperand(object, offset)); } else { - __ movq(temp, FieldOperand(object, JSObject::kPropertiesOffset)); - __ movq(temp, FieldOperand(temp, instr->hydrogen()->offset())); - } - - if (instr->hydrogen()->representation().IsDouble()) { - Label load_from_heap_number, done; - XMMRegister result = ToDoubleRegister(instr->result()); - __ JumpIfNotSmi(temp, &load_from_heap_number); - __ SmiToInteger32(temp, temp); - __ cvtlsi2sd(result, temp); - __ jmp(&done); - __ bind(&load_from_heap_number); - __ movsd(result, FieldOperand(temp, HeapNumber::kValueOffset)); - __ bind(&done); + __ movq(result, FieldOperand(object, JSObject::kPropertiesOffset)); + __ movq(result, FieldOperand(result, offset)); } } @@ -3918,6 +3910,8 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { int offset = instr->offset(); + Handle transition = instr->transition(); + if (FLAG_track_fields && representation.IsSmi()) { if (instr->value()->IsConstantOperand()) { LConstantOperand* operand_value = LConstantOperand::cast(instr->value()); @@ -3928,18 +3922,15 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { Register value = ToRegister(instr->value()); __ Integer32ToSmi(value, value); } - } else if (FLAG_track_double_fields && representation.IsDouble() && - !instr->hydrogen()->value()->type().IsSmi() && - !instr->hydrogen()->value()->type().IsHeapNumber()) { - Register value = ToRegister(instr->value()); - Label do_store; - __ JumpIfSmi(value, &do_store); - Handle map(isolate()->factory()->heap_number_map()); - DoCheckMapCommon(value, map, REQUIRE_EXACT_MAP, instr); - __ bind(&do_store); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + ASSERT(transition.is_null()); + ASSERT(instr->is_in_object()); + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); + XMMRegister value = ToDoubleRegister(instr->value()); + __ movsd(FieldOperand(object, offset), value); + return; } - Handle transition = instr->transition(); if (!transition.is_null()) { if (transition->CanBeDeprecated()) { transition_maps_.Add(transition, info()->zone()); @@ -3984,6 +3975,7 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { ToRegister(operand_value)); } else { Handle handle_value = ToHandle(operand_value); + ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); __ Move(FieldOperand(write_register, offset), handle_value); } } else { @@ -5236,7 +5228,8 @@ void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) { // Set up the parameters to the stub/runtime call and pick the right // runtime function or stub to call. int properties_count = instr->hydrogen()->constant_properties_length() / 2; - if (instr->hydrogen()->depth() > 1) { + if ((FLAG_track_double_fields && instr->hydrogen()->may_store_doubles()) || + instr->hydrogen()->depth() > 1) { __ PushHeapObject(literals); __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); __ Push(constant_properties); diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc index 1a8cfaf..f49f7d6 100644 --- a/src/x64/lithium-x64.cc +++ b/src/x64/lithium-x64.cc @@ -2039,9 +2039,7 @@ LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) { LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) { LOperand* obj = UseRegisterAtStart(instr->object()); - LOperand* temp = instr->representation().IsDouble() ? TempRegister() : NULL; - ASSERT(temp == NULL || FLAG_track_double_fields); - return DefineAsRegister(new(zone()) LLoadNamedField(obj, temp)); + return DefineAsRegister(new(zone()) LLoadNamedField(obj)); } @@ -2261,6 +2259,9 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { val = UseRegisterOrConstant(instr->value()); } else if (FLAG_track_fields && instr->field_representation().IsSmi()) { val = UseTempRegister(instr->value()); + } else if (FLAG_track_double_fields && + instr->field_representation().IsDouble()) { + val = UseRegisterAtStart(instr->value()); } else { val = UseRegister(instr->value()); } @@ -2271,8 +2272,7 @@ LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) { needs_write_barrier_for_map) ? TempRegister() : NULL; LStoreNamedField* result = new(zone()) LStoreNamedField(obj, val, temp); - if ((FLAG_track_fields && instr->field_representation().IsSmi()) || - (FLAG_track_double_fields && instr->field_representation().IsDouble())) { + if (FLAG_track_fields && instr->field_representation().IsSmi()) { return AssignEnvironment(result); } return result; diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h index ad0b0d6..d1f7e76 100644 --- a/src/x64/lithium-x64.h +++ b/src/x64/lithium-x64.h @@ -1463,15 +1463,13 @@ class LReturn: public LTemplateInstruction<0, 2, 0> { }; -class LLoadNamedField: public LTemplateInstruction<1, 1, 1> { +class LLoadNamedField: public LTemplateInstruction<1, 1, 0> { public: - explicit LLoadNamedField(LOperand* object, LOperand* temp) { + explicit LLoadNamedField(LOperand* object) { inputs_[0] = object; - temps_[0] = temp; } LOperand* object() { return inputs_[0]; } - LOperand* temp() { return temps_[0]; } DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field") DECLARE_HYDROGEN_ACCESSOR(LoadNamedField) diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index f2901d5..a7faf9b 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -343,11 +343,13 @@ void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, } -void StubCompiler::DoGenerateFastPropertyLoad(MacroAssembler* masm, - Register dst, - Register src, - bool inobject, - int index) { +void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, + Register dst, + Register src, + bool inobject, + int index, + Representation representation) { + ASSERT(!FLAG_track_double_fields || !representation.IsDouble()); int offset = index * kPointerSize; if (!inobject) { // Calculate the offset into the properties array. @@ -745,8 +747,10 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, Register value_reg, Register scratch1, Register scratch2, + Register unused, Label* miss_label, - Label* miss_restore_name) { + Label* miss_restore_name, + Label* slow) { // Check that the map of the object hasn't changed. __ CheckMap(receiver_reg, Handle(object->map()), miss_label, DO_SMI_CHECK, REQUIRE_EXACT_MAP); @@ -765,16 +769,6 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, // Ensure no transitions to deprecated maps are followed. __ CheckMapDeprecated(transition, scratch1, miss_label); - if (FLAG_track_fields && representation.IsSmi()) { - __ JumpIfNotSmi(value_reg, miss_label); - } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); - __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), - miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); - __ bind(&do_store); - } - // Check that we are allowed to write this. if (object->GetPrototype()->IsJSObject()) { JSObject* holder; @@ -790,7 +784,7 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, } Register holder_reg = CheckPrototypes( object, receiver_reg, Handle(holder), name_reg, - scratch1, scratch2, name, miss_restore_name); + scratch1, scratch2, name, miss_restore_name, SKIP_RECEIVER); // If no property was found, and the holder (the last object in the // prototype chain) is in slow mode, we need to do a negative lookup on the // holder. @@ -809,6 +803,28 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, } } + Register storage_reg = name_reg; + + if (FLAG_track_fields && representation.IsSmi()) { + __ JumpIfNotSmi(value_reg, miss_restore_name); + } else if (FLAG_track_double_fields && representation.IsDouble()) { + Label do_store, heap_number; + __ AllocateHeapNumber(storage_reg, scratch1, slow); + + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiToInteger32(scratch1, value_reg); + __ cvtlsi2sd(xmm0, scratch1); + __ jmp(&do_store); + + __ bind(&heap_number); + __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), + miss_restore_name, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); + __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); + + __ bind(&do_store); + __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0); + } + // Stub never generated for non-global objects that require access // checks. ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); @@ -834,12 +850,11 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, __ Move(scratch1, transition); __ movq(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1); - // Update the write barrier for the map field and pass the now unused - // name_reg as scratch register. + // Update the write barrier for the map field. __ RecordWriteField(receiver_reg, HeapObject::kMapOffset, scratch1, - name_reg, + scratch2, kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); @@ -856,12 +871,20 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, if (index < 0) { // Set the property straight into the object. int offset = object->map()->instance_size() + (index * kPointerSize); - __ movq(FieldOperand(receiver_reg, offset), value_reg); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ movq(FieldOperand(receiver_reg, offset), storage_reg); + } else { + __ movq(FieldOperand(receiver_reg, offset), value_reg); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Update the write barrier for the array address. // Pass the value being stored in the now unused name_reg. - __ movq(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ movq(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField( receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs); } @@ -870,12 +893,20 @@ void StubCompiler::GenerateStoreTransition(MacroAssembler* masm, int offset = index * kPointerSize + FixedArray::kHeaderSize; // Get the properties array (optimistically). __ movq(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); - __ movq(FieldOperand(scratch1, offset), value_reg); + if (FLAG_track_double_fields && representation.IsDouble()) { + __ movq(FieldOperand(scratch1, offset), storage_reg); + } else { + __ movq(FieldOperand(scratch1, offset), value_reg); + } if (!FLAG_track_fields || !representation.IsSmi()) { // Update the write barrier for the array address. // Pass the value being stored in the now unused name_reg. - __ movq(name_reg, value_reg); + if (!FLAG_track_double_fields || !representation.IsDouble()) { + __ movq(name_reg, value_reg); + } else { + ASSERT(storage_reg.is(name_reg)); + } __ RecordWriteField( scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs); } @@ -923,11 +954,35 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, if (FLAG_track_fields && representation.IsSmi()) { __ JumpIfNotSmi(value_reg, miss_label); } else if (FLAG_track_double_fields && representation.IsDouble()) { - Label do_store; - __ JumpIfSmi(value_reg, &do_store); + // Load the double storage. + if (index < 0) { + int offset = object->map()->instance_size() + (index * kPointerSize); + __ movq(scratch1, FieldOperand(receiver_reg, offset)); + } else { + __ movq(scratch1, + FieldOperand(receiver_reg, JSObject::kPropertiesOffset)); + int offset = index * kPointerSize + FixedArray::kHeaderSize; + __ movq(scratch1, FieldOperand(scratch1, offset)); + } + + // Store the value into the storage. + Label do_store, heap_number; + __ JumpIfNotSmi(value_reg, &heap_number); + __ SmiToInteger32(scratch2, value_reg); + __ cvtlsi2sd(xmm0, scratch2); + __ jmp(&do_store); + + __ bind(&heap_number); __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(), miss_label, DONT_DO_SMI_CHECK, REQUIRE_EXACT_MAP); + __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset)); + __ bind(&do_store); + __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0); + // Return the value (register rax). + ASSERT(value_reg.is(rax)); + __ ret(0); + return; } // TODO(verwaest): Share this code as a code stub. @@ -1196,15 +1251,18 @@ void BaseLoadStubCompiler::NonexistentHandlerFrontend( void BaseLoadStubCompiler::GenerateLoadField(Register reg, Handle holder, - PropertyIndex field) { + PropertyIndex field, + Representation representation) { if (!reg.is(receiver())) __ movq(receiver(), reg); if (kind() == Code::LOAD_IC) { LoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } else { KeyedLoadFieldStub stub(field.is_inobject(holder), - field.translate(holder)); + field.translate(holder), + representation); GenerateTailCall(masm(), stub.GetCode(isolate())); } } @@ -1461,7 +1519,8 @@ Handle CallStubCompiler::CompileCallField(Handle object, Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, &miss); - GenerateFastPropertyLoad(masm(), rdi, reg, holder, index); + GenerateFastPropertyLoad(masm(), rdi, reg, index.is_inobject(holder), + index.translate(holder), Representation::Tagged()); // Check that the function really is a function. __ JumpIfSmi(rdi, &miss); diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc index 40ecc02..b2c9b72 100644 --- a/test/cctest/test-heap-profiler.cc +++ b/test/cctest/test-heap-profiler.cc @@ -1414,7 +1414,7 @@ TEST(GetHeapValue) { GetProperty(obj, v8::HeapGraphEdge::kProperty, "n_prop"); v8::Local js_n_prop = js_obj->Get(v8_str("n_prop")).As(); - CHECK(js_n_prop == n_prop->GetHeapValue()); + CHECK(js_n_prop->NumberValue() == n_prop->GetHeapValue()->NumberValue()); } diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc index 069ba2d..0711454 100644 --- a/test/cctest/test-heap.cc +++ b/test/cctest/test-heap.cc @@ -2403,7 +2403,7 @@ TEST(Regress2211) { // Check size. DescriptorArray* descriptors = internal_obj->map()->instance_descriptors(); ObjectHashTable* hashtable = ObjectHashTable::cast( - internal_obj->FastPropertyAt(descriptors->GetFieldIndex(0))); + internal_obj->RawFastPropertyAt(descriptors->GetFieldIndex(0))); // HashTable header (5) and 4 initial entries (8). CHECK_LE(hashtable->SizeFor(hashtable->length()), 13 * kPointerSize); } diff --git a/test/mjsunit/track-fields.js b/test/mjsunit/track-fields.js index ced2bb3..bcf37ae 100644 --- a/test/mjsunit/track-fields.js +++ b/test/mjsunit/track-fields.js @@ -118,3 +118,146 @@ poly_load(of1, false); poly_load(of2, true); %OptimizeFunctionOnNextCall(poly_load); assertEquals("[object Object]10", poly_load(of1, true)); + +// Ensure small object literals with doubles do not share double storage. +function object_literal() { return {"a":1.5}; } +var o8 = object_literal(); +var o9 = object_literal(); +o8.a = 4.6 +assertEquals(1.5, o9.a); + +// Ensure double storage is not leaked in the case of polymorphic loads. +function load_poly(o) { + return o.a; +} + +var o10 = { "a": 1.6 }; +var o11 = { "b": 1, "a": 1.7 }; +load_poly(o10); +load_poly(o10); +load_poly(o11); +%OptimizeFunctionOnNextCall(load_poly); +var val = load_poly(o10); +o10.a = 19.5; +assertFalse(o10.a == val); + +// Ensure polymorphic loads only go monomorphic when the representations are +// compatible. + +// Check polymorphic load from double + object fields. +function load_mono(o) { + return o.a1; +} + +var object = {"x": 1}; +var o10 = { "a1": 1.6 }; +var o11 = { "a1": object, "b": 1 }; +load_mono(o10); +load_mono(o10); +load_mono(o11); +%OptimizeFunctionOnNextCall(load_mono); +assertEquals(object, load_mono(o11)); + +// Check polymorphic load from smi + object fields. +function load_mono2(o) { + return o.a2; +} + +var o12 = { "a2": 5 }; +var o13 = { "a2": object, "b": 1 }; +load_mono2(o12); +load_mono2(o12); +load_mono2(o13); +%OptimizeFunctionOnNextCall(load_mono2); +assertEquals(object, load_mono2(o13)); + +// Check polymorphic load from double + double fields. +function load_mono3(o) { + return o.a3; +} + +var o14 = { "a3": 1.6 }; +var o15 = { "a3": 1.8, "b": 1 }; +load_mono3(o14); +load_mono3(o14); +load_mono3(o15); +%OptimizeFunctionOnNextCall(load_mono3); +assertEquals(1.6, load_mono3(o14)); +assertEquals(1.8, load_mono3(o15)); + +// Check that JSON parsing respects existing representations. +var o16 = JSON.parse('{"a":1.5}'); +var o17 = JSON.parse('{"a":100}'); +assertTrue(%HaveSameMap(o16, o17)); +var o17_a = o17.a; +assertEquals(100, o17_a); +o17.a = 200; +assertEquals(100, o17_a); +assertEquals(200, o17.a); + +// Ensure normalizing results in ignored representations. +var o18 = {}; +o18.field1 = 100; +o18.field2 = 1; +o18.to_delete = 100; + +var o19 = {}; +o19.field1 = 100; +o19.field2 = 1.6; +o19.to_delete = 100; + +assertFalse(%HaveSameMap(o18, o19)); + +delete o18.to_delete; +delete o19.to_delete; + +assertTrue(%HaveSameMap(o18, o19)); +assertEquals(1, o18.field2); +assertEquals(1.6, o19.field2); + +// Test megamorphic keyed stub behaviour in combination with representations. +var some_object20 = {"a":1}; +var o20 = {}; +o20.smi = 1; +o20.dbl = 1.5; +o20.obj = some_object20; + +function keyed_load(o, k) { + return o[k]; +} + +function keyed_store(o, k, v) { + return o[k] = v; +} + +var smi20 = keyed_load(o20, "smi"); +var dbl20 = keyed_load(o20, "dbl"); +var obj20 = keyed_load(o20, "obj"); +keyed_load(o20, "smi"); +keyed_load(o20, "dbl"); +keyed_load(o20, "obj"); +keyed_load(o20, "smi"); +keyed_load(o20, "dbl"); +keyed_load(o20, "obj"); + +assertEquals(1, smi20); +assertEquals(1.5, dbl20); +assertEquals(some_object20, obj20); + +keyed_store(o20, "smi", 100); +keyed_store(o20, "dbl", 100); +keyed_store(o20, "obj", 100); +keyed_store(o20, "smi", 100); +keyed_store(o20, "dbl", 100); +keyed_store(o20, "obj", 100); +keyed_store(o20, "smi", 100); +keyed_store(o20, "dbl", 100); +keyed_store(o20, "obj", 100); + +assertEquals(1, smi20); +assertEquals(1.5, dbl20); +assertEquals(some_object20, obj20); + +assertEquals(100, o20.smi); +assertEquals(100, o20.dbl); +assertEquals(100, o20.dbl);