From: whesse@chromium.org Date: Fri, 24 Jul 2009 11:22:35 +0000 (+0000) Subject: Add inline caching for keyed loads and stores. Remove extra parentheses from some... X-Git-Tag: upstream/4.7.83~23588 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=500e10b6483f91b1fd009e6311357033ee0c8e14;p=platform%2Fupstream%2Fv8.git Add inline caching for keyed loads and stores. Remove extra parentheses from some files. Review URL: http://codereview.chromium.org/159266 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2534 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc index d64dee1..f7d0797 100644 --- a/src/ia32/ic-ia32.cc +++ b/src/ia32/ic-ia32.cc @@ -237,8 +237,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { Label slow, fast, check_string, index_int, index_string; // Load name and receiver. - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); // Check that the object isn't a smi. __ test(ecx, Immediate(kSmiTagMask)); diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index e47ad1c..4f5b3e0 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -1010,7 +1010,7 @@ Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, __ IncrementCounter(&Counters::named_store_global_inline, 1); // Check that the map of the global has not changed. - __ mov(ebx, (Operand(esp, kPointerSize))); + __ mov(ebx, Operand(esp, kPointerSize)); __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), Immediate(Handle(object->map()))); __ j(not_equal, &miss, not_taken); @@ -1089,7 +1089,7 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); GenerateLoadField(object, holder, eax, ebx, edx, index, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1110,7 +1110,7 @@ Object* LoadStubCompiler::CompileLoadCallback(JSObject* object, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, callback, name, &miss); __ bind(&miss); @@ -1132,7 +1132,7 @@ Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); GenerateLoadConstant(object, holder, eax, ebx, edx, value, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1152,7 +1152,7 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); // TODO(368): Compile in the whole chain: all the interceptors in // prototypes and ultimate answer. GenerateLoadInterceptor(receiver, @@ -1188,7 +1188,7 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, __ IncrementCounter(&Counters::named_load_global_inline, 1); // Get the receiver from the stack. - __ mov(eax, (Operand(esp, kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); // If the object is the holder then we know that it's a global // object which can only happen for contextual loads. In this case, @@ -1237,8 +1237,8 @@ Object* KeyedLoadStubCompiler::CompileLoadField(String* name, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_field, 1); // Check that the name has not changed. @@ -1267,8 +1267,8 @@ Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_callback, 1); // Check that the name has not changed. @@ -1297,8 +1297,8 @@ Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_constant_function, 1); // Check that the name has not changed. @@ -1326,8 +1326,8 @@ Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_interceptor, 1); // Check that the name has not changed. @@ -1362,8 +1362,8 @@ Object* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) { // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_array_length, 1); // Check that the name has not changed. @@ -1388,8 +1388,8 @@ Object* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) { // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_string_length, 1); // Check that the name has not changed. @@ -1414,8 +1414,8 @@ Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { // ----------------------------------- Label miss; - __ mov(eax, (Operand(esp, kPointerSize))); - __ mov(ecx, (Operand(esp, 2 * kPointerSize))); + __ mov(eax, Operand(esp, kPointerSize)); + __ mov(ecx, Operand(esp, 2 * kPointerSize)); __ IncrementCounter(&Counters::keyed_load_function_prototype, 1); // Check that the name has not changed. diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index e9a6f7f..f511145 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -773,6 +773,15 @@ void Assembler::decq(const Operand& dst) { } +void Assembler::decl(Register dst) { + EnsureSpace ensure_space(this); + last_pc_ = pc_; + emit_optional_rex_32(dst); + emit(0xFF); + emit_modrm(0x1, dst); +} + + void Assembler::decl(const Operand& dst) { EnsureSpace ensure_space(this); last_pc_ = pc_; @@ -1521,7 +1530,7 @@ void Assembler::store_rax(ExternalReference ref) { void Assembler::testb(Register reg, Immediate mask) { - ASSERT(is_int8(mask.value_)); + ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); EnsureSpace ensure_space(this); last_pc_ = pc_; if (reg.is(rax)) { @@ -1540,7 +1549,7 @@ void Assembler::testb(Register reg, Immediate mask) { void Assembler::testb(const Operand& op, Immediate mask) { - ASSERT(is_int8(mask.value_)); + ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit_optional_rex_32(rax, op); diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 1b2a35c..4b1eb7a 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -627,6 +627,7 @@ class Assembler : public Malloced { void decq(Register dst); void decq(const Operand& dst); + void decl(Register dst); void decl(const Operand& dst); // Sign-extends rax into rdx:rax. @@ -737,6 +738,10 @@ class Assembler : public Malloced { shift_32(dst, 0x5); } + void shrl(Register dst, Immediate shift_amount) { + shift_32(dst, shift_amount, 0x5); + } + void store_rax(void* dst, RelocInfo::Mode mode); void store_rax(ExternalReference ref); diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 459921c..08f8338 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -394,9 +394,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // If given receiver is already a JavaScript object then there's no // reason for converting it. __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); - __ j(less, &call_to_object); + __ j(below, &call_to_object); __ CmpInstanceType(rcx, LAST_JS_OBJECT_TYPE); - __ j(less_equal, &push_receiver); + __ j(below_equal, &push_receiver); // Convert the receiver to an object. __ bind(&call_to_object); @@ -562,7 +562,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // If the type of the result (stored in its map) is less than // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense. __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); - __ j(greater_equal, &exit); + __ j(above_equal, &exit); // Throw away the result of the constructor invocation and use the // on-stack receiver as the result. diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index 9ed7e74..66e4d39 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -3379,7 +3379,7 @@ void CodeGenerator::GenerateClassOf(ZoneList* args) { // functions to make sure they have 'Function' as their class. __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg()); - null.Branch(less); + null.Branch(below); // As long as JS_FUNCTION_TYPE is the last instance type and it is // right after LAST_JS_OBJECT_TYPE, we can avoid checking for diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc index 83fa9cd..8b746c4 100644 --- a/src/x64/disasm-x64.cc +++ b/src/x64/disasm-x64.cc @@ -687,7 +687,7 @@ int DisassemblerX64::ShiftInstruction(byte* data) { byte modrm = *(data + 1); int mod, regop, rm; get_modrm(modrm, &mod, ®op, &rm); - ASSERT(regop < 8); + regop &= 0x7; // The REX.R bit does not affect the operation. int imm8 = -1; int num_bytes = 2; if (mod != 3) { diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index db74baf..247e9e6 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -42,6 +42,124 @@ namespace internal { #define __ ACCESS_MASM(masm) +// Helper function used to load a property from a dictionary backing storage. +// This function may return false negatives, so miss_label +// must always call a backup property load that is complete. +// This function is safe to call if the receiver has fast properties, +// or if name is not a symbol, and will jump to the miss_label in that case. +static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, + Register r0, Register r1, Register r2, + Register name) { + // Register use: + // + // r0 - used to hold the property dictionary. + // + // r1 - initially the receiver + // - used for the index into the property dictionary + // - holds the result on exit. + // + // r2 - used to hold the capacity of the property dictionary. + // + // name - holds the name of the property and is unchanged. + + Label done; + + // Check for the absence of an interceptor. + // Load the map into r0. + __ movq(r0, FieldOperand(r1, JSObject::kMapOffset)); + // Test the has_named_interceptor bit in the map. + __ testl(FieldOperand(r0, Map::kInstanceAttributesOffset), + Immediate(1 << (Map::kHasNamedInterceptor + (3 * 8)))); + + // Jump to miss if the interceptor bit is set. + __ j(not_zero, miss_label); + + // Bail out if we have a JS global proxy object. + __ movzxbq(r0, FieldOperand(r0, Map::kInstanceTypeOffset)); + __ cmpb(r0, Immediate(JS_GLOBAL_PROXY_TYPE)); + __ j(equal, miss_label); + + // Possible work-around for http://crbug.com/16276. + __ cmpb(r0, Immediate(JS_GLOBAL_OBJECT_TYPE)); + __ j(equal, miss_label); + __ cmpb(r0, Immediate(JS_BUILTINS_OBJECT_TYPE)); + __ j(equal, miss_label); + + // Check that the properties array is a dictionary. + __ movq(r0, FieldOperand(r1, JSObject::kPropertiesOffset)); + __ Cmp(FieldOperand(r0, HeapObject::kMapOffset), + Factory::hash_table_map()); + __ j(not_equal, miss_label); + + // Compute the capacity mask. + const int kCapacityOffset = + StringDictionary::kHeaderSize + + StringDictionary::kCapacityIndex * kPointerSize; + __ movq(r2, FieldOperand(r0, kCapacityOffset)); + __ shrl(r2, Immediate(kSmiTagSize)); // convert smi to int + __ decl(r2); + + // Generate an unrolled loop that performs a few probes before + // giving up. Measurements done on Gmail indicate that 2 probes + // cover ~93% of loads from dictionaries. + static const int kProbes = 4; + const int kElementsStartOffset = + StringDictionary::kHeaderSize + + StringDictionary::kElementsStartIndex * kPointerSize; + for (int i = 0; i < kProbes; i++) { + // Compute the masked index: (hash + i + i * i) & mask. + __ movl(r1, FieldOperand(name, String::kLengthOffset)); + __ shrl(r1, Immediate(String::kHashShift)); + if (i > 0) { + __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); + } + __ and_(r1, r2); + + // Scale the index by multiplying by the entry size. + ASSERT(StringDictionary::kEntrySize == 3); + __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 + + // Check if the key is identical to the name. + __ cmpq(name, Operand(r0, r1, times_pointer_size, + kElementsStartOffset - kHeapObjectTag)); + if (i != kProbes - 1) { + __ j(equal, &done); + } else { + __ j(not_equal, miss_label); + } + } + + // Check that the value is a normal property. + __ bind(&done); + const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; + __ testl(Operand(r0, r1, times_pointer_size, kDetailsOffset - kHeapObjectTag), + Immediate(PropertyDetails::TypeField::mask() << kSmiTagSize)); + __ j(not_zero, miss_label); + + // Get the value at the masked, scaled index. + const int kValueOffset = kElementsStartOffset + kPointerSize; + __ movq(r1, + Operand(r0, r1, times_pointer_size, kValueOffset - kHeapObjectTag)); +} + + +// Helper function used to check that a value is either not an object +// or is loaded if it is an object. +static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm, Label* miss, + Register value) { + Label done; + // Check if the value is a Smi. + __ testl(value, Immediate(kSmiTagMask)); + __ j(zero, &done); + // Check if the object has been loaded. + __ movq(kScratchRegister, FieldOperand(value, JSFunction::kMapOffset)); + __ testb(FieldOperand(kScratchRegister, Map::kBitField2Offset), + Immediate(1 << Map::kNeedsLoading)); + __ j(not_zero, miss); + __ bind(&done); +} + + void KeyedLoadIC::ClearInlinedVersion(Address address) { // TODO(X64): Implement this when LoadIC is enabled. } @@ -77,14 +195,111 @@ void KeyedLoadIC::Generate(MacroAssembler* masm, } +#ifdef DEBUG +// For use in assert below. +static int TenToThe(int exponent) { + ASSERT(exponent <= 9); + ASSERT(exponent >= 1); + int answer = 10; + for (int i = 1; i < exponent; i++) answer *= 10; + return answer; +} +#endif + + void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rsp[0] : return address // -- rsp[8] : name // -- rsp[16] : receiver // ----------------------------------- + Label slow, fast, check_string, index_int, index_string; - Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); + // Load name and receiver. + __ movq(rax, Operand(rsp, kPointerSize)); + __ movq(rcx, Operand(rsp, 2 * kPointerSize)); + + // Check that the object isn't a smi. + __ testl(rcx, Immediate(kSmiTagMask)); + __ j(zero, &slow); + + // Check that the object is some kind of JS object EXCEPT JS Value type. + // In the case that the object is a value-wrapper object, + // we enter the runtime system to make sure that indexing + // into string objects work as intended. + ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE); + __ CmpObjectType(rcx, JS_OBJECT_TYPE, rdx); + __ j(below, &slow); + // Check that the receiver does not require access checks. We need + // to check this explicitly since this generic stub does not perform + // map checks. The map is already in rdx. + __ testb(FieldOperand(rdx, Map::kBitFieldOffset), + Immediate(1 << Map::kIsAccessCheckNeeded)); + __ j(not_zero, &slow); + + // Check that the key is a smi. + __ testl(rax, Immediate(kSmiTagMask)); + __ j(not_zero, &check_string); + __ sarl(rax, Immediate(kSmiTagSize)); + // Get the elements array of the object. + __ bind(&index_int); + __ movq(rcx, FieldOperand(rcx, JSObject::kElementsOffset)); + // Check that the object is in fast mode (not dictionary). + __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map()); + __ j(equal, &slow); + // Check that the key (index) is within bounds. + __ cmpl(rax, FieldOperand(rcx, FixedArray::kLengthOffset)); + __ j(below, &fast); // Unsigned comparison rejects negative indices. + // Slow case: Load name and receiver from stack and jump to runtime. + __ bind(&slow); + __ IncrementCounter(&Counters::keyed_load_generic_slow, 1); + KeyedLoadIC::Generate(masm, ExternalReference(Runtime::kKeyedGetProperty)); + __ bind(&check_string); + // The key is not a smi. + // Is it a string? + __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx); + __ j(above_equal, &slow); + // Is the string an array index, with cached numeric value? + __ movl(rbx, FieldOperand(rax, String::kLengthOffset)); + __ testl(rbx, Immediate(String::kIsArrayIndexMask)); + + // If the string is a symbol, do a quick inline probe of the receiver's + // dictionary, if it exists. + __ j(not_zero, &index_string); // The value in rbx is used at jump target. + __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), + Immediate(kIsSymbolMask)); + __ j(zero, &slow); + // Probe the dictionary leaving result in ecx. + GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); + GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); + __ movq(rax, rcx); + __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); + __ ret(0); + // Array index string: If short enough use cache in length/hash field (ebx). + // We assert that there are enough bits in an int32_t after the hash shift + // bits have been subtracted to allow space for the length and the cached + // array index. + ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) < + (1 << (String::kShortLengthShift - String::kHashShift))); + __ bind(&index_string); + const int kLengthFieldLimit = + (String::kMaxCachedArrayIndexLength + 1) << String::kShortLengthShift; + __ cmpl(rbx, Immediate(kLengthFieldLimit)); + __ j(above_equal, &slow); + __ movl(rax, rbx); + __ and_(rax, Immediate((1 << String::kShortLengthShift) - 1)); + __ shrl(rax, Immediate(String::kLongLengthShift)); + __ jmp(&index_int); + // Fast case: Do the load. + __ bind(&fast); + __ movq(rax, Operand(rcx, rax, times_pointer_size, + FixedArray::kHeaderSize - kHeapObjectTag)); + __ Cmp(rax, Factory::the_hole_value()); + // In case the loaded value is the_hole we have to consult GetProperty + // to ensure the prototype chain is searched. + __ j(equal, &slow); + __ IncrementCounter(&Counters::keyed_load_generic_smi, 1); + __ ret(0); } void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { @@ -128,13 +343,6 @@ Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, return NULL; } -Object* KeyedLoadStubCompiler::CompileLoadField(String* name, - JSObject* object, - JSObject* holder, - int index) { - UNIMPLEMENTED(); - return NULL; -} Object* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) { UNIMPLEMENTED(); @@ -176,10 +384,120 @@ void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) { Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss))); } + void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) { - Generate(masm, ExternalReference(IC_Utility(kKeyedStoreIC_Miss))); + // ----------- S t a t e ------------- + // -- rax : value + // -- rsp[0] : return address + // -- rsp[8] : key + // -- rsp[16] : receiver + // ----------------------------------- + Label slow, fast, array, extra; + + // Get the receiver from the stack. + __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // 2 ~ return address, key + // Check that the object isn't a smi. + __ testl(rdx, Immediate(kSmiTagMask)); + __ j(zero, &slow); + // Get the map from the receiver. + __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); + // Check that the receiver does not require access checks. We need + // to do this because this generic stub does not perform map checks. + __ testb(FieldOperand(rcx, Map::kBitFieldOffset), + Immediate(1 << Map::kIsAccessCheckNeeded)); + __ j(not_zero, &slow); + // Get the key from the stack. + __ movq(rbx, Operand(rsp, 1 * kPointerSize)); // 1 ~ return address + // Check that the key is a smi. + __ testl(rbx, Immediate(kSmiTagMask)); + __ j(not_zero, &slow); + + __ CmpInstanceType(rcx, JS_ARRAY_TYPE); + __ j(equal, &array); + // Check that the object is some kind of JS object. + __ CmpInstanceType(rcx, FIRST_JS_OBJECT_TYPE); + __ j(below, &slow); + + // Object case: Check key against length in the elements array. + // rax: value + // rdx: JSObject + // rbx: index (as a smi) + __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); + // Check that the object is in fast mode (not dictionary). + __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map()); + __ j(equal, &slow); + // Untag the key (for checking against untagged length in the fixed array). + __ movl(rdx, rbx); + __ sarl(rdx, Immediate(kSmiTagSize)); + __ cmpl(rdx, FieldOperand(rcx, Array::kLengthOffset)); + // rax: value + // rcx: FixedArray + // rbx: index (as a smi) + __ j(below, &fast); + + + // Slow case: Push extra copies of the arguments (3). + __ bind(&slow); + __ pop(rcx); + __ push(Operand(rsp, 1 * kPointerSize)); + __ push(Operand(rsp, 1 * kPointerSize)); + __ push(rax); + __ push(rcx); + // Do tail-call to runtime routine. + __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3); + + + // Extra capacity case: Check if there is extra capacity to + // perform the store and update the length. Used for adding one + // element to the array by writing to array[array.length]. + __ bind(&extra); + // rax: value + // rdx: JSArray + // rcx: FixedArray + // rbx: index (as a smi) + // flags: compare (rbx, rdx.length()) + __ j(not_equal, &slow); // do not leave holes in the array + __ sarl(rbx, Immediate(kSmiTagSize)); // untag + __ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); + __ j(above_equal, &slow); + // Restore tag and increment. + __ lea(rbx, Operand(rbx, rbx, times_1, 1 << kSmiTagSize)); + __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx); + __ subl(rbx, Immediate(1 << kSmiTagSize)); // decrement rbx again + __ jmp(&fast); + + + // Array case: Get the length and the elements array from the JS + // array. Check that the array is in fast mode; if it is the + // length is always a smi. + __ bind(&array); + // rax: value + // rdx: JSArray + // rbx: index (as a smi) + __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); + __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::hash_table_map()); + __ j(equal, &slow); + + // Check the key against the length in the array, compute the + // address to store into and fall through to fast case. + __ cmpl(rbx, FieldOperand(rdx, JSArray::kLengthOffset)); + __ j(above_equal, &extra); + + + // Fast case: Do the store. + __ bind(&fast); + // rax: value + // rcx: FixedArray + // rbx: index (as a smi) + __ movq(Operand(rcx, rbx, times_4, FixedArray::kHeaderSize - kHeapObjectTag), + rax); + // Update write barrier for the elements array address. + __ movq(rdx, rax); + __ RecordWrite(rcx, 0, rdx, rbx); + __ ret(0); } + Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, int index, Map* transition, diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 2ee6eea..44a76a4 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -175,11 +175,13 @@ class MacroAssembler: public Assembler { void Call(Handle code_object, RelocInfo::Mode rmode); // Compare object type for heap object. + // Always use unsigned comparisons: above and below, not less and greater. // Incoming register is heap_object and outgoing register is map. // They may be the same register, and may be kScratchRegister. void CmpObjectType(Register heap_object, InstanceType type, Register map); // Compare instance type for map. + // Always use unsigned comparisons: above and below, not less and greater. void CmpInstanceType(Register map, InstanceType type); // FCmp is similar to integer cmp, but requires unsigned diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index ce7886b..ba13996 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -327,7 +327,7 @@ Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, // ----------------------------------- Label miss; - __ movq(rax, (Operand(rsp, kPointerSize))); + __ movq(rax, Operand(rsp, kPointerSize)); GenerateLoadConstant(object, holder, rax, rbx, rdx, value, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -348,7 +348,7 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object, // ----------------------------------- Label miss; - __ movq(rax, (Operand(rsp, kPointerSize))); + __ movq(rax, Operand(rsp, kPointerSize)); GenerateLoadField(object, holder, rax, rbx, rdx, index, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -381,7 +381,7 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, __ IncrementCounter(&Counters::named_load_global_inline, 1); // Get the receiver from the stack. - __ movq(rax, (Operand(rsp, kPointerSize))); + __ movq(rax, Operand(rsp, kPointerSize)); // If the object is the holder then we know that it's a global // object which can only happen for contextual loads. In this case, @@ -476,6 +476,36 @@ Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, } +Object* KeyedLoadStubCompiler::CompileLoadField(String* name, + JSObject* receiver, + JSObject* holder, + int index) { + // ----------- S t a t e ------------- + // -- rsp[0] : return address + // -- rsp[8] : name + // -- rsp[16] : receiver + // ----------------------------------- + Label miss; + + __ movq(rax, Operand(rsp, kPointerSize)); + __ movq(rcx, Operand(rsp, 2 * kPointerSize)); + __ IncrementCounter(&Counters::keyed_load_field, 1); + + // Check that the name has not changed. + __ Cmp(rax, Handle(name)); + __ j(not_equal, &miss); + + GenerateLoadField(receiver, holder, rcx, rbx, rdx, index, name, &miss); + + __ bind(&miss); + __ DecrementCounter(&Counters::keyed_load_field, 1); + GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); + + // Return the generated code. + return GetCode(FIELD, name); +} + + // TODO(1241006): Avoid having lazy compile stubs specialized by the // number of arguments. It is not needed anymore. Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status index b234ca3..fa33d32 100644 --- a/test/cctest/cctest.status +++ b/test/cctest/cctest.status @@ -113,6 +113,7 @@ test-debug/RecursiveBreakpoints: CRASH || FAIL test-debug/DebuggerUnload: CRASH || FAIL test-debug/DebuggerHostDispatch: CRASH || FAIL test-debug/DebugBreakInMessageHandler: CRASH || FAIL +test-debug/NoDebugBreakInAfterCompileMessageHandler: CRASH || FAIL test-api/HugeConsStringOutOfMemory: CRASH || FAIL test-api/OutOfMemory: CRASH || FAIL test-api/OutOfMemoryNested: CRASH || FAIL