From: mvstanton@chromium.org Date: Wed, 22 Jan 2014 10:41:23 +0000 (+0000) Subject: Revert "Add hydrogen support for ArrayPop, and remove the handwritten call stubs." X-Git-Tag: upstream/4.7.83~11067 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=1b3280c49162b8fe7817f6d1941563d5f6b54075;p=platform%2Fupstream%2Fv8.git Revert "Add hydrogen support for ArrayPop, and remove the handwritten call stubs." This reverts commit r18709, due to deopt fuzzer issue. TBR=verwaest@chromium.org Review URL: https://codereview.chromium.org/143983010 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18731 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index 8625ed6..e7c5733 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -1571,6 +1571,79 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + Register receiver = r0; + Register scratch = r1; + Register elements = r3; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + scratch, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + + // Get the array's length into r4 and calculate new length. + __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); + __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC); + __ b(lt, &return_undefined); + + // Get the last element. + __ LoadRoot(r6, Heap::kTheHoleValueRootIndex); + // We can't address the last element in one operation. Compute the more + // expensive shift first, and use an offset later on. + __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4)); + __ ldr(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ cmp(scratch, r6); + __ b(eq, &call_builtin); + + // Set the array's length. + __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); + + // Fill with the hole. + __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize)); + const int argc = arguments().immediate(); + __ Drop(argc + 1); + __ mov(r0, scratch); + __ Ret(); + + __ bind(&return_undefined); + __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); + __ Drop(argc + 1); + __ Ret(); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/hydrogen.cc b/src/hydrogen.cc index 8980087..0780385 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -7613,53 +7613,6 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( return true; } break; - case kArrayPop: { - if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) { - return false; - } - if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; - ElementsKind elements_kind = receiver_map->elements_kind(); - if (!IsFastElementsKind(elements_kind)) return false; - AddCheckConstantFunction(expr->holder(), receiver, receiver_map); - - Drop(expr->arguments()->length()); - HValue* result; - HValue* checked_object; - HValue* reduced_length; - HValue* receiver = Pop(); - { NoObservableSideEffectsScope scope(this); - checked_object = AddCheckMap(receiver, receiver_map); - HValue* elements = AddLoadElements(checked_object); - // Ensure that we aren't popping from a copy-on-write array. - if (IsFastSmiOrObjectElementsKind(elements_kind)) { - Add( - elements, isolate()->factory()->fixed_array_map(), top_info()); - } - HValue* length = Add( - checked_object, HObjectAccess::ForArrayLength(elements_kind)); - reduced_length = AddUncasted(length, graph()->GetConstant1()); - HValue* bounds_check = Add( - graph()->GetConstant0(), length); - result = AddElementAccess(elements, reduced_length, NULL, - bounds_check, elements_kind, false); - Factory* factory = isolate()->factory(); - double nan_double = FixedDoubleArray::hole_nan_as_double(); - HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) - ? Add(factory->the_hole_value()) - : Add(nan_double); - if (IsFastSmiOrObjectElementsKind(elements_kind)) { - elements_kind = FAST_HOLEY_ELEMENTS; - } - AddElementAccess( - elements, reduced_length, hole, bounds_check, elements_kind, true); - } - Add( - checked_object, HObjectAccess::ForArrayLength(elements_kind), - reduced_length); - Add(expr->id(), REMOVABLE_SIMULATE); - ast_context()->ReturnValue(result); - return true; - } default: // Not yet supported for inlining. break; diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index 0a0e0f9..584d064 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -1666,6 +1666,76 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), + Immediate(factory()->fixed_array_map())); + __ j(not_equal, &call_builtin); + + // Get the array's length into ecx and calculate new length. + __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset)); + __ sub(ecx, Immediate(Smi::FromInt(1))); + __ j(negative, &return_undefined); + + // Get the last element. + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + __ mov(eax, FieldOperand(ebx, + ecx, times_half_pointer_size, + FixedArray::kHeaderSize)); + __ cmp(eax, Immediate(factory()->the_hole_value())); + __ j(equal, &call_builtin); + + // Set the array's length. + __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx); + + // Fill with the hole. + __ mov(FieldOperand(ebx, + ecx, times_half_pointer_size, + FixedArray::kHeaderSize), + Immediate(factory()->the_hole_value())); + const int argc = arguments().immediate(); + __ ret((argc + 1) * kPointerSize); + + __ bind(&return_undefined); + __ mov(eax, Immediate(factory()->undefined_value())); + __ ret((argc + 1) * kPointerSize); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), + argc + 1, + 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc index 23ef6ec..e5cb200 100644 --- a/src/mips/stub-cache-mips.cc +++ b/src/mips/stub-cache-mips.cc @@ -1556,6 +1556,78 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + Register receiver = a0; + Register scratch = a1; + Register elements = a3; + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + scratch, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + + // Get the array's length into t0 and calculate new length. + __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); + __ Subu(t0, t0, Operand(Smi::FromInt(1))); + __ Branch(&return_undefined, lt, t0, Operand(zero_reg)); + + // Get the last element. + __ LoadRoot(t2, Heap::kTheHoleValueRootIndex); + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + // We can't address the last element in one operation. Compute the more + // expensive shift first, and use an offset later on. + __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize); + __ Addu(elements, elements, t1); + __ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ Branch(&call_builtin, eq, scratch, Operand(t2)); + + // Set the array's length. + __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); + + // Fill with the hole. + __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize)); + const int argc = arguments().immediate(); + __ mov(v0, scratch); + __ DropAndRet(argc + 1); + + __ bind(&return_undefined); + __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); + __ DropAndRet(argc + 1); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/stub-cache.h b/src/stub-cache.h index 737b069..fd1b278 100644 --- a/src/stub-cache.h +++ b/src/stub-cache.h @@ -888,7 +888,8 @@ class KeyedStoreStubCompiler: public StoreStubCompiler { // Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call // IC stubs. #define CUSTOM_CALL_IC_GENERATORS(V) \ - V(ArrayPush) + V(ArrayPush) \ + V(ArrayPop) class CallStubCompiler: public StubCompiler { diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index 392c96b..b563ac6 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -1593,6 +1593,77 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ movp(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), + Heap::kFixedArrayMapRootIndex); + __ j(not_equal, &call_builtin); + + // Get the array's length into rcx and calculate new length. + __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); + __ subl(rcx, Immediate(1)); + __ j(negative, &return_undefined); + + // Get the last element. + __ LoadRoot(r9, Heap::kTheHoleValueRootIndex); + __ movp(rax, FieldOperand(rbx, + rcx, times_pointer_size, + FixedArray::kHeaderSize)); + // Check if element is already the hole. + __ cmpq(rax, r9); + // If so, call slow-case to also check prototypes for value. + __ j(equal, &call_builtin); + + // Set the array's length. + __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); + + // Fill with the hole and return original value. + __ movp(FieldOperand(rbx, + rcx, times_pointer_size, + FixedArray::kHeaderSize), + r9); + const int argc = arguments().immediate(); + __ ret((argc + 1) * kPointerSize); + + __ bind(&return_undefined); + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + __ ret((argc + 1) * kPointerSize); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), + argc + 1, + 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object,