From ee4e034d7042da214bd1efc9325335d1d27226e9 Mon Sep 17 00:00:00 2001 From: "jkummerow@chromium.org" Date: Fri, 24 Jan 2014 08:32:50 +0000 Subject: [PATCH] Revert broken ArrayPop changes This reverts: r18749 "Reland (and fix) "Add hydrogen support for ArrayPop, and remove the handwritten call stubs."", r18790 "Remove ArrayPush from the custom call generators, and instead call directly to the handler in crankshaft.", and r18798 "MIPS: Remove ArrayPush from the custom call generators, and instead call directly to the handler in crankshaft." For causing crashes on Canary. BUG=chromium:337686 LOG=N R=bmeurer@chromium.org Review URL: https://codereview.chromium.org/146003006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18805 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/code-stubs-arm.cc | 15 ----- src/arm/stub-cache-arm.cc | 73 ++++++++++++++++++++++ src/hydrogen.cc | 82 ------------------------- src/ia32/code-stubs-ia32.cc | 14 ----- src/ia32/stub-cache-ia32.cc | 70 +++++++++++++++++++++ src/isolate.h | 1 - src/mips/code-stubs-mips.cc | 14 ----- src/mips/stub-cache-mips.cc | 72 ++++++++++++++++++++++ src/stub-cache.cc | 57 +++++++++++++++++ src/stub-cache.h | 17 +++++ src/x64/code-stubs-x64.cc | 14 ----- src/x64/stub-cache-x64.cc | 71 +++++++++++++++++++++ test/mjsunit/object-seal.js | 3 +- test/mjsunit/regress/regress-array-pop-deopt.js | 41 ------------- 14 files changed, 362 insertions(+), 182 deletions(-) delete mode 100644 test/mjsunit/regress/regress-array-pop-deopt.js diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 8e9a394..12742fb 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -427,21 +427,6 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { descriptor->param_representations_ = representations; descriptor->platform_specific_descriptor_ = &noInlineDescriptor; } - { - CallInterfaceDescriptor* descriptor = - isolate->call_descriptor(Isolate::CallHandler); - static Register registers[] = { cp, // context - r0, // receiver - }; - static Representation representations[] = { - Representation::Tagged(), // context - Representation::Tagged(), // receiver - }; - descriptor->register_param_count_ = 2; - descriptor->register_params_ = registers; - descriptor->param_representations_ = representations; - descriptor->platform_specific_descriptor_ = &default_descriptor; - } } diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index 987437b..da3d4c9 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -1601,6 +1601,79 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + Register receiver = r0; + Register scratch = r1; + Register elements = r3; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ ldr(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + scratch, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + + // Get the array's length into r4 and calculate new length. + __ ldr(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); + __ sub(r4, r4, Operand(Smi::FromInt(1)), SetCC); + __ b(lt, &return_undefined); + + // Get the last element. + __ LoadRoot(r6, Heap::kTheHoleValueRootIndex); + // We can't address the last element in one operation. Compute the more + // expensive shift first, and use an offset later on. + __ add(elements, elements, Operand::PointerOffsetFromSmiKey(r4)); + __ ldr(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ cmp(scratch, r6); + __ b(eq, &call_builtin); + + // Set the array's length. + __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset)); + + // Fill with the hole. + __ str(r6, FieldMemOperand(elements, FixedArray::kHeaderSize)); + const int argc = arguments().immediate(); + __ Drop(argc + 1); + __ mov(r0, scratch); + __ Ret(); + + __ bind(&return_undefined); + __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); + __ Drop(argc + 1); + __ Ret(); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/hydrogen.cc b/src/hydrogen.cc index f4b6d03..3180f4d 100644 --- a/src/hydrogen.cc +++ b/src/hydrogen.cc @@ -7612,88 +7612,6 @@ bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall( return true; } break; - case kArrayPop: { - if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) { - return false; - } - if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; - ElementsKind elements_kind = receiver_map->elements_kind(); - if (!IsFastElementsKind(elements_kind)) return false; - AddCheckConstantFunction(expr->holder(), receiver, receiver_map); - - Drop(expr->arguments()->length()); - HValue* result; - HValue* checked_object; - HValue* reduced_length; - HValue* receiver = Pop(); - { NoObservableSideEffectsScope scope(this); - checked_object = AddCheckMap(receiver, receiver_map); - HValue* elements = AddLoadElements(checked_object); - // Ensure that we aren't popping from a copy-on-write array. - if (IsFastSmiOrObjectElementsKind(elements_kind)) { - Add( - elements, isolate()->factory()->fixed_array_map(), top_info()); - } - HValue* length = Add( - checked_object, HObjectAccess::ForArrayLength(elements_kind)); - reduced_length = AddUncasted(length, graph()->GetConstant1()); - HValue* bounds_check = Add( - graph()->GetConstant0(), length); - result = AddElementAccess(elements, reduced_length, NULL, - bounds_check, elements_kind, false); - Factory* factory = isolate()->factory(); - double nan_double = FixedDoubleArray::hole_nan_as_double(); - HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind) - ? Add(factory->the_hole_value()) - : Add(nan_double); - if (IsFastSmiOrObjectElementsKind(elements_kind)) { - elements_kind = FAST_HOLEY_ELEMENTS; - } - AddElementAccess( - elements, reduced_length, hole, bounds_check, elements_kind, true); - } - Add( - checked_object, HObjectAccess::ForArrayLength(elements_kind), - reduced_length); - ast_context()->ReturnValue(result); - Add(expr->id(), REMOVABLE_SIMULATE); - return true; - } - case kArrayPush: { - if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) { - return false; - } - if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false; - ElementsKind elements_kind = receiver_map->elements_kind(); - if (!IsFastElementsKind(elements_kind)) return false; - AddCheckConstantFunction(expr->holder(), receiver, receiver_map); - - HValue* op_vals[] = { - context(), - // Receiver. - environment()->ExpressionStackAt(expr->arguments()->length()) - }; - - const int argc = expr->arguments()->length(); - // Includes receiver. - PushArgumentsFromEnvironment(argc + 1); - - CallInterfaceDescriptor* descriptor = - isolate()->call_descriptor(Isolate::CallHandler); - - ArrayPushStub stub(receiver_map->elements_kind(), argc); - Handle code = stub.GetCode(isolate()); - HConstant* code_value = Add(code); - - ASSERT((sizeof(op_vals) / kPointerSize) == - descriptor->environment_length()); - - HInstruction* call = New( - code_value, argc + 1, descriptor, - Vector(op_vals, descriptor->environment_length())); - ast_context()->ReturnInstruction(call, expr->id()); - return true; - } default: // Not yet supported for inlining. break; diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 48b5999..f21308f 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -421,20 +421,6 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { descriptor->register_params_ = registers; descriptor->param_representations_ = representations; } - { - CallInterfaceDescriptor* descriptor = - isolate->call_descriptor(Isolate::CallHandler); - static Register registers[] = { esi, // context - edx, // receiver - }; - static Representation representations[] = { - Representation::Tagged(), // context - Representation::Tagged(), // receiver - }; - descriptor->register_param_count_ = 2; - descriptor->register_params_ = registers; - descriptor->param_representations_ = representations; - } } diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index e76bfb5..92b2f93 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -1616,6 +1616,76 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), + Immediate(factory()->fixed_array_map())); + __ j(not_equal, &call_builtin); + + // Get the array's length into ecx and calculate new length. + __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset)); + __ sub(ecx, Immediate(Smi::FromInt(1))); + __ j(negative, &return_undefined); + + // Get the last element. + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + __ mov(eax, FieldOperand(ebx, + ecx, times_half_pointer_size, + FixedArray::kHeaderSize)); + __ cmp(eax, Immediate(factory()->the_hole_value())); + __ j(equal, &call_builtin); + + // Set the array's length. + __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx); + + // Fill with the hole. + __ mov(FieldOperand(ebx, + ecx, times_half_pointer_size, + FixedArray::kHeaderSize), + Immediate(factory()->the_hole_value())); + const int argc = arguments().immediate(); + __ ret((argc + 1) * kPointerSize); + + __ bind(&return_undefined); + __ mov(eax, Immediate(factory()->undefined_value())); + __ ret((argc + 1) * kPointerSize); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), + argc + 1, + 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/isolate.h b/src/isolate.h index 5956baa..d6dff05 100644 --- a/src/isolate.h +++ b/src/isolate.h @@ -1076,7 +1076,6 @@ class Isolate { enum CallDescriptorKey { KeyedCall, NamedCall, - CallHandler, ArgumentAdaptorCall, NUMBER_OF_CALL_DESCRIPTORS }; diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index 8651b69..2fe6d69 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -419,20 +419,6 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { descriptor->register_params_ = registers; descriptor->param_representations_ = representations; } - { - CallInterfaceDescriptor* descriptor = - isolate->call_descriptor(Isolate::CallHandler); - static Register registers[] = { cp, // context - a0, // receiver - }; - static Representation representations[] = { - Representation::Tagged(), // context - Representation::Tagged(), // receiver - }; - descriptor->register_param_count_ = 2; - descriptor->register_params_ = registers; - descriptor->param_representations_ = representations; - } } diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc index ef98db5..b713be9 100644 --- a/src/mips/stub-cache-mips.cc +++ b/src/mips/stub-cache-mips.cc @@ -1585,6 +1585,78 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + Register receiver = a0; + Register scratch = a1; + Register elements = a3; + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CheckMap(elements, + scratch, + Heap::kFixedArrayMapRootIndex, + &call_builtin, + DONT_DO_SMI_CHECK); + + // Get the array's length into t0 and calculate new length. + __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); + __ Subu(t0, t0, Operand(Smi::FromInt(1))); + __ Branch(&return_undefined, lt, t0, Operand(zero_reg)); + + // Get the last element. + __ LoadRoot(t2, Heap::kTheHoleValueRootIndex); + STATIC_ASSERT(kSmiTagSize == 1); + STATIC_ASSERT(kSmiTag == 0); + // We can't address the last element in one operation. Compute the more + // expensive shift first, and use an offset later on. + __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize); + __ Addu(elements, elements, t1); + __ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize)); + __ Branch(&call_builtin, eq, scratch, Operand(t2)); + + // Set the array's length. + __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset)); + + // Fill with the hole. + __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize)); + const int argc = arguments().immediate(); + __ mov(v0, scratch); + __ DropAndRet(argc + 1); + + __ bind(&return_undefined); + __ LoadRoot(v0, Heap::kUndefinedValueRootIndex); + __ DropAndRet(argc + 1); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/src/stub-cache.cc b/src/stub-cache.cc index f715573..89ed586 100644 --- a/src/stub-cache.cc +++ b/src/stub-cache.cc @@ -1282,6 +1282,41 @@ void CallStubCompiler::GenerateJumpFunction(Handle object, } +Handle CallStubCompiler::CompileArrayPushCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + Handle map(Handle::cast(object)->map()); + ElementsKind elements_kind = map->elements_kind(); + const int argc = arguments().immediate(); + + ArrayPushStub stub(elements_kind, argc); + Handle code = stub.GetCode(isolate()); + StubCompiler::GenerateTailCall(masm(), code); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileCallConstant( Handle object, Handle holder, @@ -1878,6 +1913,13 @@ CallStubCompiler::CallStubCompiler(Isolate* isolate, bool CallStubCompiler::HasCustomCallGenerator(Handle function) { + if (function->shared()->HasBuiltinFunctionId()) { + BuiltinFunctionId id = function->shared()->builtin_function_id(); +#define CALL_GENERATOR_CASE(name) if (id == k##name) return true; + CUSTOM_CALL_IC_GENERATORS(CALL_GENERATOR_CASE) +#undef CALL_GENERATOR_CASE + } + CallOptimization optimization(function); return optimization.is_simple_api_call(); } @@ -1891,6 +1933,21 @@ Handle CallStubCompiler::CompileCustomCall( Handle fname, Code::StubType type) { ASSERT(HasCustomCallGenerator(function)); + + if (function->shared()->HasBuiltinFunctionId()) { + BuiltinFunctionId id = function->shared()->builtin_function_id(); +#define CALL_GENERATOR_CASE(name) \ + if (id == k##name) { \ + return CallStubCompiler::Compile##name##Call(object, \ + holder, \ + cell, \ + function, \ + fname, \ + type); \ + } + CUSTOM_CALL_IC_GENERATORS(CALL_GENERATOR_CASE) +#undef CALL_GENERATOR_CASE + } CallOptimization optimization(function); ASSERT(optimization.is_simple_api_call()); return CompileFastApiCall(optimization, diff --git a/src/stub-cache.h b/src/stub-cache.h index 9cdd77f..a54d92d 100644 --- a/src/stub-cache.h +++ b/src/stub-cache.h @@ -868,6 +868,13 @@ class KeyedStoreStubCompiler: public StoreStubCompiler { }; +// Subset of FUNCTIONS_WITH_ID_LIST with custom constant/global call +// IC stubs. +#define CUSTOM_CALL_IC_GENERATORS(V) \ + V(ArrayPush) \ + V(ArrayPop) + + class CallStubCompiler: public StubCompiler { public: CallStubCompiler(Isolate* isolate, @@ -934,6 +941,16 @@ class CallStubCompiler: public StubCompiler { Handle name, Code::StubType type); +#define DECLARE_CALL_GENERATOR(name) \ + Handle Compile##name##Call(Handle object, \ + Handle holder, \ + Handle cell, \ + Handle function, \ + Handle fname, \ + Code::StubType type); + CUSTOM_CALL_IC_GENERATORS(DECLARE_CALL_GENERATOR) +#undef DECLARE_CALL_GENERATOR + Handle CompileFastApiCall(const CallOptimization& optimization, Handle object, Handle holder, diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index b8ef7de..a2ddd4c 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -418,20 +418,6 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) { descriptor->register_params_ = registers; descriptor->param_representations_ = representations; } - { - CallInterfaceDescriptor* descriptor = - isolate->call_descriptor(Isolate::CallHandler); - static Register registers[] = { rsi, // context - rdx, // receiver - }; - static Representation representations[] = { - Representation::Tagged(), // context - Representation::Tagged(), // receiver - }; - descriptor->register_param_count_ = 2; - descriptor->register_params_ = registers; - descriptor->param_representations_ = representations; - } } diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index dbfd419..53e9f88 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -1534,6 +1534,77 @@ Handle CallStubCompiler::CompileCallField(Handle object, } +Handle CallStubCompiler::CompileArrayPopCall( + Handle object, + Handle holder, + Handle cell, + Handle function, + Handle name, + Code::StubType type) { + // If object is not an array or is observed or sealed, bail out to regular + // call. + if (!object->IsJSArray() || + !cell.is_null() || + Handle::cast(object)->map()->is_observed() || + !Handle::cast(object)->map()->is_extensible()) { + return Handle::null(); + } + + Label miss, return_undefined, call_builtin; + + HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss); + + // Get the elements array of the object. + __ movp(rbx, FieldOperand(rdx, JSArray::kElementsOffset)); + + // Check that the elements are in fast mode and writable. + __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), + Heap::kFixedArrayMapRootIndex); + __ j(not_equal, &call_builtin); + + // Get the array's length into rcx and calculate new length. + __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset)); + __ subl(rcx, Immediate(1)); + __ j(negative, &return_undefined); + + // Get the last element. + __ LoadRoot(r9, Heap::kTheHoleValueRootIndex); + __ movp(rax, FieldOperand(rbx, + rcx, times_pointer_size, + FixedArray::kHeaderSize)); + // Check if element is already the hole. + __ cmpq(rax, r9); + // If so, call slow-case to also check prototypes for value. + __ j(equal, &call_builtin); + + // Set the array's length. + __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx); + + // Fill with the hole and return original value. + __ movp(FieldOperand(rbx, + rcx, times_pointer_size, + FixedArray::kHeaderSize), + r9); + const int argc = arguments().immediate(); + __ ret((argc + 1) * kPointerSize); + + __ bind(&return_undefined); + __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + __ ret((argc + 1) * kPointerSize); + + __ bind(&call_builtin); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, isolate()), + argc + 1, + 1); + + HandlerFrontendFooter(&miss); + + // Return the generated code. + return GetCode(type, name); +} + + Handle CallStubCompiler::CompileFastApiCall( const CallOptimization& optimization, Handle object, diff --git a/test/mjsunit/object-seal.js b/test/mjsunit/object-seal.js index 3afddb9..f31f0b7 100644 --- a/test/mjsunit/object-seal.js +++ b/test/mjsunit/object-seal.js @@ -251,7 +251,8 @@ assertOptimized(shift_call); Object.seal(obj); assertThrows(function() { push_call(obj); }, TypeError); assertThrows(function() { shift_call(obj); }, TypeError); -assertUnoptimized(push_call); +assertOptimized(push_call); +// shift() doesn't have a custom call generator, so deopt will occur. assertUnoptimized(shift_call); assertDoesNotThrow(function() { push_call(objControl); }); assertDoesNotThrow(function() { shift_call(objControl); }); diff --git a/test/mjsunit/regress/regress-array-pop-deopt.js b/test/mjsunit/regress/regress-array-pop-deopt.js deleted file mode 100644 index 9a0d35d..0000000 --- a/test/mjsunit/regress/regress-array-pop-deopt.js +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2014 the V8 project authors. All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following -// disclaimer in the documentation and/or other materials provided -// with the distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived -// from this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Flags: --allow-natives-syntax - -var o = [6,7,8,9]; - -function f(b) { - var v = o.pop() + b; - return v; -} - -assertEquals(10, f(1)); -assertEquals(9, f(1)); -assertEquals(8, f(1)); -%OptimizeFunctionOnNextCall(f); -assertEquals("61", f("1")); -- 2.7.4