Operand(masm->isolate()->factory()->fixed_array_map()));
__ b(ne, fast_double);
}
+
+ // HOLECHECK: guards "A[i] = V"
+ // We have to go to the runtime if the current value is the hole because
+ // there may be a callback on the element
+ Label holecheck_passed1;
+ __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ ldr(scratch_value,
+ MemOperand::PointerAddressFromSmiKey(address, key, PreIndex));
+ __ cmp(scratch_value, Operand(masm->isolate()->factory()->the_hole_value()));
+ __ b(ne, &holecheck_passed1);
+ __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
+ slow);
+
+ __ bind(&holecheck_passed1);
+
// Smi stores don't require further checks.
Label non_smi_value;
__ JumpIfNotSmi(value, &non_smi_value);
__ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
__ b(ne, slow);
}
+
+ // HOLECHECK: guards "A[i] double hole?"
+ // We have to see if the double version of the hole is present. If so
+ // go to the runtime.
+ __ add(address, elements,
+ Operand((FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32))
+ - kHeapObjectTag));
+ __ ldr(scratch_value,
+ MemOperand(address, key, LSL, kPointerSizeLog2, PreIndex));
+ __ cmp(scratch_value, Operand(kHoleNanUpper32));
+ __ b(ne, &fast_double_without_map_check);
+ __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
+ slow);
+
__ bind(&fast_double_without_map_check);
__ StoreNumberToDoubleElements(value, key, elements, r3, d0,
&transition_double_elements);
}
+void MacroAssembler::JumpIfDictionaryInPrototypeChain(
+ Register object,
+ Register scratch0,
+ Register scratch1,
+ Label* found) {
+ ASSERT(!scratch1.is(scratch0));
+ Factory* factory = isolate()->factory();
+ Register current = scratch0;
+ Label loop_again;
+
+ // scratch contained elements pointer.
+ mov(current, object);
+
+ // Loop based on the map going up the prototype chain.
+ bind(&loop_again);
+ ldr(current, FieldMemOperand(current, HeapObject::kMapOffset));
+ ldr(scratch1, FieldMemOperand(current, Map::kBitField2Offset));
+ Ubfx(scratch1, scratch1, Map::kElementsKindShift, Map::kElementsKindBitCount);
+ cmp(scratch1, Operand(DICTIONARY_ELEMENTS));
+ b(eq, found);
+ ldr(current, FieldMemOperand(current, Map::kPrototypeOffset));
+ cmp(current, Operand(factory->null_value()));
+ b(ne, &loop_again);
+}
+
+
#ifdef DEBUG
bool AreAliased(Register reg1,
Register reg2,
bind(&no_memento_found);
}
+ // Jumps to found label if a prototype map has dictionary elements.
+ void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0,
+ Register scratch1, Label* found);
+
private:
void CallCFunctionHelper(Register function,
int num_reg_arguments,
}
+void Heap::ClearAllICsByKind(Code::Kind kind) {
+ HeapObjectIterator it(code_space());
+
+ for (Object* object = it.Next(); object != NULL; object = it.Next()) {
+ Code* code = Code::cast(object);
+ Code::Kind current_kind = code->kind();
+ if (current_kind == Code::FUNCTION ||
+ current_kind == Code::OPTIMIZED_FUNCTION) {
+ code->ClearInlineCaches(kind);
+ }
+ }
+}
+
+
void Heap::RepairFreeListsAfterBoot() {
PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next();
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
+ // Iterates the whole code space to clear all ICs of the given kind.
+ void ClearAllICsByKind(Code::Kind kind);
+
// For use during bootup.
void RepairFreeListsAfterBoot();
checked_object->ClearGVNFlag(kDependsOnElementsKind);
}
+ if (is_store && map->prototype()->IsJSObject()) {
+ // monomorphic stores need a prototype chain check because shape
+ // changes could allow callbacks on elements in the chain that
+ // aren't compatible with monomorphic keyed stores.
+ Handle<JSObject> prototype(JSObject::cast(map->prototype()));
+ Object* holder = map->prototype();
+ while (holder->GetPrototype(isolate())->IsJSObject()) {
+ holder = holder->GetPrototype(isolate());
+ }
+ ASSERT(holder->GetPrototype(isolate())->IsNull());
+
+ BuildCheckPrototypeMaps(prototype,
+ Handle<JSObject>(JSObject::cast(holder)));
+ }
+
LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
return BuildUncheckedMonomorphicElementAccess(
checked_object, key, val,
SmallMapList* types;
bool monomorphic = ComputeReceiverTypes(expr, obj, &types);
+ bool force_generic = false;
+ if (is_store && (monomorphic || (types != NULL && !types->is_empty()))) {
+ // Stores can't be mono/polymorphic if their prototype chain has dictionary
+ // elements. However a receiver map that has dictionary elements itself
+ // should be left to normal mono/poly behavior (the other maps may benefit
+ // from highly optimized stores).
+ for (int i = 0; i < types->length(); i++) {
+ Handle<Map> current_map = types->at(i);
+ if (current_map->DictionaryElementsInPrototypeChainOnly()) {
+ force_generic = true;
+ monomorphic = false;
+ break;
+ }
+ }
+ }
+
if (monomorphic) {
Handle<Map> map = types->first();
if (map->has_slow_elements_kind()) {
instr = BuildMonomorphicElementAccess(
obj, key, val, NULL, map, is_store, expr->GetStoreMode());
}
- } else if (types != NULL && !types->is_empty()) {
+ } else if (!force_generic && (types != NULL && !types->is_empty())) {
return HandlePolymorphicElementAccess(
obj, key, val, types, is_store,
expr->GetStoreMode(), has_side_effects);
__ cmp(edi, masm->isolate()->factory()->fixed_array_map());
__ j(not_equal, fast_double);
}
+
+ // HOLECHECK: guards "A[i] = V"
+ // We have to go to the runtime if the current value is the hole because
+ // there may be a callback on the element
+ Label holecheck_passed1;
+ __ cmp(CodeGenerator::FixedArrayElementOperand(ebx, ecx),
+ masm->isolate()->factory()->the_hole_value());
+ __ j(not_equal, &holecheck_passed1);
+ __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+
+ __ bind(&holecheck_passed1);
+
// Smi stores don't require further checks.
Label non_smi_value;
__ JumpIfNotSmi(eax, &non_smi_value);
// If the value is a number, store it as a double in the FastDoubleElements
// array.
}
+
+ // HOLECHECK: guards "A[i] double hole?"
+ // We have to see if the double version of the hole is present. If so
+ // go to the runtime.
+ uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
+ __ cmp(FieldOperand(ebx, ecx, times_4, offset), Immediate(kHoleNanUpper32));
+ __ j(not_equal, &fast_double_without_map_check);
+ __ JumpIfDictionaryInPrototypeChain(edx, ebx, edi, slow);
+ __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
+
__ bind(&fast_double_without_map_check);
__ StoreNumberToDoubleElements(eax, ebx, ecx, edi, xmm0,
&transition_double_elements, false);
}
+void MacroAssembler::JumpIfDictionaryInPrototypeChain(
+ Register object,
+ Register scratch0,
+ Register scratch1,
+ Label* found) {
+ ASSERT(!scratch1.is(scratch0));
+ Factory* factory = isolate()->factory();
+ Register current = scratch0;
+ Label loop_again;
+
+ // scratch contained elements pointer.
+ mov(current, object);
+
+ // Loop based on the map going up the prototype chain.
+ bind(&loop_again);
+ mov(current, FieldOperand(current, HeapObject::kMapOffset));
+ mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
+ and_(scratch1, Map::kElementsKindMask);
+ shr(scratch1, Map::kElementsKindShift);
+ cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
+ j(equal, found);
+ mov(current, FieldOperand(current, Map::kPrototypeOffset));
+ cmp(current, Immediate(factory->null_value()));
+ j(not_equal, &loop_again);
+}
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32
bind(&no_memento_found);
}
+ // Jumps to found label if a prototype map has dictionary elements.
+ void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0,
+ Register scratch1, Label* found);
+
private:
bool generating_stub_;
bool allow_stub_calls_;
isolate()->heap()->non_strict_arguments_elements_map()) {
stub = non_strict_arguments_stub();
} else if (key_is_smi_like &&
- (!target().is_identical_to(non_strict_arguments_stub()))) {
- KeyedAccessStoreMode store_mode =
- GetStoreMode(receiver, key, value);
- stub = StoreElementStub(receiver, store_mode);
+ !(target().is_identical_to(non_strict_arguments_stub()))) {
+ // We should go generic if receiver isn't a dictionary, but our
+ // prototype chain does have dictionary elements. This ensures that
+ // other non-dictionary receivers in the polymorphic case benefit
+ // from fast path keyed stores.
+ if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) {
+ KeyedAccessStoreMode store_mode =
+ GetStoreMode(receiver, key, value);
+ stub = StoreElementStub(receiver, store_mode);
+ }
}
}
}
ASSERT(args.length() == 4);
KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate);
Handle<Object> value = args.at<Object>(0);
+ Handle<Map> map = args.at<Map>(1);
Handle<Object> key = args.at<Object>(2);
Handle<Object> object = args.at<Object>(3);
StrictModeFlag strict_mode = ic.strict_mode();
+ if (object->IsJSObject()) {
+ JSObject::TransitionElementsKind(Handle<JSObject>::cast(object),
+ map->elements_kind());
+ }
return Runtime::SetObjectProperty(isolate,
object,
key,
}
+bool Map::DictionaryElementsInPrototypeChainOnly() {
+ Heap* heap = GetHeap();
+
+ if (IsDictionaryElementsKind(elements_kind())) {
+ return false;
+ }
+
+ for (Object* prototype = this->prototype();
+ prototype != heap->null_value();
+ prototype = prototype->GetPrototype(GetIsolate())) {
+ if (prototype->IsJSProxy()) {
+ // Be conservative, don't walk into proxies.
+ return true;
+ }
+
+ if (IsDictionaryElementsKind(
+ JSObject::cast(prototype)->map()->elements_kind())) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+
void JSObject::SetElementCallback(Handle<JSObject> object,
uint32_t index,
Handle<Object> structure,
PropertyDetails details = PropertyDetails(attributes, CALLBACKS, 0);
// Normalize elements to make this operation simple.
+ bool had_dictionary_elements = object->HasDictionaryElements();
Handle<SeededNumberDictionary> dictionary = NormalizeElements(object);
ASSERT(object->HasDictionaryElements() ||
object->HasDictionaryArgumentsElements());
parameter_map->set(1, *dictionary);
} else {
object->set_elements(*dictionary);
+
+ if (!had_dictionary_elements) {
+ // KeyedStoreICs (at least the non-generic ones) need a reset.
+ heap->ClearAllICsByKind(Code::KEYED_STORE_IC);
+ }
}
}
void Code::ClearInlineCaches() {
+ ClearInlineCaches(NULL);
+}
+
+
+void Code::ClearInlineCaches(Code::Kind kind) {
+ ClearInlineCaches(&kind);
+}
+
+
+void Code::ClearInlineCaches(Code::Kind* kind) {
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::CONSTRUCT_CALL) |
RelocInfo::ModeMask(RelocInfo::CODE_TARGET_WITH_ID) |
RelocInfo* info = it.rinfo();
Code* target(Code::GetCodeFromTargetAddress(info->target_address()));
if (target->is_inline_cache_stub()) {
- IC::Clear(this->GetIsolate(), info->pc());
+ if (kind == NULL || *kind == target->kind()) {
+ IC::Clear(this->GetIsolate(), info->pc());
+ }
}
}
}
}
}
+ bool dictionary_elements_in_chain =
+ object->map()->DictionaryElementsInPrototypeChainOnly();
Handle<JSObject> real_receiver = object;
if (skip_hidden_prototypes) {
ASSERT(new_map->prototype() == *value);
real_receiver->set_map(*new_map);
+ if (!dictionary_elements_in_chain &&
+ new_map->DictionaryElementsInPrototypeChainOnly()) {
+ // If the prototype chain didn't previously have element callbacks, then
+ // KeyedStoreICs need to be cleared to ensure any that involve this
+ // map go generic.
+ object->GetHeap()->ClearAllICsByKind(Code::KEYED_STORE_IC);
+ }
+
heap->ClearInstanceofCache();
ASSERT(size == object->Size());
return value;
}
if (from_kind == to_kind) return this;
-
- MaybeObject* maybe_failure = UpdateAllocationSite(to_kind);
- if (maybe_failure->IsFailure()) return maybe_failure;
+ // Don't update the site if to_kind isn't fast
+ if (IsFastElementsKind(to_kind)) {
+ MaybeObject* maybe_failure = UpdateAllocationSite(to_kind);
+ if (maybe_failure->IsFailure()) return maybe_failure;
+ }
Isolate* isolate = GetIsolate();
if (elements() == isolate->heap()->empty_fixed_array() ||
DECLARE_VERIFIER(Code)
void ClearInlineCaches();
+ void ClearInlineCaches(Kind kind);
+
void ClearTypeFeedbackCells(Heap* heap);
BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset);
private:
friend class RelocIterator;
+ void ClearInlineCaches(Kind* kind);
+
// Code aging
byte* FindCodeAgeSequence();
static void GetCodeAgeAndParity(Code* code, Age* age,
static bool IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind);
+ // Returns true if the current map doesn't have DICTIONARY_ELEMENTS but if a
+ // map with DICTIONARY_ELEMENTS was found in the prototype chain.
+ bool DictionaryElementsInPrototypeChainOnly();
+
inline bool HasTransitionArray();
inline bool HasElementsTransition();
inline Map* elements_transition_map();
__ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
__ j(not_equal, fast_double);
}
+
+ // HOLECHECK: guards "A[i] = V"
+ // We have to go to the runtime if the current value is the hole because
+ // there may be a callback on the element
+ Label holecheck_passed1;
+ __ movq(kScratchRegister, FieldOperand(rbx,
+ rcx,
+ times_pointer_size,
+ FixedArray::kHeaderSize));
+ __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &holecheck_passed1);
+ __ JumpIfDictionaryInPrototypeChain(rdx, rdi, kScratchRegister, slow);
+
+ __ bind(&holecheck_passed1);
+
// Smi stores don't require further checks.
Label non_smi_value;
__ JumpIfNotSmi(rax, &non_smi_value);
__ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
__ j(not_equal, slow);
}
+
+ // HOLECHECK: guards "A[i] double hole?"
+ // We have to see if the double version of the hole is present. If so
+ // go to the runtime.
+ uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
+ __ cmpl(FieldOperand(rbx, rcx, times_8, offset), Immediate(kHoleNanUpper32));
+ __ j(not_equal, &fast_double_without_map_check);
+ __ JumpIfDictionaryInPrototypeChain(rdx, rdi, kScratchRegister, slow);
+
__ bind(&fast_double_without_map_check);
__ StoreNumberToDoubleElements(rax, rbx, rcx, xmm0,
&transition_double_elements);
}
+void MacroAssembler::JumpIfDictionaryInPrototypeChain(
+ Register object,
+ Register scratch0,
+ Register scratch1,
+ Label* found) {
+ ASSERT(!(scratch0.is(kScratchRegister) && scratch1.is(kScratchRegister)));
+ ASSERT(!scratch1.is(scratch0));
+ Register current = scratch0;
+ Label loop_again;
+
+ movq(current, object);
+
+ // Loop based on the map going up the prototype chain.
+ bind(&loop_again);
+ movq(current, FieldOperand(current, HeapObject::kMapOffset));
+ movq(scratch1, FieldOperand(current, Map::kBitField2Offset));
+ and_(scratch1, Immediate(Map::kElementsKindMask));
+ shr(scratch1, Immediate(Map::kElementsKindShift));
+ cmpq(scratch1, Immediate(DICTIONARY_ELEMENTS));
+ j(equal, found);
+ movq(current, FieldOperand(current, Map::kPrototypeOffset));
+ CompareRoot(current, Heap::kNullValueRootIndex);
+ j(not_equal, &loop_again);
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64
bind(&no_memento_found);
}
+ // Jumps to found label if a prototype map has dictionary elements.
+ void JumpIfDictionaryInPrototypeChain(Register object, Register scratch0,
+ Register scratch1, Label* found);
+
private:
// Order general registers are pushed by Pushad.
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
assertKind(elements_kind.fast_double, obj);
obj = fastliteralcase([3, 6, 2], 1.5);
assertKind(elements_kind.fast_double, obj);
+
+ // Note: thanks to pessimistic transition store stubs, we'll attempt
+ // to transition to the most general elements kind seen at a particular
+ // store site. So, the elements kind will be double.
obj = fastliteralcase([2, 6, 3], 2);
- assertKind(elements_kind.fast_smi_only, obj);
+ assertKind(elements_kind.fast_double, obj);
}
// Verify that we will not pretransition the double->fast path.
--- /dev/null
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --max-opt-count=100 --noalways-opt
+
+// We specify max-opt-count because we opt/deopt the same function many
+// times.
+
+// It's nice to run this in other browsers too.
+var standalone = false;
+if (standalone) {
+ assertTrue = function(val) {
+ if (val != true) {
+ print("FAILURE");
+ }
+ }
+
+ assertFalse = function(val) {
+ if (val != false) {
+ print("FAILURE");
+ }
+ }
+
+ assertEquals = function(expected, val) {
+ if (expected !== val) {
+ print("FAILURE");
+ }
+ }
+
+ empty_func = function(name) { }
+ assertUnoptimized = empty_func;
+ assertOptimized = empty_func;
+
+ optimize = empty_func;
+ clearFunctionTypeFeedback = empty_func;
+ deoptimizeFunction = empty_func;
+} else {
+ optimize = function(name) {
+ %OptimizeFunctionOnNextCall(name);
+ }
+ clearFunctionTypeFeedback = function(name) {
+ %ClearFunctionTypeFeedback(name);
+ }
+ deoptimizeFunction = function(name) {
+ %DeoptimizeFunction(name);
+ }
+}
+
+function base_getter_test(create_func) {
+ var calls = 0;
+
+ // Testcase: setter in prototype chain
+ foo = function(a) { var x = a[0]; return x + 3; }
+ var a = create_func();
+ var ap = [];
+ ap.__defineGetter__(0, function() { calls++; return 0; });
+
+ foo(a);
+ foo(a);
+ foo(a);
+ delete a[0];
+
+ assertEquals(0, calls);
+ a.__proto__ = ap;
+ foo(a);
+ assertEquals(1, calls);
+ optimize(foo);
+ foo(a);
+ assertEquals(2, calls);
+ assertOptimized(foo);
+
+ // Testcase: getter "deep" in prototype chain.
+ clearFunctionTypeFeedback(foo);
+ deoptimizeFunction(foo);
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ var ap2 = [];
+ a.__proto__ = ap2;
+ foo(a);
+ foo(a);
+ foo(a);
+ delete a[0];
+
+ assertEquals(0, calls);
+
+ ap2.__proto__ = ap; // "sneak" in a callback.
+ // The sneak case should be caught by unoptimized code too.
+ assertUnoptimized(foo);
+ foo(a);
+ foo(a);
+ foo(a);
+ assertEquals(3, calls);
+
+ // Testcase: getter added after optimization (feedback is monomorphic)
+ clearFunctionTypeFeedback(foo);
+ deoptimizeFunction(foo);
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ ap2 = [];
+ a.__proto__ = ap2;
+ foo(a);
+ foo(a);
+ foo(a);
+ optimize(foo);
+ foo(a);
+ assertOptimized(foo);
+ delete a[0];
+ ap2.__proto__ = ap;
+ foo(a);
+ assertOptimized(foo); // getters don't require deopt on shape change.
+ assertEquals(1, calls);
+
+ // Testcase: adding additional getters to a prototype chain that already has
+ // one shouldn't deopt anything.
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ a.__proto__ = ap2;
+ bar = function(a) { return a[3] + 600; }
+ bar(a);
+ bar(a);
+ bar(a);
+ optimize(bar);
+ bar(a);
+ assertOptimized(bar);
+ assertEquals(0, calls);
+ delete a[3];
+ ap2.__defineGetter__(3, function() { calls++; return 0; });
+ bar(a);
+ assertOptimized(bar);
+ assertEquals(1, calls);
+}
+
+// Verify that map transitions don't confuse us.
+create_func_smi = function() { return [,,,,,,5]; }
+create_func_double = function() { return [,,,,,,5.5]; }
+create_func_fast = function() { return [,,,,,,true]; }
+
+var cf = [create_func_smi,
+ create_func_double,
+ create_func_fast];
+
+for(var c = 0; c < 3; c++) {
+ base_getter_test(cf[c]);
+}
+
+// A special test for LoadKeyedHoleMode. Ensure that optimized is generated
+// which sets ALLOW_RETURN_HOLE, then add a setter on the prototype that should
+// cause the function to deoptimize.
+
+var a = [3.5,,,3.5];
+fun = function(a) { return a[0] + 5.5; }
+fun(a);
+fun(a);
+fun(a); // should have a monomorphic KeyedLoadIC.
+optimize(fun);
+fun(a);
+assertOptimized(fun);
+
+// returning undefined shouldn't phase us.
+delete a[0];
+fun(a);
+assertOptimized(fun);
+
+// but messing up the prototype chain will.
+a.__proto__ = [];
+fun(a);
+assertUnoptimized(fun);
+
+// Construct a non-trivial prototype chain.
+var a = [3.5,,,,3.5];
+var ap = [,,3.5];
+ap.__proto__ = a.__proto__;
+a.__proto__ = ap;
+fun(a);
+optimize(fun);
+fun(a);
+assertOptimized(fun);
+
+var calls = 0;
+delete a[0];
+ap.__defineGetter__(0, function() { calls++; return 0; });
+fun(a);
+assertEquals(1, calls);
+assertUnoptimized(fun);
# All tests in the bug directory are expected to fail.
'bugs/*': [FAIL],
- # TODO(mvstanton) Re-enable when the performance is bearable again.
- 'regress/regress-2185-2': [SKIP],
-
##############################################################################
# Flaky tests.
# BUG(v8:2921).
--- /dev/null
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --max-opt-count=100 --noalways-opt
+
+// We specify max-opt-count because we opt/deopt the same function many
+// times.
+
+// It's nice to run this in other browsers too.
+var standalone = false;
+if (standalone) {
+ assertTrue = function(val) {
+ if (val != true) {
+ print("FAILURE");
+ }
+ }
+
+ assertFalse = function(val) {
+ if (val != false) {
+ print("FAILURE");
+ }
+ }
+
+ assertEquals = function(expected, val) {
+ if (expected !== val) {
+ print("FAILURE");
+ }
+ }
+
+ empty_func = function(name) { }
+ assertUnoptimized = empty_func;
+ assertOptimized = empty_func;
+
+ optimize = empty_func;
+ clearFunctionTypeFeedback = empty_func;
+ deoptimizeFunction = empty_func;
+} else {
+ optimize = function(name) {
+ %OptimizeFunctionOnNextCall(name);
+ }
+ clearFunctionTypeFeedback = function(name) {
+ %ClearFunctionTypeFeedback(name);
+ }
+ deoptimizeFunction = function(name) {
+ %DeoptimizeFunction(name);
+ }
+}
+
+function base_setter_test(create_func, index, store_value) {
+ var calls = 0;
+
+ // Testcase: setter in prototype chain
+ foo = function(a) { a[index] = store_value; }
+ var a = create_func();
+ var ap = [];
+ ap.__defineSetter__(index, function() { calls++; });
+
+ foo(a);
+ foo(a);
+ foo(a);
+ delete a[index];
+
+ assertEquals(0, calls);
+ a.__proto__ = ap;
+ foo(a);
+ assertEquals(1, calls);
+ optimize(foo);
+ foo(a);
+ assertEquals(2, calls);
+ assertOptimized(foo);
+
+ // Testcase: setter added on prototype chain object already in place.
+ clearFunctionTypeFeedback(foo);
+ deoptimizeFunction(foo);
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+ a = create_func();
+ var apap = [];
+ a.__proto__ = apap;
+ foo(a);
+ foo(a);
+ foo(a);
+ delete a[index];
+ apap.__defineSetter__(index, function() { calls++; });
+ foo(a);
+ foo(a);
+ foo(a);
+ assertEquals(3, calls);
+
+ // Testcase: setter "deep" in prototype chain.
+ clearFunctionTypeFeedback(foo);
+ deoptimizeFunction(foo);
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ var ap2 = [];
+ a.__proto__ = ap2;
+ foo(a);
+ foo(a);
+ foo(a);
+ delete a[index];
+
+ assertEquals(0, calls);
+
+ ap2.__proto__ = ap; // "sneak" in a callback.
+ // The sneak case should be caught by unoptimized code too.
+ assertUnoptimized(foo);
+ foo(a);
+ foo(a);
+ foo(a);
+ assertEquals(3, calls);
+
+ // Testcase: setter added after optimization (feedback is monomorphic)
+ clearFunctionTypeFeedback(foo);
+ deoptimizeFunction(foo);
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ ap2 = [];
+ a.__proto__ = ap2;
+ foo(a);
+ foo(a);
+ foo(a);
+ optimize(foo);
+ foo(a);
+ assertOptimized(foo);
+ delete a[index];
+ ap2.__proto__ = ap;
+ foo(a);
+ assertUnoptimized(foo); // map shape change should deopt foo.
+ assertEquals(1, calls);
+
+ // Testcase: adding additional setters to a prototype chain that already has
+ // one shouldn't deopt anything. (ie, we aren't changing the map shape).
+ clearFunctionTypeFeedback(foo);
+ calls = 0;
+
+ a = create_func();
+ a.__proto__ = ap2;
+ bar = function(a) { a[index+1] = store_value; }
+ bar(a);
+ bar(a);
+ bar(a); // store should be generic
+ optimize(bar);
+ bar(a);
+ assertOptimized(bar);
+ assertEquals(0, calls);
+ delete a[index+1];
+ ap2.__defineSetter__(index+1, function() { calls++; });
+ bar(a);
+ assertOptimized(bar);
+ assertEquals(1, calls);
+}
+
+// Verify that map transitions don't confuse us.
+create_func_smi = function() { return [,,,,,,5]; }
+create_func_double = function() { return [0,,3.2,,,,5.5]; }
+create_func_fast = function() { return [,,,,,,true]; }
+create_func_dictionary = function() { var a = []; a.length = 100000; return a; }
+
+var cf = [create_func_smi,
+ create_func_double,
+ create_func_fast,
+ create_func_dictionary];
+
+var values = [3, 3.5, true];
+
+for(var c = 0; c < 3; c++) {
+ for(var s = 0; s < 3; s++) {
+ base_setter_test(cf[c], 0, values[s]);
+ base_setter_test(cf[c], 1, values[s]);
+ }
+}