From 4dd45e115be0122ee8b6ba1d9b3be5e86ea3b97c Mon Sep 17 00:00:00 2001 From: ishell Date: Wed, 23 Sep 2015 01:46:09 -0700 Subject: [PATCH] Share literals arrays per pair. This CL also renames wrongly named test for v8:4173. BUG=v8:4121 LOG=Y Review URL: https://codereview.chromium.org/1353363002 Cr-Commit-Position: refs/heads/master@{#30879} --- src/code-stubs-hydrogen.cc | 8 +- src/factory.cc | 13 ++- src/heap/mark-compact.cc | 4 +- src/objects.cc | 129 ++++++++++++++++--------- src/objects.h | 21 +++- test/mjsunit/array-natives-elements.js | 2 +- test/mjsunit/regress/regress-4121.js | 84 +++++++--------- test/mjsunit/regress/regress-4173.js | 58 +++++++++++ 8 files changed, 214 insertions(+), 105 deletions(-) create mode 100644 test/mjsunit/regress/regress-4173.js diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 43cf265e9..59d3a8b7d 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -1731,13 +1731,15 @@ void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode( optimized_map, map_index, SharedFunctionInfo::kContextOffset); HValue* osr_ast_slot = LoadFromOptimizedCodeMap( optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset); + HValue* code_object = LoadFromOptimizedCodeMap( + optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset); builder->If(native_context, context_slot); builder->AndIf(osr_ast_slot, osr_ast_id_none); + builder->And(); + builder->IfNot(code_object, + graph()->GetConstantUndefined()); builder->Then(); - HValue* code_object = LoadFromOptimizedCodeMap(optimized_map, - map_index, SharedFunctionInfo::kCachedCodeOffset); - // and the literals HValue* literals = LoadFromOptimizedCodeMap(optimized_map, map_index, SharedFunctionInfo::kLiteralsOffset); diff --git a/src/factory.cc b/src/factory.cc index 84cc00d28..c1322d7e7 100644 --- a/src/factory.cc +++ b/src/factory.cc @@ -1314,17 +1314,24 @@ Handle Factory::NewFunctionFromSharedFunctionInfo( context->native_context(), BailoutId::None()); if (cached.code != nullptr) { // Caching of optimized code enabled and optimized code found. - if (cached.literals != nullptr) result->set_literals(cached.literals); DCHECK(!cached.code->marked_for_deoptimization()); DCHECK(result->shared()->is_compiled()); result->ReplaceCode(cached.code); } - if (cached.literals == nullptr && !info->bound()) { + if (cached.literals != nullptr) { + result->set_literals(cached.literals); + + } else if (!info->bound()) { int number_of_literals = info->num_literals(); - // TODO(mstarzinger): Consider sharing the newly created literals array. Handle literals = NewFixedArray(number_of_literals, pretenure); result->set_literals(*literals); + // Cache context-specific literals. + if (FLAG_cache_optimized_code) { + Handle native_context(context->native_context()); + SharedFunctionInfo::AddToOptimizedCodeMap( + info, native_context, undefined_value(), literals, BailoutId::None()); + } } return result; diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 56d386a8e..87e1b34f5 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -1013,8 +1013,8 @@ void CodeFlusher::ProcessOptimizedCodeMaps() { STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4); Context* context = Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset)); - Code* code = - Code::cast(code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); + HeapObject* code = HeapObject::cast( + code_map->get(i + SharedFunctionInfo::kCachedCodeOffset)); FixedArray* literals = FixedArray::cast( code_map->get(i + SharedFunctionInfo::kLiteralsOffset)); Smi* ast_id = diff --git a/src/objects.cc b/src/objects.cc index 89ae2a653..6b057c1c3 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -10137,47 +10137,60 @@ void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap( void SharedFunctionInfo::AddToOptimizedCodeMap( - Handle shared, - Handle native_context, - Handle code, - Handle literals, + Handle shared, Handle native_context, + Handle code, Handle literals, BailoutId osr_ast_id) { Isolate* isolate = shared->GetIsolate(); - DCHECK(!shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); - DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION); + DCHECK(*code == isolate->heap()->undefined_value() || + !shared->SearchOptimizedCodeMap(*native_context, osr_ast_id).code); + DCHECK(*code == isolate->heap()->undefined_value() || + Code::cast(*code)->kind() == Code::OPTIMIZED_FUNCTION); DCHECK(native_context->IsNativeContext()); STATIC_ASSERT(kEntryLength == 4); Handle new_code_map; Handle value(shared->optimized_code_map(), isolate); - int old_length; + int entry; if (value->IsSmi()) { // No optimized code map. DCHECK_EQ(0, Smi::cast(*value)->value()); new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED); - old_length = kEntriesStart; + entry = kEntriesStart; } else { - // Copy old optimized code map and append one new entry. Handle old_code_map = Handle::cast(value); + entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id); + if (entry > kSharedCodeIndex) { + // Found an existing context-specific entry, it must not contain any code. + DCHECK_EQ(isolate->heap()->undefined_value(), + old_code_map->get(entry + kCachedCodeOffset)); + // Just set the code and literals to the entry. + old_code_map->set(entry + kCachedCodeOffset, *code); + old_code_map->set(entry + kLiteralsOffset, *literals); + return; + } + + // Copy old optimized code map and append one new entry. new_code_map = isolate->factory()->CopyFixedArrayAndGrow( old_code_map, kEntryLength, TENURED); - old_length = old_code_map->length(); + int old_length = old_code_map->length(); // Zap the old map to avoid any stale entries. Note that this is required // for correctness because entries are being treated weakly by the GC. MemsetPointer(old_code_map->data_start(), isolate->heap()->the_hole_value(), old_length); + entry = old_length; } - new_code_map->set(old_length + kContextOffset, *native_context); - new_code_map->set(old_length + kCachedCodeOffset, *code); - new_code_map->set(old_length + kLiteralsOffset, *literals); - new_code_map->set(old_length + kOsrAstIdOffset, - Smi::FromInt(osr_ast_id.ToInt())); + new_code_map->set(entry + kContextOffset, *native_context); + new_code_map->set(entry + kCachedCodeOffset, *code); + new_code_map->set(entry + kLiteralsOffset, *literals); + new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt())); #ifdef DEBUG for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) { DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext()); - DCHECK(new_code_map->get(i + kCachedCodeOffset)->IsCode()); - DCHECK(Code::cast(new_code_map->get(i + kCachedCodeOffset))->kind() == - Code::OPTIMIZED_FUNCTION); + Object* code = new_code_map->get(i + kCachedCodeOffset); + if (code != isolate->heap()->undefined_value()) { + DCHECK(code->IsCode()); + DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION); + } DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray()); DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi()); } @@ -10206,37 +10219,43 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, DisallowHeapAllocation no_gc; if (optimized_code_map()->IsSmi()) return; + Heap* heap = GetHeap(); FixedArray* code_map = FixedArray::cast(optimized_code_map()); int dst = kEntriesStart; int length = code_map->length(); for (int src = kEntriesStart; src < length; src += kEntryLength) { DCHECK(code_map->get(src)->IsNativeContext()); - if (Code::cast(code_map->get(src + kCachedCodeOffset)) == optimized_code) { - // Evict the src entry by not copying it to the dst entry. + if (code_map->get(src + kCachedCodeOffset) == optimized_code) { + BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); if (FLAG_trace_opt) { PrintF("[evicting entry from optimizing code map (%s) for ", reason); ShortPrint(); - BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value()); if (osr.IsNone()) { PrintF("]\n"); } else { PrintF(" (osr ast id %d)]\n", osr.ToInt()); } } - } else { - // Keep the src entry by copying it to the dst entry. - if (dst != src) { - code_map->set(dst + kContextOffset, - code_map->get(src + kContextOffset)); - code_map->set(dst + kCachedCodeOffset, - code_map->get(src + kCachedCodeOffset)); - code_map->set(dst + kLiteralsOffset, - code_map->get(src + kLiteralsOffset)); - code_map->set(dst + kOsrAstIdOffset, - code_map->get(src + kOsrAstIdOffset)); + if (!osr.IsNone()) { + // Evict the src entry by not copying it to the dst entry. + continue; } - dst += kEntryLength; + // In case of non-OSR entry just clear the code in order to proceed + // sharing literals. + code_map->set_undefined(src + kCachedCodeOffset); + } + + // Keep the src entry by copying it to the dst entry. + if (dst != src) { + code_map->set(dst + kContextOffset, code_map->get(src + kContextOffset)); + code_map->set(dst + kCachedCodeOffset, + code_map->get(src + kCachedCodeOffset)); + code_map->set(dst + kLiteralsOffset, + code_map->get(src + kLiteralsOffset)); + code_map->set(dst + kOsrAstIdOffset, + code_map->get(src + kOsrAstIdOffset)); } + dst += kEntryLength; } if (code_map->get(kSharedCodeIndex) == optimized_code) { // Evict context-independent code as well. @@ -10249,8 +10268,8 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code, } if (dst != length) { // Always trim even when array is cleared because of heap verifier. - GetHeap()->RightTrimFixedArray(code_map, - length - dst); + heap->RightTrimFixedArray(code_map, + length - dst); if (code_map->length() == kEntriesStart && code_map->get(kSharedCodeIndex)->IsUndefined()) { ClearOptimizedCodeMap(); @@ -11272,8 +11291,8 @@ void SharedFunctionInfo::ResetForNewContext(int new_ic_age) { } -CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( - Context* native_context, BailoutId osr_ast_id) { +int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context, + BailoutId osr_ast_id) { DisallowHeapAllocation no_gc; DCHECK(native_context->IsNativeContext()); Object* value = optimized_code_map(); @@ -11284,21 +11303,41 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( for (int i = kEntriesStart; i < length; i += kEntryLength) { if (optimized_code_map->get(i + kContextOffset) == native_context && optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) { - return {Code::cast(optimized_code_map->get(i + kCachedCodeOffset)), - FixedArray::cast(optimized_code_map->get(i + kLiteralsOffset))}; + return i; } } Object* shared_code = optimized_code_map->get(kSharedCodeIndex); if (shared_code->IsCode() && osr_ast_id.IsNone()) { - return {Code::cast(shared_code), nullptr}; + return kSharedCodeIndex; } - if (FLAG_trace_opt) { - PrintF("[didn't find optimized code in optimized code map for "); - ShortPrint(); - PrintF("]\n"); + } + return -1; +} + + +CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap( + Context* native_context, BailoutId osr_ast_id) { + CodeAndLiterals result = {nullptr, nullptr}; + int entry = SearchOptimizedCodeMapEntry(native_context, osr_ast_id); + if (entry != kNotFound) { + FixedArray* code_map = FixedArray::cast(optimized_code_map()); + if (entry == kSharedCodeIndex) { + result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr}; + + } else { + DCHECK_LE(entry + kEntryLength, code_map->length()); + Object* code = code_map->get(entry + kCachedCodeOffset); + result = {code->IsUndefined() ? nullptr : Code::cast(code), + FixedArray::cast(code_map->get(entry + kLiteralsOffset))}; } } - return {nullptr, nullptr}; + if (FLAG_trace_opt && !optimized_code_map()->IsSmi() && + result.code == nullptr) { + PrintF("[didn't find optimized code in optimized code map for "); + ShortPrint(); + PrintF("]\n"); + } + return result; } diff --git a/src/objects.h b/src/objects.h index af0772780..a24d029da 100644 --- a/src/objects.h +++ b/src/objects.h @@ -6286,15 +6286,18 @@ class SharedFunctionInfo: public HeapObject { DECL_ACCESSORS(optimized_code_map, Object) // Returns entry from optimized code map for specified context and OSR entry. - // Note that {code == nullptr} indicates no matching entry has been found, - // whereas {literals == nullptr} indicates the code is context-independent. + // Note that {code == nullptr, literals == nullptr} indicates no matching + // entry has been found, whereas {code, literals == nullptr} indicates that + // code is context-independent. CodeAndLiterals SearchOptimizedCodeMap(Context* native_context, BailoutId osr_ast_id); // Clear optimized code map. void ClearOptimizedCodeMap(); - // Removed a specific optimized code object from the optimized code map. + // Removes a specific optimized code object from the optimized code map. + // In case of non-OSR the code reference is cleared from the cache entry but + // the entry itself is left in the map in order to proceed sharing literals. void EvictFromOptimizedCodeMap(Code* optimized_code, const char* reason); // Trims the optimized code map after entries have been removed. @@ -6305,9 +6308,11 @@ class SharedFunctionInfo: public HeapObject { Handle code); // Add a new entry to the optimized code map for context-dependent code. + // |code| is either a code object or an undefined value. In the latter case + // the entry just maps |native_context, osr_ast_id| pair to |literals| array. static void AddToOptimizedCodeMap(Handle shared, Handle native_context, - Handle code, + Handle code, Handle literals, BailoutId osr_ast_id); @@ -6327,6 +6332,8 @@ class SharedFunctionInfo: public HeapObject { static const int kEntryLength = 4; static const int kInitialLength = kEntriesStart + kEntryLength; + static const int kNotFound = -1; + // [scope_info]: Scope info. DECL_ACCESSORS(scope_info, ScopeInfo) @@ -6895,6 +6902,12 @@ class SharedFunctionInfo: public HeapObject { #endif private: + // Returns entry from optimized code map for specified context and OSR entry. + // The result is either kNotFound, kSharedCodeIndex for context-independent + // entry or a start index of the context-dependent entry. + int SearchOptimizedCodeMapEntry(Context* native_context, + BailoutId osr_ast_id); + DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo); }; diff --git a/test/mjsunit/array-natives-elements.js b/test/mjsunit/array-natives-elements.js index dabf57622..bf884fca4 100644 --- a/test/mjsunit/array-natives-elements.js +++ b/test/mjsunit/array-natives-elements.js @@ -30,6 +30,7 @@ // IC and Crankshaft support for smi-only elements in dynamic array literals. function get(foo) { return foo; } // Used to generate dynamic values. +var __sequence = 0; function array_natives_test() { // Ensure small array literals start in specific element kind mode. @@ -41,7 +42,6 @@ function array_natives_test() { // This code exists to eliminate the learning influence of AllocationSites // on the following tests. - var __sequence = 0; function make_array_string(literal) { this.__sequence = this.__sequence + 1; return "/* " + this.__sequence + " */ " + literal; diff --git a/test/mjsunit/regress/regress-4121.js b/test/mjsunit/regress/regress-4121.js index bef0b47ee..a175ed9fd 100644 --- a/test/mjsunit/regress/regress-4121.js +++ b/test/mjsunit/regress/regress-4121.js @@ -4,55 +4,45 @@ // Flags: --allow-natives-syntax -function Migrator(o) { - return o.foo; +function literals_sharing_test(warmup, optimize) { + function closure() { + // Ensure small array literals start in specific element kind mode. + assertTrue(%HasFastSmiElements([])); + assertTrue(%HasFastSmiElements([1])); + assertTrue(%HasFastSmiElements([1,2])); + assertTrue(%HasFastDoubleElements([1.1])); + assertTrue(%HasFastDoubleElements([1.1,2])); + + var a = [1, 2, 3]; + if (warmup) { + // Transition elements kind during warmup... + assertTrue(%HasFastSmiElements(a)); + assertEquals(4, a.push(1.3)); + } + // ... and ensure that the information about transitioning is + // propagated to the next closure. + assertTrue(%HasFastDoubleElements(a)); + }; + if (optimize) %OptimizeFunctionOnNextCall(closure); + closure(); } -function Loader(o) { - return o[0]; -} - -var first_smi_array = [1]; -var second_smi_array = [2]; -var first_object_array = ["first"]; -var second_object_array = ["string"]; - -assertTrue(%HasFastSmiElements(first_smi_array)); -assertTrue(%HasFastSmiElements(second_smi_array)); -assertTrue(%HasFastObjectElements(first_object_array)); -assertTrue(%HasFastObjectElements(second_object_array)); - -// Prepare identical transition chains for smi and object arrays. -first_smi_array.foo = 0; -second_smi_array.foo = 0; -first_object_array.foo = 0; -second_object_array.foo = 0; - -// Collect type feedback for not-yet-deprecated original object array map. -for (var i = 0; i < 3; i++) Migrator(second_object_array); -// Blaze a migration trail for smi array maps. -// This marks the migrated smi array map as a migration target. -first_smi_array.foo = 0.5; -print(second_smi_array.foo); -// Deprecate original object array map. -// Use TryMigrate from deferred optimized code to migrate second object array. -first_object_array.foo = 0.5; -%OptimizeFunctionOnNextCall(Migrator); -Migrator(second_object_array); - -// |second_object_array| now erroneously has a smi map. -// Optimized code assuming smi elements will expose this. - -for (var i = 0; i < 3; i++) Loader(second_smi_array); -%OptimizeFunctionOnNextCall(Loader); -assertEquals("string", Loader(second_object_array)); +function test() { + var warmup = true; + for (var i = 0; i < 3; i++) { + print("iter: " + i + ", warmup: "+ warmup); + literals_sharing_test(warmup, false); + warmup = false; + } + print("iter: " + i + ", opt: true"); + literals_sharing_test(warmup, true); +} -// Any of the following checks will also fail: -assertTrue(%HasFastObjectElements(second_object_array)); -assertFalse(%HasFastSmiElements(second_object_array)); -assertTrue(%HaveSameMap(first_object_array, second_object_array)); -assertFalse(%HaveSameMap(first_smi_array, second_object_array)); -%ClearFunctionTypeFeedback(Loader); -%ClearFunctionTypeFeedback(Migrator); +function stress_opt_test() {} +stress_opt_test(); +if (%GetOptimizationStatus(stress_opt_test) == 2) { + // This test is not suitable for --always-opt mode. + test(); +} diff --git a/test/mjsunit/regress/regress-4173.js b/test/mjsunit/regress/regress-4173.js new file mode 100644 index 000000000..bef0b47ee --- /dev/null +++ b/test/mjsunit/regress/regress-4173.js @@ -0,0 +1,58 @@ +// Copyright 2015 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --allow-natives-syntax + +function Migrator(o) { + return o.foo; +} +function Loader(o) { + return o[0]; +} + +var first_smi_array = [1]; +var second_smi_array = [2]; +var first_object_array = ["first"]; +var second_object_array = ["string"]; + +assertTrue(%HasFastSmiElements(first_smi_array)); +assertTrue(%HasFastSmiElements(second_smi_array)); +assertTrue(%HasFastObjectElements(first_object_array)); +assertTrue(%HasFastObjectElements(second_object_array)); + +// Prepare identical transition chains for smi and object arrays. +first_smi_array.foo = 0; +second_smi_array.foo = 0; +first_object_array.foo = 0; +second_object_array.foo = 0; + +// Collect type feedback for not-yet-deprecated original object array map. +for (var i = 0; i < 3; i++) Migrator(second_object_array); + +// Blaze a migration trail for smi array maps. +// This marks the migrated smi array map as a migration target. +first_smi_array.foo = 0.5; +print(second_smi_array.foo); + +// Deprecate original object array map. +// Use TryMigrate from deferred optimized code to migrate second object array. +first_object_array.foo = 0.5; +%OptimizeFunctionOnNextCall(Migrator); +Migrator(second_object_array); + +// |second_object_array| now erroneously has a smi map. +// Optimized code assuming smi elements will expose this. + +for (var i = 0; i < 3; i++) Loader(second_smi_array); +%OptimizeFunctionOnNextCall(Loader); +assertEquals("string", Loader(second_object_array)); + +// Any of the following checks will also fail: +assertTrue(%HasFastObjectElements(second_object_array)); +assertFalse(%HasFastSmiElements(second_object_array)); +assertTrue(%HaveSameMap(first_object_array, second_object_array)); +assertFalse(%HaveSameMap(first_smi_array, second_object_array)); + +%ClearFunctionTypeFeedback(Loader); +%ClearFunctionTypeFeedback(Migrator); -- 2.34.1