1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_STUB_CACHE_H_
6 #define V8_STUB_CACHE_H_
8 #include "src/allocation.h"
9 #include "src/arguments.h"
10 #include "src/code-stubs.h"
11 #include "src/ic-inl.h"
12 #include "src/macro-assembler.h"
13 #include "src/objects.h"
14 #include "src/zone-inl.h"
20 // The stub cache is used for megamorphic property accesses.
21 // It maps (map, name, type) to property access handlers. The cache does not
22 // need explicit invalidation when a prototype chain is modified, since the
23 // handlers verify the chain.
26 class CallOptimization;
31 class SCTableReference {
33 Address address() const { return address_; }
36 explicit SCTableReference(Address address) : address_(address) {}
40 friend class StubCache;
53 // Access cache for entry hash(name, map).
54 Code* Set(Name* name, Map* map, Code* code);
55 Code* Get(Name* name, Map* map, Code::Flags flags);
56 // Clear the lookup table (@ mark compact collection).
58 // Collect all maps that match the name and flags.
59 void CollectMatchingMaps(SmallMapList* types,
62 Handle<Context> native_context,
64 // Generate code for probing the stub cache table.
65 // Arguments extra, extra2 and extra3 may be used to pass additional scratch
66 // registers. Set to no_reg if not needed.
67 void GenerateProbe(MacroAssembler* masm,
73 Register extra2 = no_reg,
74 Register extra3 = no_reg);
81 SCTableReference key_reference(StubCache::Table table) {
82 return SCTableReference(
83 reinterpret_cast<Address>(&first_entry(table)->key));
86 SCTableReference map_reference(StubCache::Table table) {
87 return SCTableReference(
88 reinterpret_cast<Address>(&first_entry(table)->map));
91 SCTableReference value_reference(StubCache::Table table) {
92 return SCTableReference(
93 reinterpret_cast<Address>(&first_entry(table)->value));
96 StubCache::Entry* first_entry(StubCache::Table table) {
98 case StubCache::kPrimary: return StubCache::primary_;
99 case StubCache::kSecondary: return StubCache::secondary_;
105 Isolate* isolate() { return isolate_; }
107 // Setting the entry size such that the index is shifted by Name::kHashShift
108 // is convenient; shifting down the length field (to extract the hash code)
109 // automatically discards the hash bit field.
110 static const int kCacheIndexShift = Name::kHashShift;
113 explicit StubCache(Isolate* isolate);
115 // The stub cache has a primary and secondary level. The two levels have
116 // different hashing algorithms in order to avoid simultaneous collisions
117 // in both caches. Unlike a probing strategy (quadratic or otherwise) the
118 // update strategy on updates is fairly clear and simple: Any existing entry
119 // in the primary cache is moved to the secondary cache, and secondary cache
120 // entries are overwritten.
122 // Hash algorithm for the primary table. This algorithm is replicated in
123 // assembler for every architecture. Returns an index into the table that
124 // is scaled by 1 << kCacheIndexShift.
125 static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
126 STATIC_ASSERT(kCacheIndexShift == Name::kHashShift);
127 // Compute the hash of the name (use entire hash field).
128 DCHECK(name->HasHashCode());
129 uint32_t field = name->hash_field();
130 // Using only the low bits in 64-bit mode is unlikely to increase the
131 // risk of collision even if the heap is spread over an area larger than
132 // 4Gb (and not at all if it isn't).
133 uint32_t map_low32bits =
134 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
135 // We always set the in_loop bit to zero when generating the lookup code
136 // so do it here too so the hash codes match.
138 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
139 // Base the offset on a simple combination of name, flags, and map.
140 uint32_t key = (map_low32bits + field) ^ iflags;
141 return key & ((kPrimaryTableSize - 1) << kCacheIndexShift);
144 // Hash algorithm for the secondary table. This algorithm is replicated in
145 // assembler for every architecture. Returns an index into the table that
146 // is scaled by 1 << kCacheIndexShift.
147 static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
148 // Use the seed from the primary cache in the secondary cache.
149 uint32_t name_low32bits =
150 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
151 // We always set the in_loop bit to zero when generating the lookup code
152 // so do it here too so the hash codes match.
154 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
155 uint32_t key = (seed - name_low32bits) + iflags;
156 return key & ((kSecondaryTableSize - 1) << kCacheIndexShift);
159 // Compute the entry for a given offset in exactly the same way as
160 // we do in generated code. We generate an hash code that already
161 // ends in Name::kHashShift 0s. Then we multiply it so it is a multiple
162 // of sizeof(Entry). This makes it easier to avoid making mistakes
163 // in the hashed offset computations.
164 static Entry* entry(Entry* table, int offset) {
165 const int multiplier = sizeof(*table) >> Name::kHashShift;
166 return reinterpret_cast<Entry*>(
167 reinterpret_cast<Address>(table) + offset * multiplier);
170 static const int kPrimaryTableBits = 11;
171 static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
172 static const int kSecondaryTableBits = 9;
173 static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
175 Entry primary_[kPrimaryTableSize];
176 Entry secondary_[kSecondaryTableSize];
179 friend class Isolate;
180 friend class SCTableReference;
182 DISALLOW_COPY_AND_ASSIGN(StubCache);
186 // ------------------------------------------------------------------------
189 // Support functions for IC stubs for callbacks.
190 DECLARE_RUNTIME_FUNCTION(StoreCallbackProperty);
193 // Support functions for IC stubs for interceptors.
194 DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptorOnly);
195 DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptor);
196 DECLARE_RUNTIME_FUNCTION(LoadElementWithInterceptor);
197 DECLARE_RUNTIME_FUNCTION(StorePropertyWithInterceptor);
200 enum PrototypeCheckType { CHECK_ALL_MAPS, SKIP_RECEIVER };
201 enum IcCheckType { ELEMENT, PROPERTY };
204 class PropertyAccessCompiler BASE_EMBEDDED {
206 static Builtins::Name MissBuiltin(Code::Kind kind) {
209 return Builtins::kLoadIC_Miss;
211 return Builtins::kStoreIC_Miss;
212 case Code::KEYED_LOAD_IC:
213 return Builtins::kKeyedLoadIC_Miss;
214 case Code::KEYED_STORE_IC:
215 return Builtins::kKeyedStoreIC_Miss;
219 return Builtins::kLoadIC_Miss;
222 static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
225 PropertyAccessCompiler(Isolate* isolate, Code::Kind kind,
226 CacheHolderFlag cache_holder)
227 : registers_(GetCallingConvention(kind)),
229 cache_holder_(cache_holder),
231 masm_(isolate, NULL, 256) {}
233 Code::Kind kind() const { return kind_; }
234 CacheHolderFlag cache_holder() const { return cache_holder_; }
235 MacroAssembler* masm() { return &masm_; }
236 Isolate* isolate() const { return isolate_; }
237 Heap* heap() const { return isolate()->heap(); }
238 Factory* factory() const { return isolate()->factory(); }
240 Register receiver() const { return registers_[0]; }
241 Register name() const { return registers_[1]; }
242 Register scratch1() const { return registers_[2]; }
243 Register scratch2() const { return registers_[3]; }
244 Register scratch3() const { return registers_[4]; }
246 // Calling convention between indexed store IC and handler.
247 Register transition_map() const { return scratch1(); }
249 static Register* GetCallingConvention(Code::Kind);
250 static Register* load_calling_convention();
251 static Register* store_calling_convention();
252 static Register* keyed_store_calling_convention();
254 Register* registers_;
256 static void GenerateTailCall(MacroAssembler* masm, Handle<Code> code);
258 Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
259 Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<Name> name);
263 CacheHolderFlag cache_holder_;
266 MacroAssembler masm_;
270 class PropertyICCompiler : public PropertyAccessCompiler {
272 // Finds the Code object stored in the Heap::non_monomorphic_cache().
273 static Code* FindPreMonomorphic(Isolate* isolate, Code::Kind kind,
274 ExtraICState extra_ic_state);
277 static Handle<Code> ComputeLoad(Isolate* isolate, InlineCacheState ic_state,
278 ExtraICState extra_state);
279 static Handle<Code> ComputeStore(Isolate* isolate, InlineCacheState ic_state,
280 ExtraICState extra_state);
282 static Handle<Code> ComputeMonomorphic(Code::Kind kind, Handle<Name> name,
283 Handle<HeapType> type,
284 Handle<Code> handler,
285 ExtraICState extra_ic_state);
286 static Handle<Code> ComputePolymorphic(Code::Kind kind, TypeHandleList* types,
287 CodeHandleList* handlers,
288 int number_of_valid_maps,
290 ExtraICState extra_ic_state);
293 static Handle<Code> ComputeKeyedLoadMonomorphic(Handle<Map> receiver_map);
295 static Handle<Code> ComputeKeyedStoreMonomorphic(
296 Handle<Map> receiver_map, StrictMode strict_mode,
297 KeyedAccessStoreMode store_mode);
298 static Handle<Code> ComputeKeyedLoadPolymorphic(MapHandleList* receiver_maps);
299 static Handle<Code> ComputeKeyedStorePolymorphic(
300 MapHandleList* receiver_maps, KeyedAccessStoreMode store_mode,
301 StrictMode strict_mode);
304 static Handle<Code> ComputeCompareNil(Handle<Map> receiver_map,
305 CompareNilICStub* stub);
309 PropertyICCompiler(Isolate* isolate, Code::Kind kind,
310 ExtraICState extra_ic_state = kNoExtraICState,
311 CacheHolderFlag cache_holder = kCacheOnReceiver)
312 : PropertyAccessCompiler(isolate, kind, cache_holder),
313 extra_ic_state_(extra_ic_state) {}
315 static Handle<Code> Find(Handle<Name> name, Handle<Map> stub_holder_map,
317 ExtraICState extra_ic_state = kNoExtraICState,
318 CacheHolderFlag cache_holder = kCacheOnReceiver);
320 Handle<Code> CompileLoadInitialize(Code::Flags flags);
321 Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags);
322 Handle<Code> CompileLoadMegamorphic(Code::Flags flags);
323 Handle<Code> CompileStoreInitialize(Code::Flags flags);
324 Handle<Code> CompileStorePreMonomorphic(Code::Flags flags);
325 Handle<Code> CompileStoreGeneric(Code::Flags flags);
326 Handle<Code> CompileStoreMegamorphic(Code::Flags flags);
328 Handle<Code> CompileMonomorphic(Handle<HeapType> type, Handle<Code> handler,
329 Handle<Name> name, IcCheckType check);
330 Handle<Code> CompilePolymorphic(TypeHandleList* types,
331 CodeHandleList* handlers, Handle<Name> name,
332 Code::StubType type, IcCheckType check);
334 Handle<Code> CompileKeyedStoreMonomorphic(Handle<Map> receiver_map,
335 KeyedAccessStoreMode store_mode);
336 Handle<Code> CompileKeyedStorePolymorphic(MapHandleList* receiver_maps,
337 KeyedAccessStoreMode store_mode);
338 Handle<Code> CompileKeyedStorePolymorphic(MapHandleList* receiver_maps,
339 CodeHandleList* handler_stubs,
340 MapHandleList* transitioned_maps);
342 bool IncludesNumberType(TypeHandleList* types);
344 Handle<Code> GetCode(Code::Kind kind, Code::StubType type, Handle<Name> name,
345 InlineCacheState state = MONOMORPHIC);
347 Logger::LogEventsAndTags log_kind(Handle<Code> code) {
348 if (kind() == Code::LOAD_IC) {
349 return code->ic_state() == MONOMORPHIC ? Logger::LOAD_IC_TAG
350 : Logger::LOAD_POLYMORPHIC_IC_TAG;
351 } else if (kind() == Code::KEYED_LOAD_IC) {
352 return code->ic_state() == MONOMORPHIC
353 ? Logger::KEYED_LOAD_IC_TAG
354 : Logger::KEYED_LOAD_POLYMORPHIC_IC_TAG;
355 } else if (kind() == Code::STORE_IC) {
356 return code->ic_state() == MONOMORPHIC ? Logger::STORE_IC_TAG
357 : Logger::STORE_POLYMORPHIC_IC_TAG;
359 DCHECK_EQ(Code::KEYED_STORE_IC, kind());
360 return code->ic_state() == MONOMORPHIC
361 ? Logger::KEYED_STORE_IC_TAG
362 : Logger::KEYED_STORE_POLYMORPHIC_IC_TAG;
366 const ExtraICState extra_ic_state_;
370 class PropertyHandlerCompiler : public PropertyAccessCompiler {
372 static Handle<Code> Find(Handle<Name> name, Handle<Map> map, Code::Kind kind,
373 CacheHolderFlag cache_holder, Code::StubType type);
376 PropertyHandlerCompiler(Isolate* isolate, Code::Kind kind,
377 Handle<HeapType> type, Handle<JSObject> holder,
378 CacheHolderFlag cache_holder)
379 : PropertyAccessCompiler(isolate, kind, cache_holder),
383 virtual ~PropertyHandlerCompiler() {}
385 virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
391 virtual void FrontendFooter(Handle<Name> name, Label* miss) { UNREACHABLE(); }
393 Register Frontend(Register object_reg, Handle<Name> name);
394 void NonexistentFrontendHeader(Handle<Name> name, Label* miss,
395 Register scratch1, Register scratch2);
397 // TODO(verwaest): Make non-static.
398 static void GenerateFastApiCall(MacroAssembler* masm,
399 const CallOptimization& optimization,
400 Handle<Map> receiver_map, Register receiver,
401 Register scratch, bool is_store, int argc,
404 // Helper function used to check that the dictionary doesn't contain
405 // the property. This function may return false negatives, so miss_label
406 // must always call a backup property check that is complete.
407 // This function is safe to call if the receiver has fast properties.
408 // Name must be unique and receiver must be a heap object.
409 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
416 // Generate code to check that a global property cell is empty. Create
417 // the property cell at compilation time if no cell exists for the
419 static void GenerateCheckPropertyCell(MacroAssembler* masm,
420 Handle<JSGlobalObject> global,
425 // Generates code that verifies that the property holder has not changed
426 // (checking maps of objects in the prototype chain for fast and global
427 // objects or doing negative lookup for slow objects, ensures that the
428 // property cells for global objects are still empty) and checks that the map
429 // of the holder has not changed. If necessary the function also generates
430 // code for security check in case of global object holders. Helps to make
431 // sure that the current IC is still valid.
433 // The scratch and holder registers are always clobbered, but the object
434 // register is only clobbered if it the same as the holder register. The
435 // function returns a register containing the holder - either object_reg or
437 Register CheckPrototypes(Register object_reg, Register holder_reg,
438 Register scratch1, Register scratch2,
439 Handle<Name> name, Label* miss,
440 PrototypeCheckType check = CHECK_ALL_MAPS);
442 Handle<Code> GetCode(Code::Kind kind, Code::StubType type, Handle<Name> name);
443 void set_type_for_object(Handle<Object> object) {
444 type_ = IC::CurrentTypeOf(object, isolate());
446 void set_holder(Handle<JSObject> holder) { holder_ = holder; }
447 Handle<HeapType> type() const { return type_; }
448 Handle<JSObject> holder() const { return holder_; }
451 Handle<HeapType> type_;
452 Handle<JSObject> holder_;
456 class NamedLoadHandlerCompiler : public PropertyHandlerCompiler {
458 NamedLoadHandlerCompiler(Isolate* isolate, Handle<HeapType> type,
459 Handle<JSObject> holder,
460 CacheHolderFlag cache_holder)
461 : PropertyHandlerCompiler(isolate, Code::LOAD_IC, type, holder,
464 virtual ~NamedLoadHandlerCompiler() {}
466 Handle<Code> CompileLoadField(Handle<Name> name, FieldIndex index);
468 Handle<Code> CompileLoadCallback(Handle<Name> name,
469 Handle<ExecutableAccessorInfo> callback);
471 Handle<Code> CompileLoadCallback(Handle<Name> name,
472 const CallOptimization& call_optimization);
474 Handle<Code> CompileLoadConstant(Handle<Name> name, int constant_index);
476 // The LookupIterator is used to perform a lookup behind the interceptor. If
477 // the iterator points to a LookupIterator::PROPERTY, its access will be
479 Handle<Code> CompileLoadInterceptor(LookupIterator* it);
481 Handle<Code> CompileLoadViaGetter(Handle<Name> name,
482 Handle<JSFunction> getter);
484 Handle<Code> CompileLoadGlobal(Handle<PropertyCell> cell, Handle<Name> name,
485 bool is_configurable);
488 static Handle<Code> ComputeLoadNonexistent(Handle<Name> name,
489 Handle<HeapType> type);
491 static void GenerateLoadViaGetter(MacroAssembler* masm, Handle<HeapType> type,
493 Handle<JSFunction> getter);
495 static void GenerateLoadViaGetterForDeopt(MacroAssembler* masm) {
496 GenerateLoadViaGetter(masm, Handle<HeapType>::null(), no_reg,
497 Handle<JSFunction>());
500 static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
506 // These constants describe the structure of the interceptor arguments on the
507 // stack. The arguments are pushed by the (platform-specific)
508 // PushInterceptorArguments and read by LoadPropertyWithInterceptorOnly and
509 // LoadWithInterceptor.
510 static const int kInterceptorArgsNameIndex = 0;
511 static const int kInterceptorArgsInfoIndex = 1;
512 static const int kInterceptorArgsThisIndex = 2;
513 static const int kInterceptorArgsHolderIndex = 3;
514 static const int kInterceptorArgsLength = 4;
517 virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
520 virtual void FrontendFooter(Handle<Name> name, Label* miss);
523 Handle<Code> CompileLoadNonexistent(Handle<Name> name);
524 void GenerateLoadConstant(Handle<Object> value);
525 void GenerateLoadCallback(Register reg,
526 Handle<ExecutableAccessorInfo> callback);
527 void GenerateLoadCallback(const CallOptimization& call_optimization,
528 Handle<Map> receiver_map);
529 void GenerateLoadInterceptor(Register holder_reg);
530 void GenerateLoadInterceptorWithFollowup(LookupIterator* it,
531 Register holder_reg);
532 void GenerateLoadPostInterceptor(LookupIterator* it, Register reg);
534 // Generates prototype loading code that uses the objects from the
535 // context we were in when this function was called. If the context
536 // has changed, a jump to miss is performed. This ties the generated
537 // code to a particular context and so must not be used in cases
538 // where the generated code is not allowed to have references to
539 // objects from a context.
540 static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm,
546 Register scratch4() { return registers_[5]; }
550 class NamedStoreHandlerCompiler : public PropertyHandlerCompiler {
552 explicit NamedStoreHandlerCompiler(Isolate* isolate, Handle<HeapType> type,
553 Handle<JSObject> holder)
554 : PropertyHandlerCompiler(isolate, Code::STORE_IC, type, holder,
557 virtual ~NamedStoreHandlerCompiler() {}
559 Handle<Code> CompileStoreTransition(Handle<Map> transition,
561 Handle<Code> CompileStoreField(LookupIterator* it);
562 Handle<Code> CompileStoreCallback(Handle<JSObject> object, Handle<Name> name,
563 Handle<ExecutableAccessorInfo> callback);
564 Handle<Code> CompileStoreCallback(Handle<JSObject> object, Handle<Name> name,
565 const CallOptimization& call_optimization);
566 Handle<Code> CompileStoreViaSetter(Handle<JSObject> object, Handle<Name> name,
567 Handle<JSFunction> setter);
568 Handle<Code> CompileStoreInterceptor(Handle<Name> name);
570 static void GenerateStoreViaSetter(MacroAssembler* masm,
571 Handle<HeapType> type, Register receiver,
572 Handle<JSFunction> setter);
574 static void GenerateStoreViaSetterForDeopt(MacroAssembler* masm) {
575 GenerateStoreViaSetter(masm, Handle<HeapType>::null(), no_reg,
576 Handle<JSFunction>());
580 virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
583 virtual void FrontendFooter(Handle<Name> name, Label* miss);
584 void GenerateRestoreName(Label* label, Handle<Name> name);
587 void GenerateStoreTransition(Handle<Map> transition, Handle<Name> name,
588 Register receiver_reg, Register name_reg,
589 Register value_reg, Register scratch1,
590 Register scratch2, Register scratch3,
591 Label* miss_label, Label* slow);
593 void GenerateStoreField(LookupIterator* lookup, Register value_reg,
596 static Builtins::Name SlowBuiltin(Code::Kind kind) {
598 case Code::STORE_IC: return Builtins::kStoreIC_Slow;
599 case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
600 default: UNREACHABLE();
602 return Builtins::kStoreIC_Slow;
605 static Register value();
609 class ElementHandlerCompiler : public PropertyHandlerCompiler {
611 explicit ElementHandlerCompiler(Isolate* isolate)
612 : PropertyHandlerCompiler(isolate, Code::KEYED_LOAD_IC,
613 Handle<HeapType>::null(),
614 Handle<JSObject>::null(), kCacheOnReceiver) {}
616 virtual ~ElementHandlerCompiler() {}
618 void CompileElementHandlers(MapHandleList* receiver_maps,
619 CodeHandleList* handlers);
621 static void GenerateLoadDictionaryElement(MacroAssembler* masm);
622 static void GenerateStoreDictionaryElement(MacroAssembler* masm);
626 // Holds information about possible function call optimizations.
627 class CallOptimization BASE_EMBEDDED {
629 explicit CallOptimization(Handle<JSFunction> function);
631 bool is_constant_call() const {
632 return !constant_function_.is_null();
635 Handle<JSFunction> constant_function() const {
636 DCHECK(is_constant_call());
637 return constant_function_;
640 bool is_simple_api_call() const {
641 return is_simple_api_call_;
644 Handle<FunctionTemplateInfo> expected_receiver_type() const {
645 DCHECK(is_simple_api_call());
646 return expected_receiver_type_;
649 Handle<CallHandlerInfo> api_call_info() const {
650 DCHECK(is_simple_api_call());
651 return api_call_info_;
659 Handle<JSObject> LookupHolderOfExpectedType(
660 Handle<Map> receiver_map,
661 HolderLookup* holder_lookup) const;
663 // Check if the api holder is between the receiver and the holder.
664 bool IsCompatibleReceiver(Handle<Object> receiver,
665 Handle<JSObject> holder) const;
668 void Initialize(Handle<JSFunction> function);
670 // Determines whether the given function can be called using the
671 // fast api call builtin.
672 void AnalyzePossibleApiFunction(Handle<JSFunction> function);
674 Handle<JSFunction> constant_function_;
675 bool is_simple_api_call_;
676 Handle<FunctionTemplateInfo> expected_receiver_type_;
677 Handle<CallHandlerInfo> api_call_info_;
681 } } // namespace v8::internal
683 #endif // V8_STUB_CACHE_H_