1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_STUB_CACHE_H_
29 #define V8_STUB_CACHE_H_
31 #include "allocation.h"
32 #include "arguments.h"
33 #include "code-stubs.h"
35 #include "macro-assembler.h"
43 // The stub cache is used for megamorphic calls and property accesses.
44 // It maps (map, name, type)->Code*
46 // The design of the table uses the inline cache stubs used for
47 // mono-morphic calls. The beauty of this, we do not have to
48 // invalidate the cache whenever a prototype map is changed. The stub
49 // validates the map chain as in the mono-morphic case.
52 class CallOptimization;
57 class SCTableReference {
59 Address address() const { return address_; }
62 explicit SCTableReference(Address address) : address_(address) {}
66 friend class StubCache;
80 Handle<JSObject> StubHolder(Handle<JSObject> receiver,
81 Handle<JSObject> holder);
83 Handle<Code> FindIC(Handle<Name> name,
84 Handle<Map> stub_holder_map,
86 ExtraICState extra_state = kNoExtraICState,
87 InlineCacheHolderFlag cache_holder = OWN_MAP);
89 Handle<Code> FindHandler(Handle<Name> name,
92 InlineCacheHolderFlag cache_holder = OWN_MAP);
94 Handle<Code> ComputeMonomorphicIC(Handle<Name> name,
95 Handle<HeapType> type,
97 ExtraICState extra_ic_state);
99 Handle<Code> ComputeLoadNonexistent(Handle<Name> name, Handle<HeapType> type);
101 Handle<Code> ComputeKeyedLoadElement(Handle<Map> receiver_map);
103 Handle<Code> ComputeKeyedStoreElement(Handle<Map> receiver_map,
104 StrictModeFlag strict_mode,
105 KeyedAccessStoreMode store_mode);
109 Handle<Code> ComputeLoad(InlineCacheState ic_state, ExtraICState extra_state);
110 Handle<Code> ComputeStore(InlineCacheState ic_state,
111 ExtraICState extra_state);
115 Handle<Code> ComputeCompareNil(Handle<Map> receiver_map,
116 CompareNilICStub& stub);
120 Handle<Code> ComputeLoadElementPolymorphic(MapHandleList* receiver_maps);
121 Handle<Code> ComputeStoreElementPolymorphic(MapHandleList* receiver_maps,
122 KeyedAccessStoreMode store_mode,
123 StrictModeFlag strict_mode);
125 Handle<Code> ComputePolymorphicIC(TypeHandleList* types,
126 CodeHandleList* handlers,
127 int number_of_valid_maps,
129 ExtraICState extra_ic_state);
131 // Finds the Code object stored in the Heap::non_monomorphic_cache().
132 Code* FindPreMonomorphicIC(Code::Kind kind, ExtraICState extra_ic_state);
134 // Update cache for entry hash(name, map).
135 Code* Set(Name* name, Map* map, Code* code);
137 // Clear the lookup table (@ mark compact collection).
140 // Collect all maps that match the name and flags.
141 void CollectMatchingMaps(SmallMapList* types,
144 Handle<Context> native_context,
147 // Generate code for probing the stub cache table.
148 // Arguments extra, extra2 and extra3 may be used to pass additional scratch
149 // registers. Set to no_reg if not needed.
150 void GenerateProbe(MacroAssembler* masm,
156 Register extra2 = no_reg,
157 Register extra3 = no_reg);
165 SCTableReference key_reference(StubCache::Table table) {
166 return SCTableReference(
167 reinterpret_cast<Address>(&first_entry(table)->key));
171 SCTableReference map_reference(StubCache::Table table) {
172 return SCTableReference(
173 reinterpret_cast<Address>(&first_entry(table)->map));
177 SCTableReference value_reference(StubCache::Table table) {
178 return SCTableReference(
179 reinterpret_cast<Address>(&first_entry(table)->value));
183 StubCache::Entry* first_entry(StubCache::Table table) {
185 case StubCache::kPrimary: return StubCache::primary_;
186 case StubCache::kSecondary: return StubCache::secondary_;
192 Isolate* isolate() { return isolate_; }
193 Heap* heap() { return isolate()->heap(); }
194 Factory* factory() { return isolate()->factory(); }
196 // These constants describe the structure of the interceptor arguments on the
197 // stack. The arguments are pushed by the (platform-specific)
198 // PushInterceptorArguments and read by LoadPropertyWithInterceptorOnly and
199 // LoadWithInterceptor.
200 static const int kInterceptorArgsNameIndex = 0;
201 static const int kInterceptorArgsInfoIndex = 1;
202 static const int kInterceptorArgsThisIndex = 2;
203 static const int kInterceptorArgsHolderIndex = 3;
204 static const int kInterceptorArgsLength = 4;
207 explicit StubCache(Isolate* isolate);
209 // The stub cache has a primary and secondary level. The two levels have
210 // different hashing algorithms in order to avoid simultaneous collisions
211 // in both caches. Unlike a probing strategy (quadratic or otherwise) the
212 // update strategy on updates is fairly clear and simple: Any existing entry
213 // in the primary cache is moved to the secondary cache, and secondary cache
214 // entries are overwritten.
216 // Hash algorithm for the primary table. This algorithm is replicated in
217 // assembler for every architecture. Returns an index into the table that
218 // is scaled by 1 << kHeapObjectTagSize.
219 static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
220 // This works well because the heap object tag size and the hash
221 // shift are equal. Shifting down the length field to get the
222 // hash code would effectively throw away two bits of the hash
224 STATIC_ASSERT(kHeapObjectTagSize == Name::kHashShift);
225 // Compute the hash of the name (use entire hash field).
226 ASSERT(name->HasHashCode());
227 uint32_t field = name->hash_field();
228 // Using only the low bits in 64-bit mode is unlikely to increase the
229 // risk of collision even if the heap is spread over an area larger than
230 // 4Gb (and not at all if it isn't).
231 uint32_t map_low32bits =
232 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
233 // We always set the in_loop bit to zero when generating the lookup code
234 // so do it here too so the hash codes match.
236 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
237 // Base the offset on a simple combination of name, flags, and map.
238 uint32_t key = (map_low32bits + field) ^ iflags;
239 return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
242 // Hash algorithm for the secondary table. This algorithm is replicated in
243 // assembler for every architecture. Returns an index into the table that
244 // is scaled by 1 << kHeapObjectTagSize.
245 static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
246 // Use the seed from the primary cache in the secondary cache.
247 uint32_t name_low32bits =
248 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
249 // We always set the in_loop bit to zero when generating the lookup code
250 // so do it here too so the hash codes match.
252 (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
253 uint32_t key = (seed - name_low32bits) + iflags;
254 return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
257 // Compute the entry for a given offset in exactly the same way as
258 // we do in generated code. We generate an hash code that already
259 // ends in Name::kHashShift 0s. Then we multiply it so it is a multiple
260 // of sizeof(Entry). This makes it easier to avoid making mistakes
261 // in the hashed offset computations.
262 static Entry* entry(Entry* table, int offset) {
263 const int multiplier = sizeof(*table) >> Name::kHashShift;
264 return reinterpret_cast<Entry*>(
265 reinterpret_cast<Address>(table) + offset * multiplier);
268 static const int kPrimaryTableBits = 11;
269 static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
270 static const int kSecondaryTableBits = 9;
271 static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
273 Entry primary_[kPrimaryTableSize];
274 Entry secondary_[kSecondaryTableSize];
277 friend class Isolate;
278 friend class SCTableReference;
280 DISALLOW_COPY_AND_ASSIGN(StubCache);
284 // ------------------------------------------------------------------------
287 // Support functions for IC stubs for callbacks.
288 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty);
291 // Support functions for IC stubs for interceptors.
292 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly);
293 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad);
294 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall);
295 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty);
296 DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor);
299 enum PrototypeCheckType { CHECK_ALL_MAPS, SKIP_RECEIVER };
300 enum IcCheckType { ELEMENT, PROPERTY };
303 // The stub compilers compile stubs for the stub cache.
304 class StubCompiler BASE_EMBEDDED {
306 explicit StubCompiler(Isolate* isolate,
307 ExtraICState extra_ic_state = kNoExtraICState)
308 : isolate_(isolate), extra_ic_state_(extra_ic_state),
309 masm_(isolate, NULL, 256), failure_(NULL) { }
311 Handle<Code> CompileLoadInitialize(Code::Flags flags);
312 Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags);
313 Handle<Code> CompileLoadMegamorphic(Code::Flags flags);
315 Handle<Code> CompileStoreInitialize(Code::Flags flags);
316 Handle<Code> CompileStorePreMonomorphic(Code::Flags flags);
317 Handle<Code> CompileStoreGeneric(Code::Flags flags);
318 Handle<Code> CompileStoreMegamorphic(Code::Flags flags);
320 // Static functions for generating parts of stubs.
321 static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
325 // Helper function used to check that the dictionary doesn't contain
326 // the property. This function may return false negatives, so miss_label
327 // must always call a backup property check that is complete.
328 // This function is safe to call if the receiver has fast properties.
329 // Name must be unique and receiver must be a heap object.
330 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
337 // Generates prototype loading code that uses the objects from the
338 // context we were in when this function was called. If the context
339 // has changed, a jump to miss is performed. This ties the generated
340 // code to a particular context and so must not be used in cases
341 // where the generated code is not allowed to have references to
342 // objects from a context.
343 static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm,
348 static void GenerateFastPropertyLoad(MacroAssembler* masm,
353 Representation representation);
355 static void GenerateLoadArrayLength(MacroAssembler* masm,
360 static void GenerateLoadStringLength(MacroAssembler* masm,
366 static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
372 // Generate code to check that a global property cell is empty. Create
373 // the property cell at compilation time if no cell exists for the
375 static void GenerateCheckPropertyCell(MacroAssembler* masm,
376 Handle<JSGlobalObject> global,
381 static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
383 // Generates code that verifies that the property holder has not changed
384 // (checking maps of objects in the prototype chain for fast and global
385 // objects or doing negative lookup for slow objects, ensures that the
386 // property cells for global objects are still empty) and checks that the map
387 // of the holder has not changed. If necessary the function also generates
388 // code for security check in case of global object holders. Helps to make
389 // sure that the current IC is still valid.
391 // The scratch and holder registers are always clobbered, but the object
392 // register is only clobbered if it the same as the holder register. The
393 // function returns a register containing the holder - either object_reg or
395 Register CheckPrototypes(Handle<HeapType> type,
397 Handle<JSObject> holder,
403 PrototypeCheckType check = CHECK_ALL_MAPS);
405 void GenerateBooleanCheck(Register object, Label* miss);
408 Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
409 Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<Name> name);
411 ExtraICState extra_state() { return extra_ic_state_; }
413 MacroAssembler* masm() { return &masm_; }
414 void set_failure(Failure* failure) { failure_ = failure; }
416 static void LookupPostInterceptor(Handle<JSObject> holder,
418 LookupResult* lookup);
420 Isolate* isolate() { return isolate_; }
421 Heap* heap() { return isolate()->heap(); }
422 Factory* factory() { return isolate()->factory(); }
424 static void GenerateTailCall(MacroAssembler* masm, Handle<Code> code);
428 const ExtraICState extra_ic_state_;
429 MacroAssembler masm_;
434 enum FrontendCheckType { PERFORM_INITIAL_CHECKS, SKIP_INITIAL_CHECKS };
437 class BaseLoadStoreStubCompiler: public StubCompiler {
439 BaseLoadStoreStubCompiler(Isolate* isolate,
441 ExtraICState extra_ic_state = kNoExtraICState,
442 InlineCacheHolderFlag cache_holder = OWN_MAP)
443 : StubCompiler(isolate, extra_ic_state),
445 cache_holder_(cache_holder) {
446 InitializeRegisters();
448 virtual ~BaseLoadStoreStubCompiler() { }
450 Handle<Code> CompileMonomorphicIC(Handle<HeapType> type,
451 Handle<Code> handler,
454 Handle<Code> CompilePolymorphicIC(TypeHandleList* types,
455 CodeHandleList* handlers,
460 static Builtins::Name MissBuiltin(Code::Kind kind) {
462 case Code::LOAD_IC: return Builtins::kLoadIC_Miss;
463 case Code::STORE_IC: return Builtins::kStoreIC_Miss;
464 case Code::KEYED_LOAD_IC: return Builtins::kKeyedLoadIC_Miss;
465 case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Miss;
466 default: UNREACHABLE();
468 return Builtins::kLoadIC_Miss;
472 virtual Register HandlerFrontendHeader(Handle<HeapType> type,
474 Handle<JSObject> holder,
478 virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss) = 0;
480 Register HandlerFrontend(Handle<HeapType> type,
482 Handle<JSObject> holder,
485 Handle<Code> GetCode(Code::Kind kind,
489 Handle<Code> GetICCode(Code::Kind kind,
492 InlineCacheState state = MONOMORPHIC);
493 Code::Kind kind() { return kind_; }
495 Logger::LogEventsAndTags log_kind(Handle<Code> code) {
496 if (!code->is_inline_cache_stub()) return Logger::STUB_TAG;
497 if (kind_ == Code::LOAD_IC) {
498 return code->ic_state() == MONOMORPHIC
499 ? Logger::LOAD_IC_TAG : Logger::LOAD_POLYMORPHIC_IC_TAG;
500 } else if (kind_ == Code::KEYED_LOAD_IC) {
501 return code->ic_state() == MONOMORPHIC
502 ? Logger::KEYED_LOAD_IC_TAG : Logger::KEYED_LOAD_POLYMORPHIC_IC_TAG;
503 } else if (kind_ == Code::STORE_IC) {
504 return code->ic_state() == MONOMORPHIC
505 ? Logger::STORE_IC_TAG : Logger::STORE_POLYMORPHIC_IC_TAG;
507 return code->ic_state() == MONOMORPHIC
508 ? Logger::KEYED_STORE_IC_TAG : Logger::KEYED_STORE_POLYMORPHIC_IC_TAG;
511 void JitEvent(Handle<Name> name, Handle<Code> code);
513 virtual Register receiver() = 0;
514 virtual Register name() = 0;
515 virtual Register scratch1() = 0;
516 virtual Register scratch2() = 0;
517 virtual Register scratch3() = 0;
519 void InitializeRegisters();
521 bool IncludesNumberType(TypeHandleList* types);
524 InlineCacheHolderFlag cache_holder_;
525 Register* registers_;
529 class LoadStubCompiler: public BaseLoadStoreStubCompiler {
531 LoadStubCompiler(Isolate* isolate,
532 ExtraICState extra_ic_state = kNoExtraICState,
533 InlineCacheHolderFlag cache_holder = OWN_MAP,
534 Code::Kind kind = Code::LOAD_IC)
535 : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state,
537 virtual ~LoadStubCompiler() { }
539 Handle<Code> CompileLoadField(Handle<HeapType> type,
540 Handle<JSObject> holder,
543 Representation representation);
545 Handle<Code> CompileLoadCallback(Handle<HeapType> type,
546 Handle<JSObject> holder,
548 Handle<ExecutableAccessorInfo> callback);
550 Handle<Code> CompileLoadCallback(Handle<HeapType> type,
551 Handle<JSObject> holder,
553 const CallOptimization& call_optimization);
555 Handle<Code> CompileLoadConstant(Handle<HeapType> type,
556 Handle<JSObject> holder,
558 Handle<Object> value);
560 Handle<Code> CompileLoadInterceptor(Handle<HeapType> type,
561 Handle<JSObject> holder,
564 Handle<Code> CompileLoadViaGetter(Handle<HeapType> type,
565 Handle<JSObject> holder,
567 Handle<JSFunction> getter);
569 static void GenerateLoadViaGetter(MacroAssembler* masm,
570 Handle<HeapType> type,
572 Handle<JSFunction> getter);
574 Handle<Code> CompileLoadNonexistent(Handle<HeapType> type,
575 Handle<JSObject> last,
578 Handle<Code> CompileLoadGlobal(Handle<HeapType> type,
579 Handle<GlobalObject> holder,
580 Handle<PropertyCell> cell,
582 bool is_dont_delete);
584 static Register* registers();
587 ContextualMode contextual_mode() {
588 return LoadIC::GetContextualMode(extra_state());
591 virtual Register HandlerFrontendHeader(Handle<HeapType> type,
593 Handle<JSObject> holder,
597 virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
599 Register CallbackHandlerFrontend(Handle<HeapType> type,
601 Handle<JSObject> holder,
603 Handle<Object> callback);
604 void NonexistentHandlerFrontend(Handle<HeapType> type,
605 Handle<JSObject> last,
608 void GenerateLoadField(Register reg,
609 Handle<JSObject> holder,
611 Representation representation);
612 void GenerateLoadConstant(Handle<Object> value);
613 void GenerateLoadCallback(Register reg,
614 Handle<ExecutableAccessorInfo> callback);
615 void GenerateLoadCallback(const CallOptimization& call_optimization,
616 Handle<Map> receiver_map);
617 void GenerateLoadInterceptor(Register holder_reg,
618 Handle<Object> object,
619 Handle<JSObject> holder,
620 LookupResult* lookup,
622 void GenerateLoadPostInterceptor(Register reg,
623 Handle<JSObject> interceptor_holder,
625 LookupResult* lookup);
627 virtual Register receiver() { return registers_[0]; }
628 virtual Register name() { return registers_[1]; }
629 virtual Register scratch1() { return registers_[2]; }
630 virtual Register scratch2() { return registers_[3]; }
631 virtual Register scratch3() { return registers_[4]; }
632 Register scratch4() { return registers_[5]; }
636 class KeyedLoadStubCompiler: public LoadStubCompiler {
638 KeyedLoadStubCompiler(Isolate* isolate,
639 ExtraICState extra_ic_state = kNoExtraICState,
640 InlineCacheHolderFlag cache_holder = OWN_MAP)
641 : LoadStubCompiler(isolate, extra_ic_state, cache_holder,
642 Code::KEYED_LOAD_IC) { }
644 Handle<Code> CompileLoadElement(Handle<Map> receiver_map);
646 void CompileElementHandlers(MapHandleList* receiver_maps,
647 CodeHandleList* handlers);
649 static void GenerateLoadDictionaryElement(MacroAssembler* masm);
652 static Register* registers();
653 friend class BaseLoadStoreStubCompiler;
657 class StoreStubCompiler: public BaseLoadStoreStubCompiler {
659 StoreStubCompiler(Isolate* isolate,
660 ExtraICState extra_ic_state,
661 Code::Kind kind = Code::STORE_IC)
662 : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state) {}
664 virtual ~StoreStubCompiler() { }
666 Handle<Code> CompileStoreTransition(Handle<JSObject> object,
667 LookupResult* lookup,
668 Handle<Map> transition,
671 Handle<Code> CompileStoreField(Handle<JSObject> object,
672 LookupResult* lookup,
675 void GenerateNegativeHolderLookup(MacroAssembler* masm,
676 Handle<JSObject> holder,
681 void GenerateStoreTransition(MacroAssembler* masm,
682 Handle<JSObject> object,
683 LookupResult* lookup,
684 Handle<Map> transition,
686 Register receiver_reg,
695 void GenerateStoreField(MacroAssembler* masm,
696 Handle<JSObject> object,
697 LookupResult* lookup,
698 Register receiver_reg,
705 Handle<Code> CompileStoreCallback(Handle<JSObject> object,
706 Handle<JSObject> holder,
708 Handle<ExecutableAccessorInfo> callback);
710 Handle<Code> CompileStoreCallback(Handle<JSObject> object,
711 Handle<JSObject> holder,
713 const CallOptimization& call_optimization);
715 static void GenerateStoreViaSetter(MacroAssembler* masm,
716 Handle<HeapType> type,
717 Handle<JSFunction> setter);
719 Handle<Code> CompileStoreViaSetter(Handle<JSObject> object,
720 Handle<JSObject> holder,
722 Handle<JSFunction> setter);
724 Handle<Code> CompileStoreInterceptor(Handle<JSObject> object,
727 static Builtins::Name SlowBuiltin(Code::Kind kind) {
729 case Code::STORE_IC: return Builtins::kStoreIC_Slow;
730 case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
731 default: UNREACHABLE();
733 return Builtins::kStoreIC_Slow;
737 virtual Register HandlerFrontendHeader(Handle<HeapType> type,
739 Handle<JSObject> holder,
743 virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
744 void GenerateRestoreName(MacroAssembler* masm,
748 virtual Register receiver() { return registers_[0]; }
749 virtual Register name() { return registers_[1]; }
750 Register value() { return registers_[2]; }
751 virtual Register scratch1() { return registers_[3]; }
752 virtual Register scratch2() { return registers_[4]; }
753 virtual Register scratch3() { return registers_[5]; }
756 static Register* registers();
759 friend class BaseLoadStoreStubCompiler;
763 class KeyedStoreStubCompiler: public StoreStubCompiler {
765 KeyedStoreStubCompiler(Isolate* isolate,
766 ExtraICState extra_ic_state)
767 : StoreStubCompiler(isolate, extra_ic_state, Code::KEYED_STORE_IC) {}
769 Handle<Code> CompileStoreElement(Handle<Map> receiver_map);
771 Handle<Code> CompileStorePolymorphic(MapHandleList* receiver_maps,
772 CodeHandleList* handler_stubs,
773 MapHandleList* transitioned_maps);
775 Handle<Code> CompileStoreElementPolymorphic(MapHandleList* receiver_maps);
777 static void GenerateStoreDictionaryElement(MacroAssembler* masm);
780 static Register* registers();
782 KeyedAccessStoreMode store_mode() {
783 return KeyedStoreIC::GetKeyedAccessStoreMode(extra_state());
786 Register transition_map() {
787 return registers()[3];
790 friend class BaseLoadStoreStubCompiler;
794 // Holds information about possible function call optimizations.
795 class CallOptimization BASE_EMBEDDED {
797 explicit CallOptimization(LookupResult* lookup);
799 explicit CallOptimization(Handle<JSFunction> function);
801 bool is_constant_call() const {
802 return !constant_function_.is_null();
805 Handle<JSFunction> constant_function() const {
806 ASSERT(is_constant_call());
807 return constant_function_;
810 bool is_simple_api_call() const {
811 return is_simple_api_call_;
814 Handle<FunctionTemplateInfo> expected_receiver_type() const {
815 ASSERT(is_simple_api_call());
816 return expected_receiver_type_;
819 Handle<CallHandlerInfo> api_call_info() const {
820 ASSERT(is_simple_api_call());
821 return api_call_info_;
829 Handle<JSObject> LookupHolderOfExpectedType(
830 Handle<Map> receiver_map,
831 HolderLookup* holder_lookup) const;
833 // Check if the api holder is between the receiver and the holder.
834 bool IsCompatibleReceiver(Handle<Object> receiver,
835 Handle<JSObject> holder) const;
838 void Initialize(Handle<JSFunction> function);
840 // Determines whether the given function can be called using the
841 // fast api call builtin.
842 void AnalyzePossibleApiFunction(Handle<JSFunction> function);
844 Handle<JSFunction> constant_function_;
845 bool is_simple_api_call_;
846 Handle<FunctionTemplateInfo> expected_receiver_type_;
847 Handle<CallHandlerInfo> api_call_info_;
851 } } // namespace v8::internal
853 #endif // V8_STUB_CACHE_H_