Rewrite StoreIC handling using the LookupIterator. Continued from patch 494153002
[platform/upstream/v8.git] / src / stub-cache.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_STUB_CACHE_H_
6 #define V8_STUB_CACHE_H_
7
8 #include "src/allocation.h"
9 #include "src/arguments.h"
10 #include "src/code-stubs.h"
11 #include "src/ic-inl.h"
12 #include "src/macro-assembler.h"
13 #include "src/objects.h"
14 #include "src/zone-inl.h"
15
16 namespace v8 {
17 namespace internal {
18
19
20 // The stub cache is used for megamorphic property accesses.
21 // It maps (map, name, type) to property access handlers. The cache does not
22 // need explicit invalidation when a prototype chain is modified, since the
23 // handlers verify the chain.
24
25
26 class CallOptimization;
27 class SmallMapList;
28 class StubCache;
29
30
31 class SCTableReference {
32  public:
33   Address address() const { return address_; }
34
35  private:
36   explicit SCTableReference(Address address) : address_(address) {}
37
38   Address address_;
39
40   friend class StubCache;
41 };
42
43
44 class StubCache {
45  public:
46   struct Entry {
47     Name* key;
48     Code* value;
49     Map* map;
50   };
51
52   void Initialize();
53   // Access cache for entry hash(name, map).
54   Code* Set(Name* name, Map* map, Code* code);
55   Code* Get(Name* name, Map* map, Code::Flags flags);
56   // Clear the lookup table (@ mark compact collection).
57   void Clear();
58   // Collect all maps that match the name and flags.
59   void CollectMatchingMaps(SmallMapList* types,
60                            Handle<Name> name,
61                            Code::Flags flags,
62                            Handle<Context> native_context,
63                            Zone* zone);
64   // Generate code for probing the stub cache table.
65   // Arguments extra, extra2 and extra3 may be used to pass additional scratch
66   // registers. Set to no_reg if not needed.
67   void GenerateProbe(MacroAssembler* masm,
68                      Code::Flags flags,
69                      Register receiver,
70                      Register name,
71                      Register scratch,
72                      Register extra,
73                      Register extra2 = no_reg,
74                      Register extra3 = no_reg);
75
76   enum Table {
77     kPrimary,
78     kSecondary
79   };
80
81   SCTableReference key_reference(StubCache::Table table) {
82     return SCTableReference(
83         reinterpret_cast<Address>(&first_entry(table)->key));
84   }
85
86   SCTableReference map_reference(StubCache::Table table) {
87     return SCTableReference(
88         reinterpret_cast<Address>(&first_entry(table)->map));
89   }
90
91   SCTableReference value_reference(StubCache::Table table) {
92     return SCTableReference(
93         reinterpret_cast<Address>(&first_entry(table)->value));
94   }
95
96   StubCache::Entry* first_entry(StubCache::Table table) {
97     switch (table) {
98       case StubCache::kPrimary: return StubCache::primary_;
99       case StubCache::kSecondary: return StubCache::secondary_;
100     }
101     UNREACHABLE();
102     return NULL;
103   }
104
105   Isolate* isolate() { return isolate_; }
106
107   // Setting the entry size such that the index is shifted by Name::kHashShift
108   // is convenient; shifting down the length field (to extract the hash code)
109   // automatically discards the hash bit field.
110   static const int kCacheIndexShift = Name::kHashShift;
111
112  private:
113   explicit StubCache(Isolate* isolate);
114
115   // The stub cache has a primary and secondary level.  The two levels have
116   // different hashing algorithms in order to avoid simultaneous collisions
117   // in both caches.  Unlike a probing strategy (quadratic or otherwise) the
118   // update strategy on updates is fairly clear and simple:  Any existing entry
119   // in the primary cache is moved to the secondary cache, and secondary cache
120   // entries are overwritten.
121
122   // Hash algorithm for the primary table.  This algorithm is replicated in
123   // assembler for every architecture.  Returns an index into the table that
124   // is scaled by 1 << kCacheIndexShift.
125   static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
126     STATIC_ASSERT(kCacheIndexShift == Name::kHashShift);
127     // Compute the hash of the name (use entire hash field).
128     DCHECK(name->HasHashCode());
129     uint32_t field = name->hash_field();
130     // Using only the low bits in 64-bit mode is unlikely to increase the
131     // risk of collision even if the heap is spread over an area larger than
132     // 4Gb (and not at all if it isn't).
133     uint32_t map_low32bits =
134         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
135     // We always set the in_loop bit to zero when generating the lookup code
136     // so do it here too so the hash codes match.
137     uint32_t iflags =
138         (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
139     // Base the offset on a simple combination of name, flags, and map.
140     uint32_t key = (map_low32bits + field) ^ iflags;
141     return key & ((kPrimaryTableSize - 1) << kCacheIndexShift);
142   }
143
144   // Hash algorithm for the secondary table.  This algorithm is replicated in
145   // assembler for every architecture.  Returns an index into the table that
146   // is scaled by 1 << kCacheIndexShift.
147   static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
148     // Use the seed from the primary cache in the secondary cache.
149     uint32_t name_low32bits =
150         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
151     // We always set the in_loop bit to zero when generating the lookup code
152     // so do it here too so the hash codes match.
153     uint32_t iflags =
154         (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
155     uint32_t key = (seed - name_low32bits) + iflags;
156     return key & ((kSecondaryTableSize - 1) << kCacheIndexShift);
157   }
158
159   // Compute the entry for a given offset in exactly the same way as
160   // we do in generated code.  We generate an hash code that already
161   // ends in Name::kHashShift 0s.  Then we multiply it so it is a multiple
162   // of sizeof(Entry).  This makes it easier to avoid making mistakes
163   // in the hashed offset computations.
164   static Entry* entry(Entry* table, int offset) {
165     const int multiplier = sizeof(*table) >> Name::kHashShift;
166     return reinterpret_cast<Entry*>(
167         reinterpret_cast<Address>(table) + offset * multiplier);
168   }
169
170   static const int kPrimaryTableBits = 11;
171   static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
172   static const int kSecondaryTableBits = 9;
173   static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
174
175   Entry primary_[kPrimaryTableSize];
176   Entry secondary_[kSecondaryTableSize];
177   Isolate* isolate_;
178
179   friend class Isolate;
180   friend class SCTableReference;
181
182   DISALLOW_COPY_AND_ASSIGN(StubCache);
183 };
184
185
186 // ------------------------------------------------------------------------
187
188
189 // Support functions for IC stubs for callbacks.
190 DECLARE_RUNTIME_FUNCTION(StoreCallbackProperty);
191
192
193 // Support functions for IC stubs for interceptors.
194 DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptorOnly);
195 DECLARE_RUNTIME_FUNCTION(LoadPropertyWithInterceptor);
196 DECLARE_RUNTIME_FUNCTION(LoadElementWithInterceptor);
197 DECLARE_RUNTIME_FUNCTION(StorePropertyWithInterceptor);
198
199
200 enum PrototypeCheckType { CHECK_ALL_MAPS, SKIP_RECEIVER };
201 enum IcCheckType { ELEMENT, PROPERTY };
202
203
204 class PropertyAccessCompiler BASE_EMBEDDED {
205  public:
206   static Builtins::Name MissBuiltin(Code::Kind kind) {
207     switch (kind) {
208       case Code::LOAD_IC:
209         return Builtins::kLoadIC_Miss;
210       case Code::STORE_IC:
211         return Builtins::kStoreIC_Miss;
212       case Code::KEYED_LOAD_IC:
213         return Builtins::kKeyedLoadIC_Miss;
214       case Code::KEYED_STORE_IC:
215         return Builtins::kKeyedStoreIC_Miss;
216       default:
217         UNREACHABLE();
218     }
219     return Builtins::kLoadIC_Miss;
220   }
221
222   static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
223
224  protected:
225   PropertyAccessCompiler(Isolate* isolate, Code::Kind kind,
226                          CacheHolderFlag cache_holder)
227       : registers_(GetCallingConvention(kind)),
228         kind_(kind),
229         cache_holder_(cache_holder),
230         isolate_(isolate),
231         masm_(isolate, NULL, 256) {}
232
233   Code::Kind kind() const { return kind_; }
234   CacheHolderFlag cache_holder() const { return cache_holder_; }
235   MacroAssembler* masm() { return &masm_; }
236   Isolate* isolate() const { return isolate_; }
237   Heap* heap() const { return isolate()->heap(); }
238   Factory* factory() const { return isolate()->factory(); }
239
240   Register receiver() const { return registers_[0]; }
241   Register name() const { return registers_[1]; }
242   Register scratch1() const { return registers_[2]; }
243   Register scratch2() const { return registers_[3]; }
244   Register scratch3() const { return registers_[4]; }
245
246   // Calling convention between indexed store IC and handler.
247   Register transition_map() const { return scratch1(); }
248
249   static Register* GetCallingConvention(Code::Kind);
250   static Register* load_calling_convention();
251   static Register* store_calling_convention();
252   static Register* keyed_store_calling_convention();
253
254   Register* registers_;
255
256   static void GenerateTailCall(MacroAssembler* masm, Handle<Code> code);
257
258   Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
259   Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<Name> name);
260
261  private:
262   Code::Kind kind_;
263   CacheHolderFlag cache_holder_;
264
265   Isolate* isolate_;
266   MacroAssembler masm_;
267 };
268
269
270 class PropertyICCompiler : public PropertyAccessCompiler {
271  public:
272   // Finds the Code object stored in the Heap::non_monomorphic_cache().
273   static Code* FindPreMonomorphic(Isolate* isolate, Code::Kind kind,
274                                   ExtraICState extra_ic_state);
275
276   // Named
277   static Handle<Code> ComputeLoad(Isolate* isolate, InlineCacheState ic_state,
278                                   ExtraICState extra_state);
279   static Handle<Code> ComputeStore(Isolate* isolate, InlineCacheState ic_state,
280                                    ExtraICState extra_state);
281
282   static Handle<Code> ComputeMonomorphic(Code::Kind kind, Handle<Name> name,
283                                          Handle<HeapType> type,
284                                          Handle<Code> handler,
285                                          ExtraICState extra_ic_state);
286   static Handle<Code> ComputePolymorphic(Code::Kind kind, TypeHandleList* types,
287                                          CodeHandleList* handlers,
288                                          int number_of_valid_maps,
289                                          Handle<Name> name,
290                                          ExtraICState extra_ic_state);
291
292   // Keyed
293   static Handle<Code> ComputeKeyedLoadMonomorphic(Handle<Map> receiver_map);
294
295   static Handle<Code> ComputeKeyedStoreMonomorphic(
296       Handle<Map> receiver_map, StrictMode strict_mode,
297       KeyedAccessStoreMode store_mode);
298   static Handle<Code> ComputeKeyedLoadPolymorphic(MapHandleList* receiver_maps);
299   static Handle<Code> ComputeKeyedStorePolymorphic(
300       MapHandleList* receiver_maps, KeyedAccessStoreMode store_mode,
301       StrictMode strict_mode);
302
303   // Compare nil
304   static Handle<Code> ComputeCompareNil(Handle<Map> receiver_map,
305                                         CompareNilICStub* stub);
306
307
308  private:
309   PropertyICCompiler(Isolate* isolate, Code::Kind kind,
310                      ExtraICState extra_ic_state = kNoExtraICState,
311                      CacheHolderFlag cache_holder = kCacheOnReceiver)
312       : PropertyAccessCompiler(isolate, kind, cache_holder),
313         extra_ic_state_(extra_ic_state) {}
314
315   static Handle<Code> Find(Handle<Name> name, Handle<Map> stub_holder_map,
316                            Code::Kind kind,
317                            ExtraICState extra_ic_state = kNoExtraICState,
318                            CacheHolderFlag cache_holder = kCacheOnReceiver);
319
320   Handle<Code> CompileLoadInitialize(Code::Flags flags);
321   Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags);
322   Handle<Code> CompileLoadMegamorphic(Code::Flags flags);
323   Handle<Code> CompileStoreInitialize(Code::Flags flags);
324   Handle<Code> CompileStorePreMonomorphic(Code::Flags flags);
325   Handle<Code> CompileStoreGeneric(Code::Flags flags);
326   Handle<Code> CompileStoreMegamorphic(Code::Flags flags);
327
328   Handle<Code> CompileMonomorphic(Handle<HeapType> type, Handle<Code> handler,
329                                   Handle<Name> name, IcCheckType check);
330   Handle<Code> CompilePolymorphic(TypeHandleList* types,
331                                   CodeHandleList* handlers, Handle<Name> name,
332                                   Code::StubType type, IcCheckType check);
333
334   Handle<Code> CompileKeyedStoreMonomorphic(Handle<Map> receiver_map,
335                                             KeyedAccessStoreMode store_mode);
336   Handle<Code> CompileKeyedStorePolymorphic(MapHandleList* receiver_maps,
337                                             KeyedAccessStoreMode store_mode);
338   Handle<Code> CompileKeyedStorePolymorphic(MapHandleList* receiver_maps,
339                                             CodeHandleList* handler_stubs,
340                                             MapHandleList* transitioned_maps);
341
342   bool IncludesNumberType(TypeHandleList* types);
343
344   Handle<Code> GetCode(Code::Kind kind, Code::StubType type, Handle<Name> name,
345                        InlineCacheState state = MONOMORPHIC);
346
347   Logger::LogEventsAndTags log_kind(Handle<Code> code) {
348     if (kind() == Code::LOAD_IC) {
349       return code->ic_state() == MONOMORPHIC ? Logger::LOAD_IC_TAG
350                                              : Logger::LOAD_POLYMORPHIC_IC_TAG;
351     } else if (kind() == Code::KEYED_LOAD_IC) {
352       return code->ic_state() == MONOMORPHIC
353                  ? Logger::KEYED_LOAD_IC_TAG
354                  : Logger::KEYED_LOAD_POLYMORPHIC_IC_TAG;
355     } else if (kind() == Code::STORE_IC) {
356       return code->ic_state() == MONOMORPHIC ? Logger::STORE_IC_TAG
357                                              : Logger::STORE_POLYMORPHIC_IC_TAG;
358     } else {
359       DCHECK_EQ(Code::KEYED_STORE_IC, kind());
360       return code->ic_state() == MONOMORPHIC
361                  ? Logger::KEYED_STORE_IC_TAG
362                  : Logger::KEYED_STORE_POLYMORPHIC_IC_TAG;
363     }
364   }
365
366   const ExtraICState extra_ic_state_;
367 };
368
369
370 class PropertyHandlerCompiler : public PropertyAccessCompiler {
371  public:
372   static Handle<Code> Find(Handle<Name> name, Handle<Map> map, Code::Kind kind,
373                            CacheHolderFlag cache_holder, Code::StubType type);
374
375  protected:
376   PropertyHandlerCompiler(Isolate* isolate, Code::Kind kind,
377                           Handle<HeapType> type, Handle<JSObject> holder,
378                           CacheHolderFlag cache_holder)
379       : PropertyAccessCompiler(isolate, kind, cache_holder),
380         type_(type),
381         holder_(holder) {}
382
383   virtual ~PropertyHandlerCompiler() {}
384
385   virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
386                                   Label* miss) {
387     UNREACHABLE();
388     return receiver();
389   }
390
391   virtual void FrontendFooter(Handle<Name> name, Label* miss) { UNREACHABLE(); }
392
393   Register Frontend(Register object_reg, Handle<Name> name);
394   void NonexistentFrontendHeader(Handle<Name> name, Label* miss,
395                                  Register scratch1, Register scratch2);
396
397   // TODO(verwaest): Make non-static.
398   static void GenerateFastApiCall(MacroAssembler* masm,
399                                   const CallOptimization& optimization,
400                                   Handle<Map> receiver_map, Register receiver,
401                                   Register scratch, bool is_store, int argc,
402                                   Register* values);
403
404   // Helper function used to check that the dictionary doesn't contain
405   // the property. This function may return false negatives, so miss_label
406   // must always call a backup property check that is complete.
407   // This function is safe to call if the receiver has fast properties.
408   // Name must be unique and receiver must be a heap object.
409   static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
410                                                Label* miss_label,
411                                                Register receiver,
412                                                Handle<Name> name,
413                                                Register r0,
414                                                Register r1);
415
416   // Generate code to check that a global property cell is empty. Create
417   // the property cell at compilation time if no cell exists for the
418   // property.
419   static void GenerateCheckPropertyCell(MacroAssembler* masm,
420                                         Handle<JSGlobalObject> global,
421                                         Handle<Name> name,
422                                         Register scratch,
423                                         Label* miss);
424
425   // Generates code that verifies that the property holder has not changed
426   // (checking maps of objects in the prototype chain for fast and global
427   // objects or doing negative lookup for slow objects, ensures that the
428   // property cells for global objects are still empty) and checks that the map
429   // of the holder has not changed. If necessary the function also generates
430   // code for security check in case of global object holders. Helps to make
431   // sure that the current IC is still valid.
432   //
433   // The scratch and holder registers are always clobbered, but the object
434   // register is only clobbered if it the same as the holder register. The
435   // function returns a register containing the holder - either object_reg or
436   // holder_reg.
437   Register CheckPrototypes(Register object_reg, Register holder_reg,
438                            Register scratch1, Register scratch2,
439                            Handle<Name> name, Label* miss,
440                            PrototypeCheckType check = CHECK_ALL_MAPS);
441
442   Handle<Code> GetCode(Code::Kind kind, Code::StubType type, Handle<Name> name);
443   void set_type_for_object(Handle<Object> object) {
444     type_ = IC::CurrentTypeOf(object, isolate());
445   }
446   void set_holder(Handle<JSObject> holder) { holder_ = holder; }
447   Handle<HeapType> type() const { return type_; }
448   Handle<JSObject> holder() const { return holder_; }
449
450  private:
451   Handle<HeapType> type_;
452   Handle<JSObject> holder_;
453 };
454
455
456 class NamedLoadHandlerCompiler : public PropertyHandlerCompiler {
457  public:
458   NamedLoadHandlerCompiler(Isolate* isolate, Handle<HeapType> type,
459                            Handle<JSObject> holder,
460                            CacheHolderFlag cache_holder)
461       : PropertyHandlerCompiler(isolate, Code::LOAD_IC, type, holder,
462                                 cache_holder) {}
463
464   virtual ~NamedLoadHandlerCompiler() {}
465
466   Handle<Code> CompileLoadField(Handle<Name> name, FieldIndex index);
467
468   Handle<Code> CompileLoadCallback(Handle<Name> name,
469                                    Handle<ExecutableAccessorInfo> callback);
470
471   Handle<Code> CompileLoadCallback(Handle<Name> name,
472                                    const CallOptimization& call_optimization);
473
474   Handle<Code> CompileLoadConstant(Handle<Name> name, int constant_index);
475
476   // The LookupIterator is used to perform a lookup behind the interceptor. If
477   // the iterator points to a LookupIterator::PROPERTY, its access will be
478   // inlined.
479   Handle<Code> CompileLoadInterceptor(LookupIterator* it);
480
481   Handle<Code> CompileLoadViaGetter(Handle<Name> name,
482                                     Handle<JSFunction> getter);
483
484   Handle<Code> CompileLoadGlobal(Handle<PropertyCell> cell, Handle<Name> name,
485                                  bool is_configurable);
486
487   // Static interface
488   static Handle<Code> ComputeLoadNonexistent(Handle<Name> name,
489                                              Handle<HeapType> type);
490
491   static void GenerateLoadViaGetter(MacroAssembler* masm, Handle<HeapType> type,
492                                     Register receiver,
493                                     Handle<JSFunction> getter);
494
495   static void GenerateLoadViaGetterForDeopt(MacroAssembler* masm) {
496     GenerateLoadViaGetter(masm, Handle<HeapType>::null(), no_reg,
497                           Handle<JSFunction>());
498   }
499
500   static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
501                                             Register receiver,
502                                             Register scratch1,
503                                             Register scratch2,
504                                             Label* miss_label);
505
506   // These constants describe the structure of the interceptor arguments on the
507   // stack. The arguments are pushed by the (platform-specific)
508   // PushInterceptorArguments and read by LoadPropertyWithInterceptorOnly and
509   // LoadWithInterceptor.
510   static const int kInterceptorArgsNameIndex = 0;
511   static const int kInterceptorArgsInfoIndex = 1;
512   static const int kInterceptorArgsThisIndex = 2;
513   static const int kInterceptorArgsHolderIndex = 3;
514   static const int kInterceptorArgsLength = 4;
515
516  protected:
517   virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
518                                   Label* miss);
519
520   virtual void FrontendFooter(Handle<Name> name, Label* miss);
521
522  private:
523   Handle<Code> CompileLoadNonexistent(Handle<Name> name);
524   void GenerateLoadConstant(Handle<Object> value);
525   void GenerateLoadCallback(Register reg,
526                             Handle<ExecutableAccessorInfo> callback);
527   void GenerateLoadCallback(const CallOptimization& call_optimization,
528                             Handle<Map> receiver_map);
529   void GenerateLoadInterceptor(Register holder_reg);
530   void GenerateLoadInterceptorWithFollowup(LookupIterator* it,
531                                            Register holder_reg);
532   void GenerateLoadPostInterceptor(LookupIterator* it, Register reg);
533
534   // Generates prototype loading code that uses the objects from the
535   // context we were in when this function was called. If the context
536   // has changed, a jump to miss is performed. This ties the generated
537   // code to a particular context and so must not be used in cases
538   // where the generated code is not allowed to have references to
539   // objects from a context.
540   static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm,
541                                                         int index,
542                                                         Register prototype,
543                                                         Label* miss);
544
545
546   Register scratch4() { return registers_[5]; }
547 };
548
549
550 class NamedStoreHandlerCompiler : public PropertyHandlerCompiler {
551  public:
552   explicit NamedStoreHandlerCompiler(Isolate* isolate, Handle<HeapType> type,
553                                      Handle<JSObject> holder)
554       : PropertyHandlerCompiler(isolate, Code::STORE_IC, type, holder,
555                                 kCacheOnReceiver) {}
556
557   virtual ~NamedStoreHandlerCompiler() {}
558
559   Handle<Code> CompileStoreTransition(Handle<Map> transition,
560                                       Handle<Name> name);
561   Handle<Code> CompileStoreField(LookupIterator* it);
562   Handle<Code> CompileStoreCallback(Handle<JSObject> object, Handle<Name> name,
563                                     Handle<ExecutableAccessorInfo> callback);
564   Handle<Code> CompileStoreCallback(Handle<JSObject> object, Handle<Name> name,
565                                     const CallOptimization& call_optimization);
566   Handle<Code> CompileStoreViaSetter(Handle<JSObject> object, Handle<Name> name,
567                                      Handle<JSFunction> setter);
568   Handle<Code> CompileStoreInterceptor(Handle<Name> name);
569
570   static void GenerateStoreViaSetter(MacroAssembler* masm,
571                                      Handle<HeapType> type, Register receiver,
572                                      Handle<JSFunction> setter);
573
574   static void GenerateStoreViaSetterForDeopt(MacroAssembler* masm) {
575     GenerateStoreViaSetter(masm, Handle<HeapType>::null(), no_reg,
576                            Handle<JSFunction>());
577   }
578
579  protected:
580   virtual Register FrontendHeader(Register object_reg, Handle<Name> name,
581                                   Label* miss);
582
583   virtual void FrontendFooter(Handle<Name> name, Label* miss);
584   void GenerateRestoreName(Label* label, Handle<Name> name);
585
586  private:
587   void GenerateStoreTransition(Handle<Map> transition, Handle<Name> name,
588                                Register receiver_reg, Register name_reg,
589                                Register value_reg, Register scratch1,
590                                Register scratch2, Register scratch3,
591                                Label* miss_label, Label* slow);
592
593   void GenerateStoreField(LookupIterator* lookup, Register value_reg,
594                           Label* miss_label);
595
596   static Builtins::Name SlowBuiltin(Code::Kind kind) {
597     switch (kind) {
598       case Code::STORE_IC: return Builtins::kStoreIC_Slow;
599       case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
600       default: UNREACHABLE();
601     }
602     return Builtins::kStoreIC_Slow;
603   }
604
605   static Register value();
606 };
607
608
609 class ElementHandlerCompiler : public PropertyHandlerCompiler {
610  public:
611   explicit ElementHandlerCompiler(Isolate* isolate)
612       : PropertyHandlerCompiler(isolate, Code::KEYED_LOAD_IC,
613                                 Handle<HeapType>::null(),
614                                 Handle<JSObject>::null(), kCacheOnReceiver) {}
615
616   virtual ~ElementHandlerCompiler() {}
617
618   void CompileElementHandlers(MapHandleList* receiver_maps,
619                               CodeHandleList* handlers);
620
621   static void GenerateLoadDictionaryElement(MacroAssembler* masm);
622   static void GenerateStoreDictionaryElement(MacroAssembler* masm);
623 };
624
625
626 // Holds information about possible function call optimizations.
627 class CallOptimization BASE_EMBEDDED {
628  public:
629   explicit CallOptimization(Handle<JSFunction> function);
630
631   bool is_constant_call() const {
632     return !constant_function_.is_null();
633   }
634
635   Handle<JSFunction> constant_function() const {
636     DCHECK(is_constant_call());
637     return constant_function_;
638   }
639
640   bool is_simple_api_call() const {
641     return is_simple_api_call_;
642   }
643
644   Handle<FunctionTemplateInfo> expected_receiver_type() const {
645     DCHECK(is_simple_api_call());
646     return expected_receiver_type_;
647   }
648
649   Handle<CallHandlerInfo> api_call_info() const {
650     DCHECK(is_simple_api_call());
651     return api_call_info_;
652   }
653
654   enum HolderLookup {
655     kHolderNotFound,
656     kHolderIsReceiver,
657     kHolderFound
658   };
659   Handle<JSObject> LookupHolderOfExpectedType(
660       Handle<Map> receiver_map,
661       HolderLookup* holder_lookup) const;
662
663   // Check if the api holder is between the receiver and the holder.
664   bool IsCompatibleReceiver(Handle<Object> receiver,
665                             Handle<JSObject> holder) const;
666
667  private:
668   void Initialize(Handle<JSFunction> function);
669
670   // Determines whether the given function can be called using the
671   // fast api call builtin.
672   void AnalyzePossibleApiFunction(Handle<JSFunction> function);
673
674   Handle<JSFunction> constant_function_;
675   bool is_simple_api_call_;
676   Handle<FunctionTemplateInfo> expected_receiver_type_;
677   Handle<CallHandlerInfo> api_call_info_;
678 };
679
680
681 } }  // namespace v8::internal
682
683 #endif  // V8_STUB_CACHE_H_