Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / stub-cache.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_STUB_CACHE_H_
29 #define V8_STUB_CACHE_H_
30
31 #include "allocation.h"
32 #include "arguments.h"
33 #include "code-stubs.h"
34 #include "ic-inl.h"
35 #include "macro-assembler.h"
36 #include "objects.h"
37 #include "zone-inl.h"
38
39 namespace v8 {
40 namespace internal {
41
42
43 // The stub cache is used for megamorphic calls and property accesses.
44 // It maps (map, name, type)->Code*
45
46 // The design of the table uses the inline cache stubs used for
47 // mono-morphic calls. The beauty of this, we do not have to
48 // invalidate the cache whenever a prototype map is changed.  The stub
49 // validates the map chain as in the mono-morphic case.
50
51
52 class CallOptimization;
53 class SmallMapList;
54 class StubCache;
55
56
57 class SCTableReference {
58  public:
59   Address address() const { return address_; }
60
61  private:
62   explicit SCTableReference(Address address) : address_(address) {}
63
64   Address address_;
65
66   friend class StubCache;
67 };
68
69
70 class StubCache {
71  public:
72   struct Entry {
73     Name* key;
74     Code* value;
75     Map* map;
76   };
77
78   void Initialize();
79
80   Handle<JSObject> StubHolder(Handle<JSObject> receiver,
81                               Handle<JSObject> holder);
82
83   Handle<Code> FindIC(Handle<Name> name,
84                       Handle<Map> stub_holder_map,
85                       Code::Kind kind,
86                       ExtraICState extra_state = kNoExtraICState,
87                       InlineCacheHolderFlag cache_holder = OWN_MAP);
88
89   Handle<Code> FindHandler(Handle<Name> name,
90                            Handle<Map> map,
91                            Code::Kind kind,
92                            InlineCacheHolderFlag cache_holder = OWN_MAP);
93
94   Handle<Code> ComputeMonomorphicIC(Handle<Name> name,
95                                     Handle<HeapType> type,
96                                     Handle<Code> handler,
97                                     ExtraICState extra_ic_state);
98
99   Handle<Code> ComputeLoadNonexistent(Handle<Name> name, Handle<HeapType> type);
100
101   Handle<Code> ComputeKeyedLoadElement(Handle<Map> receiver_map);
102
103   Handle<Code> ComputeKeyedStoreElement(Handle<Map> receiver_map,
104                                         StrictModeFlag strict_mode,
105                                         KeyedAccessStoreMode store_mode);
106
107   // ---
108
109   Handle<Code> ComputeLoad(InlineCacheState ic_state, ExtraICState extra_state);
110   Handle<Code> ComputeStore(InlineCacheState ic_state,
111                             ExtraICState extra_state);
112
113   // ---
114
115   Handle<Code> ComputeCompareNil(Handle<Map> receiver_map,
116                                  CompareNilICStub& stub);
117
118   // ---
119
120   Handle<Code> ComputeLoadElementPolymorphic(MapHandleList* receiver_maps);
121   Handle<Code> ComputeStoreElementPolymorphic(MapHandleList* receiver_maps,
122                                               KeyedAccessStoreMode store_mode,
123                                               StrictModeFlag strict_mode);
124
125   Handle<Code> ComputePolymorphicIC(TypeHandleList* types,
126                                     CodeHandleList* handlers,
127                                     int number_of_valid_maps,
128                                     Handle<Name> name,
129                                     ExtraICState extra_ic_state);
130
131   // Finds the Code object stored in the Heap::non_monomorphic_cache().
132   Code* FindPreMonomorphicIC(Code::Kind kind, ExtraICState extra_ic_state);
133
134   // Update cache for entry hash(name, map).
135   Code* Set(Name* name, Map* map, Code* code);
136
137   // Clear the lookup table (@ mark compact collection).
138   void Clear();
139
140   // Collect all maps that match the name and flags.
141   void CollectMatchingMaps(SmallMapList* types,
142                            Handle<Name> name,
143                            Code::Flags flags,
144                            Handle<Context> native_context,
145                            Zone* zone);
146
147   // Generate code for probing the stub cache table.
148   // Arguments extra, extra2 and extra3 may be used to pass additional scratch
149   // registers. Set to no_reg if not needed.
150   void GenerateProbe(MacroAssembler* masm,
151                      Code::Flags flags,
152                      Register receiver,
153                      Register name,
154                      Register scratch,
155                      Register extra,
156                      Register extra2 = no_reg,
157                      Register extra3 = no_reg);
158
159   enum Table {
160     kPrimary,
161     kSecondary
162   };
163
164
165   SCTableReference key_reference(StubCache::Table table) {
166     return SCTableReference(
167         reinterpret_cast<Address>(&first_entry(table)->key));
168   }
169
170
171   SCTableReference map_reference(StubCache::Table table) {
172     return SCTableReference(
173         reinterpret_cast<Address>(&first_entry(table)->map));
174   }
175
176
177   SCTableReference value_reference(StubCache::Table table) {
178     return SCTableReference(
179         reinterpret_cast<Address>(&first_entry(table)->value));
180   }
181
182
183   StubCache::Entry* first_entry(StubCache::Table table) {
184     switch (table) {
185       case StubCache::kPrimary: return StubCache::primary_;
186       case StubCache::kSecondary: return StubCache::secondary_;
187     }
188     UNREACHABLE();
189     return NULL;
190   }
191
192   Isolate* isolate() { return isolate_; }
193   Heap* heap() { return isolate()->heap(); }
194   Factory* factory() { return isolate()->factory(); }
195
196   // These constants describe the structure of the interceptor arguments on the
197   // stack. The arguments are pushed by the (platform-specific)
198   // PushInterceptorArguments and read by LoadPropertyWithInterceptorOnly and
199   // LoadWithInterceptor.
200   static const int kInterceptorArgsNameIndex = 0;
201   static const int kInterceptorArgsInfoIndex = 1;
202   static const int kInterceptorArgsThisIndex = 2;
203   static const int kInterceptorArgsHolderIndex = 3;
204   static const int kInterceptorArgsLength = 4;
205
206  private:
207   explicit StubCache(Isolate* isolate);
208
209   // The stub cache has a primary and secondary level.  The two levels have
210   // different hashing algorithms in order to avoid simultaneous collisions
211   // in both caches.  Unlike a probing strategy (quadratic or otherwise) the
212   // update strategy on updates is fairly clear and simple:  Any existing entry
213   // in the primary cache is moved to the secondary cache, and secondary cache
214   // entries are overwritten.
215
216   // Hash algorithm for the primary table.  This algorithm is replicated in
217   // assembler for every architecture.  Returns an index into the table that
218   // is scaled by 1 << kHeapObjectTagSize.
219   static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
220     // This works well because the heap object tag size and the hash
221     // shift are equal.  Shifting down the length field to get the
222     // hash code would effectively throw away two bits of the hash
223     // code.
224     STATIC_ASSERT(kHeapObjectTagSize == Name::kHashShift);
225     // Compute the hash of the name (use entire hash field).
226     ASSERT(name->HasHashCode());
227     uint32_t field = name->hash_field();
228     // Using only the low bits in 64-bit mode is unlikely to increase the
229     // risk of collision even if the heap is spread over an area larger than
230     // 4Gb (and not at all if it isn't).
231     uint32_t map_low32bits =
232         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
233     // We always set the in_loop bit to zero when generating the lookup code
234     // so do it here too so the hash codes match.
235     uint32_t iflags =
236         (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
237     // Base the offset on a simple combination of name, flags, and map.
238     uint32_t key = (map_low32bits + field) ^ iflags;
239     return key & ((kPrimaryTableSize - 1) << kHeapObjectTagSize);
240   }
241
242   // Hash algorithm for the secondary table.  This algorithm is replicated in
243   // assembler for every architecture.  Returns an index into the table that
244   // is scaled by 1 << kHeapObjectTagSize.
245   static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
246     // Use the seed from the primary cache in the secondary cache.
247     uint32_t name_low32bits =
248         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
249     // We always set the in_loop bit to zero when generating the lookup code
250     // so do it here too so the hash codes match.
251     uint32_t iflags =
252         (static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
253     uint32_t key = (seed - name_low32bits) + iflags;
254     return key & ((kSecondaryTableSize - 1) << kHeapObjectTagSize);
255   }
256
257   // Compute the entry for a given offset in exactly the same way as
258   // we do in generated code.  We generate an hash code that already
259   // ends in Name::kHashShift 0s.  Then we multiply it so it is a multiple
260   // of sizeof(Entry).  This makes it easier to avoid making mistakes
261   // in the hashed offset computations.
262   static Entry* entry(Entry* table, int offset) {
263     const int multiplier = sizeof(*table) >> Name::kHashShift;
264     return reinterpret_cast<Entry*>(
265         reinterpret_cast<Address>(table) + offset * multiplier);
266   }
267
268   static const int kPrimaryTableBits = 11;
269   static const int kPrimaryTableSize = (1 << kPrimaryTableBits);
270   static const int kSecondaryTableBits = 9;
271   static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
272
273   Entry primary_[kPrimaryTableSize];
274   Entry secondary_[kSecondaryTableSize];
275   Isolate* isolate_;
276
277   friend class Isolate;
278   friend class SCTableReference;
279
280   DISALLOW_COPY_AND_ASSIGN(StubCache);
281 };
282
283
284 // ------------------------------------------------------------------------
285
286
287 // Support functions for IC stubs for callbacks.
288 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreCallbackProperty);
289
290
291 // Support functions for IC stubs for interceptors.
292 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorOnly);
293 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForLoad);
294 DECLARE_RUNTIME_FUNCTION(MaybeObject*, LoadPropertyWithInterceptorForCall);
295 DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreInterceptorProperty);
296 DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedLoadPropertyWithInterceptor);
297
298
299 enum PrototypeCheckType { CHECK_ALL_MAPS, SKIP_RECEIVER };
300 enum IcCheckType { ELEMENT, PROPERTY };
301
302
303 // The stub compilers compile stubs for the stub cache.
304 class StubCompiler BASE_EMBEDDED {
305  public:
306   explicit StubCompiler(Isolate* isolate,
307                         ExtraICState extra_ic_state = kNoExtraICState)
308       : isolate_(isolate), extra_ic_state_(extra_ic_state),
309         masm_(isolate, NULL, 256), failure_(NULL) { }
310
311   Handle<Code> CompileLoadInitialize(Code::Flags flags);
312   Handle<Code> CompileLoadPreMonomorphic(Code::Flags flags);
313   Handle<Code> CompileLoadMegamorphic(Code::Flags flags);
314
315   Handle<Code> CompileStoreInitialize(Code::Flags flags);
316   Handle<Code> CompileStorePreMonomorphic(Code::Flags flags);
317   Handle<Code> CompileStoreGeneric(Code::Flags flags);
318   Handle<Code> CompileStoreMegamorphic(Code::Flags flags);
319
320   // Static functions for generating parts of stubs.
321   static void GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
322                                                   int index,
323                                                   Register prototype);
324
325   // Helper function used to check that the dictionary doesn't contain
326   // the property. This function may return false negatives, so miss_label
327   // must always call a backup property check that is complete.
328   // This function is safe to call if the receiver has fast properties.
329   // Name must be unique and receiver must be a heap object.
330   static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
331                                                Label* miss_label,
332                                                Register receiver,
333                                                Handle<Name> name,
334                                                Register r0,
335                                                Register r1);
336
337   // Generates prototype loading code that uses the objects from the
338   // context we were in when this function was called. If the context
339   // has changed, a jump to miss is performed. This ties the generated
340   // code to a particular context and so must not be used in cases
341   // where the generated code is not allowed to have references to
342   // objects from a context.
343   static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler* masm,
344                                                         int index,
345                                                         Register prototype,
346                                                         Label* miss);
347
348   static void GenerateFastPropertyLoad(MacroAssembler* masm,
349                                        Register dst,
350                                        Register src,
351                                        bool inobject,
352                                        int index,
353                                        Representation representation);
354
355   static void GenerateLoadArrayLength(MacroAssembler* masm,
356                                       Register receiver,
357                                       Register scratch,
358                                       Label* miss_label);
359
360   static void GenerateLoadStringLength(MacroAssembler* masm,
361                                        Register receiver,
362                                        Register scratch1,
363                                        Register scratch2,
364                                        Label* miss_label);
365
366   static void GenerateLoadFunctionPrototype(MacroAssembler* masm,
367                                             Register receiver,
368                                             Register scratch1,
369                                             Register scratch2,
370                                             Label* miss_label);
371
372   // Generate code to check that a global property cell is empty. Create
373   // the property cell at compilation time if no cell exists for the
374   // property.
375   static void GenerateCheckPropertyCell(MacroAssembler* masm,
376                                         Handle<JSGlobalObject> global,
377                                         Handle<Name> name,
378                                         Register scratch,
379                                         Label* miss);
380
381   static void TailCallBuiltin(MacroAssembler* masm, Builtins::Name name);
382
383   // Generates code that verifies that the property holder has not changed
384   // (checking maps of objects in the prototype chain for fast and global
385   // objects or doing negative lookup for slow objects, ensures that the
386   // property cells for global objects are still empty) and checks that the map
387   // of the holder has not changed. If necessary the function also generates
388   // code for security check in case of global object holders. Helps to make
389   // sure that the current IC is still valid.
390   //
391   // The scratch and holder registers are always clobbered, but the object
392   // register is only clobbered if it the same as the holder register. The
393   // function returns a register containing the holder - either object_reg or
394   // holder_reg.
395   Register CheckPrototypes(Handle<HeapType> type,
396                            Register object_reg,
397                            Handle<JSObject> holder,
398                            Register holder_reg,
399                            Register scratch1,
400                            Register scratch2,
401                            Handle<Name> name,
402                            Label* miss,
403                            PrototypeCheckType check = CHECK_ALL_MAPS);
404
405   void GenerateBooleanCheck(Register object, Label* miss);
406
407  protected:
408   Handle<Code> GetCodeWithFlags(Code::Flags flags, const char* name);
409   Handle<Code> GetCodeWithFlags(Code::Flags flags, Handle<Name> name);
410
411   ExtraICState extra_state() { return extra_ic_state_; }
412
413   MacroAssembler* masm() { return &masm_; }
414   void set_failure(Failure* failure) { failure_ = failure; }
415
416   static void LookupPostInterceptor(Handle<JSObject> holder,
417                                     Handle<Name> name,
418                                     LookupResult* lookup);
419
420   Isolate* isolate() { return isolate_; }
421   Heap* heap() { return isolate()->heap(); }
422   Factory* factory() { return isolate()->factory(); }
423
424   static void GenerateTailCall(MacroAssembler* masm, Handle<Code> code);
425
426  private:
427   Isolate* isolate_;
428   const ExtraICState extra_ic_state_;
429   MacroAssembler masm_;
430   Failure* failure_;
431 };
432
433
434 enum FrontendCheckType { PERFORM_INITIAL_CHECKS, SKIP_INITIAL_CHECKS };
435
436
437 class BaseLoadStoreStubCompiler: public StubCompiler {
438  public:
439   BaseLoadStoreStubCompiler(Isolate* isolate,
440                             Code::Kind kind,
441                             ExtraICState extra_ic_state = kNoExtraICState,
442                             InlineCacheHolderFlag cache_holder = OWN_MAP)
443       : StubCompiler(isolate, extra_ic_state),
444         kind_(kind),
445         cache_holder_(cache_holder) {
446     InitializeRegisters();
447   }
448   virtual ~BaseLoadStoreStubCompiler() { }
449
450   Handle<Code> CompileMonomorphicIC(Handle<HeapType> type,
451                                     Handle<Code> handler,
452                                     Handle<Name> name);
453
454   Handle<Code> CompilePolymorphicIC(TypeHandleList* types,
455                                     CodeHandleList* handlers,
456                                     Handle<Name> name,
457                                     Code::StubType type,
458                                     IcCheckType check);
459
460   static Builtins::Name MissBuiltin(Code::Kind kind) {
461     switch (kind) {
462       case Code::LOAD_IC: return Builtins::kLoadIC_Miss;
463       case Code::STORE_IC: return Builtins::kStoreIC_Miss;
464       case Code::KEYED_LOAD_IC: return Builtins::kKeyedLoadIC_Miss;
465       case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Miss;
466       default: UNREACHABLE();
467     }
468     return Builtins::kLoadIC_Miss;
469   }
470
471  protected:
472   virtual Register HandlerFrontendHeader(Handle<HeapType> type,
473                                          Register object_reg,
474                                          Handle<JSObject> holder,
475                                          Handle<Name> name,
476                                          Label* miss) = 0;
477
478   virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss) = 0;
479
480   Register HandlerFrontend(Handle<HeapType> type,
481                            Register object_reg,
482                            Handle<JSObject> holder,
483                            Handle<Name> name);
484
485   Handle<Code> GetCode(Code::Kind kind,
486                        Code::StubType type,
487                        Handle<Name> name);
488
489   Handle<Code> GetICCode(Code::Kind kind,
490                          Code::StubType type,
491                          Handle<Name> name,
492                          InlineCacheState state = MONOMORPHIC);
493   Code::Kind kind() { return kind_; }
494
495   Logger::LogEventsAndTags log_kind(Handle<Code> code) {
496     if (!code->is_inline_cache_stub()) return Logger::STUB_TAG;
497     if (kind_ == Code::LOAD_IC) {
498       return code->ic_state() == MONOMORPHIC
499           ? Logger::LOAD_IC_TAG : Logger::LOAD_POLYMORPHIC_IC_TAG;
500     } else if (kind_ == Code::KEYED_LOAD_IC) {
501       return code->ic_state() == MONOMORPHIC
502           ? Logger::KEYED_LOAD_IC_TAG : Logger::KEYED_LOAD_POLYMORPHIC_IC_TAG;
503     } else if (kind_ == Code::STORE_IC) {
504       return code->ic_state() == MONOMORPHIC
505           ? Logger::STORE_IC_TAG : Logger::STORE_POLYMORPHIC_IC_TAG;
506     } else {
507       return code->ic_state() == MONOMORPHIC
508           ? Logger::KEYED_STORE_IC_TAG : Logger::KEYED_STORE_POLYMORPHIC_IC_TAG;
509     }
510   }
511   void JitEvent(Handle<Name> name, Handle<Code> code);
512
513   virtual Register receiver() = 0;
514   virtual Register name() = 0;
515   virtual Register scratch1() = 0;
516   virtual Register scratch2() = 0;
517   virtual Register scratch3() = 0;
518
519   void InitializeRegisters();
520
521   bool IncludesNumberType(TypeHandleList* types);
522
523   Code::Kind kind_;
524   InlineCacheHolderFlag cache_holder_;
525   Register* registers_;
526 };
527
528
529 class LoadStubCompiler: public BaseLoadStoreStubCompiler {
530  public:
531   LoadStubCompiler(Isolate* isolate,
532                    ExtraICState extra_ic_state = kNoExtraICState,
533                    InlineCacheHolderFlag cache_holder = OWN_MAP,
534                    Code::Kind kind = Code::LOAD_IC)
535       : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state,
536                                   cache_holder) { }
537   virtual ~LoadStubCompiler() { }
538
539   Handle<Code> CompileLoadField(Handle<HeapType> type,
540                                 Handle<JSObject> holder,
541                                 Handle<Name> name,
542                                 PropertyIndex index,
543                                 Representation representation);
544
545   Handle<Code> CompileLoadCallback(Handle<HeapType> type,
546                                    Handle<JSObject> holder,
547                                    Handle<Name> name,
548                                    Handle<ExecutableAccessorInfo> callback);
549
550   Handle<Code> CompileLoadCallback(Handle<HeapType> type,
551                                    Handle<JSObject> holder,
552                                    Handle<Name> name,
553                                    const CallOptimization& call_optimization);
554
555   Handle<Code> CompileLoadConstant(Handle<HeapType> type,
556                                    Handle<JSObject> holder,
557                                    Handle<Name> name,
558                                    Handle<Object> value);
559
560   Handle<Code> CompileLoadInterceptor(Handle<HeapType> type,
561                                       Handle<JSObject> holder,
562                                       Handle<Name> name);
563
564   Handle<Code> CompileLoadViaGetter(Handle<HeapType> type,
565                                     Handle<JSObject> holder,
566                                     Handle<Name> name,
567                                     Handle<JSFunction> getter);
568
569   static void GenerateLoadViaGetter(MacroAssembler* masm,
570                                     Handle<HeapType> type,
571                                     Register receiver,
572                                     Handle<JSFunction> getter);
573
574   Handle<Code> CompileLoadNonexistent(Handle<HeapType> type,
575                                       Handle<JSObject> last,
576                                       Handle<Name> name);
577
578   Handle<Code> CompileLoadGlobal(Handle<HeapType> type,
579                                  Handle<GlobalObject> holder,
580                                  Handle<PropertyCell> cell,
581                                  Handle<Name> name,
582                                  bool is_dont_delete);
583
584   static Register* registers();
585
586  protected:
587   ContextualMode contextual_mode() {
588     return LoadIC::GetContextualMode(extra_state());
589   }
590
591   virtual Register HandlerFrontendHeader(Handle<HeapType> type,
592                                          Register object_reg,
593                                          Handle<JSObject> holder,
594                                          Handle<Name> name,
595                                          Label* miss);
596
597   virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
598
599   Register CallbackHandlerFrontend(Handle<HeapType> type,
600                                    Register object_reg,
601                                    Handle<JSObject> holder,
602                                    Handle<Name> name,
603                                    Handle<Object> callback);
604   void NonexistentHandlerFrontend(Handle<HeapType> type,
605                                   Handle<JSObject> last,
606                                   Handle<Name> name);
607
608   void GenerateLoadField(Register reg,
609                          Handle<JSObject> holder,
610                          PropertyIndex field,
611                          Representation representation);
612   void GenerateLoadConstant(Handle<Object> value);
613   void GenerateLoadCallback(Register reg,
614                             Handle<ExecutableAccessorInfo> callback);
615   void GenerateLoadCallback(const CallOptimization& call_optimization,
616                             Handle<Map> receiver_map);
617   void GenerateLoadInterceptor(Register holder_reg,
618                                Handle<Object> object,
619                                Handle<JSObject> holder,
620                                LookupResult* lookup,
621                                Handle<Name> name);
622   void GenerateLoadPostInterceptor(Register reg,
623                                    Handle<JSObject> interceptor_holder,
624                                    Handle<Name> name,
625                                    LookupResult* lookup);
626
627   virtual Register receiver() { return registers_[0]; }
628   virtual Register name()     { return registers_[1]; }
629   virtual Register scratch1() { return registers_[2]; }
630   virtual Register scratch2() { return registers_[3]; }
631   virtual Register scratch3() { return registers_[4]; }
632   Register scratch4() { return registers_[5]; }
633 };
634
635
636 class KeyedLoadStubCompiler: public LoadStubCompiler {
637  public:
638   KeyedLoadStubCompiler(Isolate* isolate,
639                         ExtraICState extra_ic_state = kNoExtraICState,
640                         InlineCacheHolderFlag cache_holder = OWN_MAP)
641       : LoadStubCompiler(isolate, extra_ic_state, cache_holder,
642                          Code::KEYED_LOAD_IC) { }
643
644   Handle<Code> CompileLoadElement(Handle<Map> receiver_map);
645
646   void CompileElementHandlers(MapHandleList* receiver_maps,
647                               CodeHandleList* handlers);
648
649   static void GenerateLoadDictionaryElement(MacroAssembler* masm);
650
651  private:
652   static Register* registers();
653   friend class BaseLoadStoreStubCompiler;
654 };
655
656
657 class StoreStubCompiler: public BaseLoadStoreStubCompiler {
658  public:
659   StoreStubCompiler(Isolate* isolate,
660                     ExtraICState extra_ic_state,
661                     Code::Kind kind = Code::STORE_IC)
662       : BaseLoadStoreStubCompiler(isolate, kind, extra_ic_state) {}
663
664   virtual ~StoreStubCompiler() { }
665
666   Handle<Code> CompileStoreTransition(Handle<JSObject> object,
667                                       LookupResult* lookup,
668                                       Handle<Map> transition,
669                                       Handle<Name> name);
670
671   Handle<Code> CompileStoreField(Handle<JSObject> object,
672                                  LookupResult* lookup,
673                                  Handle<Name> name);
674
675   void GenerateNegativeHolderLookup(MacroAssembler* masm,
676                                     Handle<JSObject> holder,
677                                     Register holder_reg,
678                                     Handle<Name> name,
679                                     Label* miss);
680
681   void GenerateStoreTransition(MacroAssembler* masm,
682                                Handle<JSObject> object,
683                                LookupResult* lookup,
684                                Handle<Map> transition,
685                                Handle<Name> name,
686                                Register receiver_reg,
687                                Register name_reg,
688                                Register value_reg,
689                                Register scratch1,
690                                Register scratch2,
691                                Register scratch3,
692                                Label* miss_label,
693                                Label* slow);
694
695   void GenerateStoreField(MacroAssembler* masm,
696                           Handle<JSObject> object,
697                           LookupResult* lookup,
698                           Register receiver_reg,
699                           Register name_reg,
700                           Register value_reg,
701                           Register scratch1,
702                           Register scratch2,
703                           Label* miss_label);
704
705   Handle<Code> CompileStoreCallback(Handle<JSObject> object,
706                                     Handle<JSObject> holder,
707                                     Handle<Name> name,
708                                     Handle<ExecutableAccessorInfo> callback);
709
710   Handle<Code> CompileStoreCallback(Handle<JSObject> object,
711                                     Handle<JSObject> holder,
712                                     Handle<Name> name,
713                                     const CallOptimization& call_optimization);
714
715   static void GenerateStoreViaSetter(MacroAssembler* masm,
716                                      Handle<HeapType> type,
717                                      Handle<JSFunction> setter);
718
719   Handle<Code> CompileStoreViaSetter(Handle<JSObject> object,
720                                      Handle<JSObject> holder,
721                                      Handle<Name> name,
722                                      Handle<JSFunction> setter);
723
724   Handle<Code> CompileStoreInterceptor(Handle<JSObject> object,
725                                        Handle<Name> name);
726
727   static Builtins::Name SlowBuiltin(Code::Kind kind) {
728     switch (kind) {
729       case Code::STORE_IC: return Builtins::kStoreIC_Slow;
730       case Code::KEYED_STORE_IC: return Builtins::kKeyedStoreIC_Slow;
731       default: UNREACHABLE();
732     }
733     return Builtins::kStoreIC_Slow;
734   }
735
736  protected:
737   virtual Register HandlerFrontendHeader(Handle<HeapType> type,
738                                          Register object_reg,
739                                          Handle<JSObject> holder,
740                                          Handle<Name> name,
741                                          Label* miss);
742
743   virtual void HandlerFrontendFooter(Handle<Name> name, Label* miss);
744   void GenerateRestoreName(MacroAssembler* masm,
745                            Label* label,
746                            Handle<Name> name);
747
748   virtual Register receiver() { return registers_[0]; }
749   virtual Register name()     { return registers_[1]; }
750   Register value()    { return registers_[2]; }
751   virtual Register scratch1() { return registers_[3]; }
752   virtual Register scratch2() { return registers_[4]; }
753   virtual Register scratch3() { return registers_[5]; }
754
755  protected:
756   static Register* registers();
757
758  private:
759   friend class BaseLoadStoreStubCompiler;
760 };
761
762
763 class KeyedStoreStubCompiler: public StoreStubCompiler {
764  public:
765   KeyedStoreStubCompiler(Isolate* isolate,
766                          ExtraICState extra_ic_state)
767       : StoreStubCompiler(isolate, extra_ic_state, Code::KEYED_STORE_IC) {}
768
769   Handle<Code> CompileStoreElement(Handle<Map> receiver_map);
770
771   Handle<Code> CompileStorePolymorphic(MapHandleList* receiver_maps,
772                                        CodeHandleList* handler_stubs,
773                                        MapHandleList* transitioned_maps);
774
775   Handle<Code> CompileStoreElementPolymorphic(MapHandleList* receiver_maps);
776
777   static void GenerateStoreDictionaryElement(MacroAssembler* masm);
778
779  private:
780   static Register* registers();
781
782   KeyedAccessStoreMode store_mode() {
783     return KeyedStoreIC::GetKeyedAccessStoreMode(extra_state());
784   }
785
786   Register transition_map() {
787     return registers()[3];
788   }
789
790   friend class BaseLoadStoreStubCompiler;
791 };
792
793
794 // Holds information about possible function call optimizations.
795 class CallOptimization BASE_EMBEDDED {
796  public:
797   explicit CallOptimization(LookupResult* lookup);
798
799   explicit CallOptimization(Handle<JSFunction> function);
800
801   bool is_constant_call() const {
802     return !constant_function_.is_null();
803   }
804
805   Handle<JSFunction> constant_function() const {
806     ASSERT(is_constant_call());
807     return constant_function_;
808   }
809
810   bool is_simple_api_call() const {
811     return is_simple_api_call_;
812   }
813
814   Handle<FunctionTemplateInfo> expected_receiver_type() const {
815     ASSERT(is_simple_api_call());
816     return expected_receiver_type_;
817   }
818
819   Handle<CallHandlerInfo> api_call_info() const {
820     ASSERT(is_simple_api_call());
821     return api_call_info_;
822   }
823
824   enum HolderLookup {
825     kHolderNotFound,
826     kHolderIsReceiver,
827     kHolderFound
828   };
829   Handle<JSObject> LookupHolderOfExpectedType(
830       Handle<Map> receiver_map,
831       HolderLookup* holder_lookup) const;
832
833   // Check if the api holder is between the receiver and the holder.
834   bool IsCompatibleReceiver(Handle<Object> receiver,
835                             Handle<JSObject> holder) const;
836
837  private:
838   void Initialize(Handle<JSFunction> function);
839
840   // Determines whether the given function can be called using the
841   // fast api call builtin.
842   void AnalyzePossibleApiFunction(Handle<JSFunction> function);
843
844   Handle<JSFunction> constant_function_;
845   bool is_simple_api_call_;
846   Handle<FunctionTemplateInfo> expected_receiver_type_;
847   Handle<CallHandlerInfo> api_call_info_;
848 };
849
850
851 } }  // namespace v8::internal
852
853 #endif  // V8_STUB_CACHE_H_