1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/macro-assembler.h"
14 const int kMaxKeyedPolymorphism = 4;
17 // IC_UTIL_LIST defines all utility functions called from generated
18 // inline caching code. The argument for the macro, ICU, is the function name.
19 #define IC_UTIL_LIST(ICU) \
21 ICU(KeyedLoadIC_Miss) \
23 ICU(CallIC_Customization_Miss) \
26 ICU(SharedStoreIC_ExtendStorage) \
27 ICU(KeyedStoreIC_Miss) \
28 ICU(KeyedStoreIC_Slow) \
29 /* Utilities for IC stubs. */ \
30 ICU(StoreCallbackProperty) \
31 ICU(LoadPropertyWithInterceptorOnly) \
32 ICU(LoadPropertyWithInterceptor) \
33 ICU(LoadElementWithInterceptor) \
34 ICU(StorePropertyWithInterceptor) \
36 ICU(BinaryOpIC_Miss) \
37 ICU(CompareNilIC_Miss) \
41 // IC is the base class for LoadIC, StoreIC, KeyedLoadIC, and KeyedStoreIC.
45 // The ids for utility called from the generated code.
47 #define CONST_NAME(name) k##name,
48 IC_UTIL_LIST(CONST_NAME)
53 // Looks up the address of the named utility.
54 static Address AddressFromUtilityId(UtilityId id);
56 // Alias the inline cache state type to make the IC code more readable.
57 typedef InlineCacheState State;
59 // The IC code is either invoked with no extra frames on the stack
60 // or with a single extra frame for supporting calls.
66 // Construct the IC structure with the given number of extra
67 // JavaScript frames on the stack.
68 IC(FrameDepth depth, Isolate* isolate);
71 State state() const { return state_; }
72 inline Address address() const;
74 // Compute the current IC state based on the target stub, receiver and name.
75 void UpdateState(Handle<Object> receiver, Handle<Object> name);
77 bool IsNameCompatibleWithPrototypeFailure(Handle<Object> name);
78 void MarkPrototypeFailure(Handle<Object> name) {
79 DCHECK(IsNameCompatibleWithPrototypeFailure(name));
80 state_ = PROTOTYPE_FAILURE;
83 // If the stub contains weak maps then this function adds the stub to
84 // the dependent code array of each weak map.
85 static void RegisterWeakMapDependency(Handle<Code> stub);
87 // This function is called when a weak map in the stub is dying,
88 // invalidates the stub by setting maps in it to undefined.
89 static void InvalidateMaps(Code* stub);
91 // Clear the inline cache to initial state.
92 static void Clear(Isolate* isolate,
94 ConstantPoolArray* constant_pool);
97 bool IsLoadStub() const {
98 return target()->is_load_stub() || target()->is_keyed_load_stub();
101 bool IsStoreStub() const {
102 return target()->is_store_stub() || target()->is_keyed_store_stub();
105 bool IsCallStub() const {
106 return target()->is_call_stub();
110 template <class TypeClass>
111 static JSFunction* GetRootConstructor(TypeClass* type,
112 Context* native_context);
113 static inline Handle<Map> GetHandlerCacheHolder(HeapType* type,
114 bool receiver_is_holder,
116 CacheHolderFlag* flag);
117 static inline Handle<Map> GetICCacheHolder(HeapType* type, Isolate* isolate,
118 CacheHolderFlag* flag);
120 static bool IsCleared(Code* code) {
121 InlineCacheState state = code->ic_state();
122 return state == UNINITIALIZED || state == PREMONOMORPHIC;
125 // Utility functions to convert maps to types and back. There are two special
127 // - The heap_number_map is used as a marker which includes heap numbers as
129 // - The oddball map is only used for booleans.
130 static Handle<Map> TypeToMap(HeapType* type, Isolate* isolate);
132 static typename T::TypeHandle MapToType(Handle<Map> map,
133 typename T::Region* region);
135 static Handle<HeapType> CurrentTypeOf(Handle<Object> object,
139 // Get the call-site target; used for determining the state.
140 Handle<Code> target() const { return target_; }
142 Address fp() const { return fp_; }
143 Address pc() const { return *pc_address_; }
144 Isolate* isolate() const { return isolate_; }
146 // Get the shared function info of the caller.
147 SharedFunctionInfo* GetSharedFunctionInfo() const;
148 // Get the code object of the caller.
149 Code* GetCode() const;
150 // Get the original (non-breakpointed) code object of the caller.
151 Code* GetOriginalCode() const;
153 // Set the call-site target.
154 void set_target(Code* code) {
156 code->VerifyEmbeddedObjectsDependency();
158 SetTargetAtAddress(address(), code, constant_pool());
162 bool is_target_set() { return target_set_; }
164 char TransitionMarkFromState(IC::State state);
165 void TraceIC(const char* type, Handle<Object> name);
166 void TraceIC(const char* type, Handle<Object> name, State old_state,
169 MaybeHandle<Object> TypeError(const char* type,
170 Handle<Object> object,
172 MaybeHandle<Object> ReferenceError(const char* type, Handle<Name> name);
174 // Access the target code for the given IC address.
175 static inline Code* GetTargetAtAddress(Address address,
176 ConstantPoolArray* constant_pool);
177 static inline void SetTargetAtAddress(Address address,
179 ConstantPoolArray* constant_pool);
180 static void OnTypeFeedbackChanged(Isolate* isolate, Address address,
181 State old_state, State new_state,
182 bool target_remains_ic_stub);
183 static void PostPatching(Address address, Code* target, Code* old_target);
185 // Compute the handler either by compiling or by retrieving a cached version.
186 Handle<Code> ComputeHandler(LookupIterator* lookup, Handle<Object> object,
188 Handle<Object> value = Handle<Code>::null());
189 virtual Handle<Code> CompileHandler(LookupIterator* lookup,
190 Handle<Object> object,
191 Handle<Name> name, Handle<Object> value,
192 CacheHolderFlag cache_holder) {
194 return Handle<Code>::null();
196 // Temporary copy of the above, but using a LookupResult.
197 // TODO(jkummerow): Migrate callers to LookupIterator and delete these.
198 Handle<Code> ComputeStoreHandler(LookupResult* lookup, Handle<Object> object,
200 Handle<Object> value = Handle<Code>::null());
201 virtual Handle<Code> CompileStoreHandler(LookupResult* lookup,
202 Handle<Object> object,
204 Handle<Object> value,
205 CacheHolderFlag cache_holder) {
207 return Handle<Code>::null();
210 void UpdateMonomorphicIC(Handle<Code> handler, Handle<Name> name);
211 bool UpdatePolymorphicIC(Handle<Name> name, Handle<Code> code);
212 void UpdateMegamorphicCache(HeapType* type, Name* name, Code* code);
214 void CopyICToMegamorphicCache(Handle<Name> name);
215 bool IsTransitionOfMonomorphicTarget(Map* source_map, Map* target_map);
216 void PatchCache(Handle<Name> name, Handle<Code> code);
217 Code::Kind kind() const { return kind_; }
218 Code::Kind handler_kind() const {
219 if (kind_ == Code::KEYED_LOAD_IC) return Code::LOAD_IC;
220 DCHECK(kind_ == Code::LOAD_IC || kind_ == Code::STORE_IC ||
221 kind_ == Code::KEYED_STORE_IC);
224 virtual Handle<Code> megamorphic_stub() {
226 return Handle<Code>::null();
229 bool TryRemoveInvalidPrototypeDependentStub(Handle<Object> receiver,
230 Handle<String> name);
232 ExtraICState extra_ic_state() const { return extra_ic_state_; }
233 void set_extra_ic_state(ExtraICState state) {
234 extra_ic_state_ = state;
237 Handle<HeapType> receiver_type() { return receiver_type_; }
239 void TargetMaps(MapHandleList* list) {
241 for (int i = 0; i < target_maps_.length(); i++) {
242 list->Add(target_maps_.at(i));
246 void TargetTypes(TypeHandleList* list) {
248 for (int i = 0; i < target_maps_.length(); i++) {
249 list->Add(IC::MapToType<HeapType>(target_maps_.at(i), isolate_));
253 Map* FirstTargetMap() {
255 return target_maps_.length() > 0 ? *target_maps_.at(0) : NULL;
259 void UpdateTarget() {
260 target_ = handle(raw_target(), isolate_);
264 Code* raw_target() const {
265 return GetTargetAtAddress(address(), constant_pool());
267 inline ConstantPoolArray* constant_pool() const;
268 inline ConstantPoolArray* raw_constant_pool() const;
270 void FindTargetMaps() {
271 if (target_maps_set_) return;
272 target_maps_set_ = true;
273 if (state_ == MONOMORPHIC) {
274 Map* map = target_->FindFirstMap();
275 if (map != NULL) target_maps_.Add(handle(map));
276 } else if (state_ != UNINITIALIZED && state_ != PREMONOMORPHIC) {
277 target_->FindAllMaps(&target_maps_);
281 // Frame pointer for the frame that uses (calls) the IC.
284 // All access to the program counter of an IC structure is indirect
285 // to make the code GC safe. This feature is crucial since
286 // GetProperty and SetProperty are called and they in turn might
287 // invoke the garbage collector.
288 Address* pc_address_;
292 // The constant pool of the code which originally called the IC (which might
293 // be for the breakpointed copy of the original code).
294 Handle<ConstantPoolArray> raw_constant_pool_;
296 // The original code target that missed.
297 Handle<Code> target_;
301 Handle<HeapType> receiver_type_;
302 MaybeHandle<Code> maybe_handler_;
304 ExtraICState extra_ic_state_;
305 MapHandleList target_maps_;
306 bool target_maps_set_;
308 DISALLOW_IMPLICIT_CONSTRUCTORS(IC);
312 // An IC_Utility encapsulates IC::UtilityId. It exists mainly because you
313 // cannot make forward declarations to an enum.
316 explicit IC_Utility(IC::UtilityId id)
317 : address_(IC::AddressFromUtilityId(id)), id_(id) {}
319 Address address() const { return address_; }
321 IC::UtilityId id() const { return id_; }
328 class CallIC: public IC {
330 enum CallType { METHOD, FUNCTION };
332 class State V8_FINAL BASE_EMBEDDED {
334 explicit State(ExtraICState extra_ic_state);
336 State(int argc, CallType call_type)
337 : argc_(argc), call_type_(call_type) {
340 ExtraICState GetExtraICState() const;
342 static void GenerateAheadOfTime(
343 Isolate*, void (*Generate)(Isolate*, const State&));
345 int arg_count() const { return argc_; }
346 CallType call_type() const { return call_type_; }
348 bool CallAsMethod() const { return call_type_ == METHOD; }
351 class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {};
352 class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {};
355 const CallType call_type_;
358 explicit CallIC(Isolate* isolate)
359 : IC(EXTRA_CALL_FRAME, isolate) {
362 void PatchMegamorphic(Handle<Object> function, Handle<FixedArray> vector,
365 void HandleMiss(Handle<Object> receiver,
366 Handle<Object> function,
367 Handle<FixedArray> vector,
370 // Returns true if a custom handler was installed.
371 bool DoCustomHandler(Handle<Object> receiver,
372 Handle<Object> function,
373 Handle<FixedArray> vector,
377 // Code generator routines.
378 static Handle<Code> initialize_stub(Isolate* isolate,
382 static void Clear(Isolate* isolate, Address address, Code* target,
383 ConstantPoolArray* constant_pool);
386 inline IC::State FeedbackToState(Handle<FixedArray> vector,
387 Handle<Smi> slot) const;
391 OStream& operator<<(OStream& os, const CallIC::State& s);
394 class LoadIC: public IC {
396 enum ParameterIndices {
401 static const Register ReceiverRegister();
402 static const Register NameRegister();
404 // With flag vector-ics, there is an additional argument. And for calls from
405 // crankshaft, yet another.
406 static const Register SlotRegister();
407 static const Register VectorRegister();
409 class State V8_FINAL BASE_EMBEDDED {
411 explicit State(ExtraICState extra_ic_state)
412 : state_(extra_ic_state) {}
414 explicit State(ContextualMode mode)
415 : state_(ContextualModeBits::encode(mode)) {}
417 ExtraICState GetExtraICState() const { return state_; }
419 ContextualMode contextual_mode() const {
420 return ContextualModeBits::decode(state_);
424 class ContextualModeBits: public BitField<ContextualMode, 0, 1> {};
425 STATIC_ASSERT(static_cast<int>(NOT_CONTEXTUAL) == 0);
427 const ExtraICState state_;
430 static ExtraICState ComputeExtraICState(ContextualMode contextual_mode) {
431 return State(contextual_mode).GetExtraICState();
434 static ContextualMode GetContextualMode(ExtraICState state) {
435 return State(state).contextual_mode();
438 ContextualMode contextual_mode() const {
439 return GetContextualMode(extra_ic_state());
442 explicit LoadIC(FrameDepth depth, Isolate* isolate)
443 : IC(depth, isolate) {
444 DCHECK(IsLoadStub());
447 // Returns if this IC is for contextual (no explicit receiver)
448 // access to properties.
449 bool IsUndeclaredGlobal(Handle<Object> receiver) {
450 if (receiver->IsGlobalObject()) {
451 return contextual_mode() == CONTEXTUAL;
453 DCHECK(contextual_mode() != CONTEXTUAL);
458 // Code generator routines.
459 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
460 static void GeneratePreMonomorphic(MacroAssembler* masm) {
463 static void GenerateMiss(MacroAssembler* masm);
464 static void GenerateMegamorphic(MacroAssembler* masm);
465 static void GenerateNormal(MacroAssembler* masm);
466 static void GenerateRuntimeGetProperty(MacroAssembler* masm);
468 static Handle<Code> initialize_stub(Isolate* isolate,
469 ExtraICState extra_state);
471 MUST_USE_RESULT MaybeHandle<Object> Load(Handle<Object> object,
475 void set_target(Code* code) {
476 // The contextual mode must be preserved across IC patching.
477 DCHECK(GetContextualMode(code->extra_ic_state()) ==
478 GetContextualMode(target()->extra_ic_state()));
480 IC::set_target(code);
483 Handle<Code> slow_stub() const {
484 if (kind() == Code::LOAD_IC) {
485 return isolate()->builtins()->LoadIC_Slow();
487 DCHECK_EQ(Code::KEYED_LOAD_IC, kind());
488 return isolate()->builtins()->KeyedLoadIC_Slow();
492 virtual Handle<Code> megamorphic_stub();
494 // Update the inline cache and the global stub cache based on the
496 void UpdateCaches(LookupIterator* lookup, Handle<Object> object,
499 virtual Handle<Code> CompileHandler(LookupIterator* lookup,
500 Handle<Object> object,
502 Handle<Object> unused,
503 CacheHolderFlag cache_holder);
506 virtual Handle<Code> pre_monomorphic_stub() const;
507 static Handle<Code> pre_monomorphic_stub(Isolate* isolate,
508 ExtraICState extra_state);
510 Handle<Code> SimpleFieldLoad(FieldIndex index);
512 static void Clear(Isolate* isolate,
515 ConstantPoolArray* constant_pool);
521 class KeyedLoadIC: public LoadIC {
523 explicit KeyedLoadIC(FrameDepth depth, Isolate* isolate)
524 : LoadIC(depth, isolate) {
525 DCHECK(target()->is_keyed_load_stub());
528 MUST_USE_RESULT MaybeHandle<Object> Load(Handle<Object> object,
531 // Code generator routines.
532 static void GenerateMiss(MacroAssembler* masm);
533 static void GenerateRuntimeGetProperty(MacroAssembler* masm);
534 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
535 static void GeneratePreMonomorphic(MacroAssembler* masm) {
538 static void GenerateGeneric(MacroAssembler* masm);
539 static void GenerateString(MacroAssembler* masm);
540 static void GenerateIndexedInterceptor(MacroAssembler* masm);
541 static void GenerateSloppyArguments(MacroAssembler* masm);
543 // Bit mask to be tested against bit field for the cases when
544 // generic stub should go into slow case.
545 // Access check is necessary explicitly since generic stub does not perform
547 static const int kSlowCaseBitFieldMask =
548 (1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor);
550 static Handle<Code> generic_stub(Isolate* isolate);
551 static Handle<Code> pre_monomorphic_stub(Isolate* isolate);
554 Handle<Code> LoadElementStub(Handle<JSObject> receiver);
555 virtual Handle<Code> pre_monomorphic_stub() const {
556 return pre_monomorphic_stub(isolate());
560 Handle<Code> generic_stub() const { return generic_stub(isolate()); }
561 Handle<Code> indexed_interceptor_stub() {
562 return isolate()->builtins()->KeyedLoadIC_IndexedInterceptor();
564 Handle<Code> sloppy_arguments_stub() {
565 return isolate()->builtins()->KeyedLoadIC_SloppyArguments();
567 Handle<Code> string_stub() {
568 return isolate()->builtins()->KeyedLoadIC_String();
571 static void Clear(Isolate* isolate,
574 ConstantPoolArray* constant_pool);
580 class StoreIC: public IC {
582 class StrictModeState: public BitField<StrictMode, 1, 1> {};
583 static ExtraICState ComputeExtraICState(StrictMode flag) {
584 return StrictModeState::encode(flag);
586 static StrictMode GetStrictMode(ExtraICState state) {
587 return StrictModeState::decode(state);
590 // For convenience, a statically declared encoding of strict mode extra
592 static const ExtraICState kStrictModeState =
593 1 << StrictModeState::kShift;
595 enum ParameterIndices {
601 static const Register ReceiverRegister();
602 static const Register NameRegister();
603 static const Register ValueRegister();
605 StoreIC(FrameDepth depth, Isolate* isolate)
606 : IC(depth, isolate) {
607 DCHECK(IsStoreStub());
610 StrictMode strict_mode() const {
611 return StrictModeState::decode(extra_ic_state());
614 // Code generators for stub routines. Only called once at startup.
615 static void GenerateSlow(MacroAssembler* masm);
616 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
617 static void GeneratePreMonomorphic(MacroAssembler* masm) {
620 static void GenerateMiss(MacroAssembler* masm);
621 static void GenerateMegamorphic(MacroAssembler* masm);
622 static void GenerateNormal(MacroAssembler* masm);
623 static void GenerateRuntimeSetProperty(MacroAssembler* masm,
624 StrictMode strict_mode);
626 static Handle<Code> initialize_stub(Isolate* isolate,
627 StrictMode strict_mode);
629 MUST_USE_RESULT MaybeHandle<Object> Store(
630 Handle<Object> object,
632 Handle<Object> value,
633 JSReceiver::StoreFromKeyed store_mode =
634 JSReceiver::CERTAINLY_NOT_STORE_FROM_KEYED);
637 virtual Handle<Code> megamorphic_stub();
640 virtual Handle<Code> generic_stub() const;
642 virtual Handle<Code> slow_stub() const {
643 return isolate()->builtins()->StoreIC_Slow();
646 virtual Handle<Code> pre_monomorphic_stub() const {
647 return pre_monomorphic_stub(isolate(), strict_mode());
650 static Handle<Code> pre_monomorphic_stub(Isolate* isolate,
651 StrictMode strict_mode);
653 // Update the inline cache and the global stub cache based on the
655 void UpdateCaches(LookupResult* lookup,
656 Handle<JSObject> receiver,
658 Handle<Object> value);
659 virtual Handle<Code> CompileStoreHandler(LookupResult* lookup,
660 Handle<Object> object,
662 Handle<Object> value,
663 CacheHolderFlag cache_holder);
666 void set_target(Code* code) {
667 // Strict mode must be preserved across IC patching.
668 DCHECK(GetStrictMode(code->extra_ic_state()) ==
669 GetStrictMode(target()->extra_ic_state()));
670 IC::set_target(code);
673 static void Clear(Isolate* isolate,
676 ConstantPoolArray* constant_pool);
682 enum KeyedStoreCheckMap {
688 enum KeyedStoreIncrementLength {
689 kDontIncrementLength,
694 class KeyedStoreIC: public StoreIC {
696 // ExtraICState bits (building on IC)
698 class ExtraICStateKeyedAccessStoreMode:
699 public BitField<KeyedAccessStoreMode, 2, 4> {}; // NOLINT
701 static ExtraICState ComputeExtraICState(StrictMode flag,
702 KeyedAccessStoreMode mode) {
703 return StrictModeState::encode(flag) |
704 ExtraICStateKeyedAccessStoreMode::encode(mode);
707 static KeyedAccessStoreMode GetKeyedAccessStoreMode(
708 ExtraICState extra_state) {
709 return ExtraICStateKeyedAccessStoreMode::decode(extra_state);
712 // The map register isn't part of the normal call specification, but
713 // ElementsTransitionAndStoreStub, used in polymorphic keyed store
714 // stub implementations requires it to be initialized.
715 static const Register MapRegister();
717 KeyedStoreIC(FrameDepth depth, Isolate* isolate)
718 : StoreIC(depth, isolate) {
719 DCHECK(target()->is_keyed_store_stub());
722 MUST_USE_RESULT MaybeHandle<Object> Store(Handle<Object> object,
724 Handle<Object> value);
726 // Code generators for stub routines. Only called once at startup.
727 static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
728 static void GeneratePreMonomorphic(MacroAssembler* masm) {
731 static void GenerateMiss(MacroAssembler* masm);
732 static void GenerateSlow(MacroAssembler* masm);
733 static void GenerateRuntimeSetProperty(MacroAssembler* masm,
734 StrictMode strict_mode);
735 static void GenerateGeneric(MacroAssembler* masm, StrictMode strict_mode);
736 static void GenerateSloppyArguments(MacroAssembler* masm);
739 virtual Handle<Code> pre_monomorphic_stub() const {
740 return pre_monomorphic_stub(isolate(), strict_mode());
742 static Handle<Code> pre_monomorphic_stub(Isolate* isolate,
743 StrictMode strict_mode) {
744 if (strict_mode == STRICT) {
745 return isolate->builtins()->KeyedStoreIC_PreMonomorphic_Strict();
747 return isolate->builtins()->KeyedStoreIC_PreMonomorphic();
750 virtual Handle<Code> slow_stub() const {
751 return isolate()->builtins()->KeyedStoreIC_Slow();
753 virtual Handle<Code> megamorphic_stub() {
754 if (strict_mode() == STRICT) {
755 return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
757 return isolate()->builtins()->KeyedStoreIC_Generic();
761 Handle<Code> StoreElementStub(Handle<JSObject> receiver,
762 KeyedAccessStoreMode store_mode);
765 void set_target(Code* code) {
766 // Strict mode must be preserved across IC patching.
767 DCHECK(GetStrictMode(code->extra_ic_state()) == strict_mode());
768 IC::set_target(code);
772 virtual Handle<Code> generic_stub() const {
773 if (strict_mode() == STRICT) {
774 return isolate()->builtins()->KeyedStoreIC_Generic_Strict();
776 return isolate()->builtins()->KeyedStoreIC_Generic();
780 Handle<Code> sloppy_arguments_stub() {
781 return isolate()->builtins()->KeyedStoreIC_SloppyArguments();
784 static void Clear(Isolate* isolate,
787 ConstantPoolArray* constant_pool);
789 KeyedAccessStoreMode GetStoreMode(Handle<JSObject> receiver,
791 Handle<Object> value);
793 Handle<Map> ComputeTransitionedMap(Handle<Map> map,
794 KeyedAccessStoreMode store_mode);
800 // Mode to overwrite BinaryExpression values.
801 enum OverwriteMode { NO_OVERWRITE, OVERWRITE_LEFT, OVERWRITE_RIGHT };
803 // Type Recording BinaryOpIC, that records the types of the inputs and outputs.
804 class BinaryOpIC: public IC {
806 class State V8_FINAL BASE_EMBEDDED {
808 State(Isolate* isolate, ExtraICState extra_ic_state);
810 State(Isolate* isolate, Token::Value op, OverwriteMode mode)
811 : op_(op), mode_(mode), left_kind_(NONE), right_kind_(NONE),
812 result_kind_(NONE), isolate_(isolate) {
813 DCHECK_LE(FIRST_TOKEN, op);
814 DCHECK_LE(op, LAST_TOKEN);
817 InlineCacheState GetICState() const {
818 if (Max(left_kind_, right_kind_) == NONE) {
819 return ::v8::internal::UNINITIALIZED;
821 if (Max(left_kind_, right_kind_) == GENERIC) {
822 return ::v8::internal::MEGAMORPHIC;
824 if (Min(left_kind_, right_kind_) == GENERIC) {
825 return ::v8::internal::GENERIC;
827 return ::v8::internal::MONOMORPHIC;
830 ExtraICState GetExtraICState() const;
832 static void GenerateAheadOfTime(
833 Isolate*, void (*Generate)(Isolate*, const State&));
835 bool CanReuseDoubleBox() const {
836 return (result_kind_ > SMI && result_kind_ <= NUMBER) &&
837 ((mode_ == OVERWRITE_LEFT &&
838 left_kind_ > SMI && left_kind_ <= NUMBER) ||
839 (mode_ == OVERWRITE_RIGHT &&
840 right_kind_ > SMI && right_kind_ <= NUMBER));
843 // Returns true if the IC _could_ create allocation mementos.
844 bool CouldCreateAllocationMementos() const {
845 if (left_kind_ == STRING || right_kind_ == STRING) {
846 DCHECK_EQ(Token::ADD, op_);
852 // Returns true if the IC _should_ create allocation mementos.
853 bool ShouldCreateAllocationMementos() const {
854 return FLAG_allocation_site_pretenuring &&
855 CouldCreateAllocationMementos();
858 bool HasSideEffects() const {
859 return Max(left_kind_, right_kind_) == GENERIC;
862 // Returns true if the IC should enable the inline smi code (i.e. if either
863 // parameter may be a smi).
864 bool UseInlinedSmiCode() const {
865 return KindMaybeSmi(left_kind_) || KindMaybeSmi(right_kind_);
868 static const int FIRST_TOKEN = Token::BIT_OR;
869 static const int LAST_TOKEN = Token::MOD;
871 Token::Value op() const { return op_; }
872 OverwriteMode mode() const { return mode_; }
873 Maybe<int> fixed_right_arg() const { return fixed_right_arg_; }
875 Type* GetLeftType(Zone* zone) const {
876 return KindToType(left_kind_, zone);
878 Type* GetRightType(Zone* zone) const {
879 return KindToType(right_kind_, zone);
881 Type* GetResultType(Zone* zone) const;
883 void Update(Handle<Object> left,
884 Handle<Object> right,
885 Handle<Object> result);
887 Isolate* isolate() const { return isolate_; }
890 friend OStream& operator<<(OStream& os, const BinaryOpIC::State& s);
892 enum Kind { NONE, SMI, INT32, NUMBER, STRING, GENERIC };
894 Kind UpdateKind(Handle<Object> object, Kind kind) const;
896 static const char* KindToString(Kind kind);
897 static Type* KindToType(Kind kind, Zone* zone);
898 static bool KindMaybeSmi(Kind kind) {
899 return (kind >= SMI && kind <= NUMBER) || kind == GENERIC;
902 // We truncate the last bit of the token.
903 STATIC_ASSERT(LAST_TOKEN - FIRST_TOKEN < (1 << 4));
904 class OpField: public BitField<int, 0, 4> {};
905 class OverwriteModeField: public BitField<OverwriteMode, 4, 2> {};
906 class ResultKindField: public BitField<Kind, 6, 3> {};
907 class LeftKindField: public BitField<Kind, 9, 3> {};
908 // When fixed right arg is set, we don't need to store the right kind.
909 // Thus the two fields can overlap.
910 class HasFixedRightArgField: public BitField<bool, 12, 1> {};
911 class FixedRightArgValueField: public BitField<int, 13, 4> {};
912 class RightKindField: public BitField<Kind, 13, 3> {};
919 Maybe<int> fixed_right_arg_;
923 explicit BinaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
925 static Builtins::JavaScript TokenToJSBuiltin(Token::Value op);
927 MaybeHandle<Object> Transition(Handle<AllocationSite> allocation_site,
929 Handle<Object> right) V8_WARN_UNUSED_RESULT;
933 OStream& operator<<(OStream& os, const BinaryOpIC::State& s);
936 class CompareIC: public IC {
938 // The type/state lattice is defined by the following inequations:
939 // UNINITIALIZED < ...
942 // INTERNALIZED_STRING < STRING
943 // KNOWN_OBJECT < OBJECT
950 UNIQUE_NAME, // Symbol or InternalizedString
952 KNOWN_OBJECT, // JSObject with specific map (faster check)
956 static State NewInputState(State old_state, Handle<Object> value);
958 static Type* StateToType(Zone* zone,
960 Handle<Map> map = Handle<Map>());
962 static void StubInfoToType(uint32_t stub_key, Type** left_type,
963 Type** right_type, Type** overall_type,
964 Handle<Map> map, Zone* zone);
966 CompareIC(Isolate* isolate, Token::Value op)
967 : IC(EXTRA_CALL_FRAME, isolate), op_(op) { }
969 // Update the inline cache for the given operands.
970 Code* UpdateCaches(Handle<Object> x, Handle<Object> y);
973 // Factory method for getting an uninitialized compare stub.
974 static Handle<Code> GetUninitialized(Isolate* isolate, Token::Value op);
976 // Helper function for computing the condition for a compare operation.
977 static Condition ComputeCondition(Token::Value op);
979 static const char* GetStateName(State state);
982 static bool HasInlinedSmiCode(Address address);
984 State TargetState(State old_state,
987 bool has_inlined_smi_code,
991 bool strict() const { return op_ == Token::EQ_STRICT; }
992 Condition GetCondition() const { return ComputeCondition(op_); }
994 static Code* GetRawUninitialized(Isolate* isolate, Token::Value op);
996 static void Clear(Isolate* isolate,
999 ConstantPoolArray* constant_pool);
1007 class CompareNilIC: public IC {
1009 explicit CompareNilIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) {}
1011 Handle<Object> CompareNil(Handle<Object> object);
1013 static Handle<Code> GetUninitialized();
1015 static void Clear(Address address,
1017 ConstantPoolArray* constant_pool);
1019 static Handle<Object> DoCompareNilSlow(Isolate* isolate, NilValue nil,
1020 Handle<Object> object);
1024 class ToBooleanIC: public IC {
1026 explicit ToBooleanIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
1028 Handle<Object> ToBoolean(Handle<Object> object);
1032 // Helper for BinaryOpIC and CompareIC.
1033 enum InlinedSmiCheck { ENABLE_INLINED_SMI_CHECK, DISABLE_INLINED_SMI_CHECK };
1034 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check);
1036 DECLARE_RUNTIME_FUNCTION(KeyedLoadIC_MissFromStubFailure);
1037 DECLARE_RUNTIME_FUNCTION(KeyedStoreIC_MissFromStubFailure);
1038 DECLARE_RUNTIME_FUNCTION(UnaryOpIC_Miss);
1039 DECLARE_RUNTIME_FUNCTION(StoreIC_MissFromStubFailure);
1040 DECLARE_RUNTIME_FUNCTION(ElementsTransitionAndStoreIC_Miss);
1041 DECLARE_RUNTIME_FUNCTION(BinaryOpIC_Miss);
1042 DECLARE_RUNTIME_FUNCTION(BinaryOpIC_MissWithAllocationSite);
1043 DECLARE_RUNTIME_FUNCTION(CompareNilIC_Miss);
1044 DECLARE_RUNTIME_FUNCTION(ToBooleanIC_Miss);
1047 } } // namespace v8::internal