1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "accessors.h"
32 #include "arguments.h"
34 #include "execution.h"
37 #include "stub-cache.h"
38 #include "v8conversions.h"
44 char IC::TransitionMarkFromState(IC::State state) {
46 case UNINITIALIZED: return '0';
47 case PREMONOMORPHIC: return '.';
48 case MONOMORPHIC: return '1';
49 case MONOMORPHIC_PROTOTYPE_FAILURE: return '^';
50 case POLYMORPHIC: return 'P';
51 case MEGAMORPHIC: return 'N';
52 case GENERIC: return 'G';
54 // We never see the debugger states here, because the state is
55 // computed from the original code - not the patched code. Let
56 // these cases fall through to the unreachable code below.
57 case DEBUG_STUB: break;
64 const char* GetTransitionMarkModifier(KeyedAccessStoreMode mode) {
65 if (mode == STORE_NO_TRANSITION_HANDLE_COW) return ".COW";
66 if (mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
69 if (IsGrowStoreMode(mode)) return ".GROW";
74 void IC::TraceIC(const char* type,
75 Handle<Object> name) {
77 Code* new_target = raw_target();
78 State new_state = new_target->ic_state();
79 PrintF("[%s%s in ", new_target->is_keyed_stub() ? "Keyed" : "", type);
80 StackFrameIterator it(isolate());
81 while (it.frame()->fp() != this->fp()) it.Advance();
82 StackFrame* raw_frame = it.frame();
83 if (raw_frame->is_internal()) {
84 Code* apply_builtin = isolate()->builtins()->builtin(
85 Builtins::kFunctionApply);
86 if (raw_frame->unchecked_code() == apply_builtin) {
87 PrintF("apply from ");
89 raw_frame = it.frame();
92 JavaScriptFrame::PrintTop(isolate(), stdout, false, true);
93 ExtraICState extra_state = new_target->extra_ic_state();
94 const char* modifier =
95 GetTransitionMarkModifier(
96 KeyedStoreIC::GetKeyedAccessStoreMode(extra_state));
98 TransitionMarkFromState(state()),
99 TransitionMarkFromState(new_state),
106 #define TRACE_GENERIC_IC(isolate, type, reason) \
108 if (FLAG_trace_ic) { \
109 PrintF("[%s patching generic stub in ", type); \
110 JavaScriptFrame::PrintTop(isolate, stdout, false, true); \
111 PrintF(" (%s)]\n", reason); \
116 #define TRACE_GENERIC_IC(isolate, type, reason)
119 #define TRACE_IC(type, name) \
120 ASSERT((TraceIC(type, name), true))
122 IC::IC(FrameDepth depth, Isolate* isolate)
125 // To improve the performance of the (much used) IC code, we unfold a few
126 // levels of the stack frame iteration code. This yields a ~35% speedup when
127 // running DeltaBlue and a ~25% speedup of gbemu with the '--nouse-ic' flag.
128 const Address entry =
129 Isolate::c_entry_fp(isolate->thread_local_top());
130 Address* pc_address =
131 reinterpret_cast<Address*>(entry + ExitFrameConstants::kCallerPCOffset);
132 Address fp = Memory::Address_at(entry + ExitFrameConstants::kCallerFPOffset);
133 // If there's another JavaScript frame on the stack or a
134 // StubFailureTrampoline, we need to look one frame further down the stack to
135 // find the frame pointer and the return address stack slot.
136 if (depth == EXTRA_CALL_FRAME) {
137 const int kCallerPCOffset = StandardFrameConstants::kCallerPCOffset;
138 pc_address = reinterpret_cast<Address*>(fp + kCallerPCOffset);
139 fp = Memory::Address_at(fp + StandardFrameConstants::kCallerFPOffset);
142 StackFrameIterator it(isolate);
143 for (int i = 0; i < depth + 1; i++) it.Advance();
144 StackFrame* frame = it.frame();
145 ASSERT(fp == frame->fp() && pc_address == frame->pc_address());
148 pc_address_ = StackFrame::ResolveReturnAddressLocation(pc_address);
149 target_ = handle(raw_target(), isolate);
150 state_ = target_->ic_state();
151 extra_ic_state_ = target_->needs_extended_extra_ic_state(target_->kind())
152 ? target_->extended_extra_ic_state()
153 : target_->extra_ic_state();
157 #ifdef ENABLE_DEBUGGER_SUPPORT
158 Address IC::OriginalCodeAddress() const {
159 HandleScope scope(isolate());
160 // Compute the JavaScript frame for the frame pointer of this IC
161 // structure. We need this to be able to find the function
162 // corresponding to the frame.
163 StackFrameIterator it(isolate());
164 while (it.frame()->fp() != this->fp()) it.Advance();
165 JavaScriptFrame* frame = JavaScriptFrame::cast(it.frame());
166 // Find the function on the stack and both the active code for the
167 // function and the original code.
168 JSFunction* function = frame->function();
169 Handle<SharedFunctionInfo> shared(function->shared(), isolate());
170 Code* code = shared->code();
171 ASSERT(Debug::HasDebugInfo(shared));
172 Code* original_code = Debug::GetDebugInfo(shared)->original_code();
173 ASSERT(original_code->IsCode());
174 // Get the address of the call site in the active code. This is the
175 // place where the call to DebugBreakXXX is and where the IC
176 // normally would be.
177 Address addr = Assembler::target_address_from_return_address(pc());
178 // Return the address in the original code. This is the place where
179 // the call which has been overwritten by the DebugBreakXXX resides
180 // and the place where the inline cache system should look.
182 original_code->instruction_start() - code->instruction_start();
188 static bool HasInterceptorGetter(JSObject* object) {
189 return !object->GetNamedInterceptor()->getter()->IsUndefined();
193 static bool HasInterceptorSetter(JSObject* object) {
194 return !object->GetNamedInterceptor()->setter()->IsUndefined();
198 static void LookupForRead(Handle<Object> object,
200 LookupResult* lookup) {
201 // Skip all the objects with named interceptors, but
202 // without actual getter.
204 object->Lookup(*name, lookup);
205 // Besides normal conditions (property not found or it's not
206 // an interceptor), bail out if lookup is not cacheable: we won't
207 // be able to IC it anyway and regular lookup should work fine.
208 if (!lookup->IsInterceptor() || !lookup->IsCacheable()) {
212 Handle<JSObject> holder(lookup->holder(), lookup->isolate());
213 if (HasInterceptorGetter(*holder)) {
217 holder->LocalLookupRealNamedProperty(*name, lookup);
218 if (lookup->IsFound()) {
219 ASSERT(!lookup->IsInterceptor());
223 Handle<Object> proto(holder->GetPrototype(), lookup->isolate());
224 if (proto->IsNull()) {
225 ASSERT(!lookup->IsFound());
234 bool IC::TryRemoveInvalidPrototypeDependentStub(Handle<Object> receiver,
235 Handle<String> name) {
236 if (target()->is_keyed_stub()) {
237 // Determine whether the failure is due to a name failure.
238 if (!name->IsName()) return false;
239 Name* stub_name = target()->FindFirstName();
240 if (*name != stub_name) return false;
243 InlineCacheHolderFlag cache_holder =
244 Code::ExtractCacheHolderFromFlags(target()->flags());
246 switch (cache_holder) {
248 // The stub was generated for JSObject but called for non-JSObject.
249 // IC::GetCodeCacheHolder is not applicable.
250 if (!receiver->IsJSObject()) return false;
253 // IC::GetCodeCacheHolder is not applicable.
254 if (receiver->GetPrototype(isolate())->IsNull()) return false;
259 IC::GetCodeCacheHolder(isolate(), *receiver, cache_holder)->map());
261 // Decide whether the inline cache failed because of changes to the
262 // receiver itself or changes to one of its prototypes.
264 // If there are changes to the receiver itself, the map of the
265 // receiver will have changed and the current target will not be in
266 // the receiver map's code cache. Therefore, if the current target
267 // is in the receiver map's code cache, the inline cache failed due
268 // to prototype check failure.
269 int index = map->IndexInCodeCache(*name, *target());
271 map->RemoveFromCodeCache(*name, *target(), index);
272 // Handlers are stored in addition to the ICs on the map. Remove those, too.
273 TryRemoveInvalidHandlers(map, name);
277 // The stub is not in the cache. We've ruled out all other kinds of failure
278 // except for proptotype chain changes, a deprecated map, a map that's
279 // different from the one that the stub expects, elements kind changes, or a
280 // constant global property that will become mutable. Threat all those
281 // situations as prototype failures (stay monomorphic if possible).
283 // If the IC is shared between multiple receivers (slow dictionary mode), then
284 // the map cannot be deprecated and the stub invalidated.
285 if (cache_holder == OWN_MAP) {
286 Map* old_map = target()->FindFirstMap();
287 if (old_map == *map) return true;
288 if (old_map != NULL) {
289 if (old_map->is_deprecated()) return true;
290 if (IsMoreGeneralElementsKindTransition(old_map->elements_kind(),
291 map->elements_kind())) {
297 if (receiver->IsGlobalObject()) {
298 LookupResult lookup(isolate());
299 GlobalObject* global = GlobalObject::cast(*receiver);
300 global->LocalLookupRealNamedProperty(*name, &lookup);
301 if (!lookup.IsFound()) return false;
302 PropertyCell* cell = global->GetPropertyCell(&lookup);
303 return cell->type()->IsConstant();
310 void IC::TryRemoveInvalidHandlers(Handle<Map> map, Handle<String> name) {
311 CodeHandleList handlers;
312 target()->FindHandlers(&handlers);
313 for (int i = 0; i < handlers.length(); i++) {
314 Handle<Code> handler = handlers.at(i);
315 int index = map->IndexInCodeCache(*name, *handler);
317 map->RemoveFromCodeCache(*name, *handler, index);
324 void IC::UpdateState(Handle<Object> receiver, Handle<Object> name) {
325 if (!name->IsString()) return;
326 if (state() != MONOMORPHIC) {
327 if (state() == POLYMORPHIC && receiver->IsHeapObject()) {
328 TryRemoveInvalidHandlers(
329 handle(Handle<HeapObject>::cast(receiver)->map()),
330 Handle<String>::cast(name));
334 if (receiver->IsUndefined() || receiver->IsNull()) return;
336 // Remove the target from the code cache if it became invalid
337 // because of changes in the prototype chain to avoid hitting it
339 if (TryRemoveInvalidPrototypeDependentStub(
340 receiver, Handle<String>::cast(name))) {
341 return MarkMonomorphicPrototypeFailure();
344 // The builtins object is special. It only changes when JavaScript
345 // builtins are loaded lazily. It is important to keep inline
346 // caches for the builtins object monomorphic. Therefore, if we get
347 // an inline cache miss for the builtins object after lazily loading
348 // JavaScript builtins, we return uninitialized as the state to
349 // force the inline cache back to monomorphic state.
350 if (receiver->IsJSBuiltinsObject()) state_ = UNINITIALIZED;
354 Failure* IC::TypeError(const char* type,
355 Handle<Object> object,
356 Handle<Object> key) {
357 HandleScope scope(isolate());
358 Handle<Object> args[2] = { key, object };
359 Handle<Object> error = isolate()->factory()->NewTypeError(
360 type, HandleVector(args, 2));
361 return isolate()->Throw(*error);
365 Failure* IC::ReferenceError(const char* type, Handle<String> name) {
366 HandleScope scope(isolate());
367 Handle<Object> error = isolate()->factory()->NewReferenceError(
368 type, HandleVector(&name, 1));
369 return isolate()->Throw(*error);
373 static int ComputeTypeInfoCountDelta(IC::State old_state, IC::State new_state) {
374 bool was_uninitialized =
375 old_state == UNINITIALIZED || old_state == PREMONOMORPHIC;
376 bool is_uninitialized =
377 new_state == UNINITIALIZED || new_state == PREMONOMORPHIC;
378 return (was_uninitialized && !is_uninitialized) ? 1 :
379 (!was_uninitialized && is_uninitialized) ? -1 : 0;
383 void IC::PostPatching(Address address, Code* target, Code* old_target) {
384 Isolate* isolate = target->GetHeap()->isolate();
385 Code* host = isolate->
386 inner_pointer_to_code_cache()->GetCacheEntry(address)->code;
387 if (host->kind() != Code::FUNCTION) return;
389 if (FLAG_type_info_threshold > 0 &&
390 old_target->is_inline_cache_stub() &&
391 target->is_inline_cache_stub()) {
392 int delta = ComputeTypeInfoCountDelta(old_target->ic_state(),
394 // Not all Code objects have TypeFeedbackInfo.
395 if (host->type_feedback_info()->IsTypeFeedbackInfo() && delta != 0) {
396 TypeFeedbackInfo* info =
397 TypeFeedbackInfo::cast(host->type_feedback_info());
398 info->change_ic_with_type_info_count(delta);
401 if (host->type_feedback_info()->IsTypeFeedbackInfo()) {
402 TypeFeedbackInfo* info =
403 TypeFeedbackInfo::cast(host->type_feedback_info());
404 info->change_own_type_change_checksum();
406 host->set_profiler_ticks(0);
407 isolate->runtime_profiler()->NotifyICChanged();
408 // TODO(2029): When an optimized function is patched, it would
409 // be nice to propagate the corresponding type information to its
410 // unoptimized version for the benefit of later inlining.
414 void IC::Clear(Isolate* isolate, Address address) {
415 Code* target = GetTargetAtAddress(address);
417 // Don't clear debug break inline cache as it will remove the break point.
418 if (target->is_debug_stub()) return;
420 switch (target->kind()) {
421 case Code::LOAD_IC: return LoadIC::Clear(isolate, address, target);
422 case Code::KEYED_LOAD_IC:
423 return KeyedLoadIC::Clear(isolate, address, target);
424 case Code::STORE_IC: return StoreIC::Clear(isolate, address, target);
425 case Code::KEYED_STORE_IC:
426 return KeyedStoreIC::Clear(isolate, address, target);
427 case Code::COMPARE_IC: return CompareIC::Clear(isolate, address, target);
428 case Code::COMPARE_NIL_IC: return CompareNilIC::Clear(address, target);
429 case Code::BINARY_OP_IC:
430 case Code::TO_BOOLEAN_IC:
431 // Clearing these is tricky and does not
432 // make any performance difference.
434 default: UNREACHABLE();
439 void KeyedLoadIC::Clear(Isolate* isolate, Address address, Code* target) {
440 if (IsCleared(target)) return;
441 // Make sure to also clear the map used in inline fast cases. If we
442 // do not clear these maps, cached code can keep objects alive
443 // through the embedded maps.
444 SetTargetAtAddress(address, *pre_monomorphic_stub(isolate));
448 void LoadIC::Clear(Isolate* isolate, Address address, Code* target) {
449 if (IsCleared(target)) return;
450 Code* code = target->GetIsolate()->stub_cache()->FindPreMonomorphicIC(
451 Code::LOAD_IC, target->extra_ic_state());
452 SetTargetAtAddress(address, code);
456 void StoreIC::Clear(Isolate* isolate, Address address, Code* target) {
457 if (IsCleared(target)) return;
458 Code* code = target->GetIsolate()->stub_cache()->FindPreMonomorphicIC(
459 Code::STORE_IC, target->extra_ic_state());
460 SetTargetAtAddress(address, code);
464 void KeyedStoreIC::Clear(Isolate* isolate, Address address, Code* target) {
465 if (IsCleared(target)) return;
466 SetTargetAtAddress(address,
467 *pre_monomorphic_stub(
468 isolate, StoreIC::GetStrictMode(target->extra_ic_state())));
472 void CompareIC::Clear(Isolate* isolate, Address address, Code* target) {
473 ASSERT(target->major_key() == CodeStub::CompareIC);
474 CompareIC::State handler_state;
476 ICCompareStub::DecodeMinorKey(target->stub_info(), NULL, NULL,
477 &handler_state, &op);
478 // Only clear CompareICs that can retain objects.
479 if (handler_state != KNOWN_OBJECT) return;
480 SetTargetAtAddress(address, GetRawUninitialized(isolate, op));
481 PatchInlinedSmiCode(address, DISABLE_INLINED_SMI_CHECK);
485 static bool MigrateDeprecated(Handle<Object> object) {
486 if (!object->IsJSObject()) return false;
487 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
488 if (!receiver->map()->is_deprecated()) return false;
489 JSObject::MigrateInstance(Handle<JSObject>::cast(object));
494 MaybeObject* LoadIC::Load(Handle<Object> object,
495 Handle<String> name) {
496 // If the object is undefined or null it's illegal to try to get any
497 // of its properties; throw a TypeError in that case.
498 if (object->IsUndefined() || object->IsNull()) {
499 return TypeError("non_object_property_load", object, name);
503 // Use specialized code for getting the length of strings and
504 // string wrapper objects. The length property of string wrapper
505 // objects is read-only and therefore always returns the length of
506 // the underlying string value. See ECMA-262 15.5.5.1.
507 if (object->IsStringWrapper() &&
508 name->Equals(isolate()->heap()->length_string())) {
510 if (state() == UNINITIALIZED) {
511 stub = pre_monomorphic_stub();
512 } else if (state() == PREMONOMORPHIC || state() == MONOMORPHIC) {
513 StringLengthStub string_length_stub(kind());
514 stub = string_length_stub.GetCode(isolate());
515 } else if (state() != MEGAMORPHIC) {
516 ASSERT(state() != GENERIC);
517 stub = megamorphic_stub();
519 if (!stub.is_null()) {
521 if (FLAG_trace_ic) PrintF("[LoadIC : +#length /stringwrapper]\n");
523 // Get the string if we have a string wrapper object.
524 String* string = String::cast(JSValue::cast(*object)->value());
525 return Smi::FromInt(string->length());
528 // Use specialized code for getting prototype of functions.
529 if (object->IsJSFunction() &&
530 name->Equals(isolate()->heap()->prototype_string()) &&
531 Handle<JSFunction>::cast(object)->should_have_prototype()) {
533 if (state() == UNINITIALIZED) {
534 stub = pre_monomorphic_stub();
535 } else if (state() == PREMONOMORPHIC) {
536 FunctionPrototypeStub function_prototype_stub(kind());
537 stub = function_prototype_stub.GetCode(isolate());
538 } else if (state() != MEGAMORPHIC) {
539 ASSERT(state() != GENERIC);
540 stub = megamorphic_stub();
542 if (!stub.is_null()) {
544 if (FLAG_trace_ic) PrintF("[LoadIC : +#prototype /function]\n");
546 return *Accessors::FunctionGetPrototype(Handle<JSFunction>::cast(object));
550 // Check if the name is trivially convertible to an index and get
551 // the element or char if so.
553 if (kind() == Code::KEYED_LOAD_IC && name->AsArrayIndex(&index)) {
554 // Rewrite to the generic keyed load stub.
555 if (FLAG_use_ic) set_target(*generic_stub());
556 return Runtime::GetElementOrCharAtOrFail(isolate(), object, index);
559 bool use_ic = MigrateDeprecated(object) ? false : FLAG_use_ic;
561 // Named lookup in the object.
562 LookupResult lookup(isolate());
563 LookupForRead(object, name, &lookup);
565 // If we did not find a property, check if we need to throw an exception.
566 if (!lookup.IsFound()) {
567 if (IsUndeclaredGlobal(object)) {
568 return ReferenceError("not_defined", name);
570 LOG(isolate(), SuspectReadEvent(*name, *object));
573 // Update inline cache and stub cache.
574 if (use_ic) UpdateCaches(&lookup, object, name);
576 PropertyAttributes attr;
578 Handle<Object> result =
579 Object::GetProperty(object, object, &lookup, name, &attr);
580 RETURN_IF_EMPTY_HANDLE(isolate(), result);
581 // If the property is not present, check if we need to throw an
583 if ((lookup.IsInterceptor() || lookup.IsHandler()) &&
584 attr == ABSENT && IsUndeclaredGlobal(object)) {
585 return ReferenceError("not_defined", name);
592 static bool AddOneReceiverMapIfMissing(MapHandleList* receiver_maps,
593 Handle<Map> new_receiver_map) {
594 ASSERT(!new_receiver_map.is_null());
595 for (int current = 0; current < receiver_maps->length(); ++current) {
596 if (!receiver_maps->at(current).is_null() &&
597 receiver_maps->at(current).is_identical_to(new_receiver_map)) {
601 receiver_maps->Add(new_receiver_map);
606 bool IC::UpdatePolymorphicIC(Handle<HeapType> type,
609 if (!code->is_handler()) return false;
610 TypeHandleList types;
611 CodeHandleList handlers;
613 int number_of_valid_types;
614 int handler_to_overwrite = -1;
616 target()->FindAllTypes(&types);
617 int number_of_types = types.length();
618 number_of_valid_types = number_of_types;
620 for (int i = 0; i < number_of_types; i++) {
621 Handle<HeapType> current_type = types.at(i);
622 // Filter out deprecated maps to ensure their instances get migrated.
623 if (current_type->IsClass() && current_type->AsClass()->is_deprecated()) {
624 number_of_valid_types--;
625 // If the receiver type is already in the polymorphic IC, this indicates
626 // there was a prototoype chain failure. In that case, just overwrite the
628 } else if (type->IsCurrently(current_type)) {
629 ASSERT(handler_to_overwrite == -1);
630 number_of_valid_types--;
631 handler_to_overwrite = i;
635 if (number_of_valid_types >= 4) return false;
636 if (number_of_types == 0) return false;
637 if (!target()->FindHandlers(&handlers, types.length())) return false;
639 number_of_valid_types++;
640 if (handler_to_overwrite >= 0) {
641 handlers.Set(handler_to_overwrite, code);
647 Handle<Code> ic = isolate()->stub_cache()->ComputePolymorphicIC(
648 &types, &handlers, number_of_valid_types, name, extra_ic_state());
654 Handle<HeapType> IC::CurrentTypeOf(Handle<Object> object, Isolate* isolate) {
655 return object->IsJSGlobalObject()
656 ? HeapType::Constant(Handle<JSGlobalObject>::cast(object), isolate)
657 : HeapType::OfCurrently(object, isolate);
661 Handle<Map> IC::TypeToMap(HeapType* type, Isolate* isolate) {
662 if (type->Is(HeapType::Number()))
663 return isolate->factory()->heap_number_map();
664 if (type->Is(HeapType::Float32x4()))
665 return isolate->factory()->float32x4_map();
666 if (type->Is(HeapType::Int32x4()))
667 return isolate->factory()->int32x4_map();
668 if (type->Is(HeapType::Boolean())) return isolate->factory()->oddball_map();
669 if (type->IsConstant()) {
670 return handle(Handle<JSGlobalObject>::cast(type->AsConstant())->map());
672 ASSERT(type->IsClass());
673 return type->AsClass();
678 typename T::TypeHandle IC::MapToType(Handle<Map> map,
679 typename T::Region* region) {
680 if (map->instance_type() == HEAP_NUMBER_TYPE) {
681 return T::Number(region);
682 } else if (map->instance_type() == FLOAT32x4_TYPE) {
683 return T::Float32x4(region);
684 } else if (map->instance_type() == INT32x4_TYPE) {
685 return T::Int32x4(region);
686 } else if (map->instance_type() == ODDBALL_TYPE) {
687 // The only oddballs that can be recorded in ICs are booleans.
688 return T::Boolean(region);
690 return T::Class(map, region);
696 Type* IC::MapToType<Type>(Handle<Map> map, Zone* zone);
700 Handle<HeapType> IC::MapToType<HeapType>(Handle<Map> map, Isolate* region);
703 void IC::UpdateMonomorphicIC(Handle<HeapType> type,
704 Handle<Code> handler,
705 Handle<String> name) {
706 if (!handler->is_handler()) return set_target(*handler);
707 Handle<Code> ic = isolate()->stub_cache()->ComputeMonomorphicIC(
708 name, type, handler, extra_ic_state());
713 void IC::CopyICToMegamorphicCache(Handle<String> name) {
714 TypeHandleList types;
715 CodeHandleList handlers;
716 target()->FindAllTypes(&types);
717 if (!target()->FindHandlers(&handlers, types.length())) return;
718 for (int i = 0; i < types.length(); i++) {
719 UpdateMegamorphicCache(*types.at(i), *name, *handlers.at(i));
724 bool IC::IsTransitionOfMonomorphicTarget(Handle<HeapType> type) {
725 if (!type->IsClass()) return false;
726 Map* receiver_map = *type->AsClass();
727 Map* current_map = target()->FindFirstMap();
728 ElementsKind receiver_elements_kind = receiver_map->elements_kind();
729 bool more_general_transition =
730 IsMoreGeneralElementsKindTransition(
731 current_map->elements_kind(), receiver_elements_kind);
732 Map* transitioned_map = more_general_transition
733 ? current_map->LookupElementsTransitionMap(receiver_elements_kind)
736 return transitioned_map == receiver_map;
740 void IC::PatchCache(Handle<HeapType> type,
746 case MONOMORPHIC_PROTOTYPE_FAILURE:
747 UpdateMonomorphicIC(type, code, name);
750 // For now, call stubs are allowed to rewrite to the same stub. This
751 // happens e.g., when the field does not contain a function.
752 ASSERT(!target().is_identical_to(code));
753 Code* old_handler = target()->FindFirstHandler();
754 if (old_handler == *code && IsTransitionOfMonomorphicTarget(type)) {
755 UpdateMonomorphicIC(type, code, name);
761 if (!target()->is_keyed_stub()) {
762 if (UpdatePolymorphicIC(type, name, code)) break;
763 CopyICToMegamorphicCache(name);
765 set_target(*megamorphic_stub());
768 UpdateMegamorphicCache(*type, *name, *code);
779 Handle<Code> LoadIC::initialize_stub(Isolate* isolate,
780 ExtraICState extra_state) {
781 return isolate->stub_cache()->ComputeLoad(UNINITIALIZED, extra_state);
785 Handle<Code> LoadIC::pre_monomorphic_stub(Isolate* isolate,
786 ExtraICState extra_state) {
787 return isolate->stub_cache()->ComputeLoad(PREMONOMORPHIC, extra_state);
791 Handle<Code> LoadIC::megamorphic_stub() {
792 return isolate()->stub_cache()->ComputeLoad(MEGAMORPHIC, extra_ic_state());
796 Handle<Code> LoadIC::SimpleFieldLoad(int offset,
798 Representation representation) {
799 if (kind() == Code::LOAD_IC) {
800 LoadFieldStub stub(inobject, offset, representation);
801 return stub.GetCode(isolate());
803 KeyedLoadFieldStub stub(inobject, offset, representation);
804 return stub.GetCode(isolate());
809 void LoadIC::UpdateCaches(LookupResult* lookup,
810 Handle<Object> object,
811 Handle<String> name) {
812 if (state() == UNINITIALIZED) {
813 // This is the first time we execute this inline cache.
814 // Set the target to the pre monomorphic stub to delay
815 // setting the monomorphic state.
816 set_target(*pre_monomorphic_stub());
817 TRACE_IC("LoadIC", name);
821 Handle<HeapType> type = CurrentTypeOf(object, isolate());
823 if (!lookup->IsCacheable()) {
824 // Bail out if the result is not cacheable.
826 } else if (!lookup->IsProperty()) {
827 if (kind() == Code::LOAD_IC) {
828 code = isolate()->stub_cache()->ComputeLoadNonexistent(name, type);
833 code = ComputeHandler(lookup, object, name);
836 PatchCache(type, name, code);
837 TRACE_IC("LoadIC", name);
841 void IC::UpdateMegamorphicCache(HeapType* type, Name* name, Code* code) {
842 // Cache code holding map should be consistent with
843 // GenerateMonomorphicCacheProbe.
844 Map* map = *TypeToMap(type, isolate());
845 isolate()->stub_cache()->Set(name, map, code);
849 Handle<Code> IC::ComputeHandler(LookupResult* lookup,
850 Handle<Object> object,
852 Handle<Object> value) {
853 InlineCacheHolderFlag cache_holder = GetCodeCacheForObject(*object);
854 Handle<HeapObject> stub_holder(GetCodeCacheHolder(
855 isolate(), *object, cache_holder));
857 Handle<Code> code = isolate()->stub_cache()->FindHandler(
858 name, handle(stub_holder->map()), kind(), cache_holder);
859 if (!code.is_null()) return code;
861 code = CompileHandler(lookup, object, name, value, cache_holder);
862 ASSERT(code->is_handler());
864 if (code->type() != Code::NORMAL) {
865 HeapObject::UpdateMapCodeCache(stub_holder, name, code);
872 Handle<Code> LoadIC::CompileHandler(LookupResult* lookup,
873 Handle<Object> object,
875 Handle<Object> unused,
876 InlineCacheHolderFlag cache_holder) {
877 if (object->IsString() && name->Equals(isolate()->heap()->length_string())) {
878 int length_index = String::kLengthOffset / kPointerSize;
879 return SimpleFieldLoad(length_index);
882 Handle<HeapType> type = CurrentTypeOf(object, isolate());
883 Handle<JSObject> holder(lookup->holder());
884 LoadStubCompiler compiler(isolate(), kNoExtraICState, cache_holder, kind());
886 switch (lookup->type()) {
888 PropertyIndex field = lookup->GetFieldIndex();
889 if (object.is_identical_to(holder)) {
890 return SimpleFieldLoad(field.translate(holder),
891 field.is_inobject(holder),
892 lookup->representation());
894 return compiler.CompileLoadField(
895 type, holder, name, field, lookup->representation());
898 Handle<Object> constant(lookup->GetConstant(), isolate());
899 // TODO(2803): Don't compute a stub for cons strings because they cannot
900 // be embedded into code.
901 if (constant->IsConsString()) break;
902 return compiler.CompileLoadConstant(type, holder, name, constant);
905 if (kind() != Code::LOAD_IC) break;
906 if (holder->IsGlobalObject()) {
907 Handle<GlobalObject> global = Handle<GlobalObject>::cast(holder);
908 Handle<PropertyCell> cell(
909 global->GetPropertyCell(lookup), isolate());
910 Handle<Code> code = compiler.CompileLoadGlobal(
911 type, global, cell, name, lookup->IsDontDelete());
912 // TODO(verwaest): Move caching of these NORMAL stubs outside as well.
913 Handle<HeapObject> stub_holder(GetCodeCacheHolder(
914 isolate(), *object, cache_holder));
915 HeapObject::UpdateMapCodeCache(stub_holder, name, code);
918 // There is only one shared stub for loading normalized
919 // properties. It does not traverse the prototype chain, so the
920 // property must be found in the object for the stub to be
922 if (!object.is_identical_to(holder)) break;
923 return isolate()->builtins()->LoadIC_Normal();
925 // Use simple field loads for some well-known callback properties.
926 if (object->IsJSObject()) {
927 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
928 Handle<HeapType> type = IC::MapToType<HeapType>(
929 handle(receiver->map()), isolate());
931 if (Accessors::IsJSObjectFieldAccessor<HeapType>(
932 type, name, &object_offset)) {
933 return SimpleFieldLoad(object_offset / kPointerSize);
937 Handle<Object> callback(lookup->GetCallbackObject(), isolate());
938 if (callback->IsExecutableAccessorInfo()) {
939 Handle<ExecutableAccessorInfo> info =
940 Handle<ExecutableAccessorInfo>::cast(callback);
941 if (v8::ToCData<Address>(info->getter()) == 0) break;
942 if (!info->IsCompatibleReceiver(*object)) break;
943 return compiler.CompileLoadCallback(type, holder, name, info);
944 } else if (callback->IsAccessorPair()) {
945 Handle<Object> getter(Handle<AccessorPair>::cast(callback)->getter(),
947 if (!getter->IsJSFunction()) break;
948 if (holder->IsGlobalObject()) break;
949 if (!holder->HasFastProperties()) break;
950 Handle<JSFunction> function = Handle<JSFunction>::cast(getter);
951 if (!object->IsJSObject() &&
952 !function->IsBuiltin() &&
953 function->shared()->is_classic_mode()) {
954 // Calling non-strict non-builtins with a value as the receiver
958 CallOptimization call_optimization(function);
959 if (call_optimization.is_simple_api_call() &&
960 call_optimization.IsCompatibleReceiver(object, holder)) {
961 return compiler.CompileLoadCallback(
962 type, holder, name, call_optimization);
964 return compiler.CompileLoadViaGetter(type, holder, name, function);
966 // TODO(dcarney): Handle correctly.
967 if (callback->IsDeclaredAccessorInfo()) break;
968 ASSERT(callback->IsForeign());
969 // No IC support for old-style native accessors.
973 ASSERT(HasInterceptorGetter(*holder));
974 return compiler.CompileLoadInterceptor(type, holder, name);
983 static Handle<Object> TryConvertKey(Handle<Object> key, Isolate* isolate) {
984 // This helper implements a few common fast cases for converting
985 // non-smi keys of keyed loads/stores to a smi or a string.
986 if (key->IsHeapNumber()) {
987 double value = Handle<HeapNumber>::cast(key)->value();
988 if (std::isnan(value)) {
989 key = isolate->factory()->nan_string();
991 int int_value = FastD2I(value);
992 if (value == int_value && Smi::IsValid(int_value)) {
993 key = Handle<Smi>(Smi::FromInt(int_value), isolate);
996 } else if (key->IsUndefined()) {
997 key = isolate->factory()->undefined_string();
1003 Handle<Code> KeyedLoadIC::LoadElementStub(Handle<JSObject> receiver) {
1004 // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
1005 // via megamorphic stubs, since they don't have a map in their relocation info
1006 // and so the stubs can't be harvested for the object needed for a map check.
1007 if (target()->type() != Code::NORMAL) {
1008 TRACE_GENERIC_IC(isolate(), "KeyedIC", "non-NORMAL target type");
1009 return generic_stub();
1012 Handle<Map> receiver_map(receiver->map(), isolate());
1013 MapHandleList target_receiver_maps;
1014 if (state() == UNINITIALIZED || state() == PREMONOMORPHIC) {
1015 // Optimistically assume that ICs that haven't reached the MONOMORPHIC state
1016 // yet will do so and stay there.
1017 return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map);
1020 if (target().is_identical_to(string_stub())) {
1021 target_receiver_maps.Add(isolate()->factory()->string_map());
1023 target()->FindAllMaps(&target_receiver_maps);
1024 if (target_receiver_maps.length() == 0) {
1025 return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map);
1029 // The first time a receiver is seen that is a transitioned version of the
1030 // previous monomorphic receiver type, assume the new ElementsKind is the
1031 // monomorphic type. This benefits global arrays that only transition
1032 // once, and all call sites accessing them are faster if they remain
1033 // monomorphic. If this optimistic assumption is not true, the IC will
1034 // miss again and it will become polymorphic and support both the
1035 // untransitioned and transitioned maps.
1036 if (state() == MONOMORPHIC &&
1037 IsMoreGeneralElementsKindTransition(
1038 target_receiver_maps.at(0)->elements_kind(),
1039 receiver->GetElementsKind())) {
1040 return isolate()->stub_cache()->ComputeKeyedLoadElement(receiver_map);
1043 ASSERT(state() != GENERIC);
1045 // Determine the list of receiver maps that this call site has seen,
1046 // adding the map that was just encountered.
1047 if (!AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map)) {
1048 // If the miss wasn't due to an unseen map, a polymorphic stub
1049 // won't help, use the generic stub.
1050 TRACE_GENERIC_IC(isolate(), "KeyedIC", "same map added twice");
1051 return generic_stub();
1054 // If the maximum number of receiver maps has been exceeded, use the generic
1055 // version of the IC.
1056 if (target_receiver_maps.length() > kMaxKeyedPolymorphism) {
1057 TRACE_GENERIC_IC(isolate(), "KeyedIC", "max polymorph exceeded");
1058 return generic_stub();
1061 return isolate()->stub_cache()->ComputeLoadElementPolymorphic(
1062 &target_receiver_maps);
1066 MaybeObject* KeyedLoadIC::Load(Handle<Object> object, Handle<Object> key) {
1067 if (MigrateDeprecated(object)) {
1068 return Runtime::GetObjectPropertyOrFail(isolate(), object, key);
1071 MaybeObject* maybe_object = NULL;
1072 Handle<Code> stub = generic_stub();
1074 // Check for values that can be converted into an internalized string directly
1075 // or is representable as a smi.
1076 key = TryConvertKey(key, isolate());
1078 if (key->IsInternalizedString()) {
1079 maybe_object = LoadIC::Load(object, Handle<String>::cast(key));
1080 if (maybe_object->IsFailure()) return maybe_object;
1081 } else if (FLAG_use_ic && !object->IsAccessCheckNeeded()) {
1082 ASSERT(!object->IsJSGlobalProxy());
1083 if (object->IsString() && key->IsNumber()) {
1084 if (state() == UNINITIALIZED) stub = string_stub();
1085 } else if (object->IsJSObject()) {
1086 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
1087 if (receiver->elements()->map() ==
1088 isolate()->heap()->non_strict_arguments_elements_map()) {
1089 stub = non_strict_arguments_stub();
1090 } else if (receiver->HasIndexedInterceptor()) {
1091 stub = indexed_interceptor_stub();
1092 } else if (!key->ToSmi()->IsFailure() &&
1093 (!target().is_identical_to(non_strict_arguments_stub()))) {
1094 stub = LoadElementStub(receiver);
1099 if (!is_target_set()) {
1100 if (*stub == *generic_stub()) {
1101 TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic");
1103 ASSERT(!stub.is_null());
1105 TRACE_IC("LoadIC", key);
1108 if (maybe_object != NULL) return maybe_object;
1109 return Runtime::GetObjectPropertyOrFail(isolate(), object, key);
1113 static bool LookupForWrite(Handle<JSObject> receiver,
1114 Handle<String> name,
1115 Handle<Object> value,
1116 LookupResult* lookup,
1118 Handle<JSObject> holder = receiver;
1119 receiver->Lookup(*name, lookup);
1120 if (lookup->IsFound()) {
1121 if (lookup->IsReadOnly() || !lookup->IsCacheable()) return false;
1123 if (lookup->holder() == *receiver) {
1124 if (lookup->IsInterceptor() && !HasInterceptorSetter(*receiver)) {
1125 receiver->LocalLookupRealNamedProperty(*name, lookup);
1126 return lookup->IsFound() &&
1127 !lookup->IsReadOnly() &&
1128 lookup->CanHoldValue(value) &&
1129 lookup->IsCacheable();
1131 return lookup->CanHoldValue(value);
1134 if (lookup->IsPropertyCallbacks()) return true;
1135 // JSGlobalProxy always goes via the runtime, so it's safe to cache.
1136 if (receiver->IsJSGlobalProxy()) return true;
1137 // Currently normal holders in the prototype chain are not supported. They
1138 // would require a runtime positive lookup and verification that the details
1139 // have not changed.
1140 if (lookup->IsInterceptor() || lookup->IsNormal()) return false;
1141 holder = Handle<JSObject>(lookup->holder(), lookup->isolate());
1144 // While normally LookupTransition gets passed the receiver, in this case we
1145 // pass the holder of the property that we overwrite. This keeps the holder in
1146 // the LookupResult intact so we can later use it to generate a prototype
1147 // chain check. This avoids a double lookup, but requires us to pass in the
1148 // receiver when trying to fetch extra information from the transition.
1149 receiver->map()->LookupTransition(*holder, *name, lookup);
1150 if (!lookup->IsTransition()) return false;
1151 PropertyDetails target_details = lookup->GetTransitionDetails();
1152 if (target_details.IsReadOnly()) return false;
1154 // If the value that's being stored does not fit in the field that the
1155 // instance would transition to, create a new transition that fits the value.
1156 // This has to be done before generating the IC, since that IC will embed the
1157 // transition target.
1158 // Ensure the instance and its map were migrated before trying to update the
1159 // transition target.
1160 ASSERT(!receiver->map()->is_deprecated());
1161 if (!value->FitsRepresentation(target_details.representation())) {
1162 Handle<Map> target(lookup->GetTransitionTarget());
1163 Map::GeneralizeRepresentation(
1164 target, target->LastAdded(),
1165 value->OptimalRepresentation(), FORCE_FIELD);
1166 // Lookup the transition again since the transition tree may have changed
1167 // entirely by the migration above.
1168 receiver->map()->LookupTransition(*holder, *name, lookup);
1169 if (!lookup->IsTransition()) return false;
1170 ic->MarkMonomorphicPrototypeFailure();
1176 MaybeObject* StoreIC::Store(Handle<Object> object,
1177 Handle<String> name,
1178 Handle<Object> value,
1179 JSReceiver::StoreFromKeyed store_mode) {
1180 if (MigrateDeprecated(object) || object->IsJSProxy()) {
1181 Handle<Object> result = JSReceiver::SetProperty(
1182 Handle<JSReceiver>::cast(object), name, value, NONE, strict_mode());
1183 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1187 // If the object is undefined or null it's illegal to try to set any
1188 // properties on it; throw a TypeError in that case.
1189 if (object->IsUndefined() || object->IsNull()) {
1190 return TypeError("non_object_property_store", object, name);
1193 // The length property of string values is read-only. Throw in strict mode.
1194 if (strict_mode() == kStrictMode && object->IsString() &&
1195 name->Equals(isolate()->heap()->length_string())) {
1196 return TypeError("strict_read_only_property", object, name);
1199 // Ignore other stores where the receiver is not a JSObject.
1200 // TODO(1475): Must check prototype chains of object wrappers.
1201 if (!object->IsJSObject()) return *value;
1203 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
1205 // Check if the given name is an array index.
1207 if (name->AsArrayIndex(&index)) {
1208 Handle<Object> result =
1209 JSObject::SetElement(receiver, index, value, NONE, strict_mode());
1210 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1214 // Observed objects are always modified through the runtime.
1215 if (FLAG_harmony_observation && receiver->map()->is_observed()) {
1216 Handle<Object> result = JSReceiver::SetProperty(
1217 receiver, name, value, NONE, strict_mode(), store_mode);
1218 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1222 // Use specialized code for setting the length of arrays with fast
1223 // properties. Slow properties might indicate redefinition of the length
1224 // property. Note that when redefined using Object.freeze, it's possible
1225 // to have fast properties but a read-only length.
1227 receiver->IsJSArray() &&
1228 name->Equals(isolate()->heap()->length_string()) &&
1229 Handle<JSArray>::cast(receiver)->AllowsSetElementsLength() &&
1230 receiver->HasFastProperties() &&
1231 !receiver->map()->is_frozen()) {
1233 StoreArrayLengthStub(kind(), strict_mode()).GetCode(isolate());
1235 TRACE_IC("StoreIC", name);
1236 Handle<Object> result = JSReceiver::SetProperty(
1237 receiver, name, value, NONE, strict_mode(), store_mode);
1238 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1242 LookupResult lookup(isolate());
1243 bool can_store = LookupForWrite(receiver, name, value, &lookup, this);
1245 strict_mode() == kStrictMode &&
1246 !(lookup.IsProperty() && lookup.IsReadOnly()) &&
1247 object->IsGlobalObject()) {
1248 // Strict mode doesn't allow setting non-existent global property.
1249 return ReferenceError("not_defined", name);
1252 if (state() == UNINITIALIZED) {
1253 Handle<Code> stub = pre_monomorphic_stub();
1255 TRACE_IC("StoreIC", name);
1256 } else if (can_store) {
1257 UpdateCaches(&lookup, receiver, name, value);
1258 } else if (!name->IsCacheable(isolate()) ||
1259 lookup.IsNormal() ||
1260 (lookup.IsField() && lookup.CanHoldValue(value))) {
1261 Handle<Code> stub = generic_stub();
1266 // Set the property.
1267 Handle<Object> result = JSReceiver::SetProperty(
1268 receiver, name, value, NONE, strict_mode(), store_mode);
1269 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1274 Handle<Code> StoreIC::initialize_stub(Isolate* isolate,
1275 StrictModeFlag strict_mode) {
1276 ExtraICState extra_state = ComputeExtraICState(strict_mode);
1277 Handle<Code> ic = isolate->stub_cache()->ComputeStore(
1278 UNINITIALIZED, extra_state);
1283 Handle<Code> StoreIC::megamorphic_stub() {
1284 return isolate()->stub_cache()->ComputeStore(MEGAMORPHIC, extra_ic_state());
1288 Handle<Code> StoreIC::generic_stub() const {
1289 return isolate()->stub_cache()->ComputeStore(GENERIC, extra_ic_state());
1293 Handle<Code> StoreIC::pre_monomorphic_stub(Isolate* isolate,
1294 StrictModeFlag strict_mode) {
1295 ExtraICState state = ComputeExtraICState(strict_mode);
1296 return isolate->stub_cache()->ComputeStore(PREMONOMORPHIC, state);
1300 void StoreIC::UpdateCaches(LookupResult* lookup,
1301 Handle<JSObject> receiver,
1302 Handle<String> name,
1303 Handle<Object> value) {
1304 ASSERT(lookup->IsFound());
1306 // These are not cacheable, so we never see such LookupResults here.
1307 ASSERT(!lookup->IsHandler());
1309 Handle<Code> code = ComputeHandler(lookup, receiver, name, value);
1311 PatchCache(CurrentTypeOf(receiver, isolate()), name, code);
1312 TRACE_IC("StoreIC", name);
1316 Handle<Code> StoreIC::CompileHandler(LookupResult* lookup,
1317 Handle<Object> object,
1318 Handle<String> name,
1319 Handle<Object> value,
1320 InlineCacheHolderFlag cache_holder) {
1321 if (object->IsJSGlobalProxy()) return slow_stub();
1322 ASSERT(cache_holder == OWN_MAP);
1323 // This is currently guaranteed by checks in StoreIC::Store.
1324 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
1326 Handle<JSObject> holder(lookup->holder());
1327 // Handlers do not use strict mode.
1328 StoreStubCompiler compiler(isolate(), kNonStrictMode, kind());
1329 switch (lookup->type()) {
1331 return compiler.CompileStoreField(receiver, lookup, name);
1333 // Explicitly pass in the receiver map since LookupForWrite may have
1334 // stored something else than the receiver in the holder.
1335 Handle<Map> transition(lookup->GetTransitionTarget());
1336 PropertyDetails details = transition->GetLastDescriptorDetails();
1338 if (details.type() == CALLBACKS || details.attributes() != NONE) break;
1340 return compiler.CompileStoreTransition(
1341 receiver, lookup, transition, name);
1344 if (kind() == Code::KEYED_STORE_IC) break;
1345 if (receiver->IsGlobalObject()) {
1346 // The stub generated for the global object picks the value directly
1347 // from the property cell. So the property must be directly on the
1349 Handle<GlobalObject> global = Handle<GlobalObject>::cast(receiver);
1350 Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate());
1351 Handle<HeapType> union_type = PropertyCell::UpdatedType(cell, value);
1352 StoreGlobalStub stub(union_type->IsConstant());
1354 Handle<Code> code = stub.GetCodeCopyFromTemplate(
1355 isolate(), receiver->map(), *cell);
1356 // TODO(verwaest): Move caching of these NORMAL stubs outside as well.
1357 HeapObject::UpdateMapCodeCache(receiver, name, code);
1360 ASSERT(holder.is_identical_to(receiver));
1361 return isolate()->builtins()->StoreIC_Normal();
1363 if (kind() == Code::KEYED_STORE_IC) break;
1364 Handle<Object> callback(lookup->GetCallbackObject(), isolate());
1365 if (callback->IsExecutableAccessorInfo()) {
1366 Handle<ExecutableAccessorInfo> info =
1367 Handle<ExecutableAccessorInfo>::cast(callback);
1368 if (v8::ToCData<Address>(info->setter()) == 0) break;
1369 if (!holder->HasFastProperties()) break;
1370 if (!info->IsCompatibleReceiver(*receiver)) break;
1371 return compiler.CompileStoreCallback(receiver, holder, name, info);
1372 } else if (callback->IsAccessorPair()) {
1373 Handle<Object> setter(
1374 Handle<AccessorPair>::cast(callback)->setter(), isolate());
1375 if (!setter->IsJSFunction()) break;
1376 if (holder->IsGlobalObject()) break;
1377 if (!holder->HasFastProperties()) break;
1378 Handle<JSFunction> function = Handle<JSFunction>::cast(setter);
1379 CallOptimization call_optimization(function);
1380 if (call_optimization.is_simple_api_call() &&
1381 call_optimization.IsCompatibleReceiver(receiver, holder)) {
1382 return compiler.CompileStoreCallback(
1383 receiver, holder, name, call_optimization);
1385 return compiler.CompileStoreViaSetter(
1386 receiver, holder, name, Handle<JSFunction>::cast(setter));
1388 // TODO(dcarney): Handle correctly.
1389 if (callback->IsDeclaredAccessorInfo()) break;
1390 ASSERT(callback->IsForeign());
1391 // No IC support for old-style native accessors.
1395 if (kind() == Code::KEYED_STORE_IC) break;
1396 ASSERT(HasInterceptorSetter(*receiver));
1397 return compiler.CompileStoreInterceptor(receiver, name);
1409 Handle<Code> KeyedStoreIC::StoreElementStub(Handle<JSObject> receiver,
1410 KeyedAccessStoreMode store_mode) {
1411 // Don't handle megamorphic property accesses for INTERCEPTORS or CALLBACKS
1412 // via megamorphic stubs, since they don't have a map in their relocation info
1413 // and so the stubs can't be harvested for the object needed for a map check.
1414 if (target()->type() != Code::NORMAL) {
1415 TRACE_GENERIC_IC(isolate(), "KeyedIC", "non-NORMAL target type");
1416 return generic_stub();
1419 Handle<Map> receiver_map(receiver->map(), isolate());
1420 if (state() == UNINITIALIZED || state() == PREMONOMORPHIC) {
1421 // Optimistically assume that ICs that haven't reached the MONOMORPHIC state
1422 // yet will do so and stay there.
1423 Handle<Map> monomorphic_map = ComputeTransitionedMap(receiver, store_mode);
1424 store_mode = GetNonTransitioningStoreMode(store_mode);
1425 return isolate()->stub_cache()->ComputeKeyedStoreElement(
1426 monomorphic_map, strict_mode(), store_mode);
1429 MapHandleList target_receiver_maps;
1430 target()->FindAllMaps(&target_receiver_maps);
1431 if (target_receiver_maps.length() == 0) {
1432 // In the case that there is a non-map-specific IC is installed (e.g. keyed
1433 // stores into properties in dictionary mode), then there will be not
1434 // receiver maps in the target.
1435 return generic_stub();
1438 // There are several special cases where an IC that is MONOMORPHIC can still
1439 // transition to a different GetNonTransitioningStoreMode IC that handles a
1440 // superset of the original IC. Handle those here if the receiver map hasn't
1441 // changed or it has transitioned to a more general kind.
1442 KeyedAccessStoreMode old_store_mode =
1443 KeyedStoreIC::GetKeyedAccessStoreMode(target()->extra_ic_state());
1444 Handle<Map> previous_receiver_map = target_receiver_maps.at(0);
1445 if (state() == MONOMORPHIC) {
1446 // If the "old" and "new" maps are in the same elements map family, stay
1447 // MONOMORPHIC and use the map for the most generic ElementsKind.
1448 Handle<Map> transitioned_receiver_map = receiver_map;
1449 if (IsTransitionStoreMode(store_mode)) {
1450 transitioned_receiver_map =
1451 ComputeTransitionedMap(receiver, store_mode);
1453 if (IsTransitionOfMonomorphicTarget(
1454 MapToType<HeapType>(transitioned_receiver_map, isolate()))) {
1455 // Element family is the same, use the "worst" case map.
1456 store_mode = GetNonTransitioningStoreMode(store_mode);
1457 return isolate()->stub_cache()->ComputeKeyedStoreElement(
1458 transitioned_receiver_map, strict_mode(), store_mode);
1459 } else if (*previous_receiver_map == receiver->map() &&
1460 old_store_mode == STANDARD_STORE &&
1461 (IsGrowStoreMode(store_mode) ||
1462 store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS ||
1463 store_mode == STORE_NO_TRANSITION_HANDLE_COW)) {
1464 // A "normal" IC that handles stores can switch to a version that can
1465 // grow at the end of the array, handle OOB accesses or copy COW arrays
1466 // and still stay MONOMORPHIC.
1467 return isolate()->stub_cache()->ComputeKeyedStoreElement(
1468 receiver_map, strict_mode(), store_mode);
1472 ASSERT(state() != GENERIC);
1475 AddOneReceiverMapIfMissing(&target_receiver_maps, receiver_map);
1477 if (IsTransitionStoreMode(store_mode)) {
1478 Handle<Map> transitioned_receiver_map =
1479 ComputeTransitionedMap(receiver, store_mode);
1480 map_added |= AddOneReceiverMapIfMissing(&target_receiver_maps,
1481 transitioned_receiver_map);
1485 // If the miss wasn't due to an unseen map, a polymorphic stub
1486 // won't help, use the generic stub.
1487 TRACE_GENERIC_IC(isolate(), "KeyedIC", "same map added twice");
1488 return generic_stub();
1491 // If the maximum number of receiver maps has been exceeded, use the generic
1492 // version of the IC.
1493 if (target_receiver_maps.length() > kMaxKeyedPolymorphism) {
1494 TRACE_GENERIC_IC(isolate(), "KeyedIC", "max polymorph exceeded");
1495 return generic_stub();
1498 // Make sure all polymorphic handlers have the same store mode, otherwise the
1499 // generic stub must be used.
1500 store_mode = GetNonTransitioningStoreMode(store_mode);
1501 if (old_store_mode != STANDARD_STORE) {
1502 if (store_mode == STANDARD_STORE) {
1503 store_mode = old_store_mode;
1504 } else if (store_mode != old_store_mode) {
1505 TRACE_GENERIC_IC(isolate(), "KeyedIC", "store mode mismatch");
1506 return generic_stub();
1510 // If the store mode isn't the standard mode, make sure that all polymorphic
1511 // receivers are either external arrays, or all "normal" arrays. Otherwise,
1512 // use the generic stub.
1513 if (store_mode != STANDARD_STORE) {
1514 int external_arrays = 0;
1515 for (int i = 0; i < target_receiver_maps.length(); ++i) {
1516 if (target_receiver_maps[i]->has_external_array_elements() ||
1517 target_receiver_maps[i]->has_fixed_typed_array_elements()) {
1521 if (external_arrays != 0 &&
1522 external_arrays != target_receiver_maps.length()) {
1523 TRACE_GENERIC_IC(isolate(), "KeyedIC",
1524 "unsupported combination of external and normal arrays");
1525 return generic_stub();
1529 return isolate()->stub_cache()->ComputeStoreElementPolymorphic(
1530 &target_receiver_maps, store_mode, strict_mode());
1534 Handle<Map> KeyedStoreIC::ComputeTransitionedMap(
1535 Handle<JSObject> receiver,
1536 KeyedAccessStoreMode store_mode) {
1537 switch (store_mode) {
1538 case STORE_TRANSITION_SMI_TO_OBJECT:
1539 case STORE_TRANSITION_DOUBLE_TO_OBJECT:
1540 case STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT:
1541 case STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT:
1542 return JSObject::GetElementsTransitionMap(receiver, FAST_ELEMENTS);
1543 case STORE_TRANSITION_SMI_TO_DOUBLE:
1544 case STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE:
1545 return JSObject::GetElementsTransitionMap(receiver, FAST_DOUBLE_ELEMENTS);
1546 case STORE_TRANSITION_HOLEY_SMI_TO_OBJECT:
1547 case STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
1548 case STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT:
1549 case STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT:
1550 return JSObject::GetElementsTransitionMap(receiver,
1551 FAST_HOLEY_ELEMENTS);
1552 case STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE:
1553 case STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE:
1554 return JSObject::GetElementsTransitionMap(receiver,
1555 FAST_HOLEY_DOUBLE_ELEMENTS);
1556 case STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS:
1557 ASSERT(receiver->map()->has_external_array_elements());
1559 case STORE_NO_TRANSITION_HANDLE_COW:
1560 case STANDARD_STORE:
1561 case STORE_AND_GROW_NO_TRANSITION:
1562 return Handle<Map>(receiver->map(), isolate());
1564 return Handle<Map>::null();
1568 bool IsOutOfBoundsAccess(Handle<JSObject> receiver,
1570 if (receiver->IsJSArray()) {
1571 return JSArray::cast(*receiver)->length()->IsSmi() &&
1572 index >= Smi::cast(JSArray::cast(*receiver)->length())->value();
1574 return index >= receiver->elements()->length();
1578 KeyedAccessStoreMode KeyedStoreIC::GetStoreMode(Handle<JSObject> receiver,
1580 Handle<Object> value) {
1581 ASSERT(!key->ToSmi()->IsFailure());
1582 Smi* smi_key = NULL;
1583 key->ToSmi()->To(&smi_key);
1584 int index = smi_key->value();
1585 bool oob_access = IsOutOfBoundsAccess(receiver, index);
1586 bool allow_growth = receiver->IsJSArray() && oob_access;
1588 // Handle growing array in stub if necessary.
1589 if (receiver->HasFastSmiElements()) {
1590 if (value->IsHeapNumber()) {
1591 if (receiver->HasFastHoleyElements()) {
1592 return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_DOUBLE;
1594 return STORE_AND_GROW_TRANSITION_SMI_TO_DOUBLE;
1597 if (value->IsHeapObject()) {
1598 if (receiver->HasFastHoleyElements()) {
1599 return STORE_AND_GROW_TRANSITION_HOLEY_SMI_TO_OBJECT;
1601 return STORE_AND_GROW_TRANSITION_SMI_TO_OBJECT;
1604 } else if (receiver->HasFastDoubleElements()) {
1605 if (!value->IsSmi() && !value->IsHeapNumber()) {
1606 if (receiver->HasFastHoleyElements()) {
1607 return STORE_AND_GROW_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
1609 return STORE_AND_GROW_TRANSITION_DOUBLE_TO_OBJECT;
1613 return STORE_AND_GROW_NO_TRANSITION;
1615 // Handle only in-bounds elements accesses.
1616 if (receiver->HasFastSmiElements()) {
1617 if (value->IsHeapNumber()) {
1618 if (receiver->HasFastHoleyElements()) {
1619 return STORE_TRANSITION_HOLEY_SMI_TO_DOUBLE;
1621 return STORE_TRANSITION_SMI_TO_DOUBLE;
1623 } else if (value->IsHeapObject()) {
1624 if (receiver->HasFastHoleyElements()) {
1625 return STORE_TRANSITION_HOLEY_SMI_TO_OBJECT;
1627 return STORE_TRANSITION_SMI_TO_OBJECT;
1630 } else if (receiver->HasFastDoubleElements()) {
1631 if (!value->IsSmi() && !value->IsHeapNumber()) {
1632 if (receiver->HasFastHoleyElements()) {
1633 return STORE_TRANSITION_HOLEY_DOUBLE_TO_OBJECT;
1635 return STORE_TRANSITION_DOUBLE_TO_OBJECT;
1639 if (!FLAG_trace_external_array_abuse &&
1640 receiver->map()->has_external_array_elements() && oob_access) {
1641 return STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS;
1643 Heap* heap = receiver->GetHeap();
1644 if (receiver->elements()->map() == heap->fixed_cow_array_map()) {
1645 return STORE_NO_TRANSITION_HANDLE_COW;
1647 return STANDARD_STORE;
1653 MaybeObject* KeyedStoreIC::Store(Handle<Object> object,
1655 Handle<Object> value) {
1656 if (MigrateDeprecated(object)) {
1657 Handle<Object> result = Runtime::SetObjectProperty(isolate(), object,
1662 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1666 // Check for values that can be converted into an internalized string directly
1667 // or is representable as a smi.
1668 key = TryConvertKey(key, isolate());
1670 MaybeObject* maybe_object = NULL;
1671 Handle<Code> stub = generic_stub();
1673 if (key->IsInternalizedString()) {
1674 maybe_object = StoreIC::Store(object,
1675 Handle<String>::cast(key),
1677 JSReceiver::MAY_BE_STORE_FROM_KEYED);
1678 if (maybe_object->IsFailure()) return maybe_object;
1680 bool use_ic = FLAG_use_ic && !object->IsAccessCheckNeeded() &&
1681 !(FLAG_harmony_observation && object->IsJSObject() &&
1682 JSObject::cast(*object)->map()->is_observed());
1683 if (use_ic && !object->IsSmi()) {
1684 // Don't use ICs for maps of the objects in Array's prototype chain. We
1685 // expect to be able to trap element sets to objects with those maps in
1686 // the runtime to enable optimization of element hole access.
1687 Handle<HeapObject> heap_object = Handle<HeapObject>::cast(object);
1688 if (heap_object->map()->IsMapInArrayPrototypeChain()) use_ic = false;
1692 ASSERT(!object->IsJSGlobalProxy());
1694 if (object->IsJSObject()) {
1695 Handle<JSObject> receiver = Handle<JSObject>::cast(object);
1696 bool key_is_smi_like = key->IsSmi() || !key->ToSmi()->IsFailure();
1697 if (receiver->elements()->map() ==
1698 isolate()->heap()->non_strict_arguments_elements_map()) {
1699 stub = non_strict_arguments_stub();
1700 } else if (key_is_smi_like &&
1701 !(target().is_identical_to(non_strict_arguments_stub()))) {
1702 // We should go generic if receiver isn't a dictionary, but our
1703 // prototype chain does have dictionary elements. This ensures that
1704 // other non-dictionary receivers in the polymorphic case benefit
1705 // from fast path keyed stores.
1706 if (!(receiver->map()->DictionaryElementsInPrototypeChainOnly())) {
1707 KeyedAccessStoreMode store_mode =
1708 GetStoreMode(receiver, key, value);
1709 stub = StoreElementStub(receiver, store_mode);
1716 if (!is_target_set()) {
1717 if (*stub == *generic_stub()) {
1718 TRACE_GENERIC_IC(isolate(), "KeyedStoreIC", "set generic");
1720 ASSERT(!stub.is_null());
1722 TRACE_IC("StoreIC", key);
1725 if (maybe_object) return maybe_object;
1726 Handle<Object> result = Runtime::SetObjectProperty(isolate(), object, key,
1730 RETURN_IF_EMPTY_HANDLE(isolate(), result);
1738 // ----------------------------------------------------------------------------
1739 // Static IC stub generators.
1742 // Used from ic-<arch>.cc.
1743 // Used from ic-<arch>.cc.
1744 RUNTIME_FUNCTION(MaybeObject*, LoadIC_Miss) {
1745 HandleScope scope(isolate);
1746 ASSERT(args.length() == 2);
1747 LoadIC ic(IC::NO_EXTRA_FRAME, isolate);
1748 Handle<Object> receiver = args.at<Object>(0);
1749 Handle<String> key = args.at<String>(1);
1750 ic.UpdateState(receiver, key);
1751 return ic.Load(receiver, key);
1755 // Used from ic-<arch>.cc
1756 RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_Miss) {
1757 HandleScope scope(isolate);
1758 ASSERT(args.length() == 2);
1759 KeyedLoadIC ic(IC::NO_EXTRA_FRAME, isolate);
1760 Handle<Object> receiver = args.at<Object>(0);
1761 Handle<Object> key = args.at<Object>(1);
1762 ic.UpdateState(receiver, key);
1763 return ic.Load(receiver, key);
1767 RUNTIME_FUNCTION(MaybeObject*, KeyedLoadIC_MissFromStubFailure) {
1768 HandleScope scope(isolate);
1769 ASSERT(args.length() == 2);
1770 KeyedLoadIC ic(IC::EXTRA_CALL_FRAME, isolate);
1771 Handle<Object> receiver = args.at<Object>(0);
1772 Handle<Object> key = args.at<Object>(1);
1773 ic.UpdateState(receiver, key);
1774 return ic.Load(receiver, key);
1778 // Used from ic-<arch>.cc.
1779 RUNTIME_FUNCTION(MaybeObject*, StoreIC_Miss) {
1780 HandleScope scope(isolate);
1781 ASSERT(args.length() == 3);
1782 StoreIC ic(IC::NO_EXTRA_FRAME, isolate);
1783 Handle<Object> receiver = args.at<Object>(0);
1784 Handle<String> key = args.at<String>(1);
1785 ic.UpdateState(receiver, key);
1786 return ic.Store(receiver, key, args.at<Object>(2));
1790 RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure) {
1791 HandleScope scope(isolate);
1792 ASSERT(args.length() == 3);
1793 StoreIC ic(IC::EXTRA_CALL_FRAME, isolate);
1794 Handle<Object> receiver = args.at<Object>(0);
1795 Handle<String> key = args.at<String>(1);
1796 ic.UpdateState(receiver, key);
1797 return ic.Store(receiver, key, args.at<Object>(2));
1801 RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) {
1802 SealHandleScope shs(isolate);
1804 ASSERT(args.length() == 2);
1805 JSArray* receiver = JSArray::cast(args[0]);
1806 Object* len = args[1];
1808 // The generated code should filter out non-Smis before we get here.
1809 ASSERT(len->IsSmi());
1812 // The length property has to be a writable callback property.
1813 LookupResult debug_lookup(isolate);
1814 receiver->LocalLookup(isolate->heap()->length_string(), &debug_lookup);
1815 ASSERT(debug_lookup.IsPropertyCallbacks() && !debug_lookup.IsReadOnly());
1819 MaybeObject* maybe_result = receiver->SetElementsLength(len);
1820 if (!maybe_result->To(&result)) return maybe_result;
1826 // Extend storage is called in a store inline cache when
1827 // it is necessary to extend the properties array of a
1829 RUNTIME_FUNCTION(MaybeObject*, SharedStoreIC_ExtendStorage) {
1830 SealHandleScope shs(isolate);
1831 ASSERT(args.length() == 3);
1833 // Convert the parameters
1834 JSObject* object = JSObject::cast(args[0]);
1835 Map* transition = Map::cast(args[1]);
1836 Object* value = args[2];
1838 // Check the object has run out out property space.
1839 ASSERT(object->HasFastProperties());
1840 ASSERT(object->map()->unused_property_fields() == 0);
1842 // Expand the properties array.
1843 FixedArray* old_storage = object->properties();
1844 int new_unused = transition->unused_property_fields();
1845 int new_size = old_storage->length() + new_unused + 1;
1847 MaybeObject* maybe_result = old_storage->CopySize(new_size);
1848 if (!maybe_result->ToObject(&result)) return maybe_result;
1850 FixedArray* new_storage = FixedArray::cast(result);
1852 Object* to_store = value;
1854 if (FLAG_track_double_fields) {
1855 DescriptorArray* descriptors = transition->instance_descriptors();
1856 PropertyDetails details = descriptors->GetDetails(transition->LastAdded());
1857 if (details.representation().IsDouble()) {
1858 MaybeObject* maybe_storage =
1859 isolate->heap()->AllocateHeapNumber(value->Number());
1860 if (!maybe_storage->To(&to_store)) return maybe_storage;
1864 new_storage->set(old_storage->length(), to_store);
1866 // Set the new property value and do the map transition.
1867 object->set_properties(new_storage);
1868 object->set_map(transition);
1870 // Return the stored value.
1875 // Used from ic-<arch>.cc.
1876 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Miss) {
1877 HandleScope scope(isolate);
1878 ASSERT(args.length() == 3);
1879 KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate);
1880 Handle<Object> receiver = args.at<Object>(0);
1881 Handle<Object> key = args.at<Object>(1);
1882 ic.UpdateState(receiver, key);
1883 return ic.Store(receiver, key, args.at<Object>(2));
1887 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure) {
1888 HandleScope scope(isolate);
1889 ASSERT(args.length() == 3);
1890 KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate);
1891 Handle<Object> receiver = args.at<Object>(0);
1892 Handle<Object> key = args.at<Object>(1);
1893 ic.UpdateState(receiver, key);
1894 return ic.Store(receiver, key, args.at<Object>(2));
1898 RUNTIME_FUNCTION(MaybeObject*, StoreIC_Slow) {
1899 HandleScope scope(isolate);
1900 ASSERT(args.length() == 3);
1901 StoreIC ic(IC::NO_EXTRA_FRAME, isolate);
1902 Handle<Object> object = args.at<Object>(0);
1903 Handle<Object> key = args.at<Object>(1);
1904 Handle<Object> value = args.at<Object>(2);
1905 StrictModeFlag strict_mode = ic.strict_mode();
1906 Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key,
1910 RETURN_IF_EMPTY_HANDLE(isolate, result);
1915 RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_Slow) {
1916 HandleScope scope(isolate);
1917 ASSERT(args.length() == 3);
1918 KeyedStoreIC ic(IC::NO_EXTRA_FRAME, isolate);
1919 Handle<Object> object = args.at<Object>(0);
1920 Handle<Object> key = args.at<Object>(1);
1921 Handle<Object> value = args.at<Object>(2);
1922 StrictModeFlag strict_mode = ic.strict_mode();
1923 Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key,
1927 RETURN_IF_EMPTY_HANDLE(isolate, result);
1932 RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss) {
1933 HandleScope scope(isolate);
1934 ASSERT(args.length() == 4);
1935 KeyedStoreIC ic(IC::EXTRA_CALL_FRAME, isolate);
1936 Handle<Object> value = args.at<Object>(0);
1937 Handle<Map> map = args.at<Map>(1);
1938 Handle<Object> key = args.at<Object>(2);
1939 Handle<Object> object = args.at<Object>(3);
1940 StrictModeFlag strict_mode = ic.strict_mode();
1941 if (object->IsJSObject()) {
1942 JSObject::TransitionElementsKind(Handle<JSObject>::cast(object),
1943 map->elements_kind());
1945 Handle<Object> result = Runtime::SetObjectProperty(isolate, object, key,
1949 RETURN_IF_EMPTY_HANDLE(isolate, result);
1954 BinaryOpIC::State::State(ExtraICState extra_ic_state) {
1955 // We don't deserialize the SSE2 Field, since this is only used to be able
1956 // to include SSE2 as well as non-SSE2 versions in the snapshot. For code
1957 // generation we always want it to reflect the current state.
1958 op_ = static_cast<Token::Value>(
1959 FIRST_TOKEN + OpField::decode(extra_ic_state));
1960 mode_ = OverwriteModeField::decode(extra_ic_state);
1961 fixed_right_arg_ = Maybe<int>(
1962 HasFixedRightArgField::decode(extra_ic_state),
1963 1 << FixedRightArgValueField::decode(extra_ic_state));
1964 left_kind_ = LeftKindField::decode(extra_ic_state);
1965 if (fixed_right_arg_.has_value) {
1966 right_kind_ = Smi::IsValid(fixed_right_arg_.value) ? SMI : INT32;
1968 right_kind_ = RightKindField::decode(extra_ic_state);
1970 result_kind_ = ResultKindField::decode(extra_ic_state);
1971 ASSERT_LE(FIRST_TOKEN, op_);
1972 ASSERT_LE(op_, LAST_TOKEN);
1976 ExtraICState BinaryOpIC::State::GetExtraICState() const {
1977 bool sse2 = (Max(result_kind_, Max(left_kind_, right_kind_)) > SMI &&
1978 CpuFeatures::IsSafeForSnapshot(SSE2));
1979 ExtraICState extra_ic_state =
1980 SSE2Field::encode(sse2) |
1981 OpField::encode(op_ - FIRST_TOKEN) |
1982 OverwriteModeField::encode(mode_) |
1983 LeftKindField::encode(left_kind_) |
1984 ResultKindField::encode(result_kind_) |
1985 HasFixedRightArgField::encode(fixed_right_arg_.has_value);
1986 if (fixed_right_arg_.has_value) {
1987 extra_ic_state = FixedRightArgValueField::update(
1988 extra_ic_state, WhichPowerOf2(fixed_right_arg_.value));
1990 extra_ic_state = RightKindField::update(extra_ic_state, right_kind_);
1992 return extra_ic_state;
1997 void BinaryOpIC::State::GenerateAheadOfTime(
1998 Isolate* isolate, void (*Generate)(Isolate*, const State&)) {
1999 // TODO(olivf) We should investigate why adding stubs to the snapshot is so
2000 // expensive at runtime. When solved we should be able to add most binops to
2001 // the snapshot instead of hand-picking them.
2002 // Generated list of commonly used stubs
2003 #define GENERATE(op, left_kind, right_kind, result_kind, mode) \
2005 State state(op, mode); \
2006 state.left_kind_ = left_kind; \
2007 state.fixed_right_arg_.has_value = false; \
2008 state.right_kind_ = right_kind; \
2009 state.result_kind_ = result_kind; \
2010 Generate(isolate, state); \
2012 GENERATE(Token::ADD, INT32, INT32, INT32, NO_OVERWRITE);
2013 GENERATE(Token::ADD, INT32, INT32, INT32, OVERWRITE_LEFT);
2014 GENERATE(Token::ADD, INT32, INT32, NUMBER, NO_OVERWRITE);
2015 GENERATE(Token::ADD, INT32, INT32, NUMBER, OVERWRITE_LEFT);
2016 GENERATE(Token::ADD, INT32, NUMBER, NUMBER, NO_OVERWRITE);
2017 GENERATE(Token::ADD, INT32, NUMBER, NUMBER, OVERWRITE_LEFT);
2018 GENERATE(Token::ADD, INT32, NUMBER, NUMBER, OVERWRITE_RIGHT);
2019 GENERATE(Token::ADD, INT32, SMI, INT32, NO_OVERWRITE);
2020 GENERATE(Token::ADD, INT32, SMI, INT32, OVERWRITE_LEFT);
2021 GENERATE(Token::ADD, INT32, SMI, INT32, OVERWRITE_RIGHT);
2022 GENERATE(Token::ADD, NUMBER, INT32, NUMBER, NO_OVERWRITE);
2023 GENERATE(Token::ADD, NUMBER, INT32, NUMBER, OVERWRITE_LEFT);
2024 GENERATE(Token::ADD, NUMBER, INT32, NUMBER, OVERWRITE_RIGHT);
2025 GENERATE(Token::ADD, NUMBER, NUMBER, NUMBER, NO_OVERWRITE);
2026 GENERATE(Token::ADD, NUMBER, NUMBER, NUMBER, OVERWRITE_LEFT);
2027 GENERATE(Token::ADD, NUMBER, NUMBER, NUMBER, OVERWRITE_RIGHT);
2028 GENERATE(Token::ADD, NUMBER, SMI, NUMBER, NO_OVERWRITE);
2029 GENERATE(Token::ADD, NUMBER, SMI, NUMBER, OVERWRITE_LEFT);
2030 GENERATE(Token::ADD, NUMBER, SMI, NUMBER, OVERWRITE_RIGHT);
2031 GENERATE(Token::ADD, SMI, INT32, INT32, NO_OVERWRITE);
2032 GENERATE(Token::ADD, SMI, INT32, INT32, OVERWRITE_LEFT);
2033 GENERATE(Token::ADD, SMI, INT32, NUMBER, NO_OVERWRITE);
2034 GENERATE(Token::ADD, SMI, NUMBER, NUMBER, NO_OVERWRITE);
2035 GENERATE(Token::ADD, SMI, NUMBER, NUMBER, OVERWRITE_LEFT);
2036 GENERATE(Token::ADD, SMI, NUMBER, NUMBER, OVERWRITE_RIGHT);
2037 GENERATE(Token::ADD, SMI, SMI, INT32, OVERWRITE_LEFT);
2038 GENERATE(Token::ADD, SMI, SMI, SMI, OVERWRITE_RIGHT);
2039 GENERATE(Token::BIT_AND, INT32, INT32, INT32, NO_OVERWRITE);
2040 GENERATE(Token::BIT_AND, INT32, INT32, INT32, OVERWRITE_LEFT);
2041 GENERATE(Token::BIT_AND, INT32, INT32, INT32, OVERWRITE_RIGHT);
2042 GENERATE(Token::BIT_AND, INT32, INT32, SMI, NO_OVERWRITE);
2043 GENERATE(Token::BIT_AND, INT32, INT32, SMI, OVERWRITE_RIGHT);
2044 GENERATE(Token::BIT_AND, INT32, SMI, INT32, NO_OVERWRITE);
2045 GENERATE(Token::BIT_AND, INT32, SMI, INT32, OVERWRITE_RIGHT);
2046 GENERATE(Token::BIT_AND, INT32, SMI, SMI, NO_OVERWRITE);
2047 GENERATE(Token::BIT_AND, INT32, SMI, SMI, OVERWRITE_LEFT);
2048 GENERATE(Token::BIT_AND, INT32, SMI, SMI, OVERWRITE_RIGHT);
2049 GENERATE(Token::BIT_AND, NUMBER, INT32, INT32, OVERWRITE_RIGHT);
2050 GENERATE(Token::BIT_AND, NUMBER, SMI, SMI, NO_OVERWRITE);
2051 GENERATE(Token::BIT_AND, NUMBER, SMI, SMI, OVERWRITE_RIGHT);
2052 GENERATE(Token::BIT_AND, SMI, INT32, INT32, NO_OVERWRITE);
2053 GENERATE(Token::BIT_AND, SMI, INT32, SMI, OVERWRITE_RIGHT);
2054 GENERATE(Token::BIT_AND, SMI, NUMBER, SMI, OVERWRITE_RIGHT);
2055 GENERATE(Token::BIT_AND, SMI, SMI, SMI, NO_OVERWRITE);
2056 GENERATE(Token::BIT_AND, SMI, SMI, SMI, OVERWRITE_LEFT);
2057 GENERATE(Token::BIT_AND, SMI, SMI, SMI, OVERWRITE_RIGHT);
2058 GENERATE(Token::BIT_OR, INT32, INT32, INT32, OVERWRITE_LEFT);
2059 GENERATE(Token::BIT_OR, INT32, INT32, INT32, OVERWRITE_RIGHT);
2060 GENERATE(Token::BIT_OR, INT32, INT32, SMI, OVERWRITE_LEFT);
2061 GENERATE(Token::BIT_OR, INT32, SMI, INT32, NO_OVERWRITE);
2062 GENERATE(Token::BIT_OR, INT32, SMI, INT32, OVERWRITE_LEFT);
2063 GENERATE(Token::BIT_OR, INT32, SMI, INT32, OVERWRITE_RIGHT);
2064 GENERATE(Token::BIT_OR, INT32, SMI, SMI, NO_OVERWRITE);
2065 GENERATE(Token::BIT_OR, INT32, SMI, SMI, OVERWRITE_RIGHT);
2066 GENERATE(Token::BIT_OR, NUMBER, SMI, INT32, NO_OVERWRITE);
2067 GENERATE(Token::BIT_OR, NUMBER, SMI, INT32, OVERWRITE_LEFT);
2068 GENERATE(Token::BIT_OR, NUMBER, SMI, INT32, OVERWRITE_RIGHT);
2069 GENERATE(Token::BIT_OR, NUMBER, SMI, SMI, NO_OVERWRITE);
2070 GENERATE(Token::BIT_OR, NUMBER, SMI, SMI, OVERWRITE_LEFT);
2071 GENERATE(Token::BIT_OR, SMI, INT32, INT32, OVERWRITE_LEFT);
2072 GENERATE(Token::BIT_OR, SMI, INT32, INT32, OVERWRITE_RIGHT);
2073 GENERATE(Token::BIT_OR, SMI, INT32, SMI, OVERWRITE_RIGHT);
2074 GENERATE(Token::BIT_OR, SMI, SMI, SMI, OVERWRITE_LEFT);
2075 GENERATE(Token::BIT_OR, SMI, SMI, SMI, OVERWRITE_RIGHT);
2076 GENERATE(Token::BIT_XOR, INT32, INT32, INT32, NO_OVERWRITE);
2077 GENERATE(Token::BIT_XOR, INT32, INT32, INT32, OVERWRITE_LEFT);
2078 GENERATE(Token::BIT_XOR, INT32, INT32, INT32, OVERWRITE_RIGHT);
2079 GENERATE(Token::BIT_XOR, INT32, INT32, SMI, NO_OVERWRITE);
2080 GENERATE(Token::BIT_XOR, INT32, INT32, SMI, OVERWRITE_LEFT);
2081 GENERATE(Token::BIT_XOR, INT32, NUMBER, SMI, NO_OVERWRITE);
2082 GENERATE(Token::BIT_XOR, INT32, SMI, INT32, NO_OVERWRITE);
2083 GENERATE(Token::BIT_XOR, INT32, SMI, INT32, OVERWRITE_LEFT);
2084 GENERATE(Token::BIT_XOR, INT32, SMI, INT32, OVERWRITE_RIGHT);
2085 GENERATE(Token::BIT_XOR, NUMBER, INT32, INT32, NO_OVERWRITE);
2086 GENERATE(Token::BIT_XOR, NUMBER, SMI, INT32, NO_OVERWRITE);
2087 GENERATE(Token::BIT_XOR, NUMBER, SMI, SMI, NO_OVERWRITE);
2088 GENERATE(Token::BIT_XOR, SMI, INT32, INT32, NO_OVERWRITE);
2089 GENERATE(Token::BIT_XOR, SMI, INT32, INT32, OVERWRITE_LEFT);
2090 GENERATE(Token::BIT_XOR, SMI, INT32, SMI, OVERWRITE_LEFT);
2091 GENERATE(Token::BIT_XOR, SMI, SMI, SMI, NO_OVERWRITE);
2092 GENERATE(Token::BIT_XOR, SMI, SMI, SMI, OVERWRITE_LEFT);
2093 GENERATE(Token::BIT_XOR, SMI, SMI, SMI, OVERWRITE_RIGHT);
2094 GENERATE(Token::DIV, INT32, INT32, INT32, NO_OVERWRITE);
2095 GENERATE(Token::DIV, INT32, INT32, NUMBER, NO_OVERWRITE);
2096 GENERATE(Token::DIV, INT32, NUMBER, NUMBER, NO_OVERWRITE);
2097 GENERATE(Token::DIV, INT32, NUMBER, NUMBER, OVERWRITE_LEFT);
2098 GENERATE(Token::DIV, INT32, SMI, INT32, NO_OVERWRITE);
2099 GENERATE(Token::DIV, INT32, SMI, NUMBER, NO_OVERWRITE);
2100 GENERATE(Token::DIV, NUMBER, INT32, NUMBER, NO_OVERWRITE);
2101 GENERATE(Token::DIV, NUMBER, INT32, NUMBER, OVERWRITE_LEFT);
2102 GENERATE(Token::DIV, NUMBER, NUMBER, NUMBER, NO_OVERWRITE);
2103 GENERATE(Token::DIV, NUMBER, NUMBER, NUMBER, OVERWRITE_LEFT);
2104 GENERATE(Token::DIV, NUMBER, NUMBER, NUMBER, OVERWRITE_RIGHT);
2105 GENERATE(Token::DIV, NUMBER, SMI, NUMBER, NO_OVERWRITE);
2106 GENERATE(Token::DIV, NUMBER, SMI, NUMBER, OVERWRITE_LEFT);
2107 GENERATE(Token::DIV, SMI, INT32, INT32, NO_OVERWRITE);
2108 GENERATE(Token::DIV, SMI, INT32, NUMBER, NO_OVERWRITE);
2109 GENERATE(Token::DIV, SMI, INT32, NUMBER, OVERWRITE_LEFT);
2110 GENERATE(Token::DIV, SMI, NUMBER, NUMBER, NO_OVERWRITE);
2111 GENERATE(Token::DIV, SMI, NUMBER, NUMBER, OVERWRITE_LEFT);
2112 GENERATE(Token::DIV, SMI, NUMBER, NUMBER, OVERWRITE_RIGHT);
2113 GENERATE(Token::DIV, SMI, SMI, NUMBER, NO_OVERWRITE);
2114 GENERATE(Token::DIV, SMI, SMI, NUMBER, OVERWRITE_LEFT);
2115 GENERATE(Token::DIV, SMI, SMI, NUMBER, OVERWRITE_RIGHT);
2116 GENERATE(Token::DIV, SMI, SMI, SMI, NO_OVERWRITE);
2117 GENERATE(Token::DIV, SMI, SMI, SMI, OVERWRITE_LEFT);
2118 GENERATE(Token::DIV, SMI, SMI, SMI, OVERWRITE_RIGHT);
2119 GENERATE(Token::MOD, NUMBER, SMI, NUMBER, OVERWRITE_LEFT);
2120 GENERATE(Token::MOD, SMI, SMI, SMI, NO_OVERWRITE);
2121 GENERATE(Token::MOD, SMI, SMI, SMI, OVERWRITE_LEFT);
2122 GENERATE(Token::MUL, INT32, INT32, INT32, NO_OVERWRITE);
2123 GENERATE(Token::MUL, INT32, INT32, NUMBER, NO_OVERWRITE);
2124 GENERATE(Token::MUL, INT32, NUMBER, NUMBER, NO_OVERWRITE);
2125 GENERATE(Token::MUL, INT32, NUMBER, NUMBER, OVERWRITE_LEFT);
2126 GENERATE(Token::MUL, INT32, SMI, INT32, NO_OVERWRITE);
2127 GENERATE(Token::MUL, INT32, SMI, INT32, OVERWRITE_LEFT);
2128 GENERATE(Token::MUL, INT32, SMI, NUMBER, NO_OVERWRITE);
2129 GENERATE(Token::MUL, NUMBER, INT32, NUMBER, NO_OVERWRITE);
2130 GENERATE(Token::MUL, NUMBER, INT32, NUMBER, OVERWRITE_LEFT);
2131 GENERATE(Token::MUL, NUMBER, INT32, NUMBER, OVERWRITE_RIGHT);
2132 GENERATE(Token::MUL, NUMBER, NUMBER, NUMBER, NO_OVERWRITE);
2133 GENERATE(Token::MUL, NUMBER, NUMBER, NUMBER, OVERWRITE_LEFT);
2134 GENERATE(Token::MUL, NUMBER, SMI, NUMBER, NO_OVERWRITE);
2135 GENERATE(Token::MUL, NUMBER, SMI, NUMBER, OVERWRITE_LEFT);
2136 GENERATE(Token::MUL, NUMBER, SMI, NUMBER, OVERWRITE_RIGHT);
2137 GENERATE(Token::MUL, SMI, INT32, INT32, NO_OVERWRITE);
2138 GENERATE(Token::MUL, SMI, INT32, INT32, OVERWRITE_LEFT);
2139 GENERATE(Token::MUL, SMI, INT32, NUMBER, NO_OVERWRITE);
2140 GENERATE(Token::MUL, SMI, NUMBER, NUMBER, NO_OVERWRITE);
2141 GENERATE(Token::MUL, SMI, NUMBER, NUMBER, OVERWRITE_LEFT);
2142 GENERATE(Token::MUL, SMI, NUMBER, NUMBER, OVERWRITE_RIGHT);
2143 GENERATE(Token::MUL, SMI, SMI, INT32, NO_OVERWRITE);
2144 GENERATE(Token::MUL, SMI, SMI, NUMBER, NO_OVERWRITE);
2145 GENERATE(Token::MUL, SMI, SMI, NUMBER, OVERWRITE_LEFT);
2146 GENERATE(Token::MUL, SMI, SMI, SMI, NO_OVERWRITE);
2147 GENERATE(Token::MUL, SMI, SMI, SMI, OVERWRITE_LEFT);
2148 GENERATE(Token::MUL, SMI, SMI, SMI, OVERWRITE_RIGHT);
2149 GENERATE(Token::SAR, INT32, SMI, INT32, OVERWRITE_RIGHT);
2150 GENERATE(Token::SAR, INT32, SMI, SMI, NO_OVERWRITE);
2151 GENERATE(Token::SAR, INT32, SMI, SMI, OVERWRITE_RIGHT);
2152 GENERATE(Token::SAR, NUMBER, SMI, SMI, NO_OVERWRITE);
2153 GENERATE(Token::SAR, NUMBER, SMI, SMI, OVERWRITE_RIGHT);
2154 GENERATE(Token::SAR, SMI, SMI, SMI, OVERWRITE_LEFT);
2155 GENERATE(Token::SAR, SMI, SMI, SMI, OVERWRITE_RIGHT);
2156 GENERATE(Token::SHL, INT32, SMI, INT32, NO_OVERWRITE);
2157 GENERATE(Token::SHL, INT32, SMI, INT32, OVERWRITE_RIGHT);
2158 GENERATE(Token::SHL, INT32, SMI, SMI, NO_OVERWRITE);
2159 GENERATE(Token::SHL, INT32, SMI, SMI, OVERWRITE_RIGHT);
2160 GENERATE(Token::SHL, NUMBER, SMI, SMI, OVERWRITE_RIGHT);
2161 GENERATE(Token::SHL, SMI, SMI, INT32, NO_OVERWRITE);
2162 GENERATE(Token::SHL, SMI, SMI, INT32, OVERWRITE_LEFT);
2163 GENERATE(Token::SHL, SMI, SMI, INT32, OVERWRITE_RIGHT);
2164 GENERATE(Token::SHL, SMI, SMI, SMI, NO_OVERWRITE);
2165 GENERATE(Token::SHL, SMI, SMI, SMI, OVERWRITE_LEFT);
2166 GENERATE(Token::SHL, SMI, SMI, SMI, OVERWRITE_RIGHT);
2167 GENERATE(Token::SHR, INT32, SMI, SMI, NO_OVERWRITE);
2168 GENERATE(Token::SHR, INT32, SMI, SMI, OVERWRITE_LEFT);
2169 GENERATE(Token::SHR, INT32, SMI, SMI, OVERWRITE_RIGHT);
2170 GENERATE(Token::SHR, NUMBER, SMI, SMI, NO_OVERWRITE);
2171 GENERATE(Token::SHR, NUMBER, SMI, SMI, OVERWRITE_LEFT);
2172 GENERATE(Token::SHR, NUMBER, SMI, INT32, OVERWRITE_RIGHT);
2173 GENERATE(Token::SHR, SMI, SMI, SMI, NO_OVERWRITE);
2174 GENERATE(Token::SHR, SMI, SMI, SMI, OVERWRITE_LEFT);
2175 GENERATE(Token::SHR, SMI, SMI, SMI, OVERWRITE_RIGHT);
2176 GENERATE(Token::SUB, INT32, INT32, INT32, NO_OVERWRITE);
2177 GENERATE(Token::SUB, INT32, INT32, INT32, OVERWRITE_LEFT);
2178 GENERATE(Token::SUB, INT32, NUMBER, NUMBER, NO_OVERWRITE);
2179 GENERATE(Token::SUB, INT32, NUMBER, NUMBER, OVERWRITE_RIGHT);
2180 GENERATE(Token::SUB, INT32, SMI, INT32, OVERWRITE_LEFT);
2181 GENERATE(Token::SUB, INT32, SMI, INT32, OVERWRITE_RIGHT);
2182 GENERATE(Token::SUB, NUMBER, INT32, NUMBER, NO_OVERWRITE);
2183 GENERATE(Token::SUB, NUMBER, INT32, NUMBER, OVERWRITE_LEFT);
2184 GENERATE(Token::SUB, NUMBER, NUMBER, NUMBER, NO_OVERWRITE);
2185 GENERATE(Token::SUB, NUMBER, NUMBER, NUMBER, OVERWRITE_LEFT);
2186 GENERATE(Token::SUB, NUMBER, NUMBER, NUMBER, OVERWRITE_RIGHT);
2187 GENERATE(Token::SUB, NUMBER, SMI, NUMBER, NO_OVERWRITE);
2188 GENERATE(Token::SUB, NUMBER, SMI, NUMBER, OVERWRITE_LEFT);
2189 GENERATE(Token::SUB, NUMBER, SMI, NUMBER, OVERWRITE_RIGHT);
2190 GENERATE(Token::SUB, SMI, INT32, INT32, NO_OVERWRITE);
2191 GENERATE(Token::SUB, SMI, NUMBER, NUMBER, NO_OVERWRITE);
2192 GENERATE(Token::SUB, SMI, NUMBER, NUMBER, OVERWRITE_LEFT);
2193 GENERATE(Token::SUB, SMI, NUMBER, NUMBER, OVERWRITE_RIGHT);
2194 GENERATE(Token::SUB, SMI, SMI, SMI, NO_OVERWRITE);
2195 GENERATE(Token::SUB, SMI, SMI, SMI, OVERWRITE_LEFT);
2196 GENERATE(Token::SUB, SMI, SMI, SMI, OVERWRITE_RIGHT);
2198 #define GENERATE(op, left_kind, fixed_right_arg_value, result_kind, mode) \
2200 State state(op, mode); \
2201 state.left_kind_ = left_kind; \
2202 state.fixed_right_arg_.has_value = true; \
2203 state.fixed_right_arg_.value = fixed_right_arg_value; \
2204 state.right_kind_ = SMI; \
2205 state.result_kind_ = result_kind; \
2206 Generate(isolate, state); \
2208 GENERATE(Token::MOD, SMI, 2, SMI, NO_OVERWRITE);
2209 GENERATE(Token::MOD, SMI, 4, SMI, NO_OVERWRITE);
2210 GENERATE(Token::MOD, SMI, 4, SMI, OVERWRITE_LEFT);
2211 GENERATE(Token::MOD, SMI, 8, SMI, NO_OVERWRITE);
2212 GENERATE(Token::MOD, SMI, 16, SMI, OVERWRITE_LEFT);
2213 GENERATE(Token::MOD, SMI, 32, SMI, NO_OVERWRITE);
2214 GENERATE(Token::MOD, SMI, 2048, SMI, NO_OVERWRITE);
2219 Type* BinaryOpIC::State::GetResultType(Zone* zone) const {
2220 Kind result_kind = result_kind_;
2221 if (HasSideEffects()) {
2223 } else if (result_kind == GENERIC && op_ == Token::ADD) {
2224 return Type::Union(Type::Number(zone), Type::String(zone), zone);
2225 } else if (result_kind == NUMBER && op_ == Token::SHR) {
2226 return Type::Unsigned32(zone);
2228 ASSERT_NE(GENERIC, result_kind);
2229 return KindToType(result_kind, zone);
2233 void BinaryOpIC::State::Print(StringStream* stream) const {
2234 stream->Add("(%s", Token::Name(op_));
2235 if (mode_ == OVERWRITE_LEFT) stream->Add("_ReuseLeft");
2236 else if (mode_ == OVERWRITE_RIGHT) stream->Add("_ReuseRight");
2237 if (CouldCreateAllocationMementos()) stream->Add("_CreateAllocationMementos");
2238 stream->Add(":%s*", KindToString(left_kind_));
2239 if (fixed_right_arg_.has_value) {
2240 stream->Add("%d", fixed_right_arg_.value);
2242 stream->Add("%s", KindToString(right_kind_));
2244 stream->Add("->%s)", KindToString(result_kind_));
2248 void BinaryOpIC::State::Update(Handle<Object> left,
2249 Handle<Object> right,
2250 Handle<Object> result) {
2251 ExtraICState old_extra_ic_state = GetExtraICState();
2253 left_kind_ = UpdateKind(left, left_kind_);
2254 right_kind_ = UpdateKind(right, right_kind_);
2256 int32_t fixed_right_arg_value = 0;
2257 bool has_fixed_right_arg =
2258 op_ == Token::MOD &&
2259 right->ToInt32(&fixed_right_arg_value) &&
2260 fixed_right_arg_value > 0 &&
2261 IsPowerOf2(fixed_right_arg_value) &&
2262 FixedRightArgValueField::is_valid(WhichPowerOf2(fixed_right_arg_value)) &&
2263 (left_kind_ == SMI || left_kind_ == INT32) &&
2264 (result_kind_ == NONE || !fixed_right_arg_.has_value);
2265 fixed_right_arg_ = Maybe<int32_t>(has_fixed_right_arg,
2266 fixed_right_arg_value);
2268 result_kind_ = UpdateKind(result, result_kind_);
2270 if (!Token::IsTruncatingBinaryOp(op_)) {
2271 Kind input_kind = Max(left_kind_, right_kind_);
2272 if (result_kind_ < input_kind && input_kind <= NUMBER) {
2273 result_kind_ = input_kind;
2277 // We don't want to distinguish INT32 and NUMBER for string add (because
2278 // NumberToString can't make use of this anyway).
2279 if (left_kind_ == STRING && right_kind_ == INT32) {
2280 ASSERT_EQ(STRING, result_kind_);
2281 ASSERT_EQ(Token::ADD, op_);
2282 right_kind_ = NUMBER;
2283 } else if (right_kind_ == STRING && left_kind_ == INT32) {
2284 ASSERT_EQ(STRING, result_kind_);
2285 ASSERT_EQ(Token::ADD, op_);
2286 left_kind_ = NUMBER;
2289 // Reset overwrite mode unless we can actually make use of it, or may be able
2290 // to make use of it at some point in the future.
2291 if ((mode_ == OVERWRITE_LEFT && left_kind_ > NUMBER) ||
2292 (mode_ == OVERWRITE_RIGHT && right_kind_ > NUMBER) ||
2293 result_kind_ > NUMBER) {
2294 mode_ = NO_OVERWRITE;
2297 if (old_extra_ic_state == GetExtraICState()) {
2298 // Tagged operations can lead to non-truncating HChanges
2299 if (left->IsUndefined() || left->IsBoolean()) {
2300 left_kind_ = GENERIC;
2301 } else if (right->IsUndefined() || right->IsBoolean()) {
2302 right_kind_ = GENERIC;
2304 // Since the X87 is too precise, we might bail out on numbers which
2305 // actually would truncate with 64 bit precision.
2306 ASSERT(!CpuFeatures::IsSupported(SSE2));
2307 ASSERT(result_kind_ < NUMBER);
2308 result_kind_ = NUMBER;
2314 BinaryOpIC::State::Kind BinaryOpIC::State::UpdateKind(Handle<Object> object,
2316 Kind new_kind = GENERIC;
2317 bool is_truncating = Token::IsTruncatingBinaryOp(op());
2318 if (object->IsBoolean() && is_truncating) {
2319 // Booleans will be automatically truncated by HChange.
2321 } else if (object->IsUndefined()) {
2322 // Undefined will be automatically truncated by HChange.
2323 new_kind = is_truncating ? INT32 : NUMBER;
2324 } else if (object->IsSmi()) {
2326 } else if (object->IsHeapNumber()) {
2327 double value = Handle<HeapNumber>::cast(object)->value();
2328 new_kind = IsInt32Double(value) ? INT32 : NUMBER;
2329 } else if (object->IsString() && op() == Token::ADD) {
2332 if (new_kind == INT32 && SmiValuesAre32Bits()) {
2336 ((new_kind <= NUMBER && kind > NUMBER) ||
2337 (new_kind > NUMBER && kind <= NUMBER))) {
2340 return Max(kind, new_kind);
2345 const char* BinaryOpIC::State::KindToString(Kind kind) {
2347 case NONE: return "None";
2348 case SMI: return "Smi";
2349 case INT32: return "Int32";
2350 case NUMBER: return "Number";
2351 case STRING: return "String";
2352 case GENERIC: return "Generic";
2360 Type* BinaryOpIC::State::KindToType(Kind kind, Zone* zone) {
2362 case NONE: return Type::None(zone);
2363 case SMI: return Type::Smi(zone);
2364 case INT32: return Type::Signed32(zone);
2365 case NUMBER: return Type::Number(zone);
2366 case STRING: return Type::String(zone);
2367 case GENERIC: return Type::Any(zone);
2374 MaybeObject* BinaryOpIC::Transition(Handle<AllocationSite> allocation_site,
2375 Handle<Object> left,
2376 Handle<Object> right) {
2377 State state(target()->extended_extra_ic_state());
2379 // Compute the actual result using the builtin for the binary operation.
2380 Object* builtin = isolate()->js_builtins_object()->javascript_builtin(
2381 TokenToJSBuiltin(state.op()));
2382 Handle<JSFunction> function = handle(JSFunction::cast(builtin), isolate());
2383 bool caught_exception;
2384 Handle<Object> result = Execution::Call(
2385 isolate(), function, left, 1, &right, &caught_exception);
2386 if (caught_exception) return Failure::Exception();
2388 // Compute the new state.
2389 State old_state = state;
2390 state.Update(left, right, result);
2392 // Check if we have a string operation here.
2393 Handle<Code> target;
2394 if (!allocation_site.is_null() || state.ShouldCreateAllocationMementos()) {
2395 // Setup the allocation site on-demand.
2396 if (allocation_site.is_null()) {
2397 allocation_site = isolate()->factory()->NewAllocationSite();
2400 // Install the stub with an allocation site.
2401 BinaryOpICWithAllocationSiteStub stub(state);
2402 target = stub.GetCodeCopyFromTemplate(isolate(), allocation_site);
2404 // Sanity check the trampoline stub.
2405 ASSERT_EQ(*allocation_site, target->FindFirstAllocationSite());
2407 // Install the generic stub.
2408 BinaryOpICStub stub(state);
2409 target = stub.GetCode(isolate());
2411 // Sanity check the generic stub.
2412 ASSERT_EQ(NULL, target->FindFirstAllocationSite());
2414 set_target(*target);
2416 if (FLAG_trace_ic) {
2418 NoAllocationStringAllocator allocator(
2419 buffer, static_cast<unsigned>(sizeof(buffer)));
2420 StringStream stream(&allocator);
2421 stream.Add("[BinaryOpIC");
2422 old_state.Print(&stream);
2424 state.Print(&stream);
2425 stream.Add(" @ %p <- ", static_cast<void*>(*target));
2426 stream.OutputToStdOut();
2427 JavaScriptFrame::PrintTop(isolate(), stdout, false, true);
2428 if (!allocation_site.is_null()) {
2429 PrintF(" using allocation site %p", static_cast<void*>(*allocation_site));
2434 // Patch the inlined smi code as necessary.
2435 if (!old_state.UseInlinedSmiCode() && state.UseInlinedSmiCode()) {
2436 PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
2437 } else if (old_state.UseInlinedSmiCode() && !state.UseInlinedSmiCode()) {
2438 PatchInlinedSmiCode(address(), DISABLE_INLINED_SMI_CHECK);
2445 RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss) {
2446 HandleScope scope(isolate);
2447 ASSERT_EQ(2, args.length());
2448 Handle<Object> left = args.at<Object>(BinaryOpICStub::kLeft);
2449 Handle<Object> right = args.at<Object>(BinaryOpICStub::kRight);
2450 BinaryOpIC ic(isolate);
2451 return ic.Transition(Handle<AllocationSite>::null(), left, right);
2455 RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_MissWithAllocationSite) {
2456 HandleScope scope(isolate);
2457 ASSERT_EQ(3, args.length());
2458 Handle<AllocationSite> allocation_site = args.at<AllocationSite>(
2459 BinaryOpWithAllocationSiteStub::kAllocationSite);
2460 Handle<Object> left = args.at<Object>(
2461 BinaryOpWithAllocationSiteStub::kLeft);
2462 Handle<Object> right = args.at<Object>(
2463 BinaryOpWithAllocationSiteStub::kRight);
2464 BinaryOpIC ic(isolate);
2465 return ic.Transition(allocation_site, left, right);
2469 Code* CompareIC::GetRawUninitialized(Isolate* isolate, Token::Value op) {
2470 ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
2472 CHECK(stub.FindCodeInCache(&code, isolate));
2477 Handle<Code> CompareIC::GetUninitialized(Isolate* isolate, Token::Value op) {
2478 ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
2479 return stub.GetCode(isolate);
2483 const char* CompareIC::GetStateName(State state) {
2485 case UNINITIALIZED: return "UNINITIALIZED";
2486 case SMI: return "SMI";
2487 case NUMBER: return "NUMBER";
2488 case INTERNALIZED_STRING: return "INTERNALIZED_STRING";
2489 case STRING: return "STRING";
2490 case UNIQUE_NAME: return "UNIQUE_NAME";
2491 case OBJECT: return "OBJECT";
2492 case KNOWN_OBJECT: return "KNOWN_OBJECT";
2493 case GENERIC: return "GENERIC";
2500 Type* CompareIC::StateToType(
2502 CompareIC::State state,
2505 case CompareIC::UNINITIALIZED: return Type::None(zone);
2506 case CompareIC::SMI: return Type::Smi(zone);
2507 case CompareIC::NUMBER: return Type::Number(zone);
2508 case CompareIC::STRING: return Type::String(zone);
2509 case CompareIC::INTERNALIZED_STRING: return Type::InternalizedString(zone);
2510 case CompareIC::UNIQUE_NAME: return Type::UniqueName(zone);
2511 case CompareIC::OBJECT: return Type::Receiver(zone);
2512 case CompareIC::KNOWN_OBJECT:
2513 return map.is_null() ? Type::Receiver(zone) : Type::Class(map, zone);
2514 case CompareIC::GENERIC: return Type::Any(zone);
2521 void CompareIC::StubInfoToType(int stub_minor_key,
2524 Type** overall_type,
2527 State left_state, right_state, handler_state;
2528 ICCompareStub::DecodeMinorKey(stub_minor_key, &left_state, &right_state,
2529 &handler_state, NULL);
2530 *left_type = StateToType(zone, left_state);
2531 *right_type = StateToType(zone, right_state);
2532 *overall_type = StateToType(zone, handler_state, map);
2536 CompareIC::State CompareIC::NewInputState(State old_state,
2537 Handle<Object> value) {
2538 switch (old_state) {
2540 if (value->IsSmi()) return SMI;
2541 if (value->IsHeapNumber()) return NUMBER;
2542 if (value->IsInternalizedString()) return INTERNALIZED_STRING;
2543 if (value->IsString()) return STRING;
2544 if (value->IsSymbol()) return UNIQUE_NAME;
2545 if (value->IsJSObject()) return OBJECT;
2548 if (value->IsSmi()) return SMI;
2549 if (value->IsHeapNumber()) return NUMBER;
2552 if (value->IsNumber()) return NUMBER;
2554 case INTERNALIZED_STRING:
2555 if (value->IsInternalizedString()) return INTERNALIZED_STRING;
2556 if (value->IsString()) return STRING;
2557 if (value->IsSymbol()) return UNIQUE_NAME;
2560 if (value->IsString()) return STRING;
2563 if (value->IsUniqueName()) return UNIQUE_NAME;
2566 if (value->IsJSObject()) return OBJECT;
2578 CompareIC::State CompareIC::TargetState(State old_state,
2581 bool has_inlined_smi_code,
2584 switch (old_state) {
2586 if (x->IsSmi() && y->IsSmi()) return SMI;
2587 if (x->IsNumber() && y->IsNumber()) return NUMBER;
2588 if (Token::IsOrderedRelationalCompareOp(op_)) {
2589 // Ordered comparisons treat undefined as NaN, so the
2590 // NUMBER stub will do the right thing.
2591 if ((x->IsNumber() && y->IsUndefined()) ||
2592 (y->IsNumber() && x->IsUndefined())) {
2596 if (x->IsInternalizedString() && y->IsInternalizedString()) {
2597 // We compare internalized strings as plain ones if we need to determine
2598 // the order in a non-equality compare.
2599 return Token::IsEqualityOp(op_) ? INTERNALIZED_STRING : STRING;
2601 if (x->IsString() && y->IsString()) return STRING;
2602 if (!Token::IsEqualityOp(op_)) return GENERIC;
2603 if (x->IsUniqueName() && y->IsUniqueName()) return UNIQUE_NAME;
2604 if (x->IsJSObject() && y->IsJSObject()) {
2605 if (Handle<JSObject>::cast(x)->map() ==
2606 Handle<JSObject>::cast(y)->map()) {
2607 return KNOWN_OBJECT;
2614 return x->IsNumber() && y->IsNumber() ? NUMBER : GENERIC;
2615 case INTERNALIZED_STRING:
2616 ASSERT(Token::IsEqualityOp(op_));
2617 if (x->IsString() && y->IsString()) return STRING;
2618 if (x->IsUniqueName() && y->IsUniqueName()) return UNIQUE_NAME;
2621 // If the failure was due to one side changing from smi to heap number,
2622 // then keep the state (if other changed at the same time, we will get
2623 // a second miss and then go to generic).
2624 if (old_left == SMI && x->IsHeapNumber()) return NUMBER;
2625 if (old_right == SMI && y->IsHeapNumber()) return NUMBER;
2628 ASSERT(Token::IsEqualityOp(op_));
2629 if (x->IsJSObject() && y->IsJSObject()) return OBJECT;
2638 return GENERIC; // Make the compiler happy.
2642 Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
2643 HandleScope scope(isolate());
2644 State previous_left, previous_right, previous_state;
2645 ICCompareStub::DecodeMinorKey(target()->stub_info(), &previous_left,
2646 &previous_right, &previous_state, NULL);
2647 State new_left = NewInputState(previous_left, x);
2648 State new_right = NewInputState(previous_right, y);
2649 State state = TargetState(previous_state, previous_left, previous_right,
2650 HasInlinedSmiCode(address()), x, y);
2651 ICCompareStub stub(op_, new_left, new_right, state);
2652 if (state == KNOWN_OBJECT) {
2654 Handle<Map>(Handle<JSObject>::cast(x)->map(), isolate()));
2656 Handle<Code> new_target = stub.GetCode(isolate());
2657 set_target(*new_target);
2659 if (FLAG_trace_ic) {
2660 PrintF("[CompareIC in ");
2661 JavaScriptFrame::PrintTop(isolate(), stdout, false, true);
2662 PrintF(" ((%s+%s=%s)->(%s+%s=%s))#%s @ %p]\n",
2663 GetStateName(previous_left),
2664 GetStateName(previous_right),
2665 GetStateName(previous_state),
2666 GetStateName(new_left),
2667 GetStateName(new_right),
2668 GetStateName(state),
2670 static_cast<void*>(*stub.GetCode(isolate())));
2673 // Activate inlined smi code.
2674 if (previous_state == UNINITIALIZED) {
2675 PatchInlinedSmiCode(address(), ENABLE_INLINED_SMI_CHECK);
2682 // Used from ICCompareStub::GenerateMiss in code-stubs-<arch>.cc.
2683 RUNTIME_FUNCTION(Code*, CompareIC_Miss) {
2684 HandleScope scope(isolate);
2685 ASSERT(args.length() == 3);
2686 CompareIC ic(isolate, static_cast<Token::Value>(args.smi_at(2)));
2687 return ic.UpdateCaches(args.at<Object>(0), args.at<Object>(1));
2691 void CompareNilIC::Clear(Address address, Code* target) {
2692 if (IsCleared(target)) return;
2693 ExtraICState state = target->extended_extra_ic_state();
2695 CompareNilICStub stub(state, HydrogenCodeStub::UNINITIALIZED);
2699 CHECK(stub.FindCodeInCache(&code, target->GetIsolate()));
2701 SetTargetAtAddress(address, code);
2705 MaybeObject* CompareNilIC::DoCompareNilSlow(NilValue nil,
2706 Handle<Object> object) {
2707 if (object->IsNull() || object->IsUndefined()) {
2708 return Smi::FromInt(true);
2710 return Smi::FromInt(object->IsUndetectableObject());
2714 MaybeObject* CompareNilIC::CompareNil(Handle<Object> object) {
2715 ExtraICState extra_ic_state = target()->extended_extra_ic_state();
2717 CompareNilICStub stub(extra_ic_state);
2719 // Extract the current supported types from the patched IC and calculate what
2720 // types must be supported as a result of the miss.
2721 bool already_monomorphic = stub.IsMonomorphic();
2723 stub.UpdateStatus(object);
2725 NilValue nil = stub.GetNilValue();
2727 // Find or create the specialized stub to support the new set of types.
2729 if (stub.IsMonomorphic()) {
2730 Handle<Map> monomorphic_map(already_monomorphic
2731 ? target()->FindFirstMap()
2732 : HeapObject::cast(*object)->map());
2733 code = isolate()->stub_cache()->ComputeCompareNil(monomorphic_map, stub);
2735 code = stub.GetCode(isolate());
2738 return DoCompareNilSlow(nil, object);
2742 RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss) {
2743 HandleScope scope(isolate);
2744 Handle<Object> object = args.at<Object>(0);
2745 CompareNilIC ic(isolate);
2746 return ic.CompareNil(object);
2750 RUNTIME_FUNCTION(MaybeObject*, Unreachable) {
2753 return isolate->heap()->undefined_value();
2757 Builtins::JavaScript BinaryOpIC::TokenToJSBuiltin(Token::Value op) {
2762 return Builtins::ADD;
2765 return Builtins::SUB;
2768 return Builtins::MUL;
2771 return Builtins::DIV;
2774 return Builtins::MOD;
2777 return Builtins::BIT_OR;
2779 case Token::BIT_AND:
2780 return Builtins::BIT_AND;
2782 case Token::BIT_XOR:
2783 return Builtins::BIT_XOR;
2786 return Builtins::SAR;
2789 return Builtins::SHR;
2792 return Builtins::SHL;
2798 MaybeObject* ToBooleanIC::ToBoolean(Handle<Object> object) {
2799 ToBooleanStub stub(target()->extended_extra_ic_state());
2800 bool to_boolean_value = stub.UpdateStatus(object);
2801 Handle<Code> code = stub.GetCode(isolate());
2803 return Smi::FromInt(to_boolean_value ? 1 : 0);
2807 RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss) {
2808 ASSERT(args.length() == 1);
2809 HandleScope scope(isolate);
2810 Handle<Object> object = args.at<Object>(0);
2811 ToBooleanIC ic(isolate);
2812 return ic.ToBoolean(object);
2816 static const Address IC_utilities[] = {
2817 #define ADDR(name) FUNCTION_ADDR(name),
2824 Address IC::AddressFromUtilityId(IC::UtilityId id) {
2825 return IC_utilities[id];
2829 } } // namespace v8::internal