1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
32 #include "arguments.h"
35 #include "stub-cache.h"
40 #define __ ACCESS_MASM(masm)
43 static void ProbeTable(Isolate* isolate,
46 StubCache::Table table,
49 // The offset is scaled by 4, based on
50 // kHeapObjectTagSize, which is two bits
52 // We need to scale up the pointer by 2 because the offset is scaled by less
53 // than the pointer size.
54 ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
55 ScaleFactor scale_factor = times_2;
57 ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
58 // The offset register holds the entry offset times four (due to masking
59 // and shifting optimizations).
60 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
61 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
64 // Multiply by 3 because there are 3 fields per entry (name, code, map).
65 __ lea(offset, Operand(offset, offset, times_2, 0));
67 __ LoadAddress(kScratchRegister, key_offset);
69 // Check that the key in the entry matches the name.
70 // Multiply entry offset by 16 to get the entry address. Since the
71 // offset register already holds the entry offset times four, multiply
73 __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
74 __ j(not_equal, &miss);
76 // Get the map entry from the cache.
77 // Use key_offset + kPointerSize * 2, rather than loading map_offset.
78 __ movp(kScratchRegister,
79 Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
80 __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
81 __ j(not_equal, &miss);
83 // Get the code entry from the cache.
84 __ LoadAddress(kScratchRegister, value_offset);
85 __ movp(kScratchRegister,
86 Operand(kScratchRegister, offset, scale_factor, 0));
88 // Check that the flags match what we're looking for.
89 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
90 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
91 __ cmpl(offset, Immediate(flags));
92 __ j(not_equal, &miss);
95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
97 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
102 // Jump to the first instruction in the code stub.
103 __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
104 __ jmp(kScratchRegister);
110 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
116 ASSERT(name->IsUniqueName());
117 ASSERT(!receiver.is(scratch0));
118 Counters* counters = masm->isolate()->counters();
119 __ IncrementCounter(counters->negative_lookups(), 1);
120 __ IncrementCounter(counters->negative_lookups_miss(), 1);
122 __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
124 const int kInterceptorOrAccessCheckNeededMask =
125 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
127 // Bail out if the receiver has a named interceptor or requires access checks.
128 __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
129 Immediate(kInterceptorOrAccessCheckNeededMask));
130 __ j(not_zero, miss_label);
132 // Check that receiver is a JSObject.
133 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
134 __ j(below, miss_label);
136 // Load properties array.
137 Register properties = scratch0;
138 __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
140 // Check that the properties array is a dictionary.
141 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
142 Heap::kHashTableMapRootIndex);
143 __ j(not_equal, miss_label);
146 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
153 __ DecrementCounter(counters->negative_lookups_miss(), 1);
157 void StubCache::GenerateProbe(MacroAssembler* masm,
165 Isolate* isolate = masm->isolate();
167 USE(extra); // The register extra is not used on the X64 platform.
168 USE(extra2); // The register extra2 is not used on the X64 platform.
169 USE(extra3); // The register extra2 is not used on the X64 platform.
170 // Make sure that code is valid. The multiplying code relies on the
171 // entry size being 3 * kPointerSize.
172 ASSERT(sizeof(Entry) == 3 * kPointerSize);
174 // Make sure the flags do not name a specific type.
175 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
177 // Make sure that there are no register conflicts.
178 ASSERT(!scratch.is(receiver));
179 ASSERT(!scratch.is(name));
181 // Check scratch register is valid, extra and extra2 are unused.
182 ASSERT(!scratch.is(no_reg));
183 ASSERT(extra2.is(no_reg));
184 ASSERT(extra3.is(no_reg));
186 Counters* counters = masm->isolate()->counters();
187 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
189 // Check that the receiver isn't a smi.
190 __ JumpIfSmi(receiver, &miss);
192 // Get the map of the receiver and compute the hash.
193 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
194 // Use only the low 32 bits of the map pointer.
195 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
196 __ xor_(scratch, Immediate(flags));
197 // We mask out the last two bits because they are not part of the hash and
198 // they are always 01 for maps. Also in the two 'and' instructions below.
199 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
201 // Probe the primary table.
202 ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
204 // Primary miss: Compute hash for secondary probe.
205 __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
206 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
207 __ xor_(scratch, Immediate(flags));
208 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
209 __ subl(scratch, name);
210 __ addl(scratch, Immediate(flags));
211 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
213 // Probe the secondary table.
214 ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
216 // Cache miss: Fall-through and let caller handle the miss by
217 // entering the runtime system.
219 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
223 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
225 Register prototype) {
226 // Load the global or builtins object from the current context.
228 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
229 // Load the native context from the global or builtins object.
231 FieldOperand(prototype, GlobalObject::kNativeContextOffset));
232 // Load the function from the native context.
233 __ movp(prototype, Operand(prototype, Context::SlotOffset(index)));
234 // Load the initial map. The global functions all have initial maps.
236 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237 // Load the prototype from the initial map.
238 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
243 MacroAssembler* masm,
247 Isolate* isolate = masm->isolate();
248 // Get the global function with the given index.
249 Handle<JSFunction> function(
250 JSFunction::cast(isolate->native_context()->get(index)));
252 // Check we're still in the same context.
253 Register scratch = prototype;
254 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
255 __ movp(scratch, Operand(rsi, offset));
256 __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
257 __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
258 __ j(not_equal, miss);
260 // Load its initial map. The global functions all have initial maps.
261 __ Move(prototype, Handle<Map>(function->initial_map()));
262 // Load the prototype from the initial map.
263 __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
267 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
271 // Check that the receiver isn't a smi.
272 __ JumpIfSmi(receiver, miss_label);
274 // Check that the object is a JS array.
275 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
276 __ j(not_equal, miss_label);
278 // Load length directly from the JS array.
279 __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset));
284 // Generate code to check if an object is a string. If the object is
285 // a string, the map's instance type is left in the scratch register.
286 static void GenerateStringCheck(MacroAssembler* masm,
290 Label* non_string_object) {
291 // Check that the object isn't a smi.
292 __ JumpIfSmi(receiver, smi);
294 // Check that the object is a string.
295 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
296 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
297 STATIC_ASSERT(kNotStringTag != 0);
298 __ testl(scratch, Immediate(kNotStringTag));
299 __ j(not_zero, non_string_object);
303 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
310 // Check if the object is a string leaving the instance type in the
312 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
314 // Load length directly from the string.
315 __ movp(rax, FieldOperand(receiver, String::kLengthOffset));
318 // Check if the object is a JSValue wrapper.
319 __ bind(&check_wrapper);
320 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
321 __ j(not_equal, miss);
323 // Check if the wrapped value is a string and load the length
324 // directly if it is.
325 __ movp(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
326 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
327 __ movp(rax, FieldOperand(scratch2, String::kLengthOffset));
332 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
337 __ TryGetFunctionPrototype(receiver, result, miss_label);
338 if (!result.is(rax)) __ movp(rax, result);
343 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
348 Representation representation) {
349 ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
350 int offset = index * kPointerSize;
352 // Calculate the offset into the properties array.
353 offset = offset + FixedArray::kHeaderSize;
354 __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset));
357 __ movp(dst, FieldOperand(src, offset));
361 static void PushInterceptorArguments(MacroAssembler* masm,
365 Handle<JSObject> holder_obj) {
366 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
367 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
368 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
369 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
370 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
372 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
373 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
374 __ Move(kScratchRegister, interceptor);
375 __ push(kScratchRegister);
381 static void CompileCallLoadPropertyWithInterceptor(
382 MacroAssembler* masm,
386 Handle<JSObject> holder_obj,
388 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
389 __ CallExternalReference(
390 ExternalReference(IC_Utility(id), masm->isolate()),
391 StubCache::kInterceptorArgsLength);
395 // Generate call to api function.
396 static void GenerateFastApiCall(MacroAssembler* masm,
397 const CallOptimization& optimization,
398 Handle<Map> receiver_map,
403 ASSERT(optimization.is_simple_api_call());
405 __ PopReturnAddressTo(scratch_in);
408 // Write the arguments to stack frame.
409 for (int i = 0; i < argc; i++) {
410 Register arg = values[argc-1-i];
411 ASSERT(!receiver.is(arg));
412 ASSERT(!scratch_in.is(arg));
415 __ PushReturnAddressFrom(scratch_in);
416 // Stack now matches JSFunction abi.
418 // Abi for CallApiFunctionStub.
419 Register callee = rax;
420 Register call_data = rbx;
421 Register holder = rcx;
422 Register api_function_address = rdx;
423 Register scratch = rdi; // scratch_in is no longer valid.
425 // Put holder in place.
426 CallOptimization::HolderLookup holder_lookup;
427 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
430 switch (holder_lookup) {
431 case CallOptimization::kHolderIsReceiver:
432 __ Move(holder, receiver);
434 case CallOptimization::kHolderFound:
435 __ Move(holder, api_holder);
437 case CallOptimization::kHolderNotFound:
442 Isolate* isolate = masm->isolate();
443 Handle<JSFunction> function = optimization.constant_function();
444 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
445 Handle<Object> call_data_obj(api_call_info->data(), isolate);
447 // Put callee in place.
448 __ Move(callee, function);
450 bool call_data_undefined = false;
451 // Put call_data in place.
452 if (isolate->heap()->InNewSpace(*call_data_obj)) {
453 __ Move(scratch, api_call_info);
454 __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
455 } else if (call_data_obj->IsUndefined()) {
456 call_data_undefined = true;
457 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
459 __ Move(call_data, call_data_obj);
462 // Put api_function_address in place.
463 Address function_address = v8::ToCData<Address>(api_call_info->callback());
465 api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
468 CallApiFunctionStub stub(true, call_data_undefined, argc);
469 __ TailCallStub(&stub);
473 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
476 if (!label->is_unused()) {
478 __ Move(this->name(), name);
483 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
484 Handle<JSGlobalObject> global,
488 Handle<PropertyCell> cell =
489 JSGlobalObject::EnsurePropertyCell(global, name);
490 ASSERT(cell->value()->IsTheHole());
491 __ Move(scratch, cell);
492 __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
493 masm->isolate()->factory()->the_hole_value());
494 __ j(not_equal, miss);
498 void StoreStubCompiler::GenerateNegativeHolderLookup(
499 MacroAssembler* masm,
500 Handle<JSObject> holder,
504 if (holder->IsJSGlobalObject()) {
505 GenerateCheckPropertyCell(
506 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
507 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
508 GenerateDictionaryNegativeLookup(
509 masm, miss, holder_reg, name, scratch1(), scratch2());
514 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
515 // store is successful.
516 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
517 Handle<JSObject> object,
518 LookupResult* lookup,
519 Handle<Map> transition,
521 Register receiver_reg,
522 Register storage_reg,
529 int descriptor = transition->LastAdded();
530 DescriptorArray* descriptors = transition->instance_descriptors();
531 PropertyDetails details = descriptors->GetDetails(descriptor);
532 Representation representation = details.representation();
533 ASSERT(!representation.IsNone());
535 if (details.type() == CONSTANT) {
536 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
537 __ Cmp(value_reg, constant);
538 __ j(not_equal, miss_label);
539 } else if (FLAG_track_fields && representation.IsSmi()) {
540 __ JumpIfNotSmi(value_reg, miss_label);
541 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
542 __ JumpIfSmi(value_reg, miss_label);
543 } else if (FLAG_track_double_fields && representation.IsDouble()) {
544 Label do_store, heap_number;
545 __ AllocateHeapNumber(storage_reg, scratch1, slow);
547 __ JumpIfNotSmi(value_reg, &heap_number);
548 __ SmiToInteger32(scratch1, value_reg);
549 __ Cvtlsi2sd(xmm0, scratch1);
552 __ bind(&heap_number);
553 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
554 miss_label, DONT_DO_SMI_CHECK);
555 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
558 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
561 // Stub never generated for non-global objects that require access
563 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
565 // Perform map transition for the receiver if necessary.
566 if (details.type() == FIELD &&
567 object->map()->unused_property_fields() == 0) {
568 // The properties must be extended before we can store the value.
569 // We jump to a runtime call that extends the properties array.
570 __ PopReturnAddressTo(scratch1);
571 __ push(receiver_reg);
574 __ PushReturnAddressFrom(scratch1);
575 __ TailCallExternalReference(
576 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
583 // Update the map of the object.
584 __ Move(scratch1, transition);
585 __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
587 // Update the write barrier for the map field.
588 __ RecordWriteField(receiver_reg,
589 HeapObject::kMapOffset,
596 if (details.type() == CONSTANT) {
597 ASSERT(value_reg.is(rax));
602 int index = transition->instance_descriptors()->GetFieldIndex(
603 transition->LastAdded());
605 // Adjust for the number of properties stored in the object. Even in the
606 // face of a transition we can use the old map here because the size of the
607 // object and the number of in-object properties is not going to change.
608 index -= object->map()->inobject_properties();
610 // TODO(verwaest): Share this code as a code stub.
611 SmiCheck smi_check = representation.IsTagged()
612 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
614 // Set the property straight into the object.
615 int offset = object->map()->instance_size() + (index * kPointerSize);
616 if (FLAG_track_double_fields && representation.IsDouble()) {
617 __ movp(FieldOperand(receiver_reg, offset), storage_reg);
619 __ movp(FieldOperand(receiver_reg, offset), value_reg);
622 if (!FLAG_track_fields || !representation.IsSmi()) {
623 // Update the write barrier for the array address.
624 if (!FLAG_track_double_fields || !representation.IsDouble()) {
625 __ movp(storage_reg, value_reg);
628 receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
629 EMIT_REMEMBERED_SET, smi_check);
632 // Write to the properties array.
633 int offset = index * kPointerSize + FixedArray::kHeaderSize;
634 // Get the properties array (optimistically).
635 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
636 if (FLAG_track_double_fields && representation.IsDouble()) {
637 __ movp(FieldOperand(scratch1, offset), storage_reg);
639 __ movp(FieldOperand(scratch1, offset), value_reg);
642 if (!FLAG_track_fields || !representation.IsSmi()) {
643 // Update the write barrier for the array address.
644 if (!FLAG_track_double_fields || !representation.IsDouble()) {
645 __ movp(storage_reg, value_reg);
648 scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
649 EMIT_REMEMBERED_SET, smi_check);
653 // Return the value (register rax).
654 ASSERT(value_reg.is(rax));
659 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
660 // but may be destroyed if store is successful.
661 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
662 Handle<JSObject> object,
663 LookupResult* lookup,
664 Register receiver_reg,
670 // Stub never generated for non-global objects that require access
672 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
674 int index = lookup->GetFieldIndex().field_index();
676 // Adjust for the number of properties stored in the object. Even in the
677 // face of a transition we can use the old map here because the size of the
678 // object and the number of in-object properties is not going to change.
679 index -= object->map()->inobject_properties();
681 Representation representation = lookup->representation();
682 ASSERT(!representation.IsNone());
683 if (FLAG_track_fields && representation.IsSmi()) {
684 __ JumpIfNotSmi(value_reg, miss_label);
685 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
686 __ JumpIfSmi(value_reg, miss_label);
687 } else if (FLAG_track_double_fields && representation.IsDouble()) {
688 // Load the double storage.
690 int offset = object->map()->instance_size() + (index * kPointerSize);
691 __ movp(scratch1, FieldOperand(receiver_reg, offset));
694 FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
695 int offset = index * kPointerSize + FixedArray::kHeaderSize;
696 __ movp(scratch1, FieldOperand(scratch1, offset));
699 // Store the value into the storage.
700 Label do_store, heap_number;
701 __ JumpIfNotSmi(value_reg, &heap_number);
702 __ SmiToInteger32(scratch2, value_reg);
703 __ Cvtlsi2sd(xmm0, scratch2);
706 __ bind(&heap_number);
707 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
708 miss_label, DONT_DO_SMI_CHECK);
709 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
711 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
712 // Return the value (register rax).
713 ASSERT(value_reg.is(rax));
718 // TODO(verwaest): Share this code as a code stub.
719 SmiCheck smi_check = representation.IsTagged()
720 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
722 // Set the property straight into the object.
723 int offset = object->map()->instance_size() + (index * kPointerSize);
724 __ movp(FieldOperand(receiver_reg, offset), value_reg);
726 if (!FLAG_track_fields || !representation.IsSmi()) {
727 // Update the write barrier for the array address.
728 // Pass the value being stored in the now unused name_reg.
729 __ movp(name_reg, value_reg);
731 receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs,
732 EMIT_REMEMBERED_SET, smi_check);
735 // Write to the properties array.
736 int offset = index * kPointerSize + FixedArray::kHeaderSize;
737 // Get the properties array (optimistically).
738 __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
739 __ movp(FieldOperand(scratch1, offset), value_reg);
741 if (!FLAG_track_fields || !representation.IsSmi()) {
742 // Update the write barrier for the array address.
743 // Pass the value being stored in the now unused name_reg.
744 __ movp(name_reg, value_reg);
746 scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs,
747 EMIT_REMEMBERED_SET, smi_check);
751 // Return the value (register rax).
752 ASSERT(value_reg.is(rax));
757 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
758 __ jmp(code, RelocInfo::CODE_TARGET);
763 #define __ ACCESS_MASM((masm()))
766 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
768 Handle<JSObject> holder,
774 PrototypeCheckType check) {
775 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
776 // Make sure that the type feedback oracle harvests the receiver map.
777 // TODO(svenpanne) Remove this hack when all ICs are reworked.
778 __ Move(scratch1, receiver_map);
780 // Make sure there's no overlap between holder and object registers.
781 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
782 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
783 && !scratch2.is(scratch1));
785 // Keep track of the current object in register reg. On the first
786 // iteration, reg is an alias for object_reg, on later iterations,
787 // it is an alias for holder_reg.
788 Register reg = object_reg;
791 Handle<JSObject> current = Handle<JSObject>::null();
792 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
793 Handle<JSObject> prototype = Handle<JSObject>::null();
794 Handle<Map> current_map = receiver_map;
795 Handle<Map> holder_map(holder->map());
796 // Traverse the prototype chain and check the maps in the prototype chain for
797 // fast and global objects or do negative lookup for normal objects.
798 while (!current_map.is_identical_to(holder_map)) {
801 // Only global objects and objects that do not require access
802 // checks are allowed in stubs.
803 ASSERT(current_map->IsJSGlobalProxyMap() ||
804 !current_map->is_access_check_needed());
806 prototype = handle(JSObject::cast(current_map->prototype()));
807 if (current_map->is_dictionary_map() &&
808 !current_map->IsJSGlobalObjectMap() &&
809 !current_map->IsJSGlobalProxyMap()) {
810 if (!name->IsUniqueName()) {
811 ASSERT(name->IsString());
812 name = factory()->InternalizeString(Handle<String>::cast(name));
814 ASSERT(current.is_null() ||
815 current->property_dictionary()->FindEntry(*name) ==
816 NameDictionary::kNotFound);
818 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
821 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
822 reg = holder_reg; // From now on the object will be in holder_reg.
823 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
825 bool in_new_space = heap()->InNewSpace(*prototype);
827 // Save the map in scratch1 for later.
828 __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
830 if (depth != 1 || check == CHECK_ALL_MAPS) {
831 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
834 // Check access rights to the global object. This has to happen after
835 // the map check so that we know that the object is actually a global
837 if (current_map->IsJSGlobalProxyMap()) {
838 __ CheckAccessGlobalProxy(reg, scratch2, miss);
839 } else if (current_map->IsJSGlobalObjectMap()) {
840 GenerateCheckPropertyCell(
841 masm(), Handle<JSGlobalObject>::cast(current), name,
844 reg = holder_reg; // From now on the object will be in holder_reg.
847 // The prototype is in new space; we cannot store a reference to it
848 // in the code. Load it from the map.
849 __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
851 // The prototype is in old space; load it directly.
852 __ Move(reg, prototype);
856 // Go to the next object in the prototype chain.
858 current_map = handle(current->map());
861 // Log the check depth.
862 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
864 if (depth != 0 || check == CHECK_ALL_MAPS) {
865 // Check the holder map.
866 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
869 // Perform security check for access to the global object.
870 ASSERT(current_map->IsJSGlobalProxyMap() ||
871 !current_map->is_access_check_needed());
872 if (current_map->IsJSGlobalProxyMap()) {
873 __ CheckAccessGlobalProxy(reg, scratch1, miss);
876 // Return the register containing the holder.
881 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
882 if (!miss->is_unused()) {
886 TailCallBuiltin(masm(), MissBuiltin(kind()));
892 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
893 if (!miss->is_unused()) {
896 GenerateRestoreName(masm(), miss, name);
897 TailCallBuiltin(masm(), MissBuiltin(kind()));
903 Register LoadStubCompiler::CallbackHandlerFrontend(
904 Handle<HeapType> type,
906 Handle<JSObject> holder,
908 Handle<Object> callback) {
911 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
913 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
914 ASSERT(!reg.is(scratch2()));
915 ASSERT(!reg.is(scratch3()));
916 ASSERT(!reg.is(scratch4()));
918 // Load the properties dictionary.
919 Register dictionary = scratch4();
920 __ movp(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
922 // Probe the dictionary.
924 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
931 __ bind(&probe_done);
933 // If probing finds an entry in the dictionary, scratch3 contains the
934 // index into the dictionary. Check that the value is the callback.
935 Register index = scratch3();
936 const int kElementsStartOffset =
937 NameDictionary::kHeaderSize +
938 NameDictionary::kElementsStartIndex * kPointerSize;
939 const int kValueOffset = kElementsStartOffset + kPointerSize;
941 Operand(dictionary, index, times_pointer_size,
942 kValueOffset - kHeapObjectTag));
943 __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
944 __ cmpq(scratch2(), scratch3());
945 __ j(not_equal, &miss);
948 HandlerFrontendFooter(name, &miss);
953 void LoadStubCompiler::GenerateLoadField(Register reg,
954 Handle<JSObject> holder,
956 Representation representation) {
957 if (!reg.is(receiver())) __ movp(receiver(), reg);
958 if (kind() == Code::LOAD_IC) {
959 LoadFieldStub stub(field.is_inobject(holder),
960 field.translate(holder),
962 GenerateTailCall(masm(), stub.GetCode(isolate()));
964 KeyedLoadFieldStub stub(field.is_inobject(holder),
965 field.translate(holder),
967 GenerateTailCall(masm(), stub.GetCode(isolate()));
972 void LoadStubCompiler::GenerateLoadCallback(
973 const CallOptimization& call_optimization,
974 Handle<Map> receiver_map) {
976 masm(), call_optimization, receiver_map,
977 receiver(), scratch1(), 0, NULL);
981 void LoadStubCompiler::GenerateLoadCallback(
983 Handle<ExecutableAccessorInfo> callback) {
984 // Insert additional parameters into the stack frame above return address.
985 ASSERT(!scratch4().is(reg));
986 __ PopReturnAddressTo(scratch4());
988 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
989 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
990 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
991 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
992 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
993 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
994 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
995 __ push(receiver()); // receiver
996 if (heap()->InNewSpace(callback->data())) {
997 ASSERT(!scratch2().is(reg));
998 __ Move(scratch2(), callback);
999 __ push(FieldOperand(scratch2(),
1000 ExecutableAccessorInfo::kDataOffset)); // data
1002 __ Push(Handle<Object>(callback->data(), isolate()));
1004 ASSERT(!kScratchRegister.is(reg));
1005 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1006 __ push(kScratchRegister); // return value
1007 __ push(kScratchRegister); // return value default
1008 __ PushAddress(ExternalReference::isolate_address(isolate()));
1009 __ push(reg); // holder
1010 __ push(name()); // name
1011 // Save a pointer to where we pushed the arguments pointer. This will be
1012 // passed as the const PropertyAccessorInfo& to the C++ callback.
1014 __ PushReturnAddressFrom(scratch4());
1016 // Abi for CallApiGetter
1017 Register api_function_address = r8;
1018 Address getter_address = v8::ToCData<Address>(callback->getter());
1019 __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
1021 CallApiGetterStub stub;
1022 __ TailCallStub(&stub);
1026 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1027 // Return the constant value.
1028 __ Move(rax, value);
1033 void LoadStubCompiler::GenerateLoadInterceptor(
1034 Register holder_reg,
1035 Handle<Object> object,
1036 Handle<JSObject> interceptor_holder,
1037 LookupResult* lookup,
1038 Handle<Name> name) {
1039 ASSERT(interceptor_holder->HasNamedInterceptor());
1040 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1042 // So far the most popular follow ups for interceptor loads are FIELD
1043 // and CALLBACKS, so inline only them, other cases may be added
1045 bool compile_followup_inline = false;
1046 if (lookup->IsFound() && lookup->IsCacheable()) {
1047 if (lookup->IsField()) {
1048 compile_followup_inline = true;
1049 } else if (lookup->type() == CALLBACKS &&
1050 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1051 ExecutableAccessorInfo* callback =
1052 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1053 compile_followup_inline = callback->getter() != NULL &&
1054 callback->IsCompatibleReceiver(*object);
1058 if (compile_followup_inline) {
1059 // Compile the interceptor call, followed by inline code to load the
1060 // property from further up the prototype chain if the call fails.
1061 // Check that the maps haven't changed.
1062 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1064 // Preserve the receiver register explicitly whenever it is different from
1065 // the holder and it is needed should the interceptor return without any
1066 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1067 // the FIELD case might cause a miss during the prototype check.
1068 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1069 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1070 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1072 // Save necessary data before invoking an interceptor.
1073 // Requires a frame to make GC aware of pushed pointers.
1075 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1077 if (must_preserve_receiver_reg) {
1078 __ push(receiver());
1080 __ push(holder_reg);
1081 __ push(this->name());
1083 // Invoke an interceptor. Note: map checks from receiver to
1084 // interceptor's holder has been compiled before (see a caller
1086 CompileCallLoadPropertyWithInterceptor(
1087 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1088 IC::kLoadPropertyWithInterceptorOnly);
1090 // Check if interceptor provided a value for property. If it's
1091 // the case, return immediately.
1092 Label interceptor_failed;
1093 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1094 __ j(equal, &interceptor_failed);
1095 frame_scope.GenerateLeaveFrame();
1098 __ bind(&interceptor_failed);
1099 __ pop(this->name());
1101 if (must_preserve_receiver_reg) {
1105 // Leave the internal frame.
1108 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1109 } else { // !compile_followup_inline
1110 // Call the runtime system to load the interceptor.
1111 // Check that the maps haven't changed.
1112 __ PopReturnAddressTo(scratch2());
1113 PushInterceptorArguments(masm(), receiver(), holder_reg,
1114 this->name(), interceptor_holder);
1115 __ PushReturnAddressFrom(scratch2());
1117 ExternalReference ref = ExternalReference(
1118 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1119 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1124 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1126 // Check that the object is a boolean.
1127 __ Cmp(object, factory()->true_value());
1128 __ j(equal, &success);
1129 __ Cmp(object, factory()->false_value());
1130 __ j(not_equal, miss);
1135 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1136 Handle<JSObject> object,
1137 Handle<JSObject> holder,
1139 Handle<ExecutableAccessorInfo> callback) {
1140 Register holder_reg = HandlerFrontend(
1141 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1143 __ PopReturnAddressTo(scratch1());
1144 __ push(receiver());
1145 __ push(holder_reg);
1146 __ Push(callback); // callback info
1149 __ PushReturnAddressFrom(scratch1());
1151 // Do tail-call to the runtime system.
1152 ExternalReference store_callback_property =
1153 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1154 __ TailCallExternalReference(store_callback_property, 5, 1);
1156 // Return the generated code.
1157 return GetCode(kind(), Code::FAST, name);
1161 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1162 Handle<JSObject> object,
1163 Handle<JSObject> holder,
1165 const CallOptimization& call_optimization) {
1166 HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1167 receiver(), holder, name);
1169 Register values[] = { value() };
1170 GenerateFastApiCall(
1171 masm(), call_optimization, handle(object->map()),
1172 receiver(), scratch1(), 1, values);
1174 // Return the generated code.
1175 return GetCode(kind(), Code::FAST, name);
1180 #define __ ACCESS_MASM(masm)
1183 void StoreStubCompiler::GenerateStoreViaSetter(
1184 MacroAssembler* masm,
1185 Handle<HeapType> type,
1186 Handle<JSFunction> setter) {
1187 // ----------- S t a t e -------------
1190 // -- rdx : receiver
1191 // -- rsp[0] : return address
1192 // -----------------------------------
1194 FrameScope scope(masm, StackFrame::INTERNAL);
1195 Register receiver = rdx;
1196 Register value = rax;
1198 // Save value register, so we can restore it later.
1201 if (!setter.is_null()) {
1202 // Call the JavaScript setter with receiver and value on the stack.
1203 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1204 // Swap in the global receiver.
1206 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1210 ParameterCount actual(1);
1211 ParameterCount expected(setter);
1212 __ InvokeFunction(setter, expected, actual,
1213 CALL_FUNCTION, NullCallWrapper());
1215 // If we generate a global code snippet for deoptimization only, remember
1216 // the place to continue after deoptimization.
1217 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1220 // We have to return the passed value, not the return value of the setter.
1223 // Restore context register.
1224 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1231 #define __ ACCESS_MASM(masm())
1234 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1235 Handle<JSObject> object,
1236 Handle<Name> name) {
1237 __ PopReturnAddressTo(scratch1());
1238 __ push(receiver());
1239 __ push(this->name());
1241 __ PushReturnAddressFrom(scratch1());
1243 // Do tail-call to the runtime system.
1244 ExternalReference store_ic_property =
1245 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1246 __ TailCallExternalReference(store_ic_property, 3, 1);
1248 // Return the generated code.
1249 return GetCode(kind(), Code::FAST, name);
1253 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1254 MapHandleList* receiver_maps,
1255 CodeHandleList* handler_stubs,
1256 MapHandleList* transitioned_maps) {
1258 __ JumpIfSmi(receiver(), &miss, Label::kNear);
1260 __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1261 int receiver_count = receiver_maps->length();
1262 for (int i = 0; i < receiver_count; ++i) {
1263 // Check map and tail call if there's a match
1264 __ Cmp(scratch1(), receiver_maps->at(i));
1265 if (transitioned_maps->at(i).is_null()) {
1266 __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
1269 __ j(not_equal, &next_map, Label::kNear);
1270 __ Move(transition_map(),
1271 transitioned_maps->at(i),
1272 RelocInfo::EMBEDDED_OBJECT);
1273 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1280 TailCallBuiltin(masm(), MissBuiltin(kind()));
1282 // Return the generated code.
1284 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1288 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1289 Handle<JSObject> last,
1290 Handle<Name> name) {
1291 NonexistentHandlerFrontend(type, last, name);
1293 // Return undefined if maps of the full prototype chain are still the
1294 // same and no global property with this name contains a value.
1295 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1298 // Return the generated code.
1299 return GetCode(kind(), Code::FAST, name);
1303 Register* LoadStubCompiler::registers() {
1304 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1305 static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
1310 Register* KeyedLoadStubCompiler::registers() {
1311 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1312 static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
1317 Register* StoreStubCompiler::registers() {
1318 // receiver, name, value, scratch1, scratch2, scratch3.
1319 static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
1324 Register* KeyedStoreStubCompiler::registers() {
1325 // receiver, name, value, scratch1, scratch2, scratch3.
1326 static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
1332 #define __ ACCESS_MASM(masm)
1335 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1336 Handle<HeapType> type,
1338 Handle<JSFunction> getter) {
1339 // ----------- S t a t e -------------
1340 // -- rax : receiver
1342 // -- rsp[0] : return address
1343 // -----------------------------------
1345 FrameScope scope(masm, StackFrame::INTERNAL);
1347 if (!getter.is_null()) {
1348 // Call the JavaScript getter with the receiver on the stack.
1349 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1350 // Swap in the global receiver.
1352 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1355 ParameterCount actual(0);
1356 ParameterCount expected(getter);
1357 __ InvokeFunction(getter, expected, actual,
1358 CALL_FUNCTION, NullCallWrapper());
1360 // If we generate a global code snippet for deoptimization only, remember
1361 // the place to continue after deoptimization.
1362 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1365 // Restore context register.
1366 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1373 #define __ ACCESS_MASM(masm())
1376 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1377 Handle<HeapType> type,
1378 Handle<GlobalObject> global,
1379 Handle<PropertyCell> cell,
1381 bool is_dont_delete) {
1383 // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
1384 // rax is used as receiver(), which we would otherwise clobber before a
1386 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1388 // Get the value from the cell.
1390 __ movp(rbx, FieldOperand(rbx, PropertyCell::kValueOffset));
1392 // Check for deleted property if property can actually be deleted.
1393 if (!is_dont_delete) {
1394 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1396 } else if (FLAG_debug_code) {
1397 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1398 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1401 Counters* counters = isolate()->counters();
1402 __ IncrementCounter(counters->named_load_global_stub(), 1);
1406 HandlerFrontendFooter(name, &miss);
1408 // Return the generated code.
1409 return GetCode(kind(), Code::NORMAL, name);
1413 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1414 TypeHandleList* types,
1415 CodeHandleList* handlers,
1417 Code::StubType type,
1418 IcCheckType check) {
1421 if (check == PROPERTY &&
1422 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1423 __ Cmp(this->name(), name);
1424 __ j(not_equal, &miss);
1428 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1429 __ JumpIfSmi(receiver(), smi_target);
1431 Register map_reg = scratch1();
1432 __ movp(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1433 int receiver_count = types->length();
1434 int number_of_handled_maps = 0;
1435 for (int current = 0; current < receiver_count; ++current) {
1436 Handle<HeapType> type = types->at(current);
1437 Handle<Map> map = IC::TypeToMap(*type, isolate());
1438 if (!map->is_deprecated()) {
1439 number_of_handled_maps++;
1440 // Check map and tail call if there's a match
1441 __ Cmp(map_reg, map);
1442 if (type->Is(HeapType::Number())) {
1443 ASSERT(!number_case.is_unused());
1444 __ bind(&number_case);
1446 __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
1449 ASSERT(number_of_handled_maps > 0);
1452 TailCallBuiltin(masm(), MissBuiltin(kind()));
1454 // Return the generated code.
1455 InlineCacheState state =
1456 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1457 return GetICCode(kind(), type, name, state);
1462 #define __ ACCESS_MASM(masm)
1465 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1466 MacroAssembler* masm) {
1467 // ----------- S t a t e -------------
1469 // -- rdx : receiver
1470 // -- rsp[0] : return address
1471 // -----------------------------------
1474 // This stub is meant to be tail-jumped to, the receiver must already
1475 // have been verified by the caller to not be a smi.
1477 __ JumpIfNotSmi(rax, &miss);
1478 __ SmiToInteger32(rbx, rax);
1479 __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
1481 // Check whether the elements is a number dictionary.
1484 // rbx: key as untagged int32
1486 __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
1490 // ----------- S t a t e -------------
1492 // -- rdx : receiver
1493 // -- rsp[0] : return address
1494 // -----------------------------------
1495 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1498 // ----------- S t a t e -------------
1500 // -- rdx : receiver
1501 // -- rsp[0] : return address
1502 // -----------------------------------
1503 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1509 } } // namespace v8::internal
1511 #endif // V8_TARGET_ARCH_X64