1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_IA32
34 #include "stub-cache.h"
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
45 StubCache::Table table,
48 // Number of the cache entry pointer-size scaled.
51 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 // Multiply by 3 because there are 3 fields per entry (name, code, map).
58 __ lea(offset, Operand(offset, offset, times_2, 0));
60 if (extra.is_valid()) {
61 // Get the code entry from the cache.
62 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
64 // Check that the key in the entry matches the name.
65 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
66 __ j(not_equal, &miss);
68 // Check the map matches.
69 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
70 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
71 __ j(not_equal, &miss);
73 // Check that the flags match what we're looking for.
74 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
75 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
76 __ cmp(offset, flags);
77 __ j(not_equal, &miss);
80 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
82 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
87 // Jump to the first instruction in the code stub.
88 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
93 // Save the offset on the stack.
96 // Check that the key in the entry matches the name.
97 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
98 __ j(not_equal, &miss);
100 // Check the map matches.
101 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
102 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
103 __ j(not_equal, &miss);
105 // Restore offset register.
106 __ mov(offset, Operand(esp, 0));
108 // Get the code entry from the cache.
109 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
111 // Check that the flags match what we're looking for.
112 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
113 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
114 __ cmp(offset, flags);
115 __ j(not_equal, &miss);
118 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
120 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
125 // Restore offset and re-load code entry from cache.
127 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
129 // Jump to the first instruction in the code stub.
130 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
140 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
146 ASSERT(name->IsUniqueName());
147 ASSERT(!receiver.is(scratch0));
148 Counters* counters = masm->isolate()->counters();
149 __ IncrementCounter(counters->negative_lookups(), 1);
150 __ IncrementCounter(counters->negative_lookups_miss(), 1);
152 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
154 const int kInterceptorOrAccessCheckNeededMask =
155 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
157 // Bail out if the receiver has a named interceptor or requires access checks.
158 __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
159 kInterceptorOrAccessCheckNeededMask);
160 __ j(not_zero, miss_label);
162 // Check that receiver is a JSObject.
163 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
164 __ j(below, miss_label);
166 // Load properties array.
167 Register properties = scratch0;
168 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
170 // Check that the properties array is a dictionary.
171 __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
172 Immediate(masm->isolate()->factory()->hash_table_map()));
173 __ j(not_equal, miss_label);
176 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
183 __ DecrementCounter(counters->negative_lookups_miss(), 1);
187 void StubCache::GenerateProbe(MacroAssembler* masm,
197 // Assert that code is valid. The multiplying code relies on the entry size
199 ASSERT(sizeof(Entry) == 12);
201 // Assert the flags do not name a specific type.
202 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
204 // Assert that there are no register conflicts.
205 ASSERT(!scratch.is(receiver));
206 ASSERT(!scratch.is(name));
207 ASSERT(!extra.is(receiver));
208 ASSERT(!extra.is(name));
209 ASSERT(!extra.is(scratch));
211 // Assert scratch and extra registers are valid, and extra2/3 are unused.
212 ASSERT(!scratch.is(no_reg));
213 ASSERT(extra2.is(no_reg));
214 ASSERT(extra3.is(no_reg));
216 Register offset = scratch;
219 Counters* counters = masm->isolate()->counters();
220 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
222 // Check that the receiver isn't a smi.
223 __ JumpIfSmi(receiver, &miss);
225 // Get the map of the receiver and compute the hash.
226 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
227 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
228 __ xor_(offset, flags);
229 // We mask out the last two bits because they are not part of the hash and
230 // they are always 01 for maps. Also in the two 'and' instructions below.
231 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
232 // ProbeTable expects the offset to be pointer scaled, which it is, because
233 // the heap object tag size is 2 and the pointer size log 2 is also 2.
234 ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
236 // Probe the primary table.
237 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
239 // Primary miss: Compute hash for secondary probe.
240 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
241 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
242 __ xor_(offset, flags);
243 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
244 __ sub(offset, name);
245 __ add(offset, Immediate(flags));
246 __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
248 // Probe the secondary table.
250 isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
252 // Cache miss: Fall-through and let caller handle the miss by
253 // entering the runtime system.
255 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
259 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
261 Register prototype) {
262 __ LoadGlobalFunction(index, prototype);
263 __ LoadGlobalFunctionInitialMap(prototype, prototype);
264 // Load the prototype from the initial map.
265 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
270 MacroAssembler* masm,
274 // Get the global function with the given index.
275 Handle<JSFunction> function(
276 JSFunction::cast(masm->isolate()->native_context()->get(index)));
277 // Check we're still in the same context.
278 Register scratch = prototype;
279 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
280 __ mov(scratch, Operand(esi, offset));
281 __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
282 __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
283 __ j(not_equal, miss);
285 // Load its initial map. The global functions all have initial maps.
286 __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
287 // Load the prototype from the initial map.
288 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
296 // Check that the receiver isn't a smi.
297 __ JumpIfSmi(receiver, miss_label);
299 // Check that the object is a JS array.
300 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
301 __ j(not_equal, miss_label);
303 // Load length directly from the JS array.
304 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
309 // Generate code to check if an object is a string. If the object is
310 // a string, the map's instance type is left in the scratch register.
311 static void GenerateStringCheck(MacroAssembler* masm,
315 Label* non_string_object) {
316 // Check that the object isn't a smi.
317 __ JumpIfSmi(receiver, smi);
319 // Check that the object is a string.
320 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
321 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
322 STATIC_ASSERT(kNotStringTag != 0);
323 __ test(scratch, Immediate(kNotStringTag));
324 __ j(not_zero, non_string_object);
328 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
335 // Check if the object is a string leaving the instance type in the
337 GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
339 // Load length from the string and convert to a smi.
340 __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
343 // Check if the object is a JSValue wrapper.
344 __ bind(&check_wrapper);
345 __ cmp(scratch1, JS_VALUE_TYPE);
346 __ j(not_equal, miss);
348 // Check if the wrapped value is a string and load the length
349 // directly if it is.
350 __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
351 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
352 __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
357 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
362 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
363 __ mov(eax, scratch1);
368 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
373 Representation representation) {
374 ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
375 int offset = index * kPointerSize;
377 // Calculate the offset into the properties array.
378 offset = offset + FixedArray::kHeaderSize;
379 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
382 __ mov(dst, FieldOperand(src, offset));
386 static void PushInterceptorArguments(MacroAssembler* masm,
390 Handle<JSObject> holder_obj) {
391 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
392 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
393 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
394 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
395 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
397 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
398 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
399 Register scratch = name;
400 __ mov(scratch, Immediate(interceptor));
407 static void CompileCallLoadPropertyWithInterceptor(
408 MacroAssembler* masm,
412 Handle<JSObject> holder_obj,
414 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
415 __ CallExternalReference(
416 ExternalReference(IC_Utility(id), masm->isolate()),
417 StubCache::kInterceptorArgsLength);
421 // Generate call to api function.
422 // This function uses push() to generate smaller, faster code than
423 // the version above. It is an optimization that should will be removed
424 // when api call ICs are generated in hydrogen.
425 static void GenerateFastApiCall(MacroAssembler* masm,
426 const CallOptimization& optimization,
427 Handle<Map> receiver_map,
432 // Copy return value.
436 // Write the arguments to stack frame.
437 for (int i = 0; i < argc; i++) {
438 Register arg = values[argc-1-i];
439 ASSERT(!receiver.is(arg));
440 ASSERT(!scratch_in.is(arg));
444 // Stack now matches JSFunction abi.
445 ASSERT(optimization.is_simple_api_call());
447 // Abi for CallApiFunctionStub.
448 Register callee = eax;
449 Register call_data = ebx;
450 Register holder = ecx;
451 Register api_function_address = edx;
452 Register scratch = edi; // scratch_in is no longer valid.
454 // Put holder in place.
455 CallOptimization::HolderLookup holder_lookup;
456 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
459 switch (holder_lookup) {
460 case CallOptimization::kHolderIsReceiver:
461 __ Move(holder, receiver);
463 case CallOptimization::kHolderFound:
464 __ LoadHeapObject(holder, api_holder);
466 case CallOptimization::kHolderNotFound:
471 Isolate* isolate = masm->isolate();
472 Handle<JSFunction> function = optimization.constant_function();
473 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
474 Handle<Object> call_data_obj(api_call_info->data(), isolate);
476 // Put callee in place.
477 __ LoadHeapObject(callee, function);
479 bool call_data_undefined = false;
480 // Put call_data in place.
481 if (isolate->heap()->InNewSpace(*call_data_obj)) {
482 __ mov(scratch, api_call_info);
483 __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
484 } else if (call_data_obj->IsUndefined()) {
485 call_data_undefined = true;
486 __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
488 __ mov(call_data, call_data_obj);
491 // Put api_function_address in place.
492 Address function_address = v8::ToCData<Address>(api_call_info->callback());
493 __ mov(api_function_address, Immediate(function_address));
496 CallApiFunctionStub stub(true, call_data_undefined, argc);
497 __ TailCallStub(&stub);
501 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
504 if (!label->is_unused()) {
506 __ mov(this->name(), Immediate(name));
511 // Generate code to check that a global property cell is empty. Create
512 // the property cell at compilation time if no cell exists for the
514 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
515 Handle<JSGlobalObject> global,
519 Handle<PropertyCell> cell =
520 JSGlobalObject::EnsurePropertyCell(global, name);
521 ASSERT(cell->value()->IsTheHole());
522 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
523 if (Serializer::enabled()) {
524 __ mov(scratch, Immediate(cell));
525 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
526 Immediate(the_hole));
528 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
530 __ j(not_equal, miss);
534 void StoreStubCompiler::GenerateNegativeHolderLookup(
535 MacroAssembler* masm,
536 Handle<JSObject> holder,
540 if (holder->IsJSGlobalObject()) {
541 GenerateCheckPropertyCell(
542 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
543 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
544 GenerateDictionaryNegativeLookup(
545 masm, miss, holder_reg, name, scratch1(), scratch2());
550 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
551 // store is successful.
552 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
553 Handle<JSObject> object,
554 LookupResult* lookup,
555 Handle<Map> transition,
557 Register receiver_reg,
558 Register storage_reg,
565 int descriptor = transition->LastAdded();
566 DescriptorArray* descriptors = transition->instance_descriptors();
567 PropertyDetails details = descriptors->GetDetails(descriptor);
568 Representation representation = details.representation();
569 ASSERT(!representation.IsNone());
571 if (details.type() == CONSTANT) {
572 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
573 __ CmpObject(value_reg, constant);
574 __ j(not_equal, miss_label);
575 } else if (FLAG_track_fields && representation.IsSmi()) {
576 __ JumpIfNotSmi(value_reg, miss_label);
577 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
578 __ JumpIfSmi(value_reg, miss_label);
579 } else if (FLAG_track_double_fields && representation.IsDouble()) {
580 Label do_store, heap_number;
581 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
583 __ JumpIfNotSmi(value_reg, &heap_number);
584 __ SmiUntag(value_reg);
585 if (CpuFeatures::IsSupported(SSE2)) {
586 CpuFeatureScope use_sse2(masm, SSE2);
587 __ Cvtsi2sd(xmm0, value_reg);
590 __ fild_s(Operand(esp, 0));
593 __ SmiTag(value_reg);
596 __ bind(&heap_number);
597 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
598 miss_label, DONT_DO_SMI_CHECK);
599 if (CpuFeatures::IsSupported(SSE2)) {
600 CpuFeatureScope use_sse2(masm, SSE2);
601 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
603 __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
607 if (CpuFeatures::IsSupported(SSE2)) {
608 CpuFeatureScope use_sse2(masm, SSE2);
609 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
611 __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
615 // Stub never generated for non-global objects that require access
617 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
619 // Perform map transition for the receiver if necessary.
620 if (details.type() == FIELD &&
621 object->map()->unused_property_fields() == 0) {
622 // The properties must be extended before we can store the value.
623 // We jump to a runtime call that extends the properties array.
624 __ pop(scratch1); // Return address.
625 __ push(receiver_reg);
626 __ push(Immediate(transition));
629 __ TailCallExternalReference(
630 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
637 // Update the map of the object.
638 __ mov(scratch1, Immediate(transition));
639 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
641 // Update the write barrier for the map field.
642 __ RecordWriteField(receiver_reg,
643 HeapObject::kMapOffset,
650 if (details.type() == CONSTANT) {
651 ASSERT(value_reg.is(eax));
656 int index = transition->instance_descriptors()->GetFieldIndex(
657 transition->LastAdded());
659 // Adjust for the number of properties stored in the object. Even in the
660 // face of a transition we can use the old map here because the size of the
661 // object and the number of in-object properties is not going to change.
662 index -= object->map()->inobject_properties();
664 SmiCheck smi_check = representation.IsTagged()
665 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
666 // TODO(verwaest): Share this code as a code stub.
668 // Set the property straight into the object.
669 int offset = object->map()->instance_size() + (index * kPointerSize);
670 if (FLAG_track_double_fields && representation.IsDouble()) {
671 __ mov(FieldOperand(receiver_reg, offset), storage_reg);
673 __ mov(FieldOperand(receiver_reg, offset), value_reg);
676 if (!FLAG_track_fields || !representation.IsSmi()) {
677 // Update the write barrier for the array address.
678 if (!FLAG_track_double_fields || !representation.IsDouble()) {
679 __ mov(storage_reg, value_reg);
681 __ RecordWriteField(receiver_reg,
690 // Write to the properties array.
691 int offset = index * kPointerSize + FixedArray::kHeaderSize;
692 // Get the properties array (optimistically).
693 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
694 if (FLAG_track_double_fields && representation.IsDouble()) {
695 __ mov(FieldOperand(scratch1, offset), storage_reg);
697 __ mov(FieldOperand(scratch1, offset), value_reg);
700 if (!FLAG_track_fields || !representation.IsSmi()) {
701 // Update the write barrier for the array address.
702 if (!FLAG_track_double_fields || !representation.IsDouble()) {
703 __ mov(storage_reg, value_reg);
705 __ RecordWriteField(scratch1,
715 // Return the value (register eax).
716 ASSERT(value_reg.is(eax));
721 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
722 // but may be destroyed if store is successful.
723 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
724 Handle<JSObject> object,
725 LookupResult* lookup,
726 Register receiver_reg,
732 // Stub never generated for non-global objects that require access
734 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
736 int index = lookup->GetFieldIndex().field_index();
738 // Adjust for the number of properties stored in the object. Even in the
739 // face of a transition we can use the old map here because the size of the
740 // object and the number of in-object properties is not going to change.
741 index -= object->map()->inobject_properties();
743 Representation representation = lookup->representation();
744 ASSERT(!representation.IsNone());
745 if (FLAG_track_fields && representation.IsSmi()) {
746 __ JumpIfNotSmi(value_reg, miss_label);
747 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
748 __ JumpIfSmi(value_reg, miss_label);
749 } else if (FLAG_track_double_fields && representation.IsDouble()) {
750 // Load the double storage.
752 int offset = object->map()->instance_size() + (index * kPointerSize);
753 __ mov(scratch1, FieldOperand(receiver_reg, offset));
755 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
756 int offset = index * kPointerSize + FixedArray::kHeaderSize;
757 __ mov(scratch1, FieldOperand(scratch1, offset));
760 // Store the value into the storage.
761 Label do_store, heap_number;
762 __ JumpIfNotSmi(value_reg, &heap_number);
763 __ SmiUntag(value_reg);
764 if (CpuFeatures::IsSupported(SSE2)) {
765 CpuFeatureScope use_sse2(masm, SSE2);
766 __ Cvtsi2sd(xmm0, value_reg);
769 __ fild_s(Operand(esp, 0));
772 __ SmiTag(value_reg);
774 __ bind(&heap_number);
775 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
776 miss_label, DONT_DO_SMI_CHECK);
777 if (CpuFeatures::IsSupported(SSE2)) {
778 CpuFeatureScope use_sse2(masm, SSE2);
779 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
781 __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
784 if (CpuFeatures::IsSupported(SSE2)) {
785 CpuFeatureScope use_sse2(masm, SSE2);
786 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
788 __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
790 // Return the value (register eax).
791 ASSERT(value_reg.is(eax));
796 ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
797 // TODO(verwaest): Share this code as a code stub.
798 SmiCheck smi_check = representation.IsTagged()
799 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
801 // Set the property straight into the object.
802 int offset = object->map()->instance_size() + (index * kPointerSize);
803 __ mov(FieldOperand(receiver_reg, offset), value_reg);
805 if (!FLAG_track_fields || !representation.IsSmi()) {
806 // Update the write barrier for the array address.
807 // Pass the value being stored in the now unused name_reg.
808 __ mov(name_reg, value_reg);
809 __ RecordWriteField(receiver_reg,
818 // Write to the properties array.
819 int offset = index * kPointerSize + FixedArray::kHeaderSize;
820 // Get the properties array (optimistically).
821 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
822 __ mov(FieldOperand(scratch1, offset), value_reg);
824 if (!FLAG_track_fields || !representation.IsSmi()) {
825 // Update the write barrier for the array address.
826 // Pass the value being stored in the now unused name_reg.
827 __ mov(name_reg, value_reg);
828 __ RecordWriteField(scratch1,
838 // Return the value (register eax).
839 ASSERT(value_reg.is(eax));
844 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
845 __ jmp(code, RelocInfo::CODE_TARGET);
850 #define __ ACCESS_MASM(masm())
853 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
855 Handle<JSObject> holder,
861 PrototypeCheckType check) {
862 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
863 // Make sure that the type feedback oracle harvests the receiver map.
864 // TODO(svenpanne) Remove this hack when all ICs are reworked.
865 __ mov(scratch1, receiver_map);
867 // Make sure there's no overlap between holder and object registers.
868 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
869 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
870 && !scratch2.is(scratch1));
872 // Keep track of the current object in register reg.
873 Register reg = object_reg;
876 Handle<JSObject> current = Handle<JSObject>::null();
877 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
878 Handle<JSObject> prototype = Handle<JSObject>::null();
879 Handle<Map> current_map = receiver_map;
880 Handle<Map> holder_map(holder->map());
881 // Traverse the prototype chain and check the maps in the prototype chain for
882 // fast and global objects or do negative lookup for normal objects.
883 while (!current_map.is_identical_to(holder_map)) {
886 // Only global objects and objects that do not require access
887 // checks are allowed in stubs.
888 ASSERT(current_map->IsJSGlobalProxyMap() ||
889 !current_map->is_access_check_needed());
891 prototype = handle(JSObject::cast(current_map->prototype()));
892 if (current_map->is_dictionary_map() &&
893 !current_map->IsJSGlobalObjectMap() &&
894 !current_map->IsJSGlobalProxyMap()) {
895 if (!name->IsUniqueName()) {
896 ASSERT(name->IsString());
897 name = factory()->InternalizeString(Handle<String>::cast(name));
899 ASSERT(current.is_null() ||
900 current->property_dictionary()->FindEntry(*name) ==
901 NameDictionary::kNotFound);
903 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
906 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
907 reg = holder_reg; // From now on the object will be in holder_reg.
908 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
910 bool in_new_space = heap()->InNewSpace(*prototype);
911 if (depth != 1 || check == CHECK_ALL_MAPS) {
912 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
915 // Check access rights to the global object. This has to happen after
916 // the map check so that we know that the object is actually a global
918 if (current_map->IsJSGlobalProxyMap()) {
919 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
920 } else if (current_map->IsJSGlobalObjectMap()) {
921 GenerateCheckPropertyCell(
922 masm(), Handle<JSGlobalObject>::cast(current), name,
927 // Save the map in scratch1 for later.
928 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
931 reg = holder_reg; // From now on the object will be in holder_reg.
934 // The prototype is in new space; we cannot store a reference to it
935 // in the code. Load it from the map.
936 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
938 // The prototype is in old space; load it directly.
939 __ mov(reg, prototype);
943 // Go to the next object in the prototype chain.
945 current_map = handle(current->map());
948 // Log the check depth.
949 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
951 if (depth != 0 || check == CHECK_ALL_MAPS) {
952 // Check the holder map.
953 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
956 // Perform security check for access to the global object.
957 ASSERT(current_map->IsJSGlobalProxyMap() ||
958 !current_map->is_access_check_needed());
959 if (current_map->IsJSGlobalProxyMap()) {
960 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
963 // Return the register containing the holder.
968 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
969 if (!miss->is_unused()) {
973 TailCallBuiltin(masm(), MissBuiltin(kind()));
979 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
980 if (!miss->is_unused()) {
983 GenerateRestoreName(masm(), miss, name);
984 TailCallBuiltin(masm(), MissBuiltin(kind()));
990 Register LoadStubCompiler::CallbackHandlerFrontend(
991 Handle<HeapType> type,
993 Handle<JSObject> holder,
995 Handle<Object> callback) {
998 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1000 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1001 ASSERT(!reg.is(scratch2()));
1002 ASSERT(!reg.is(scratch3()));
1003 Register dictionary = scratch1();
1004 bool must_preserve_dictionary_reg = reg.is(dictionary);
1006 // Load the properties dictionary.
1007 if (must_preserve_dictionary_reg) {
1008 __ push(dictionary);
1010 __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
1012 // Probe the dictionary.
1013 Label probe_done, pop_and_miss;
1014 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1021 __ bind(&pop_and_miss);
1022 if (must_preserve_dictionary_reg) {
1026 __ bind(&probe_done);
1028 // If probing finds an entry in the dictionary, scratch2 contains the
1029 // index into the dictionary. Check that the value is the callback.
1030 Register index = scratch2();
1031 const int kElementsStartOffset =
1032 NameDictionary::kHeaderSize +
1033 NameDictionary::kElementsStartIndex * kPointerSize;
1034 const int kValueOffset = kElementsStartOffset + kPointerSize;
1036 Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
1037 if (must_preserve_dictionary_reg) {
1040 __ cmp(scratch3(), callback);
1041 __ j(not_equal, &miss);
1044 HandlerFrontendFooter(name, &miss);
1049 void LoadStubCompiler::GenerateLoadField(Register reg,
1050 Handle<JSObject> holder,
1051 PropertyIndex field,
1052 Representation representation) {
1053 if (!reg.is(receiver())) __ mov(receiver(), reg);
1054 if (kind() == Code::LOAD_IC) {
1055 LoadFieldStub stub(field.is_inobject(holder),
1056 field.translate(holder),
1058 GenerateTailCall(masm(), stub.GetCode(isolate()));
1060 KeyedLoadFieldStub stub(field.is_inobject(holder),
1061 field.translate(holder),
1063 GenerateTailCall(masm(), stub.GetCode(isolate()));
1068 void LoadStubCompiler::GenerateLoadCallback(
1069 const CallOptimization& call_optimization,
1070 Handle<Map> receiver_map) {
1071 GenerateFastApiCall(
1072 masm(), call_optimization, receiver_map,
1073 receiver(), scratch1(), 0, NULL);
1077 void LoadStubCompiler::GenerateLoadCallback(
1079 Handle<ExecutableAccessorInfo> callback) {
1080 // Insert additional parameters into the stack frame above return address.
1081 ASSERT(!scratch3().is(reg));
1082 __ pop(scratch3()); // Get return address to place it below.
1084 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1085 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1086 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1087 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1088 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1089 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1090 __ push(receiver()); // receiver
1091 // Push data from ExecutableAccessorInfo.
1092 if (isolate()->heap()->InNewSpace(callback->data())) {
1093 ASSERT(!scratch2().is(reg));
1094 __ mov(scratch2(), Immediate(callback));
1095 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
1097 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1099 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
1100 // ReturnValue default value
1101 __ push(Immediate(isolate()->factory()->undefined_value()));
1102 __ push(Immediate(reinterpret_cast<int>(isolate())));
1103 __ push(reg); // holder
1105 // Save a pointer to where we pushed the arguments. This will be
1106 // passed as the const PropertyAccessorInfo& to the C++ callback.
1109 __ push(name()); // name
1111 __ push(scratch3()); // Restore return address.
1113 // Abi for CallApiGetter
1114 Register getter_address = edx;
1115 Address function_address = v8::ToCData<Address>(callback->getter());
1116 __ mov(getter_address, Immediate(function_address));
1118 CallApiGetterStub stub;
1119 __ TailCallStub(&stub);
1123 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1124 // Return the constant value.
1125 __ LoadObject(eax, value);
1130 void LoadStubCompiler::GenerateLoadInterceptor(
1131 Register holder_reg,
1132 Handle<Object> object,
1133 Handle<JSObject> interceptor_holder,
1134 LookupResult* lookup,
1135 Handle<Name> name) {
1136 ASSERT(interceptor_holder->HasNamedInterceptor());
1137 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1139 // So far the most popular follow ups for interceptor loads are FIELD
1140 // and CALLBACKS, so inline only them, other cases may be added
1142 bool compile_followup_inline = false;
1143 if (lookup->IsFound() && lookup->IsCacheable()) {
1144 if (lookup->IsField()) {
1145 compile_followup_inline = true;
1146 } else if (lookup->type() == CALLBACKS &&
1147 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1148 ExecutableAccessorInfo* callback =
1149 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1150 compile_followup_inline = callback->getter() != NULL &&
1151 callback->IsCompatibleReceiver(*object);
1155 if (compile_followup_inline) {
1156 // Compile the interceptor call, followed by inline code to load the
1157 // property from further up the prototype chain if the call fails.
1158 // Check that the maps haven't changed.
1159 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1161 // Preserve the receiver register explicitly whenever it is different from
1162 // the holder and it is needed should the interceptor return without any
1163 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1164 // the FIELD case might cause a miss during the prototype check.
1165 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1166 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1167 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1169 // Save necessary data before invoking an interceptor.
1170 // Requires a frame to make GC aware of pushed pointers.
1172 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1174 if (must_preserve_receiver_reg) {
1175 __ push(receiver());
1177 __ push(holder_reg);
1178 __ push(this->name());
1180 // Invoke an interceptor. Note: map checks from receiver to
1181 // interceptor's holder has been compiled before (see a caller
1183 CompileCallLoadPropertyWithInterceptor(
1184 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1185 IC::kLoadPropertyWithInterceptorOnly);
1187 // Check if interceptor provided a value for property. If it's
1188 // the case, return immediately.
1189 Label interceptor_failed;
1190 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1191 __ j(equal, &interceptor_failed);
1192 frame_scope.GenerateLeaveFrame();
1195 // Clobber registers when generating debug-code to provoke errors.
1196 __ bind(&interceptor_failed);
1197 if (FLAG_debug_code) {
1198 __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1199 __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1200 __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1203 __ pop(this->name());
1205 if (must_preserve_receiver_reg) {
1209 // Leave the internal frame.
1212 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1213 } else { // !compile_followup_inline
1214 // Call the runtime system to load the interceptor.
1215 // Check that the maps haven't changed.
1216 __ pop(scratch2()); // save old return address
1217 PushInterceptorArguments(masm(), receiver(), holder_reg,
1218 this->name(), interceptor_holder);
1219 __ push(scratch2()); // restore old return address
1221 ExternalReference ref =
1222 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1224 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1229 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1231 // Check that the object is a boolean.
1232 __ cmp(object, factory()->true_value());
1233 __ j(equal, &success);
1234 __ cmp(object, factory()->false_value());
1235 __ j(not_equal, miss);
1240 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1241 Handle<JSObject> object,
1242 Handle<JSObject> holder,
1244 Handle<ExecutableAccessorInfo> callback) {
1245 Register holder_reg = HandlerFrontend(
1246 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1248 __ pop(scratch1()); // remove the return address
1249 __ push(receiver());
1250 __ push(holder_reg);
1254 __ push(scratch1()); // restore return address
1256 // Do tail-call to the runtime system.
1257 ExternalReference store_callback_property =
1258 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1259 __ TailCallExternalReference(store_callback_property, 5, 1);
1261 // Return the generated code.
1262 return GetCode(kind(), Code::FAST, name);
1266 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1267 Handle<JSObject> object,
1268 Handle<JSObject> holder,
1270 const CallOptimization& call_optimization) {
1271 HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1272 receiver(), holder, name);
1274 Register values[] = { value() };
1275 GenerateFastApiCall(
1276 masm(), call_optimization, handle(object->map()),
1277 receiver(), scratch1(), 1, values);
1279 // Return the generated code.
1280 return GetCode(kind(), Code::FAST, name);
1285 #define __ ACCESS_MASM(masm)
1288 void StoreStubCompiler::GenerateStoreViaSetter(
1289 MacroAssembler* masm,
1290 Handle<HeapType> type,
1291 Handle<JSFunction> setter) {
1292 // ----------- S t a t e -------------
1295 // -- edx : receiver
1296 // -- esp[0] : return address
1297 // -----------------------------------
1299 FrameScope scope(masm, StackFrame::INTERNAL);
1300 Register receiver = edx;
1301 Register value = eax;
1303 // Save value register, so we can restore it later.
1306 if (!setter.is_null()) {
1307 // Call the JavaScript setter with receiver and value on the stack.
1308 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1309 // Swap in the global receiver.
1311 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1315 ParameterCount actual(1);
1316 ParameterCount expected(setter);
1317 __ InvokeFunction(setter, expected, actual,
1318 CALL_FUNCTION, NullCallWrapper());
1320 // If we generate a global code snippet for deoptimization only, remember
1321 // the place to continue after deoptimization.
1322 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1325 // We have to return the passed value, not the return value of the setter.
1328 // Restore context register.
1329 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1336 #define __ ACCESS_MASM(masm())
1339 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1340 Handle<JSObject> object,
1341 Handle<Name> name) {
1342 __ pop(scratch1()); // remove the return address
1343 __ push(receiver());
1344 __ push(this->name());
1346 __ push(scratch1()); // restore return address
1348 // Do tail-call to the runtime system.
1349 ExternalReference store_ic_property =
1350 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1351 __ TailCallExternalReference(store_ic_property, 3, 1);
1353 // Return the generated code.
1354 return GetCode(kind(), Code::FAST, name);
1358 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1359 MapHandleList* receiver_maps,
1360 CodeHandleList* handler_stubs,
1361 MapHandleList* transitioned_maps) {
1363 __ JumpIfSmi(receiver(), &miss, Label::kNear);
1364 __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1365 for (int i = 0; i < receiver_maps->length(); ++i) {
1366 __ cmp(scratch1(), receiver_maps->at(i));
1367 if (transitioned_maps->at(i).is_null()) {
1368 __ j(equal, handler_stubs->at(i));
1371 __ j(not_equal, &next_map, Label::kNear);
1372 __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1373 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1378 TailCallBuiltin(masm(), MissBuiltin(kind()));
1380 // Return the generated code.
1382 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1386 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1387 Handle<JSObject> last,
1388 Handle<Name> name) {
1389 NonexistentHandlerFrontend(type, last, name);
1391 // Return undefined if maps of the full prototype chain are still the
1392 // same and no global property with this name contains a value.
1393 __ mov(eax, isolate()->factory()->undefined_value());
1396 // Return the generated code.
1397 return GetCode(kind(), Code::FAST, name);
1401 Register* LoadStubCompiler::registers() {
1402 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1403 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1408 Register* KeyedLoadStubCompiler::registers() {
1409 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1410 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1415 Register* StoreStubCompiler::registers() {
1416 // receiver, name, value, scratch1, scratch2, scratch3.
1417 static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
1422 Register* KeyedStoreStubCompiler::registers() {
1423 // receiver, name, value, scratch1, scratch2, scratch3.
1424 static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
1430 #define __ ACCESS_MASM(masm)
1433 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1434 Handle<HeapType> type,
1436 Handle<JSFunction> getter) {
1438 FrameScope scope(masm, StackFrame::INTERNAL);
1440 if (!getter.is_null()) {
1441 // Call the JavaScript getter with the receiver on the stack.
1442 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1443 // Swap in the global receiver.
1445 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1448 ParameterCount actual(0);
1449 ParameterCount expected(getter);
1450 __ InvokeFunction(getter, expected, actual,
1451 CALL_FUNCTION, NullCallWrapper());
1453 // If we generate a global code snippet for deoptimization only, remember
1454 // the place to continue after deoptimization.
1455 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1458 // Restore context register.
1459 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1466 #define __ ACCESS_MASM(masm())
1469 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1470 Handle<HeapType> type,
1471 Handle<GlobalObject> global,
1472 Handle<PropertyCell> cell,
1474 bool is_dont_delete) {
1477 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1478 // Get the value from the cell.
1479 if (Serializer::enabled()) {
1480 __ mov(eax, Immediate(cell));
1481 __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
1483 __ mov(eax, Operand::ForCell(cell));
1486 // Check for deleted property if property can actually be deleted.
1487 if (!is_dont_delete) {
1488 __ cmp(eax, factory()->the_hole_value());
1490 } else if (FLAG_debug_code) {
1491 __ cmp(eax, factory()->the_hole_value());
1492 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1495 Counters* counters = isolate()->counters();
1496 __ IncrementCounter(counters->named_load_global_stub(), 1);
1497 // The code above already loads the result into the return register.
1500 HandlerFrontendFooter(name, &miss);
1502 // Return the generated code.
1503 return GetCode(kind(), Code::NORMAL, name);
1507 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1508 TypeHandleList* types,
1509 CodeHandleList* handlers,
1511 Code::StubType type,
1512 IcCheckType check) {
1515 if (check == PROPERTY &&
1516 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1517 __ cmp(this->name(), Immediate(name));
1518 __ j(not_equal, &miss);
1522 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1523 __ JumpIfSmi(receiver(), smi_target);
1525 Register map_reg = scratch1();
1526 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1527 int receiver_count = types->length();
1528 int number_of_handled_maps = 0;
1529 for (int current = 0; current < receiver_count; ++current) {
1530 Handle<HeapType> type = types->at(current);
1531 Handle<Map> map = IC::TypeToMap(*type, isolate());
1532 if (!map->is_deprecated()) {
1533 number_of_handled_maps++;
1534 __ cmp(map_reg, map);
1535 if (type->Is(HeapType::Number())) {
1536 ASSERT(!number_case.is_unused());
1537 __ bind(&number_case);
1539 __ j(equal, handlers->at(current));
1542 ASSERT(number_of_handled_maps != 0);
1545 TailCallBuiltin(masm(), MissBuiltin(kind()));
1547 // Return the generated code.
1548 InlineCacheState state =
1549 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1550 return GetICCode(kind(), type, name, state);
1555 #define __ ACCESS_MASM(masm)
1558 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1559 MacroAssembler* masm) {
1560 // ----------- S t a t e -------------
1562 // -- edx : receiver
1563 // -- esp[0] : return address
1564 // -----------------------------------
1567 // This stub is meant to be tail-jumped to, the receiver must already
1568 // have been verified by the caller to not be a smi.
1569 __ JumpIfNotSmi(ecx, &miss);
1572 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1574 // Push receiver on the stack to free up a register for the dictionary
1577 __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
1578 // Pop receiver before returning.
1585 // ----------- S t a t e -------------
1587 // -- edx : receiver
1588 // -- esp[0] : return address
1589 // -----------------------------------
1590 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1593 // ----------- S t a t e -------------
1595 // -- edx : receiver
1596 // -- esp[0] : return address
1597 // -----------------------------------
1598 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1604 } } // namespace v8::internal
1606 #endif // V8_TARGET_ARCH_IA32