1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
11 #include "stub-cache.h"
16 #define __ ACCESS_MASM(masm)
19 static void ProbeTable(Isolate* isolate,
22 StubCache::Table table,
25 // Number of the cache entry pointer-size scaled.
28 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
29 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
30 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
34 // Multiply by 3 because there are 3 fields per entry (name, code, map).
35 __ lea(offset, Operand(offset, offset, times_2, 0));
37 if (extra.is_valid()) {
38 // Get the code entry from the cache.
39 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
41 // Check that the key in the entry matches the name.
42 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
43 __ j(not_equal, &miss);
45 // Check the map matches.
46 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
47 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
48 __ j(not_equal, &miss);
50 // Check that the flags match what we're looking for.
51 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
52 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
53 __ cmp(offset, flags);
54 __ j(not_equal, &miss);
57 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
59 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
64 // Jump to the first instruction in the code stub.
65 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
70 // Save the offset on the stack.
73 // Check that the key in the entry matches the name.
74 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
75 __ j(not_equal, &miss);
77 // Check the map matches.
78 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
79 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
80 __ j(not_equal, &miss);
82 // Restore offset register.
83 __ mov(offset, Operand(esp, 0));
85 // Get the code entry from the cache.
86 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
88 // Check that the flags match what we're looking for.
89 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
90 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
91 __ cmp(offset, flags);
92 __ j(not_equal, &miss);
95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
97 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
102 // Restore offset and re-load code entry from cache.
104 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
106 // Jump to the first instruction in the code stub.
107 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
117 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
123 ASSERT(name->IsUniqueName());
124 ASSERT(!receiver.is(scratch0));
125 Counters* counters = masm->isolate()->counters();
126 __ IncrementCounter(counters->negative_lookups(), 1);
127 __ IncrementCounter(counters->negative_lookups_miss(), 1);
129 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
131 const int kInterceptorOrAccessCheckNeededMask =
132 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
134 // Bail out if the receiver has a named interceptor or requires access checks.
135 __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
136 kInterceptorOrAccessCheckNeededMask);
137 __ j(not_zero, miss_label);
139 // Check that receiver is a JSObject.
140 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
141 __ j(below, miss_label);
143 // Load properties array.
144 Register properties = scratch0;
145 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
147 // Check that the properties array is a dictionary.
148 __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
149 Immediate(masm->isolate()->factory()->hash_table_map()));
150 __ j(not_equal, miss_label);
153 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
160 __ DecrementCounter(counters->negative_lookups_miss(), 1);
164 void StubCache::GenerateProbe(MacroAssembler* masm,
174 // Assert that code is valid. The multiplying code relies on the entry size
176 ASSERT(sizeof(Entry) == 12);
178 // Assert the flags do not name a specific type.
179 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
181 // Assert that there are no register conflicts.
182 ASSERT(!scratch.is(receiver));
183 ASSERT(!scratch.is(name));
184 ASSERT(!extra.is(receiver));
185 ASSERT(!extra.is(name));
186 ASSERT(!extra.is(scratch));
188 // Assert scratch and extra registers are valid, and extra2/3 are unused.
189 ASSERT(!scratch.is(no_reg));
190 ASSERT(extra2.is(no_reg));
191 ASSERT(extra3.is(no_reg));
193 Register offset = scratch;
196 Counters* counters = masm->isolate()->counters();
197 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
199 // Check that the receiver isn't a smi.
200 __ JumpIfSmi(receiver, &miss);
202 // Get the map of the receiver and compute the hash.
203 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
204 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
205 __ xor_(offset, flags);
206 // We mask out the last two bits because they are not part of the hash and
207 // they are always 01 for maps. Also in the two 'and' instructions below.
208 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
209 // ProbeTable expects the offset to be pointer scaled, which it is, because
210 // the heap object tag size is 2 and the pointer size log 2 is also 2.
211 ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
213 // Probe the primary table.
214 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
216 // Primary miss: Compute hash for secondary probe.
217 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
218 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
219 __ xor_(offset, flags);
220 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
221 __ sub(offset, name);
222 __ add(offset, Immediate(flags));
223 __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
225 // Probe the secondary table.
227 isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
229 // Cache miss: Fall-through and let caller handle the miss by
230 // entering the runtime system.
232 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
236 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
238 Register prototype) {
239 __ LoadGlobalFunction(index, prototype);
240 __ LoadGlobalFunctionInitialMap(prototype, prototype);
241 // Load the prototype from the initial map.
242 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
246 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
247 MacroAssembler* masm,
251 // Get the global function with the given index.
252 Handle<JSFunction> function(
253 JSFunction::cast(masm->isolate()->native_context()->get(index)));
254 // Check we're still in the same context.
255 Register scratch = prototype;
256 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
257 __ mov(scratch, Operand(esi, offset));
258 __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
259 __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
260 __ j(not_equal, miss);
262 // Load its initial map. The global functions all have initial maps.
263 __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
264 // Load the prototype from the initial map.
265 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
269 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
273 // Check that the receiver isn't a smi.
274 __ JumpIfSmi(receiver, miss_label);
276 // Check that the object is a JS array.
277 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
278 __ j(not_equal, miss_label);
280 // Load length directly from the JS array.
281 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
291 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
292 __ mov(eax, scratch1);
297 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
302 Representation representation) {
303 ASSERT(!representation.IsDouble());
304 int offset = index * kPointerSize;
306 // Calculate the offset into the properties array.
307 offset = offset + FixedArray::kHeaderSize;
308 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
311 __ mov(dst, FieldOperand(src, offset));
315 static void PushInterceptorArguments(MacroAssembler* masm,
319 Handle<JSObject> holder_obj) {
320 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
321 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
322 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
323 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
324 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
326 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
327 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
328 Register scratch = name;
329 __ mov(scratch, Immediate(interceptor));
336 static void CompileCallLoadPropertyWithInterceptor(
337 MacroAssembler* masm,
341 Handle<JSObject> holder_obj,
343 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
344 __ CallExternalReference(
345 ExternalReference(IC_Utility(id), masm->isolate()),
346 StubCache::kInterceptorArgsLength);
350 // Generate call to api function.
351 // This function uses push() to generate smaller, faster code than
352 // the version above. It is an optimization that should will be removed
353 // when api call ICs are generated in hydrogen.
354 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
355 const CallOptimization& optimization,
356 Handle<Map> receiver_map,
362 // Copy return value.
366 // Write the arguments to stack frame.
367 for (int i = 0; i < argc; i++) {
368 Register arg = values[argc-1-i];
369 ASSERT(!receiver.is(arg));
370 ASSERT(!scratch_in.is(arg));
374 // Stack now matches JSFunction abi.
375 ASSERT(optimization.is_simple_api_call());
377 // Abi for CallApiFunctionStub.
378 Register callee = eax;
379 Register call_data = ebx;
380 Register holder = ecx;
381 Register api_function_address = edx;
382 Register scratch = edi; // scratch_in is no longer valid.
384 // Put holder in place.
385 CallOptimization::HolderLookup holder_lookup;
386 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
389 switch (holder_lookup) {
390 case CallOptimization::kHolderIsReceiver:
391 __ Move(holder, receiver);
393 case CallOptimization::kHolderFound:
394 __ LoadHeapObject(holder, api_holder);
396 case CallOptimization::kHolderNotFound:
401 Isolate* isolate = masm->isolate();
402 Handle<JSFunction> function = optimization.constant_function();
403 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
404 Handle<Object> call_data_obj(api_call_info->data(), isolate);
406 // Put callee in place.
407 __ LoadHeapObject(callee, function);
409 bool call_data_undefined = false;
410 // Put call_data in place.
411 if (isolate->heap()->InNewSpace(*call_data_obj)) {
412 __ mov(scratch, api_call_info);
413 __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
414 } else if (call_data_obj->IsUndefined()) {
415 call_data_undefined = true;
416 __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
418 __ mov(call_data, call_data_obj);
421 // Put api_function_address in place.
422 Address function_address = v8::ToCData<Address>(api_call_info->callback());
423 __ mov(api_function_address, Immediate(function_address));
426 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
427 __ TailCallStub(&stub);
431 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
434 if (!label->is_unused()) {
436 __ mov(this->name(), Immediate(name));
441 // Generate code to check that a global property cell is empty. Create
442 // the property cell at compilation time if no cell exists for the
444 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
445 Handle<JSGlobalObject> global,
449 Handle<PropertyCell> cell =
450 JSGlobalObject::EnsurePropertyCell(global, name);
451 ASSERT(cell->value()->IsTheHole());
452 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
453 if (Serializer::enabled(masm->isolate())) {
454 __ mov(scratch, Immediate(cell));
455 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
456 Immediate(the_hole));
458 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
460 __ j(not_equal, miss);
464 void StoreStubCompiler::GenerateNegativeHolderLookup(
465 MacroAssembler* masm,
466 Handle<JSObject> holder,
470 if (holder->IsJSGlobalObject()) {
471 GenerateCheckPropertyCell(
472 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
473 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
474 GenerateDictionaryNegativeLookup(
475 masm, miss, holder_reg, name, scratch1(), scratch2());
480 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
481 // store is successful.
482 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
483 Handle<JSObject> object,
484 LookupResult* lookup,
485 Handle<Map> transition,
487 Register receiver_reg,
488 Register storage_reg,
495 int descriptor = transition->LastAdded();
496 DescriptorArray* descriptors = transition->instance_descriptors();
497 PropertyDetails details = descriptors->GetDetails(descriptor);
498 Representation representation = details.representation();
499 ASSERT(!representation.IsNone());
501 if (details.type() == CONSTANT) {
502 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
503 __ CmpObject(value_reg, constant);
504 __ j(not_equal, miss_label);
505 } else if (representation.IsSmi()) {
506 __ JumpIfNotSmi(value_reg, miss_label);
507 } else if (representation.IsHeapObject()) {
508 __ JumpIfSmi(value_reg, miss_label);
509 HeapType* field_type = descriptors->GetFieldType(descriptor);
510 HeapType::Iterator<Map> it = field_type->Classes();
514 __ CompareMap(value_reg, it.Current());
517 __ j(not_equal, miss_label);
520 __ j(equal, &do_store, Label::kNear);
524 } else if (representation.IsDouble()) {
525 Label do_store, heap_number;
526 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
528 __ JumpIfNotSmi(value_reg, &heap_number);
529 __ SmiUntag(value_reg);
530 if (CpuFeatures::IsSupported(SSE2)) {
531 CpuFeatureScope use_sse2(masm, SSE2);
532 __ Cvtsi2sd(xmm0, value_reg);
535 __ fild_s(Operand(esp, 0));
538 __ SmiTag(value_reg);
541 __ bind(&heap_number);
542 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
543 miss_label, DONT_DO_SMI_CHECK);
544 if (CpuFeatures::IsSupported(SSE2)) {
545 CpuFeatureScope use_sse2(masm, SSE2);
546 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
548 __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
552 if (CpuFeatures::IsSupported(SSE2)) {
553 CpuFeatureScope use_sse2(masm, SSE2);
554 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
556 __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
560 // Stub never generated for non-global objects that require access
562 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
564 // Perform map transition for the receiver if necessary.
565 if (details.type() == FIELD &&
566 object->map()->unused_property_fields() == 0) {
567 // The properties must be extended before we can store the value.
568 // We jump to a runtime call that extends the properties array.
569 __ pop(scratch1); // Return address.
570 __ push(receiver_reg);
571 __ push(Immediate(transition));
574 __ TailCallExternalReference(
575 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
582 // Update the map of the object.
583 __ mov(scratch1, Immediate(transition));
584 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
586 // Update the write barrier for the map field.
587 __ RecordWriteField(receiver_reg,
588 HeapObject::kMapOffset,
595 if (details.type() == CONSTANT) {
596 ASSERT(value_reg.is(eax));
601 int index = transition->instance_descriptors()->GetFieldIndex(
602 transition->LastAdded());
604 // Adjust for the number of properties stored in the object. Even in the
605 // face of a transition we can use the old map here because the size of the
606 // object and the number of in-object properties is not going to change.
607 index -= object->map()->inobject_properties();
609 SmiCheck smi_check = representation.IsTagged()
610 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
611 // TODO(verwaest): Share this code as a code stub.
613 // Set the property straight into the object.
614 int offset = object->map()->instance_size() + (index * kPointerSize);
615 if (representation.IsDouble()) {
616 __ mov(FieldOperand(receiver_reg, offset), storage_reg);
618 __ mov(FieldOperand(receiver_reg, offset), value_reg);
621 if (!representation.IsSmi()) {
622 // Update the write barrier for the array address.
623 if (!representation.IsDouble()) {
624 __ mov(storage_reg, value_reg);
626 __ RecordWriteField(receiver_reg,
635 // Write to the properties array.
636 int offset = index * kPointerSize + FixedArray::kHeaderSize;
637 // Get the properties array (optimistically).
638 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
639 if (representation.IsDouble()) {
640 __ mov(FieldOperand(scratch1, offset), storage_reg);
642 __ mov(FieldOperand(scratch1, offset), value_reg);
645 if (!representation.IsSmi()) {
646 // Update the write barrier for the array address.
647 if (!representation.IsDouble()) {
648 __ mov(storage_reg, value_reg);
650 __ RecordWriteField(scratch1,
660 // Return the value (register eax).
661 ASSERT(value_reg.is(eax));
666 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
667 // but may be destroyed if store is successful.
668 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
669 Handle<JSObject> object,
670 LookupResult* lookup,
671 Register receiver_reg,
677 // Stub never generated for non-global objects that require access
679 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
681 int index = lookup->GetFieldIndex().field_index();
683 // Adjust for the number of properties stored in the object. Even in the
684 // face of a transition we can use the old map here because the size of the
685 // object and the number of in-object properties is not going to change.
686 index -= object->map()->inobject_properties();
688 Representation representation = lookup->representation();
689 ASSERT(!representation.IsNone());
690 if (representation.IsSmi()) {
691 __ JumpIfNotSmi(value_reg, miss_label);
692 } else if (representation.IsHeapObject()) {
693 __ JumpIfSmi(value_reg, miss_label);
694 HeapType* field_type = lookup->GetFieldType();
695 HeapType::Iterator<Map> it = field_type->Classes();
699 __ CompareMap(value_reg, it.Current());
702 __ j(not_equal, miss_label);
705 __ j(equal, &do_store, Label::kNear);
709 } else if (representation.IsDouble()) {
710 // Load the double storage.
712 int offset = object->map()->instance_size() + (index * kPointerSize);
713 __ mov(scratch1, FieldOperand(receiver_reg, offset));
715 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
716 int offset = index * kPointerSize + FixedArray::kHeaderSize;
717 __ mov(scratch1, FieldOperand(scratch1, offset));
720 // Store the value into the storage.
721 Label do_store, heap_number;
722 __ JumpIfNotSmi(value_reg, &heap_number);
723 __ SmiUntag(value_reg);
724 if (CpuFeatures::IsSupported(SSE2)) {
725 CpuFeatureScope use_sse2(masm, SSE2);
726 __ Cvtsi2sd(xmm0, value_reg);
729 __ fild_s(Operand(esp, 0));
732 __ SmiTag(value_reg);
734 __ bind(&heap_number);
735 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
736 miss_label, DONT_DO_SMI_CHECK);
737 if (CpuFeatures::IsSupported(SSE2)) {
738 CpuFeatureScope use_sse2(masm, SSE2);
739 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
741 __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
744 if (CpuFeatures::IsSupported(SSE2)) {
745 CpuFeatureScope use_sse2(masm, SSE2);
746 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
748 __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
750 // Return the value (register eax).
751 ASSERT(value_reg.is(eax));
756 ASSERT(!representation.IsDouble());
757 // TODO(verwaest): Share this code as a code stub.
758 SmiCheck smi_check = representation.IsTagged()
759 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
761 // Set the property straight into the object.
762 int offset = object->map()->instance_size() + (index * kPointerSize);
763 __ mov(FieldOperand(receiver_reg, offset), value_reg);
765 if (!representation.IsSmi()) {
766 // Update the write barrier for the array address.
767 // Pass the value being stored in the now unused name_reg.
768 __ mov(name_reg, value_reg);
769 __ RecordWriteField(receiver_reg,
778 // Write to the properties array.
779 int offset = index * kPointerSize + FixedArray::kHeaderSize;
780 // Get the properties array (optimistically).
781 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
782 __ mov(FieldOperand(scratch1, offset), value_reg);
784 if (!representation.IsSmi()) {
785 // Update the write barrier for the array address.
786 // Pass the value being stored in the now unused name_reg.
787 __ mov(name_reg, value_reg);
788 __ RecordWriteField(scratch1,
798 // Return the value (register eax).
799 ASSERT(value_reg.is(eax));
804 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
805 __ jmp(code, RelocInfo::CODE_TARGET);
810 #define __ ACCESS_MASM(masm())
813 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
815 Handle<JSObject> holder,
821 PrototypeCheckType check) {
822 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
824 // Make sure there's no overlap between holder and object registers.
825 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
826 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
827 && !scratch2.is(scratch1));
829 // Keep track of the current object in register reg.
830 Register reg = object_reg;
833 Handle<JSObject> current = Handle<JSObject>::null();
834 if (type->IsConstant()) current =
835 Handle<JSObject>::cast(type->AsConstant()->Value());
836 Handle<JSObject> prototype = Handle<JSObject>::null();
837 Handle<Map> current_map = receiver_map;
838 Handle<Map> holder_map(holder->map());
839 // Traverse the prototype chain and check the maps in the prototype chain for
840 // fast and global objects or do negative lookup for normal objects.
841 while (!current_map.is_identical_to(holder_map)) {
844 // Only global objects and objects that do not require access
845 // checks are allowed in stubs.
846 ASSERT(current_map->IsJSGlobalProxyMap() ||
847 !current_map->is_access_check_needed());
849 prototype = handle(JSObject::cast(current_map->prototype()));
850 if (current_map->is_dictionary_map() &&
851 !current_map->IsJSGlobalObjectMap() &&
852 !current_map->IsJSGlobalProxyMap()) {
853 if (!name->IsUniqueName()) {
854 ASSERT(name->IsString());
855 name = factory()->InternalizeString(Handle<String>::cast(name));
857 ASSERT(current.is_null() ||
858 current->property_dictionary()->FindEntry(name) ==
859 NameDictionary::kNotFound);
861 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
864 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
865 reg = holder_reg; // From now on the object will be in holder_reg.
866 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
868 bool in_new_space = heap()->InNewSpace(*prototype);
869 if (depth != 1 || check == CHECK_ALL_MAPS) {
870 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
873 // Check access rights to the global object. This has to happen after
874 // the map check so that we know that the object is actually a global
876 if (current_map->IsJSGlobalProxyMap()) {
877 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
878 } else if (current_map->IsJSGlobalObjectMap()) {
879 GenerateCheckPropertyCell(
880 masm(), Handle<JSGlobalObject>::cast(current), name,
885 // Save the map in scratch1 for later.
886 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
889 reg = holder_reg; // From now on the object will be in holder_reg.
892 // The prototype is in new space; we cannot store a reference to it
893 // in the code. Load it from the map.
894 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
896 // The prototype is in old space; load it directly.
897 __ mov(reg, prototype);
901 // Go to the next object in the prototype chain.
903 current_map = handle(current->map());
906 // Log the check depth.
907 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
909 if (depth != 0 || check == CHECK_ALL_MAPS) {
910 // Check the holder map.
911 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
914 // Perform security check for access to the global object.
915 ASSERT(current_map->IsJSGlobalProxyMap() ||
916 !current_map->is_access_check_needed());
917 if (current_map->IsJSGlobalProxyMap()) {
918 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
921 // Return the register containing the holder.
926 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
927 if (!miss->is_unused()) {
931 TailCallBuiltin(masm(), MissBuiltin(kind()));
937 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
938 if (!miss->is_unused()) {
941 GenerateRestoreName(masm(), miss, name);
942 TailCallBuiltin(masm(), MissBuiltin(kind()));
948 Register LoadStubCompiler::CallbackHandlerFrontend(
949 Handle<HeapType> type,
951 Handle<JSObject> holder,
953 Handle<Object> callback) {
956 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
958 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
959 ASSERT(!reg.is(scratch2()));
960 ASSERT(!reg.is(scratch3()));
961 Register dictionary = scratch1();
962 bool must_preserve_dictionary_reg = reg.is(dictionary);
964 // Load the properties dictionary.
965 if (must_preserve_dictionary_reg) {
968 __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
970 // Probe the dictionary.
971 Label probe_done, pop_and_miss;
972 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
979 __ bind(&pop_and_miss);
980 if (must_preserve_dictionary_reg) {
984 __ bind(&probe_done);
986 // If probing finds an entry in the dictionary, scratch2 contains the
987 // index into the dictionary. Check that the value is the callback.
988 Register index = scratch2();
989 const int kElementsStartOffset =
990 NameDictionary::kHeaderSize +
991 NameDictionary::kElementsStartIndex * kPointerSize;
992 const int kValueOffset = kElementsStartOffset + kPointerSize;
994 Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
995 if (must_preserve_dictionary_reg) {
998 __ cmp(scratch3(), callback);
999 __ j(not_equal, &miss);
1002 HandlerFrontendFooter(name, &miss);
1007 void LoadStubCompiler::GenerateLoadField(Register reg,
1008 Handle<JSObject> holder,
1009 PropertyIndex field,
1010 Representation representation) {
1011 if (!reg.is(receiver())) __ mov(receiver(), reg);
1012 if (kind() == Code::LOAD_IC) {
1013 LoadFieldStub stub(isolate(),
1014 field.is_inobject(holder),
1015 field.translate(holder),
1017 GenerateTailCall(masm(), stub.GetCode());
1019 KeyedLoadFieldStub stub(isolate(),
1020 field.is_inobject(holder),
1021 field.translate(holder),
1023 GenerateTailCall(masm(), stub.GetCode());
1028 void LoadStubCompiler::GenerateLoadCallback(
1030 Handle<ExecutableAccessorInfo> callback) {
1031 // Insert additional parameters into the stack frame above return address.
1032 ASSERT(!scratch3().is(reg));
1033 __ pop(scratch3()); // Get return address to place it below.
1035 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1036 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1037 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1038 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1039 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1040 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1041 __ push(receiver()); // receiver
1042 // Push data from ExecutableAccessorInfo.
1043 if (isolate()->heap()->InNewSpace(callback->data())) {
1044 ASSERT(!scratch2().is(reg));
1045 __ mov(scratch2(), Immediate(callback));
1046 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
1048 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1050 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
1051 // ReturnValue default value
1052 __ push(Immediate(isolate()->factory()->undefined_value()));
1053 __ push(Immediate(reinterpret_cast<int>(isolate())));
1054 __ push(reg); // holder
1056 // Save a pointer to where we pushed the arguments. This will be
1057 // passed as the const PropertyAccessorInfo& to the C++ callback.
1060 __ push(name()); // name
1062 __ push(scratch3()); // Restore return address.
1064 // Abi for CallApiGetter
1065 Register getter_address = edx;
1066 Address function_address = v8::ToCData<Address>(callback->getter());
1067 __ mov(getter_address, Immediate(function_address));
1069 CallApiGetterStub stub(isolate());
1070 __ TailCallStub(&stub);
1074 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1075 // Return the constant value.
1076 __ LoadObject(eax, value);
1081 void LoadStubCompiler::GenerateLoadInterceptor(
1082 Register holder_reg,
1083 Handle<Object> object,
1084 Handle<JSObject> interceptor_holder,
1085 LookupResult* lookup,
1086 Handle<Name> name) {
1087 ASSERT(interceptor_holder->HasNamedInterceptor());
1088 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1090 // So far the most popular follow ups for interceptor loads are FIELD
1091 // and CALLBACKS, so inline only them, other cases may be added
1093 bool compile_followup_inline = false;
1094 if (lookup->IsFound() && lookup->IsCacheable()) {
1095 if (lookup->IsField()) {
1096 compile_followup_inline = true;
1097 } else if (lookup->type() == CALLBACKS &&
1098 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1099 ExecutableAccessorInfo* callback =
1100 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1101 compile_followup_inline = callback->getter() != NULL &&
1102 callback->IsCompatibleReceiver(*object);
1106 if (compile_followup_inline) {
1107 // Compile the interceptor call, followed by inline code to load the
1108 // property from further up the prototype chain if the call fails.
1109 // Check that the maps haven't changed.
1110 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1112 // Preserve the receiver register explicitly whenever it is different from
1113 // the holder and it is needed should the interceptor return without any
1114 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1115 // the FIELD case might cause a miss during the prototype check.
1116 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1117 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1118 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1120 // Save necessary data before invoking an interceptor.
1121 // Requires a frame to make GC aware of pushed pointers.
1123 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1125 if (must_preserve_receiver_reg) {
1126 __ push(receiver());
1128 __ push(holder_reg);
1129 __ push(this->name());
1131 // Invoke an interceptor. Note: map checks from receiver to
1132 // interceptor's holder has been compiled before (see a caller
1134 CompileCallLoadPropertyWithInterceptor(
1135 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1136 IC::kLoadPropertyWithInterceptorOnly);
1138 // Check if interceptor provided a value for property. If it's
1139 // the case, return immediately.
1140 Label interceptor_failed;
1141 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1142 __ j(equal, &interceptor_failed);
1143 frame_scope.GenerateLeaveFrame();
1146 // Clobber registers when generating debug-code to provoke errors.
1147 __ bind(&interceptor_failed);
1148 if (FLAG_debug_code) {
1149 __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1150 __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1151 __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1154 __ pop(this->name());
1156 if (must_preserve_receiver_reg) {
1160 // Leave the internal frame.
1163 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1164 } else { // !compile_followup_inline
1165 // Call the runtime system to load the interceptor.
1166 // Check that the maps haven't changed.
1167 __ pop(scratch2()); // save old return address
1168 PushInterceptorArguments(masm(), receiver(), holder_reg,
1169 this->name(), interceptor_holder);
1170 __ push(scratch2()); // restore old return address
1172 ExternalReference ref =
1173 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1175 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1180 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1182 // Check that the object is a boolean.
1183 __ cmp(object, factory()->true_value());
1184 __ j(equal, &success);
1185 __ cmp(object, factory()->false_value());
1186 __ j(not_equal, miss);
1191 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1192 Handle<JSObject> object,
1193 Handle<JSObject> holder,
1195 Handle<ExecutableAccessorInfo> callback) {
1196 Register holder_reg = HandlerFrontend(
1197 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1199 __ pop(scratch1()); // remove the return address
1200 __ push(receiver());
1201 __ push(holder_reg);
1205 __ push(scratch1()); // restore return address
1207 // Do tail-call to the runtime system.
1208 ExternalReference store_callback_property =
1209 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1210 __ TailCallExternalReference(store_callback_property, 5, 1);
1212 // Return the generated code.
1213 return GetCode(kind(), Code::FAST, name);
1218 #define __ ACCESS_MASM(masm)
1221 void StoreStubCompiler::GenerateStoreViaSetter(
1222 MacroAssembler* masm,
1223 Handle<HeapType> type,
1225 Handle<JSFunction> setter) {
1226 // ----------- S t a t e -------------
1227 // -- esp[0] : return address
1228 // -----------------------------------
1230 FrameScope scope(masm, StackFrame::INTERNAL);
1232 // Save value register, so we can restore it later.
1235 if (!setter.is_null()) {
1236 // Call the JavaScript setter with receiver and value on the stack.
1237 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1238 // Swap in the global receiver.
1240 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1244 ParameterCount actual(1);
1245 ParameterCount expected(setter);
1246 __ InvokeFunction(setter, expected, actual,
1247 CALL_FUNCTION, NullCallWrapper());
1249 // If we generate a global code snippet for deoptimization only, remember
1250 // the place to continue after deoptimization.
1251 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1254 // We have to return the passed value, not the return value of the setter.
1257 // Restore context register.
1258 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1265 #define __ ACCESS_MASM(masm())
1268 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1269 Handle<JSObject> object,
1270 Handle<Name> name) {
1271 __ pop(scratch1()); // remove the return address
1272 __ push(receiver());
1273 __ push(this->name());
1275 __ push(scratch1()); // restore return address
1277 // Do tail-call to the runtime system.
1278 ExternalReference store_ic_property =
1279 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1280 __ TailCallExternalReference(store_ic_property, 3, 1);
1282 // Return the generated code.
1283 return GetCode(kind(), Code::FAST, name);
1287 void StoreStubCompiler::GenerateStoreArrayLength() {
1288 // Prepare tail call to StoreIC_ArrayLength.
1289 __ pop(scratch1()); // remove the return address
1290 __ push(receiver());
1292 __ push(scratch1()); // restore return address
1294 ExternalReference ref =
1295 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1297 __ TailCallExternalReference(ref, 2, 1);
1301 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1302 MapHandleList* receiver_maps,
1303 CodeHandleList* handler_stubs,
1304 MapHandleList* transitioned_maps) {
1306 __ JumpIfSmi(receiver(), &miss, Label::kNear);
1307 __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1308 for (int i = 0; i < receiver_maps->length(); ++i) {
1309 __ cmp(scratch1(), receiver_maps->at(i));
1310 if (transitioned_maps->at(i).is_null()) {
1311 __ j(equal, handler_stubs->at(i));
1314 __ j(not_equal, &next_map, Label::kNear);
1315 __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1316 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1321 TailCallBuiltin(masm(), MissBuiltin(kind()));
1323 // Return the generated code.
1325 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1329 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1330 Handle<JSObject> last,
1331 Handle<Name> name) {
1332 NonexistentHandlerFrontend(type, last, name);
1334 // Return undefined if maps of the full prototype chain are still the
1335 // same and no global property with this name contains a value.
1336 __ mov(eax, isolate()->factory()->undefined_value());
1339 // Return the generated code.
1340 return GetCode(kind(), Code::FAST, name);
1344 Register* LoadStubCompiler::registers() {
1345 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1346 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1351 Register* KeyedLoadStubCompiler::registers() {
1352 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1353 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1358 Register StoreStubCompiler::value() {
1363 Register* StoreStubCompiler::registers() {
1364 // receiver, name, scratch1, scratch2, scratch3.
1365 static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1370 Register* KeyedStoreStubCompiler::registers() {
1371 // receiver, name, scratch1, scratch2, scratch3.
1372 static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1378 #define __ ACCESS_MASM(masm)
1381 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1382 Handle<HeapType> type,
1384 Handle<JSFunction> getter) {
1386 FrameScope scope(masm, StackFrame::INTERNAL);
1388 if (!getter.is_null()) {
1389 // Call the JavaScript getter with the receiver on the stack.
1390 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1391 // Swap in the global receiver.
1393 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1396 ParameterCount actual(0);
1397 ParameterCount expected(getter);
1398 __ InvokeFunction(getter, expected, actual,
1399 CALL_FUNCTION, NullCallWrapper());
1401 // If we generate a global code snippet for deoptimization only, remember
1402 // the place to continue after deoptimization.
1403 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1406 // Restore context register.
1407 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1414 #define __ ACCESS_MASM(masm())
1417 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1418 Handle<HeapType> type,
1419 Handle<GlobalObject> global,
1420 Handle<PropertyCell> cell,
1422 bool is_dont_delete) {
1425 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1426 // Get the value from the cell.
1427 if (Serializer::enabled(isolate())) {
1428 __ mov(eax, Immediate(cell));
1429 __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
1431 __ mov(eax, Operand::ForCell(cell));
1434 // Check for deleted property if property can actually be deleted.
1435 if (!is_dont_delete) {
1436 __ cmp(eax, factory()->the_hole_value());
1438 } else if (FLAG_debug_code) {
1439 __ cmp(eax, factory()->the_hole_value());
1440 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1443 Counters* counters = isolate()->counters();
1444 __ IncrementCounter(counters->named_load_global_stub(), 1);
1445 // The code above already loads the result into the return register.
1448 HandlerFrontendFooter(name, &miss);
1450 // Return the generated code.
1451 return GetCode(kind(), Code::NORMAL, name);
1455 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1456 TypeHandleList* types,
1457 CodeHandleList* handlers,
1459 Code::StubType type,
1460 IcCheckType check) {
1463 if (check == PROPERTY &&
1464 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1465 __ cmp(this->name(), Immediate(name));
1466 __ j(not_equal, &miss);
1470 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1471 __ JumpIfSmi(receiver(), smi_target);
1473 Register map_reg = scratch1();
1474 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1475 int receiver_count = types->length();
1476 int number_of_handled_maps = 0;
1477 for (int current = 0; current < receiver_count; ++current) {
1478 Handle<HeapType> type = types->at(current);
1479 Handle<Map> map = IC::TypeToMap(*type, isolate());
1480 if (!map->is_deprecated()) {
1481 number_of_handled_maps++;
1482 __ cmp(map_reg, map);
1483 if (type->Is(HeapType::Number())) {
1484 ASSERT(!number_case.is_unused());
1485 __ bind(&number_case);
1487 __ j(equal, handlers->at(current));
1490 ASSERT(number_of_handled_maps != 0);
1493 TailCallBuiltin(masm(), MissBuiltin(kind()));
1495 // Return the generated code.
1496 InlineCacheState state =
1497 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1498 return GetICCode(kind(), type, name, state);
1503 #define __ ACCESS_MASM(masm)
1506 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1507 MacroAssembler* masm) {
1508 // ----------- S t a t e -------------
1510 // -- edx : receiver
1511 // -- esp[0] : return address
1512 // -----------------------------------
1515 // This stub is meant to be tail-jumped to, the receiver must already
1516 // have been verified by the caller to not be a smi.
1517 __ JumpIfNotSmi(ecx, &miss);
1520 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1522 // Push receiver on the stack to free up a register for the dictionary
1525 __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
1526 // Pop receiver before returning.
1533 // ----------- S t a t e -------------
1535 // -- edx : receiver
1536 // -- esp[0] : return address
1537 // -----------------------------------
1538 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1541 // ----------- S t a t e -------------
1543 // -- edx : receiver
1544 // -- esp[0] : return address
1545 // -----------------------------------
1546 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1552 } } // namespace v8::internal
1554 #endif // V8_TARGET_ARCH_IA32