1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_MIPS
34 #include "stub-cache.h"
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
45 StubCache::Table table,
48 // Number of the cache entry, not scaled.
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
61 // Check the relative positions of the address fields.
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
73 // Multiply by 3 because there are 3 fields per entry (name, code, map).
74 __ sll(offset_scratch, offset, 1);
75 __ Addu(offset_scratch, offset_scratch, offset);
77 // Calculate the base address of the entry.
78 __ li(base_addr, Operand(key_offset));
79 __ sll(at, offset_scratch, kPointerSizeLog2);
80 __ Addu(base_addr, base_addr, at);
82 // Check that the key in the entry matches the name.
83 __ lw(at, MemOperand(base_addr, 0));
84 __ Branch(&miss, ne, name, Operand(at));
86 // Check the map matches.
87 __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88 __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89 __ Branch(&miss, ne, at, Operand(scratch2));
91 // Get the code entry from the cache.
92 Register code = scratch2;
94 __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
96 // Check that the flags match what we're looking for.
97 Register flags_reg = base_addr;
99 __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100 __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101 __ Branch(&miss, ne, flags_reg, Operand(flags));
104 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
106 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
111 // Jump to the first instruction in the code stub.
112 __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
115 // Miss: fall through.
120 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
126 ASSERT(name->IsUniqueName());
127 ASSERT(!receiver.is(scratch0));
128 Counters* counters = masm->isolate()->counters();
129 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
130 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
134 const int kInterceptorOrAccessCheckNeededMask =
135 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
137 // Bail out if the receiver has a named interceptor or requires access checks.
138 Register map = scratch1;
139 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
140 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
141 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
142 __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
144 // Check that receiver is a JSObject.
145 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
146 __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
148 // Load properties array.
149 Register properties = scratch0;
150 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
151 // Check that the properties array is a dictionary.
152 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
153 Register tmp = properties;
154 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
155 __ Branch(miss_label, ne, map, Operand(tmp));
157 // Restore the temporarily used register.
158 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
161 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
169 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
173 void StubCache::GenerateProbe(MacroAssembler* masm,
181 Isolate* isolate = masm->isolate();
184 // Make sure that code is valid. The multiplying code relies on the
185 // entry size being 12.
186 ASSERT(sizeof(Entry) == 12);
188 // Make sure the flags does not name a specific type.
189 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
191 // Make sure that there are no register conflicts.
192 ASSERT(!scratch.is(receiver));
193 ASSERT(!scratch.is(name));
194 ASSERT(!extra.is(receiver));
195 ASSERT(!extra.is(name));
196 ASSERT(!extra.is(scratch));
197 ASSERT(!extra2.is(receiver));
198 ASSERT(!extra2.is(name));
199 ASSERT(!extra2.is(scratch));
200 ASSERT(!extra2.is(extra));
202 // Check register validity.
203 ASSERT(!scratch.is(no_reg));
204 ASSERT(!extra.is(no_reg));
205 ASSERT(!extra2.is(no_reg));
206 ASSERT(!extra3.is(no_reg));
208 Counters* counters = masm->isolate()->counters();
209 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
212 // Check that the receiver isn't a smi.
213 __ JumpIfSmi(receiver, &miss);
215 // Get the map of the receiver and compute the hash.
216 __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
217 __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
218 __ Addu(scratch, scratch, at);
219 uint32_t mask = kPrimaryTableSize - 1;
220 // We shift out the last two bits because they are not part of the hash and
221 // they are always 01 for maps.
222 __ srl(scratch, scratch, kHeapObjectTagSize);
223 __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
224 __ And(scratch, scratch, Operand(mask));
226 // Probe the primary table.
238 // Primary miss: Compute hash for secondary probe.
239 __ srl(at, name, kHeapObjectTagSize);
240 __ Subu(scratch, scratch, at);
241 uint32_t mask2 = kSecondaryTableSize - 1;
242 __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
243 __ And(scratch, scratch, Operand(mask2));
245 // Probe the secondary table.
257 // Cache miss: Fall-through and let caller handle the miss by
258 // entering the runtime system.
260 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
265 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
267 Register prototype) {
268 // Load the global or builtins object from the current context.
270 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
271 // Load the native context from the global or builtins object.
273 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
274 // Load the function from the native context.
275 __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
276 // Load the initial map. The global functions all have initial maps.
278 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
279 // Load the prototype from the initial map.
280 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
285 MacroAssembler* masm,
289 Isolate* isolate = masm->isolate();
290 // Get the global function with the given index.
291 Handle<JSFunction> function(
292 JSFunction::cast(isolate->native_context()->get(index)));
294 // Check we're still in the same context.
295 Register scratch = prototype;
296 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
297 __ lw(scratch, MemOperand(cp, offset));
298 __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
299 __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
301 __ Branch(miss, ne, at, Operand(scratch));
303 // Load its initial map. The global functions all have initial maps.
304 __ li(prototype, Handle<Map>(function->initial_map()));
305 // Load the prototype from the initial map.
306 __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
315 Representation representation) {
316 ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
317 int offset = index * kPointerSize;
319 // Calculate the offset into the properties array.
320 offset = offset + FixedArray::kHeaderSize;
321 __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
324 __ lw(dst, FieldMemOperand(src, offset));
328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
332 // Check that the receiver isn't a smi.
333 __ JumpIfSmi(receiver, miss_label);
335 // Check that the object is a JS array.
336 __ GetObjectType(receiver, scratch, scratch);
337 __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
339 // Load length directly from the JS array.
340 __ Ret(USE_DELAY_SLOT);
341 __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
345 // Generate code to check if an object is a string. If the object is a
346 // heap object, its map's instance type is left in the scratch1 register.
347 // If this is not needed, scratch1 and scratch2 may be the same register.
348 static void GenerateStringCheck(MacroAssembler* masm,
353 Label* non_string_object) {
354 // Check that the receiver isn't a smi.
355 __ JumpIfSmi(receiver, smi, t0);
357 // Check that the object is a string.
358 __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
359 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
360 __ And(scratch2, scratch1, Operand(kIsNotStringMask));
361 // The cast is to resolve the overload for the argument of 0x0.
362 __ Branch(non_string_object,
365 Operand(static_cast<int32_t>(kStringTag)));
369 // Generate code to load the length from a string object and return the length.
370 // If the receiver object is not a string or a wrapped string object the
371 // execution continues at the miss label. The register containing the
372 // receiver is potentially clobbered.
373 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
380 // Check if the object is a string leaving the instance type in the
381 // scratch1 register.
382 GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
384 // Load length directly from the string.
385 __ Ret(USE_DELAY_SLOT);
386 __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
388 // Check if the object is a JSValue wrapper.
389 __ bind(&check_wrapper);
390 __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
392 // Unwrap the value and check if the wrapped value is a string.
393 __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
394 GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
395 __ Ret(USE_DELAY_SLOT);
396 __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
400 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
405 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
406 __ Ret(USE_DELAY_SLOT);
407 __ mov(v0, scratch1);
411 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
412 Handle<JSGlobalObject> global,
416 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
417 ASSERT(cell->value()->IsTheHole());
418 __ li(scratch, Operand(cell));
419 __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
420 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
421 __ Branch(miss, ne, scratch, Operand(at));
425 void StoreStubCompiler::GenerateNegativeHolderLookup(
426 MacroAssembler* masm,
427 Handle<JSObject> holder,
431 if (holder->IsJSGlobalObject()) {
432 GenerateCheckPropertyCell(
433 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
434 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
435 GenerateDictionaryNegativeLookup(
436 masm, miss, holder_reg, name, scratch1(), scratch2());
441 // Generate StoreTransition code, value is passed in a0 register.
442 // After executing generated code, the receiver_reg and name_reg
444 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
445 Handle<JSObject> object,
446 LookupResult* lookup,
447 Handle<Map> transition,
449 Register receiver_reg,
450 Register storage_reg,
460 int descriptor = transition->LastAdded();
461 DescriptorArray* descriptors = transition->instance_descriptors();
462 PropertyDetails details = descriptors->GetDetails(descriptor);
463 Representation representation = details.representation();
464 ASSERT(!representation.IsNone());
466 if (details.type() == CONSTANT) {
467 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
468 __ li(scratch1, constant);
469 __ Branch(miss_label, ne, value_reg, Operand(scratch1));
470 } else if (FLAG_track_fields && representation.IsSmi()) {
471 __ JumpIfNotSmi(value_reg, miss_label);
472 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
473 __ JumpIfSmi(value_reg, miss_label);
474 } else if (FLAG_track_double_fields && representation.IsDouble()) {
475 Label do_store, heap_number;
476 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
477 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
479 __ JumpIfNotSmi(value_reg, &heap_number);
480 __ SmiUntag(scratch1, value_reg);
481 __ mtc1(scratch1, f6);
485 __ bind(&heap_number);
486 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
487 miss_label, DONT_DO_SMI_CHECK);
488 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
491 __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
494 // Stub never generated for non-global objects that require access
496 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
498 // Perform map transition for the receiver if necessary.
499 if (details.type() == FIELD &&
500 object->map()->unused_property_fields() == 0) {
501 // The properties must be extended before we can store the value.
502 // We jump to a runtime call that extends the properties array.
503 __ push(receiver_reg);
504 __ li(a2, Operand(transition));
506 __ TailCallExternalReference(
507 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
513 // Update the map of the object.
514 __ li(scratch1, Operand(transition));
515 __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
517 // Update the write barrier for the map field.
518 __ RecordWriteField(receiver_reg,
519 HeapObject::kMapOffset,
527 if (details.type() == CONSTANT) {
528 ASSERT(value_reg.is(a0));
529 __ Ret(USE_DELAY_SLOT);
534 int index = transition->instance_descriptors()->GetFieldIndex(
535 transition->LastAdded());
537 // Adjust for the number of properties stored in the object. Even in the
538 // face of a transition we can use the old map here because the size of the
539 // object and the number of in-object properties is not going to change.
540 index -= object->map()->inobject_properties();
542 // TODO(verwaest): Share this code as a code stub.
543 SmiCheck smi_check = representation.IsTagged()
544 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
546 // Set the property straight into the object.
547 int offset = object->map()->instance_size() + (index * kPointerSize);
548 if (FLAG_track_double_fields && representation.IsDouble()) {
549 __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
551 __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
554 if (!FLAG_track_fields || !representation.IsSmi()) {
555 // Update the write barrier for the array address.
556 if (!FLAG_track_double_fields || !representation.IsDouble()) {
557 __ mov(storage_reg, value_reg);
559 __ RecordWriteField(receiver_reg,
569 // Write to the properties array.
570 int offset = index * kPointerSize + FixedArray::kHeaderSize;
571 // Get the properties array
573 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
574 if (FLAG_track_double_fields && representation.IsDouble()) {
575 __ sw(storage_reg, FieldMemOperand(scratch1, offset));
577 __ sw(value_reg, FieldMemOperand(scratch1, offset));
580 if (!FLAG_track_fields || !representation.IsSmi()) {
581 // Update the write barrier for the array address.
582 if (!FLAG_track_double_fields || !representation.IsDouble()) {
583 __ mov(storage_reg, value_reg);
585 __ RecordWriteField(scratch1,
596 // Return the value (register v0).
597 ASSERT(value_reg.is(a0));
599 __ Ret(USE_DELAY_SLOT);
604 // Generate StoreField code, value is passed in a0 register.
605 // When leaving generated code after success, the receiver_reg and name_reg
606 // may be clobbered. Upon branch to miss_label, the receiver and name
607 // registers have their original values.
608 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
609 Handle<JSObject> object,
610 LookupResult* lookup,
611 Register receiver_reg,
620 // Stub never generated for non-global objects that require access
622 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
624 int index = lookup->GetFieldIndex().field_index();
626 // Adjust for the number of properties stored in the object. Even in the
627 // face of a transition we can use the old map here because the size of the
628 // object and the number of in-object properties is not going to change.
629 index -= object->map()->inobject_properties();
631 Representation representation = lookup->representation();
632 ASSERT(!representation.IsNone());
633 if (FLAG_track_fields && representation.IsSmi()) {
634 __ JumpIfNotSmi(value_reg, miss_label);
635 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
636 __ JumpIfSmi(value_reg, miss_label);
637 } else if (FLAG_track_double_fields && representation.IsDouble()) {
638 // Load the double storage.
640 int offset = object->map()->instance_size() + (index * kPointerSize);
641 __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
644 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
645 int offset = index * kPointerSize + FixedArray::kHeaderSize;
646 __ lw(scratch1, FieldMemOperand(scratch1, offset));
649 // Store the value into the storage.
650 Label do_store, heap_number;
651 __ JumpIfNotSmi(value_reg, &heap_number);
652 __ SmiUntag(scratch2, value_reg);
653 __ mtc1(scratch2, f6);
657 __ bind(&heap_number);
658 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
659 miss_label, DONT_DO_SMI_CHECK);
660 __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
663 __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
664 // Return the value (register v0).
665 ASSERT(value_reg.is(a0));
666 __ Ret(USE_DELAY_SLOT);
671 // TODO(verwaest): Share this code as a code stub.
672 SmiCheck smi_check = representation.IsTagged()
673 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
675 // Set the property straight into the object.
676 int offset = object->map()->instance_size() + (index * kPointerSize);
677 __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
679 if (!FLAG_track_fields || !representation.IsSmi()) {
680 // Skip updating write barrier if storing a smi.
681 __ JumpIfSmi(value_reg, &exit);
683 // Update the write barrier for the array address.
684 // Pass the now unused name_reg as a scratch register.
685 __ mov(name_reg, value_reg);
686 __ RecordWriteField(receiver_reg,
696 // Write to the properties array.
697 int offset = index * kPointerSize + FixedArray::kHeaderSize;
698 // Get the properties array.
700 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
701 __ sw(value_reg, FieldMemOperand(scratch1, offset));
703 if (!FLAG_track_fields || !representation.IsSmi()) {
704 // Skip updating write barrier if storing a smi.
705 __ JumpIfSmi(value_reg, &exit);
707 // Update the write barrier for the array address.
708 // Ok to clobber receiver_reg and name_reg, since we return.
709 __ mov(name_reg, value_reg);
710 __ RecordWriteField(scratch1,
721 // Return the value (register v0).
722 ASSERT(value_reg.is(a0));
724 __ Ret(USE_DELAY_SLOT);
729 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
732 if (!label->is_unused()) {
734 __ li(this->name(), Operand(name));
739 static void PushInterceptorArguments(MacroAssembler* masm,
743 Handle<JSObject> holder_obj) {
744 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
745 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
746 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
747 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
748 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
750 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
751 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
752 Register scratch = name;
753 __ li(scratch, Operand(interceptor));
754 __ Push(scratch, receiver, holder);
758 static void CompileCallLoadPropertyWithInterceptor(
759 MacroAssembler* masm,
763 Handle<JSObject> holder_obj,
765 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
766 __ CallExternalReference(
767 ExternalReference(IC_Utility(id), masm->isolate()),
768 StubCache::kInterceptorArgsLength);
772 // Generate call to api function.
773 static void GenerateFastApiCall(MacroAssembler* masm,
774 const CallOptimization& optimization,
775 Handle<Map> receiver_map,
780 ASSERT(!receiver.is(scratch_in));
781 // Preparing to push, adjust sp.
782 __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
783 __ sw(receiver, MemOperand(sp, argc * kPointerSize)); // Push receiver.
784 // Write the arguments to stack frame.
785 for (int i = 0; i < argc; i++) {
786 Register arg = values[argc-1-i];
787 ASSERT(!receiver.is(arg));
788 ASSERT(!scratch_in.is(arg));
789 __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize)); // Push arg.
791 ASSERT(optimization.is_simple_api_call());
793 // Abi for CallApiFunctionStub.
794 Register callee = a0;
795 Register call_data = t0;
796 Register holder = a2;
797 Register api_function_address = a1;
799 // Put holder in place.
800 CallOptimization::HolderLookup holder_lookup;
801 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
804 switch (holder_lookup) {
805 case CallOptimization::kHolderIsReceiver:
806 __ Move(holder, receiver);
808 case CallOptimization::kHolderFound:
809 __ li(holder, api_holder);
811 case CallOptimization::kHolderNotFound:
816 Isolate* isolate = masm->isolate();
817 Handle<JSFunction> function = optimization.constant_function();
818 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
819 Handle<Object> call_data_obj(api_call_info->data(), isolate);
821 // Put callee in place.
822 __ li(callee, function);
824 bool call_data_undefined = false;
825 // Put call_data in place.
826 if (isolate->heap()->InNewSpace(*call_data_obj)) {
827 __ li(call_data, api_call_info);
828 __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
829 } else if (call_data_obj->IsUndefined()) {
830 call_data_undefined = true;
831 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
833 __ li(call_data, call_data_obj);
835 // Put api_function_address in place.
836 Address function_address = v8::ToCData<Address>(api_call_info->callback());
837 ApiFunction fun(function_address);
838 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
839 ExternalReference ref =
840 ExternalReference(&fun,
843 __ li(api_function_address, Operand(ref));
846 CallApiFunctionStub stub(true, call_data_undefined, argc);
847 __ TailCallStub(&stub);
851 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
852 __ Jump(code, RelocInfo::CODE_TARGET);
857 #define __ ACCESS_MASM(masm())
860 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
862 Handle<JSObject> holder,
868 PrototypeCheckType check) {
869 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
870 // Make sure that the type feedback oracle harvests the receiver map.
871 // TODO(svenpanne) Remove this hack when all ICs are reworked.
872 __ li(scratch1, Operand(receiver_map));
874 // Make sure there's no overlap between holder and object registers.
875 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
876 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
877 && !scratch2.is(scratch1));
879 // Keep track of the current object in register reg.
880 Register reg = object_reg;
883 Handle<JSObject> current = Handle<JSObject>::null();
884 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
885 Handle<JSObject> prototype = Handle<JSObject>::null();
886 Handle<Map> current_map = receiver_map;
887 Handle<Map> holder_map(holder->map());
888 // Traverse the prototype chain and check the maps in the prototype chain for
889 // fast and global objects or do negative lookup for normal objects.
890 while (!current_map.is_identical_to(holder_map)) {
893 // Only global objects and objects that do not require access
894 // checks are allowed in stubs.
895 ASSERT(current_map->IsJSGlobalProxyMap() ||
896 !current_map->is_access_check_needed());
898 prototype = handle(JSObject::cast(current_map->prototype()));
899 if (current_map->is_dictionary_map() &&
900 !current_map->IsJSGlobalObjectMap() &&
901 !current_map->IsJSGlobalProxyMap()) {
902 if (!name->IsUniqueName()) {
903 ASSERT(name->IsString());
904 name = factory()->InternalizeString(Handle<String>::cast(name));
906 ASSERT(current.is_null() ||
907 current->property_dictionary()->FindEntry(*name) ==
908 NameDictionary::kNotFound);
910 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
913 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
914 reg = holder_reg; // From now on the object will be in holder_reg.
915 __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
917 Register map_reg = scratch1;
918 if (depth != 1 || check == CHECK_ALL_MAPS) {
919 // CheckMap implicitly loads the map of |reg| into |map_reg|.
920 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
922 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
925 // Check access rights to the global object. This has to happen after
926 // the map check so that we know that the object is actually a global
928 if (current_map->IsJSGlobalProxyMap()) {
929 __ CheckAccessGlobalProxy(reg, scratch2, miss);
930 } else if (current_map->IsJSGlobalObjectMap()) {
931 GenerateCheckPropertyCell(
932 masm(), Handle<JSGlobalObject>::cast(current), name,
936 reg = holder_reg; // From now on the object will be in holder_reg.
938 if (heap()->InNewSpace(*prototype)) {
939 // The prototype is in new space; we cannot store a reference to it
940 // in the code. Load it from the map.
941 __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
943 // The prototype is in old space; load it directly.
944 __ li(reg, Operand(prototype));
948 // Go to the next object in the prototype chain.
950 current_map = handle(current->map());
953 // Log the check depth.
954 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
956 if (depth != 0 || check == CHECK_ALL_MAPS) {
957 // Check the holder map.
958 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
961 // Perform security check for access to the global object.
962 ASSERT(current_map->IsJSGlobalProxyMap() ||
963 !current_map->is_access_check_needed());
964 if (current_map->IsJSGlobalProxyMap()) {
965 __ CheckAccessGlobalProxy(reg, scratch1, miss);
968 // Return the register containing the holder.
973 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
974 if (!miss->is_unused()) {
978 TailCallBuiltin(masm(), MissBuiltin(kind()));
984 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
985 if (!miss->is_unused()) {
988 GenerateRestoreName(masm(), miss, name);
989 TailCallBuiltin(masm(), MissBuiltin(kind()));
995 Register LoadStubCompiler::CallbackHandlerFrontend(
996 Handle<HeapType> type,
998 Handle<JSObject> holder,
1000 Handle<Object> callback) {
1003 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1005 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1006 ASSERT(!reg.is(scratch2()));
1007 ASSERT(!reg.is(scratch3()));
1008 ASSERT(!reg.is(scratch4()));
1010 // Load the properties dictionary.
1011 Register dictionary = scratch4();
1012 __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
1014 // Probe the dictionary.
1016 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1023 __ bind(&probe_done);
1025 // If probing finds an entry in the dictionary, scratch3 contains the
1026 // pointer into the dictionary. Check that the value is the callback.
1027 Register pointer = scratch3();
1028 const int kElementsStartOffset = NameDictionary::kHeaderSize +
1029 NameDictionary::kElementsStartIndex * kPointerSize;
1030 const int kValueOffset = kElementsStartOffset + kPointerSize;
1031 __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
1032 __ Branch(&miss, ne, scratch2(), Operand(callback));
1035 HandlerFrontendFooter(name, &miss);
1040 void LoadStubCompiler::GenerateLoadField(Register reg,
1041 Handle<JSObject> holder,
1042 PropertyIndex field,
1043 Representation representation) {
1044 if (!reg.is(receiver())) __ mov(receiver(), reg);
1045 if (kind() == Code::LOAD_IC) {
1046 LoadFieldStub stub(field.is_inobject(holder),
1047 field.translate(holder),
1049 GenerateTailCall(masm(), stub.GetCode(isolate()));
1051 KeyedLoadFieldStub stub(field.is_inobject(holder),
1052 field.translate(holder),
1054 GenerateTailCall(masm(), stub.GetCode(isolate()));
1059 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1060 // Return the constant value.
1066 void LoadStubCompiler::GenerateLoadCallback(
1067 const CallOptimization& call_optimization,
1068 Handle<Map> receiver_map) {
1069 GenerateFastApiCall(
1070 masm(), call_optimization, receiver_map,
1071 receiver(), scratch3(), 0, NULL);
1075 void LoadStubCompiler::GenerateLoadCallback(
1077 Handle<ExecutableAccessorInfo> callback) {
1078 // Build AccessorInfo::args_ list on the stack and push property name below
1079 // the exit frame to make GC aware of them and store pointers to them.
1080 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1081 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1082 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1083 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1084 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1085 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1086 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1087 ASSERT(!scratch2().is(reg));
1088 ASSERT(!scratch3().is(reg));
1089 ASSERT(!scratch4().is(reg));
1090 __ push(receiver());
1091 if (heap()->InNewSpace(callback->data())) {
1092 __ li(scratch3(), callback);
1093 __ lw(scratch3(), FieldMemOperand(scratch3(),
1094 ExecutableAccessorInfo::kDataOffset));
1096 __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
1098 __ Subu(sp, sp, 6 * kPointerSize);
1099 __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
1100 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1101 __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1102 __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1104 Operand(ExternalReference::isolate_address(isolate())));
1105 __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1106 __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1107 __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1108 __ Addu(scratch2(), sp, 1 * kPointerSize);
1110 __ mov(a2, scratch2()); // Saved in case scratch2 == a1.
1111 // Abi for CallApiGetter.
1112 Register getter_address_reg = a2;
1114 Address getter_address = v8::ToCData<Address>(callback->getter());
1115 ApiFunction fun(getter_address);
1116 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1117 ExternalReference ref = ExternalReference(&fun, type, isolate());
1118 __ li(getter_address_reg, Operand(ref));
1120 CallApiGetterStub stub;
1121 __ TailCallStub(&stub);
1125 void LoadStubCompiler::GenerateLoadInterceptor(
1126 Register holder_reg,
1127 Handle<Object> object,
1128 Handle<JSObject> interceptor_holder,
1129 LookupResult* lookup,
1130 Handle<Name> name) {
1131 ASSERT(interceptor_holder->HasNamedInterceptor());
1132 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1134 // So far the most popular follow ups for interceptor loads are FIELD
1135 // and CALLBACKS, so inline only them, other cases may be added
1137 bool compile_followup_inline = false;
1138 if (lookup->IsFound() && lookup->IsCacheable()) {
1139 if (lookup->IsField()) {
1140 compile_followup_inline = true;
1141 } else if (lookup->type() == CALLBACKS &&
1142 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1143 ExecutableAccessorInfo* callback =
1144 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1145 compile_followup_inline = callback->getter() != NULL &&
1146 callback->IsCompatibleReceiver(*object);
1150 if (compile_followup_inline) {
1151 // Compile the interceptor call, followed by inline code to load the
1152 // property from further up the prototype chain if the call fails.
1153 // Check that the maps haven't changed.
1154 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1156 // Preserve the receiver register explicitly whenever it is different from
1157 // the holder and it is needed should the interceptor return without any
1158 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1159 // the FIELD case might cause a miss during the prototype check.
1160 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1161 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1162 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1164 // Save necessary data before invoking an interceptor.
1165 // Requires a frame to make GC aware of pushed pointers.
1167 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1168 if (must_preserve_receiver_reg) {
1169 __ Push(receiver(), holder_reg, this->name());
1171 __ Push(holder_reg, this->name());
1173 // Invoke an interceptor. Note: map checks from receiver to
1174 // interceptor's holder has been compiled before (see a caller
1176 CompileCallLoadPropertyWithInterceptor(
1177 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1178 IC::kLoadPropertyWithInterceptorOnly);
1180 // Check if interceptor provided a value for property. If it's
1181 // the case, return immediately.
1182 Label interceptor_failed;
1183 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1184 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
1185 frame_scope.GenerateLeaveFrame();
1188 __ bind(&interceptor_failed);
1189 __ pop(this->name());
1191 if (must_preserve_receiver_reg) {
1194 // Leave the internal frame.
1196 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1197 } else { // !compile_followup_inline
1198 // Call the runtime system to load the interceptor.
1199 // Check that the maps haven't changed.
1200 PushInterceptorArguments(masm(), receiver(), holder_reg,
1201 this->name(), interceptor_holder);
1203 ExternalReference ref = ExternalReference(
1204 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1205 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1210 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1212 // Check that the object is a boolean.
1213 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1214 __ Branch(&success, eq, object, Operand(at));
1215 __ LoadRoot(at, Heap::kFalseValueRootIndex);
1216 __ Branch(miss, ne, object, Operand(at));
1221 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1222 Handle<JSObject> object,
1223 Handle<JSObject> holder,
1225 Handle<ExecutableAccessorInfo> callback) {
1226 Register holder_reg = HandlerFrontend(
1227 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1229 // Stub never generated for non-global objects that require access
1231 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1233 __ Push(receiver(), holder_reg); // Receiver.
1234 __ li(at, Operand(callback)); // Callback info.
1236 __ li(at, Operand(name));
1237 __ Push(at, value());
1239 // Do tail-call to the runtime system.
1240 ExternalReference store_callback_property =
1241 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1242 __ TailCallExternalReference(store_callback_property, 5, 1);
1244 // Return the generated code.
1245 return GetCode(kind(), Code::FAST, name);
1249 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1250 Handle<JSObject> object,
1251 Handle<JSObject> holder,
1253 const CallOptimization& call_optimization) {
1254 HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1255 receiver(), holder, name);
1257 Register values[] = { value() };
1258 GenerateFastApiCall(
1259 masm(), call_optimization, handle(object->map()),
1260 receiver(), scratch3(), 1, values);
1262 // Return the generated code.
1263 return GetCode(kind(), Code::FAST, name);
1268 #define __ ACCESS_MASM(masm)
1271 void StoreStubCompiler::GenerateStoreViaSetter(
1272 MacroAssembler* masm,
1273 Handle<HeapType> type,
1274 Handle<JSFunction> setter) {
1275 // ----------- S t a t e -------------
1279 // -- ra : return address
1280 // -----------------------------------
1282 FrameScope scope(masm, StackFrame::INTERNAL);
1283 Register receiver = a1;
1284 Register value = a0;
1286 // Save value register, so we can restore it later.
1289 if (!setter.is_null()) {
1290 // Call the JavaScript setter with receiver and value on the stack.
1291 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1292 // Swap in the global receiver.
1295 receiver, JSGlobalObject::kGlobalReceiverOffset));
1297 __ Push(receiver, value);
1298 ParameterCount actual(1);
1299 ParameterCount expected(setter);
1300 __ InvokeFunction(setter, expected, actual,
1301 CALL_FUNCTION, NullCallWrapper());
1303 // If we generate a global code snippet for deoptimization only, remember
1304 // the place to continue after deoptimization.
1305 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1308 // We have to return the passed value, not the return value of the setter.
1311 // Restore context register.
1312 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1319 #define __ ACCESS_MASM(masm())
1322 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1323 Handle<JSObject> object,
1324 Handle<Name> name) {
1327 // Check that the map of the object hasn't changed.
1328 __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
1331 // Perform global security token check if needed.
1332 if (object->IsJSGlobalProxy()) {
1333 __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
1336 // Stub is never generated for non-global objects that require access
1338 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1340 __ Push(receiver(), this->name(), value());
1342 // Do tail-call to the runtime system.
1343 ExternalReference store_ic_property =
1344 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1345 __ TailCallExternalReference(store_ic_property, 3, 1);
1347 // Handle store cache miss.
1349 TailCallBuiltin(masm(), MissBuiltin(kind()));
1351 // Return the generated code.
1352 return GetCode(kind(), Code::FAST, name);
1356 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1357 Handle<JSObject> last,
1358 Handle<Name> name) {
1359 NonexistentHandlerFrontend(type, last, name);
1361 // Return undefined if maps of the full prototype chain is still the same.
1362 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1365 // Return the generated code.
1366 return GetCode(kind(), Code::FAST, name);
1370 Register* LoadStubCompiler::registers() {
1371 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1372 static Register registers[] = { a0, a2, a3, a1, t0, t1 };
1377 Register* KeyedLoadStubCompiler::registers() {
1378 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1379 static Register registers[] = { a1, a0, a2, a3, t0, t1 };
1384 Register* StoreStubCompiler::registers() {
1385 // receiver, name, value, scratch1, scratch2, scratch3.
1386 static Register registers[] = { a1, a2, a0, a3, t0, t1 };
1391 Register* KeyedStoreStubCompiler::registers() {
1392 // receiver, name, value, scratch1, scratch2, scratch3.
1393 static Register registers[] = { a2, a1, a0, a3, t0, t1 };
1399 #define __ ACCESS_MASM(masm)
1402 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1403 Handle<HeapType> type,
1405 Handle<JSFunction> getter) {
1406 // ----------- S t a t e -------------
1409 // -- ra : return address
1410 // -----------------------------------
1412 FrameScope scope(masm, StackFrame::INTERNAL);
1414 if (!getter.is_null()) {
1415 // Call the JavaScript getter with the receiver on the stack.
1416 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1417 // Swap in the global receiver.
1420 receiver, JSGlobalObject::kGlobalReceiverOffset));
1423 ParameterCount actual(0);
1424 ParameterCount expected(getter);
1425 __ InvokeFunction(getter, expected, actual,
1426 CALL_FUNCTION, NullCallWrapper());
1428 // If we generate a global code snippet for deoptimization only, remember
1429 // the place to continue after deoptimization.
1430 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1433 // Restore context register.
1434 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1441 #define __ ACCESS_MASM(masm())
1444 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1445 Handle<HeapType> type,
1446 Handle<GlobalObject> global,
1447 Handle<PropertyCell> cell,
1449 bool is_dont_delete) {
1452 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1454 // Get the value from the cell.
1455 __ li(a3, Operand(cell));
1456 __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
1458 // Check for deleted property if property can actually be deleted.
1459 if (!is_dont_delete) {
1460 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1461 __ Branch(&miss, eq, t0, Operand(at));
1464 Counters* counters = isolate()->counters();
1465 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
1466 __ Ret(USE_DELAY_SLOT);
1469 HandlerFrontendFooter(name, &miss);
1471 // Return the generated code.
1472 return GetCode(kind(), Code::NORMAL, name);
1476 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1477 TypeHandleList* types,
1478 CodeHandleList* handlers,
1480 Code::StubType type,
1481 IcCheckType check) {
1484 if (check == PROPERTY &&
1485 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1486 __ Branch(&miss, ne, this->name(), Operand(name));
1490 Register match = scratch1();
1491 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1492 __ JumpIfSmi(receiver(), smi_target, match); // Reg match is 0 if Smi.
1494 Register map_reg = scratch2();
1496 int receiver_count = types->length();
1497 int number_of_handled_maps = 0;
1498 __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1499 for (int current = 0; current < receiver_count; ++current) {
1500 Handle<HeapType> type = types->at(current);
1501 Handle<Map> map = IC::TypeToMap(*type, isolate());
1502 if (!map->is_deprecated()) {
1503 number_of_handled_maps++;
1504 // Check map and tail call if there's a match.
1505 // Separate compare from branch, to provide path for above JumpIfSmi().
1506 __ Subu(match, map_reg, Operand(map));
1507 if (type->Is(HeapType::Number())) {
1508 ASSERT(!number_case.is_unused());
1509 __ bind(&number_case);
1511 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1512 eq, match, Operand(zero_reg));
1515 ASSERT(number_of_handled_maps != 0);
1518 TailCallBuiltin(masm(), MissBuiltin(kind()));
1520 // Return the generated code.
1521 InlineCacheState state =
1522 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1523 return GetICCode(kind(), type, name, state);
1527 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1528 MapHandleList* receiver_maps,
1529 CodeHandleList* handler_stubs,
1530 MapHandleList* transitioned_maps) {
1532 __ JumpIfSmi(receiver(), &miss);
1534 int receiver_count = receiver_maps->length();
1535 __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1536 for (int i = 0; i < receiver_count; ++i) {
1537 if (transitioned_maps->at(i).is_null()) {
1538 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
1539 scratch1(), Operand(receiver_maps->at(i)));
1542 __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
1543 __ li(transition_map(), Operand(transitioned_maps->at(i)));
1544 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1550 TailCallBuiltin(masm(), MissBuiltin(kind()));
1552 // Return the generated code.
1554 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1559 #define __ ACCESS_MASM(masm)
1562 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1563 MacroAssembler* masm) {
1564 // ---------- S t a t e --------------
1565 // -- ra : return address
1568 // -----------------------------------
1572 Register receiver = a1;
1574 __ JumpIfNotSmi(key, &miss);
1575 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
1576 __ sra(a2, a0, kSmiTagSize);
1577 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1580 // Slow case, key and receiver still in a0 and a1.
1582 __ IncrementCounter(
1583 masm->isolate()->counters()->keyed_load_external_array_slow(),
1585 // Entry registers are intact.
1586 // ---------- S t a t e --------------
1587 // -- ra : return address
1590 // -----------------------------------
1591 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1593 // Miss case, call the runtime.
1596 // ---------- S t a t e --------------
1597 // -- ra : return address
1600 // -----------------------------------
1601 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1607 } } // namespace v8::internal
1609 #endif // V8_TARGET_ARCH_MIPS