Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / mips / stub-cache-mips.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_MIPS
31
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35
36 namespace v8 {
37 namespace internal {
38
39 #define __ ACCESS_MASM(masm)
40
41
42 static void ProbeTable(Isolate* isolate,
43                        MacroAssembler* masm,
44                        Code::Flags flags,
45                        StubCache::Table table,
46                        Register receiver,
47                        Register name,
48                        // Number of the cache entry, not scaled.
49                        Register offset,
50                        Register scratch,
51                        Register scratch2,
52                        Register offset_scratch) {
53   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56
57   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59   uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60
61   // Check the relative positions of the address fields.
62   ASSERT(value_off_addr > key_off_addr);
63   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65   ASSERT(map_off_addr > key_off_addr);
66   ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67   ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68
69   Label miss;
70   Register base_addr = scratch;
71   scratch = no_reg;
72
73   // Multiply by 3 because there are 3 fields per entry (name, code, map).
74   __ sll(offset_scratch, offset, 1);
75   __ Addu(offset_scratch, offset_scratch, offset);
76
77   // Calculate the base address of the entry.
78   __ li(base_addr, Operand(key_offset));
79   __ sll(at, offset_scratch, kPointerSizeLog2);
80   __ Addu(base_addr, base_addr, at);
81
82   // Check that the key in the entry matches the name.
83   __ lw(at, MemOperand(base_addr, 0));
84   __ Branch(&miss, ne, name, Operand(at));
85
86   // Check the map matches.
87   __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88   __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89   __ Branch(&miss, ne, at, Operand(scratch2));
90
91   // Get the code entry from the cache.
92   Register code = scratch2;
93   scratch2 = no_reg;
94   __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95
96   // Check that the flags match what we're looking for.
97   Register flags_reg = base_addr;
98   base_addr = no_reg;
99   __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100   __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101   __ Branch(&miss, ne, flags_reg, Operand(flags));
102
103 #ifdef DEBUG
104     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
105       __ jmp(&miss);
106     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
107       __ jmp(&miss);
108     }
109 #endif
110
111   // Jump to the first instruction in the code stub.
112   __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
113   __ Jump(at);
114
115   // Miss: fall through.
116   __ bind(&miss);
117 }
118
119
120 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
121                                                     Label* miss_label,
122                                                     Register receiver,
123                                                     Handle<Name> name,
124                                                     Register scratch0,
125                                                     Register scratch1) {
126   ASSERT(name->IsUniqueName());
127   ASSERT(!receiver.is(scratch0));
128   Counters* counters = masm->isolate()->counters();
129   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
130   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
131
132   Label done;
133
134   const int kInterceptorOrAccessCheckNeededMask =
135       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
136
137   // Bail out if the receiver has a named interceptor or requires access checks.
138   Register map = scratch1;
139   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
140   __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
141   __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
142   __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
143
144   // Check that receiver is a JSObject.
145   __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
146   __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
147
148   // Load properties array.
149   Register properties = scratch0;
150   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
151   // Check that the properties array is a dictionary.
152   __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
153   Register tmp = properties;
154   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
155   __ Branch(miss_label, ne, map, Operand(tmp));
156
157   // Restore the temporarily used register.
158   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
159
160
161   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
162                                                    miss_label,
163                                                    &done,
164                                                    receiver,
165                                                    properties,
166                                                    name,
167                                                    scratch1);
168   __ bind(&done);
169   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
170 }
171
172
173 void StubCache::GenerateProbe(MacroAssembler* masm,
174                               Code::Flags flags,
175                               Register receiver,
176                               Register name,
177                               Register scratch,
178                               Register extra,
179                               Register extra2,
180                               Register extra3) {
181   Isolate* isolate = masm->isolate();
182   Label miss;
183
184   // Make sure that code is valid. The multiplying code relies on the
185   // entry size being 12.
186   ASSERT(sizeof(Entry) == 12);
187
188   // Make sure the flags does not name a specific type.
189   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
190
191   // Make sure that there are no register conflicts.
192   ASSERT(!scratch.is(receiver));
193   ASSERT(!scratch.is(name));
194   ASSERT(!extra.is(receiver));
195   ASSERT(!extra.is(name));
196   ASSERT(!extra.is(scratch));
197   ASSERT(!extra2.is(receiver));
198   ASSERT(!extra2.is(name));
199   ASSERT(!extra2.is(scratch));
200   ASSERT(!extra2.is(extra));
201
202   // Check register validity.
203   ASSERT(!scratch.is(no_reg));
204   ASSERT(!extra.is(no_reg));
205   ASSERT(!extra2.is(no_reg));
206   ASSERT(!extra3.is(no_reg));
207
208   Counters* counters = masm->isolate()->counters();
209   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
210                       extra2, extra3);
211
212   // Check that the receiver isn't a smi.
213   __ JumpIfSmi(receiver, &miss);
214
215   // Get the map of the receiver and compute the hash.
216   __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
217   __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
218   __ Addu(scratch, scratch, at);
219   uint32_t mask = kPrimaryTableSize - 1;
220   // We shift out the last two bits because they are not part of the hash and
221   // they are always 01 for maps.
222   __ srl(scratch, scratch, kHeapObjectTagSize);
223   __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
224   __ And(scratch, scratch, Operand(mask));
225
226   // Probe the primary table.
227   ProbeTable(isolate,
228              masm,
229              flags,
230              kPrimary,
231              receiver,
232              name,
233              scratch,
234              extra,
235              extra2,
236              extra3);
237
238   // Primary miss: Compute hash for secondary probe.
239   __ srl(at, name, kHeapObjectTagSize);
240   __ Subu(scratch, scratch, at);
241   uint32_t mask2 = kSecondaryTableSize - 1;
242   __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
243   __ And(scratch, scratch, Operand(mask2));
244
245   // Probe the secondary table.
246   ProbeTable(isolate,
247              masm,
248              flags,
249              kSecondary,
250              receiver,
251              name,
252              scratch,
253              extra,
254              extra2,
255              extra3);
256
257   // Cache miss: Fall-through and let caller handle the miss by
258   // entering the runtime system.
259   __ bind(&miss);
260   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
261                       extra2, extra3);
262 }
263
264
265 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
266                                                        int index,
267                                                        Register prototype) {
268   // Load the global or builtins object from the current context.
269   __ lw(prototype,
270         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
271   // Load the native context from the global or builtins object.
272   __ lw(prototype,
273          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
274   // Load the function from the native context.
275   __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
276   // Load the initial map.  The global functions all have initial maps.
277   __ lw(prototype,
278          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
279   // Load the prototype from the initial map.
280   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
281 }
282
283
284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
285     MacroAssembler* masm,
286     int index,
287     Register prototype,
288     Label* miss) {
289   Isolate* isolate = masm->isolate();
290   // Get the global function with the given index.
291   Handle<JSFunction> function(
292       JSFunction::cast(isolate->native_context()->get(index)));
293
294   // Check we're still in the same context.
295   Register scratch = prototype;
296   const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
297   __ lw(scratch, MemOperand(cp, offset));
298   __ lw(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
299   __ lw(scratch, MemOperand(scratch, Context::SlotOffset(index)));
300   __ li(at, function);
301   __ Branch(miss, ne, at, Operand(scratch));
302
303   // Load its initial map. The global functions all have initial maps.
304   __ li(prototype, Handle<Map>(function->initial_map()));
305   // Load the prototype from the initial map.
306   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
307 }
308
309
310 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
311                                             Register dst,
312                                             Register src,
313                                             bool inobject,
314                                             int index,
315                                             Representation representation) {
316   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
317   int offset = index * kPointerSize;
318   if (!inobject) {
319     // Calculate the offset into the properties array.
320     offset = offset + FixedArray::kHeaderSize;
321     __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
322     src = dst;
323   }
324   __ lw(dst, FieldMemOperand(src, offset));
325 }
326
327
328 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
329                                            Register receiver,
330                                            Register scratch,
331                                            Label* miss_label) {
332   // Check that the receiver isn't a smi.
333   __ JumpIfSmi(receiver, miss_label);
334
335   // Check that the object is a JS array.
336   __ GetObjectType(receiver, scratch, scratch);
337   __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
338
339   // Load length directly from the JS array.
340   __ Ret(USE_DELAY_SLOT);
341   __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
342 }
343
344
345 // Generate code to check if an object is a string.  If the object is a
346 // heap object, its map's instance type is left in the scratch1 register.
347 // If this is not needed, scratch1 and scratch2 may be the same register.
348 static void GenerateStringCheck(MacroAssembler* masm,
349                                 Register receiver,
350                                 Register scratch1,
351                                 Register scratch2,
352                                 Label* smi,
353                                 Label* non_string_object) {
354   // Check that the receiver isn't a smi.
355   __ JumpIfSmi(receiver, smi, t0);
356
357   // Check that the object is a string.
358   __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
359   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
360   __ And(scratch2, scratch1, Operand(kIsNotStringMask));
361   // The cast is to resolve the overload for the argument of 0x0.
362   __ Branch(non_string_object,
363             ne,
364             scratch2,
365             Operand(static_cast<int32_t>(kStringTag)));
366 }
367
368
369 // Generate code to load the length from a string object and return the length.
370 // If the receiver object is not a string or a wrapped string object the
371 // execution continues at the miss label. The register containing the
372 // receiver is potentially clobbered.
373 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
374                                             Register receiver,
375                                             Register scratch1,
376                                             Register scratch2,
377                                             Label* miss) {
378   Label check_wrapper;
379
380   // Check if the object is a string leaving the instance type in the
381   // scratch1 register.
382   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
383
384   // Load length directly from the string.
385   __ Ret(USE_DELAY_SLOT);
386   __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
387
388   // Check if the object is a JSValue wrapper.
389   __ bind(&check_wrapper);
390   __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
391
392   // Unwrap the value and check if the wrapped value is a string.
393   __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
394   GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
395   __ Ret(USE_DELAY_SLOT);
396   __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
397 }
398
399
400 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
401                                                  Register receiver,
402                                                  Register scratch1,
403                                                  Register scratch2,
404                                                  Label* miss_label) {
405   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
406   __ Ret(USE_DELAY_SLOT);
407   __ mov(v0, scratch1);
408 }
409
410
411 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
412                                              Handle<JSGlobalObject> global,
413                                              Handle<Name> name,
414                                              Register scratch,
415                                              Label* miss) {
416   Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
417   ASSERT(cell->value()->IsTheHole());
418   __ li(scratch, Operand(cell));
419   __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
420   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
421   __ Branch(miss, ne, scratch, Operand(at));
422 }
423
424
425 void StoreStubCompiler::GenerateNegativeHolderLookup(
426     MacroAssembler* masm,
427     Handle<JSObject> holder,
428     Register holder_reg,
429     Handle<Name> name,
430     Label* miss) {
431   if (holder->IsJSGlobalObject()) {
432     GenerateCheckPropertyCell(
433         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
434   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
435     GenerateDictionaryNegativeLookup(
436         masm, miss, holder_reg, name, scratch1(), scratch2());
437   }
438 }
439
440
441 // Generate StoreTransition code, value is passed in a0 register.
442 // After executing generated code, the receiver_reg and name_reg
443 // may be clobbered.
444 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
445                                                 Handle<JSObject> object,
446                                                 LookupResult* lookup,
447                                                 Handle<Map> transition,
448                                                 Handle<Name> name,
449                                                 Register receiver_reg,
450                                                 Register storage_reg,
451                                                 Register value_reg,
452                                                 Register scratch1,
453                                                 Register scratch2,
454                                                 Register scratch3,
455                                                 Label* miss_label,
456                                                 Label* slow) {
457   // a0 : value.
458   Label exit;
459
460   int descriptor = transition->LastAdded();
461   DescriptorArray* descriptors = transition->instance_descriptors();
462   PropertyDetails details = descriptors->GetDetails(descriptor);
463   Representation representation = details.representation();
464   ASSERT(!representation.IsNone());
465
466   if (details.type() == CONSTANT) {
467     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
468     __ li(scratch1, constant);
469     __ Branch(miss_label, ne, value_reg, Operand(scratch1));
470   } else if (FLAG_track_fields && representation.IsSmi()) {
471     __ JumpIfNotSmi(value_reg, miss_label);
472   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
473     __ JumpIfSmi(value_reg, miss_label);
474   } else if (FLAG_track_double_fields && representation.IsDouble()) {
475     Label do_store, heap_number;
476     __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
477     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
478
479     __ JumpIfNotSmi(value_reg, &heap_number);
480     __ SmiUntag(scratch1, value_reg);
481     __ mtc1(scratch1, f6);
482     __ cvt_d_w(f4, f6);
483     __ jmp(&do_store);
484
485     __ bind(&heap_number);
486     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
487                 miss_label, DONT_DO_SMI_CHECK);
488     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
489
490     __ bind(&do_store);
491     __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
492   }
493
494   // Stub never generated for non-global objects that require access
495   // checks.
496   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
497
498   // Perform map transition for the receiver if necessary.
499   if (details.type() == FIELD &&
500       object->map()->unused_property_fields() == 0) {
501     // The properties must be extended before we can store the value.
502     // We jump to a runtime call that extends the properties array.
503     __ push(receiver_reg);
504     __ li(a2, Operand(transition));
505     __ Push(a2, a0);
506     __ TailCallExternalReference(
507            ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
508                              masm->isolate()),
509            3, 1);
510     return;
511   }
512
513   // Update the map of the object.
514   __ li(scratch1, Operand(transition));
515   __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
516
517   // Update the write barrier for the map field.
518   __ RecordWriteField(receiver_reg,
519                       HeapObject::kMapOffset,
520                       scratch1,
521                       scratch2,
522                       kRAHasNotBeenSaved,
523                       kDontSaveFPRegs,
524                       OMIT_REMEMBERED_SET,
525                       OMIT_SMI_CHECK);
526
527   if (details.type() == CONSTANT) {
528     ASSERT(value_reg.is(a0));
529     __ Ret(USE_DELAY_SLOT);
530     __ mov(v0, a0);
531     return;
532   }
533
534   int index = transition->instance_descriptors()->GetFieldIndex(
535       transition->LastAdded());
536
537   // Adjust for the number of properties stored in the object. Even in the
538   // face of a transition we can use the old map here because the size of the
539   // object and the number of in-object properties is not going to change.
540   index -= object->map()->inobject_properties();
541
542   // TODO(verwaest): Share this code as a code stub.
543   SmiCheck smi_check = representation.IsTagged()
544       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
545   if (index < 0) {
546     // Set the property straight into the object.
547     int offset = object->map()->instance_size() + (index * kPointerSize);
548     if (FLAG_track_double_fields && representation.IsDouble()) {
549       __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
550     } else {
551       __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
552     }
553
554     if (!FLAG_track_fields || !representation.IsSmi()) {
555       // Update the write barrier for the array address.
556       if (!FLAG_track_double_fields || !representation.IsDouble()) {
557         __ mov(storage_reg, value_reg);
558       }
559       __ RecordWriteField(receiver_reg,
560                           offset,
561                           storage_reg,
562                           scratch1,
563                           kRAHasNotBeenSaved,
564                           kDontSaveFPRegs,
565                           EMIT_REMEMBERED_SET,
566                           smi_check);
567     }
568   } else {
569     // Write to the properties array.
570     int offset = index * kPointerSize + FixedArray::kHeaderSize;
571     // Get the properties array
572     __ lw(scratch1,
573           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
574     if (FLAG_track_double_fields && representation.IsDouble()) {
575       __ sw(storage_reg, FieldMemOperand(scratch1, offset));
576     } else {
577       __ sw(value_reg, FieldMemOperand(scratch1, offset));
578     }
579
580     if (!FLAG_track_fields || !representation.IsSmi()) {
581       // Update the write barrier for the array address.
582       if (!FLAG_track_double_fields || !representation.IsDouble()) {
583         __ mov(storage_reg, value_reg);
584       }
585       __ RecordWriteField(scratch1,
586                           offset,
587                           storage_reg,
588                           receiver_reg,
589                           kRAHasNotBeenSaved,
590                           kDontSaveFPRegs,
591                           EMIT_REMEMBERED_SET,
592                           smi_check);
593     }
594   }
595
596   // Return the value (register v0).
597   ASSERT(value_reg.is(a0));
598   __ bind(&exit);
599   __ Ret(USE_DELAY_SLOT);
600   __ mov(v0, a0);
601 }
602
603
604 // Generate StoreField code, value is passed in a0 register.
605 // When leaving generated code after success, the receiver_reg and name_reg
606 // may be clobbered.  Upon branch to miss_label, the receiver and name
607 // registers have their original values.
608 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
609                                            Handle<JSObject> object,
610                                            LookupResult* lookup,
611                                            Register receiver_reg,
612                                            Register name_reg,
613                                            Register value_reg,
614                                            Register scratch1,
615                                            Register scratch2,
616                                            Label* miss_label) {
617   // a0 : value
618   Label exit;
619
620   // Stub never generated for non-global objects that require access
621   // checks.
622   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
623
624   int index = lookup->GetFieldIndex().field_index();
625
626   // Adjust for the number of properties stored in the object. Even in the
627   // face of a transition we can use the old map here because the size of the
628   // object and the number of in-object properties is not going to change.
629   index -= object->map()->inobject_properties();
630
631   Representation representation = lookup->representation();
632   ASSERT(!representation.IsNone());
633   if (FLAG_track_fields && representation.IsSmi()) {
634     __ JumpIfNotSmi(value_reg, miss_label);
635   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
636     __ JumpIfSmi(value_reg, miss_label);
637   } else if (FLAG_track_double_fields && representation.IsDouble()) {
638     // Load the double storage.
639     if (index < 0) {
640       int offset = object->map()->instance_size() + (index * kPointerSize);
641       __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
642     } else {
643       __ lw(scratch1,
644             FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
645       int offset = index * kPointerSize + FixedArray::kHeaderSize;
646       __ lw(scratch1, FieldMemOperand(scratch1, offset));
647     }
648
649     // Store the value into the storage.
650     Label do_store, heap_number;
651     __ JumpIfNotSmi(value_reg, &heap_number);
652     __ SmiUntag(scratch2, value_reg);
653     __ mtc1(scratch2, f6);
654     __ cvt_d_w(f4, f6);
655     __ jmp(&do_store);
656
657     __ bind(&heap_number);
658     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
659                 miss_label, DONT_DO_SMI_CHECK);
660     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
661
662     __ bind(&do_store);
663     __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
664     // Return the value (register v0).
665     ASSERT(value_reg.is(a0));
666     __ Ret(USE_DELAY_SLOT);
667     __ mov(v0, a0);
668     return;
669   }
670
671   // TODO(verwaest): Share this code as a code stub.
672   SmiCheck smi_check = representation.IsTagged()
673       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
674   if (index < 0) {
675     // Set the property straight into the object.
676     int offset = object->map()->instance_size() + (index * kPointerSize);
677     __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
678
679     if (!FLAG_track_fields || !representation.IsSmi()) {
680       // Skip updating write barrier if storing a smi.
681       __ JumpIfSmi(value_reg, &exit);
682
683       // Update the write barrier for the array address.
684       // Pass the now unused name_reg as a scratch register.
685       __ mov(name_reg, value_reg);
686       __ RecordWriteField(receiver_reg,
687                           offset,
688                           name_reg,
689                           scratch1,
690                           kRAHasNotBeenSaved,
691                           kDontSaveFPRegs,
692                           EMIT_REMEMBERED_SET,
693                           smi_check);
694     }
695   } else {
696     // Write to the properties array.
697     int offset = index * kPointerSize + FixedArray::kHeaderSize;
698     // Get the properties array.
699     __ lw(scratch1,
700           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
701     __ sw(value_reg, FieldMemOperand(scratch1, offset));
702
703     if (!FLAG_track_fields || !representation.IsSmi()) {
704       // Skip updating write barrier if storing a smi.
705       __ JumpIfSmi(value_reg, &exit);
706
707       // Update the write barrier for the array address.
708       // Ok to clobber receiver_reg and name_reg, since we return.
709       __ mov(name_reg, value_reg);
710       __ RecordWriteField(scratch1,
711                           offset,
712                           name_reg,
713                           receiver_reg,
714                           kRAHasNotBeenSaved,
715                           kDontSaveFPRegs,
716                           EMIT_REMEMBERED_SET,
717                           smi_check);
718     }
719   }
720
721   // Return the value (register v0).
722   ASSERT(value_reg.is(a0));
723   __ bind(&exit);
724   __ Ret(USE_DELAY_SLOT);
725   __ mov(v0, a0);
726 }
727
728
729 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
730                                             Label* label,
731                                             Handle<Name> name) {
732   if (!label->is_unused()) {
733     __ bind(label);
734     __ li(this->name(), Operand(name));
735   }
736 }
737
738
739 static void PushInterceptorArguments(MacroAssembler* masm,
740                                      Register receiver,
741                                      Register holder,
742                                      Register name,
743                                      Handle<JSObject> holder_obj) {
744   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
745   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
746   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
747   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
748   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
749   __ push(name);
750   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
751   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
752   Register scratch = name;
753   __ li(scratch, Operand(interceptor));
754   __ Push(scratch, receiver, holder);
755 }
756
757
758 static void CompileCallLoadPropertyWithInterceptor(
759     MacroAssembler* masm,
760     Register receiver,
761     Register holder,
762     Register name,
763     Handle<JSObject> holder_obj,
764     IC::UtilityId id) {
765   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
766   __ CallExternalReference(
767       ExternalReference(IC_Utility(id), masm->isolate()),
768       StubCache::kInterceptorArgsLength);
769 }
770
771
772 // Generate call to api function.
773 static void GenerateFastApiCall(MacroAssembler* masm,
774                                 const CallOptimization& optimization,
775                                 Handle<Map> receiver_map,
776                                 Register receiver,
777                                 Register scratch_in,
778                                 int argc,
779                                 Register* values) {
780   ASSERT(!receiver.is(scratch_in));
781   // Preparing to push, adjust sp.
782   __ Subu(sp, sp, Operand((argc + 1) * kPointerSize));
783   __ sw(receiver, MemOperand(sp, argc * kPointerSize));  // Push receiver.
784   // Write the arguments to stack frame.
785   for (int i = 0; i < argc; i++) {
786     Register arg = values[argc-1-i];
787     ASSERT(!receiver.is(arg));
788     ASSERT(!scratch_in.is(arg));
789     __ sw(arg, MemOperand(sp, (argc-1-i) * kPointerSize));  // Push arg.
790   }
791   ASSERT(optimization.is_simple_api_call());
792
793   // Abi for CallApiFunctionStub.
794   Register callee = a0;
795   Register call_data = t0;
796   Register holder = a2;
797   Register api_function_address = a1;
798
799   // Put holder in place.
800   CallOptimization::HolderLookup holder_lookup;
801   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
802       receiver_map,
803       &holder_lookup);
804   switch (holder_lookup) {
805     case CallOptimization::kHolderIsReceiver:
806       __ Move(holder, receiver);
807       break;
808     case CallOptimization::kHolderFound:
809       __ li(holder, api_holder);
810      break;
811     case CallOptimization::kHolderNotFound:
812       UNREACHABLE();
813       break;
814   }
815
816   Isolate* isolate = masm->isolate();
817   Handle<JSFunction> function = optimization.constant_function();
818   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
819   Handle<Object> call_data_obj(api_call_info->data(), isolate);
820
821   // Put callee in place.
822   __ li(callee, function);
823
824   bool call_data_undefined = false;
825   // Put call_data in place.
826   if (isolate->heap()->InNewSpace(*call_data_obj)) {
827     __ li(call_data, api_call_info);
828     __ lw(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
829   } else if (call_data_obj->IsUndefined()) {
830     call_data_undefined = true;
831     __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
832   } else {
833     __ li(call_data, call_data_obj);
834   }
835   // Put api_function_address in place.
836   Address function_address = v8::ToCData<Address>(api_call_info->callback());
837   ApiFunction fun(function_address);
838   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
839   ExternalReference ref =
840       ExternalReference(&fun,
841                         type,
842                         masm->isolate());
843   __ li(api_function_address, Operand(ref));
844
845   // Jump to stub.
846   CallApiFunctionStub stub(true, call_data_undefined, argc);
847   __ TailCallStub(&stub);
848 }
849
850
851 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
852   __ Jump(code, RelocInfo::CODE_TARGET);
853 }
854
855
856 #undef __
857 #define __ ACCESS_MASM(masm())
858
859
860 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
861                                        Register object_reg,
862                                        Handle<JSObject> holder,
863                                        Register holder_reg,
864                                        Register scratch1,
865                                        Register scratch2,
866                                        Handle<Name> name,
867                                        Label* miss,
868                                        PrototypeCheckType check) {
869   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
870   // Make sure that the type feedback oracle harvests the receiver map.
871   // TODO(svenpanne) Remove this hack when all ICs are reworked.
872   __ li(scratch1, Operand(receiver_map));
873
874   // Make sure there's no overlap between holder and object registers.
875   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
876   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
877          && !scratch2.is(scratch1));
878
879   // Keep track of the current object in register reg.
880   Register reg = object_reg;
881   int depth = 0;
882
883   Handle<JSObject> current = Handle<JSObject>::null();
884   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
885   Handle<JSObject> prototype = Handle<JSObject>::null();
886   Handle<Map> current_map = receiver_map;
887   Handle<Map> holder_map(holder->map());
888   // Traverse the prototype chain and check the maps in the prototype chain for
889   // fast and global objects or do negative lookup for normal objects.
890   while (!current_map.is_identical_to(holder_map)) {
891     ++depth;
892
893     // Only global objects and objects that do not require access
894     // checks are allowed in stubs.
895     ASSERT(current_map->IsJSGlobalProxyMap() ||
896            !current_map->is_access_check_needed());
897
898     prototype = handle(JSObject::cast(current_map->prototype()));
899     if (current_map->is_dictionary_map() &&
900         !current_map->IsJSGlobalObjectMap() &&
901         !current_map->IsJSGlobalProxyMap()) {
902       if (!name->IsUniqueName()) {
903         ASSERT(name->IsString());
904         name = factory()->InternalizeString(Handle<String>::cast(name));
905       }
906       ASSERT(current.is_null() ||
907              current->property_dictionary()->FindEntry(*name) ==
908              NameDictionary::kNotFound);
909
910       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
911                                        scratch1, scratch2);
912
913       __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
914       reg = holder_reg;  // From now on the object will be in holder_reg.
915       __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
916     } else {
917       Register map_reg = scratch1;
918       if (depth != 1 || check == CHECK_ALL_MAPS) {
919         // CheckMap implicitly loads the map of |reg| into |map_reg|.
920         __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
921       } else {
922         __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
923       }
924
925       // Check access rights to the global object.  This has to happen after
926       // the map check so that we know that the object is actually a global
927       // object.
928       if (current_map->IsJSGlobalProxyMap()) {
929         __ CheckAccessGlobalProxy(reg, scratch2, miss);
930       } else if (current_map->IsJSGlobalObjectMap()) {
931         GenerateCheckPropertyCell(
932             masm(), Handle<JSGlobalObject>::cast(current), name,
933             scratch2, miss);
934       }
935
936       reg = holder_reg;  // From now on the object will be in holder_reg.
937
938       if (heap()->InNewSpace(*prototype)) {
939         // The prototype is in new space; we cannot store a reference to it
940         // in the code.  Load it from the map.
941         __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
942       } else {
943         // The prototype is in old space; load it directly.
944         __ li(reg, Operand(prototype));
945       }
946     }
947
948     // Go to the next object in the prototype chain.
949     current = prototype;
950     current_map = handle(current->map());
951   }
952
953   // Log the check depth.
954   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
955
956   if (depth != 0 || check == CHECK_ALL_MAPS) {
957     // Check the holder map.
958     __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
959   }
960
961   // Perform security check for access to the global object.
962   ASSERT(current_map->IsJSGlobalProxyMap() ||
963          !current_map->is_access_check_needed());
964   if (current_map->IsJSGlobalProxyMap()) {
965     __ CheckAccessGlobalProxy(reg, scratch1, miss);
966   }
967
968   // Return the register containing the holder.
969   return reg;
970 }
971
972
973 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
974   if (!miss->is_unused()) {
975     Label success;
976     __ Branch(&success);
977     __ bind(miss);
978     TailCallBuiltin(masm(), MissBuiltin(kind()));
979     __ bind(&success);
980   }
981 }
982
983
984 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
985   if (!miss->is_unused()) {
986     Label success;
987     __ Branch(&success);
988     GenerateRestoreName(masm(), miss, name);
989     TailCallBuiltin(masm(), MissBuiltin(kind()));
990     __ bind(&success);
991   }
992 }
993
994
995 Register LoadStubCompiler::CallbackHandlerFrontend(
996     Handle<HeapType> type,
997     Register object_reg,
998     Handle<JSObject> holder,
999     Handle<Name> name,
1000     Handle<Object> callback) {
1001   Label miss;
1002
1003   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1004
1005   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1006     ASSERT(!reg.is(scratch2()));
1007     ASSERT(!reg.is(scratch3()));
1008     ASSERT(!reg.is(scratch4()));
1009
1010     // Load the properties dictionary.
1011     Register dictionary = scratch4();
1012     __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
1013
1014     // Probe the dictionary.
1015     Label probe_done;
1016     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1017                                                      &miss,
1018                                                      &probe_done,
1019                                                      dictionary,
1020                                                      this->name(),
1021                                                      scratch2(),
1022                                                      scratch3());
1023     __ bind(&probe_done);
1024
1025     // If probing finds an entry in the dictionary, scratch3 contains the
1026     // pointer into the dictionary. Check that the value is the callback.
1027     Register pointer = scratch3();
1028     const int kElementsStartOffset = NameDictionary::kHeaderSize +
1029         NameDictionary::kElementsStartIndex * kPointerSize;
1030     const int kValueOffset = kElementsStartOffset + kPointerSize;
1031     __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
1032     __ Branch(&miss, ne, scratch2(), Operand(callback));
1033   }
1034
1035   HandlerFrontendFooter(name, &miss);
1036   return reg;
1037 }
1038
1039
1040 void LoadStubCompiler::GenerateLoadField(Register reg,
1041                                          Handle<JSObject> holder,
1042                                          PropertyIndex field,
1043                                          Representation representation) {
1044   if (!reg.is(receiver())) __ mov(receiver(), reg);
1045   if (kind() == Code::LOAD_IC) {
1046     LoadFieldStub stub(field.is_inobject(holder),
1047                        field.translate(holder),
1048                        representation);
1049     GenerateTailCall(masm(), stub.GetCode(isolate()));
1050   } else {
1051     KeyedLoadFieldStub stub(field.is_inobject(holder),
1052                             field.translate(holder),
1053                             representation);
1054     GenerateTailCall(masm(), stub.GetCode(isolate()));
1055   }
1056 }
1057
1058
1059 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1060   // Return the constant value.
1061   __ li(v0, value);
1062   __ Ret();
1063 }
1064
1065
1066 void LoadStubCompiler::GenerateLoadCallback(
1067     const CallOptimization& call_optimization,
1068     Handle<Map> receiver_map) {
1069   GenerateFastApiCall(
1070       masm(), call_optimization, receiver_map,
1071       receiver(), scratch3(), 0, NULL);
1072 }
1073
1074
1075 void LoadStubCompiler::GenerateLoadCallback(
1076     Register reg,
1077     Handle<ExecutableAccessorInfo> callback) {
1078   // Build AccessorInfo::args_ list on the stack and push property name below
1079   // the exit frame to make GC aware of them and store pointers to them.
1080   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1081   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1082   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1083   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1084   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1085   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1086   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1087   ASSERT(!scratch2().is(reg));
1088   ASSERT(!scratch3().is(reg));
1089   ASSERT(!scratch4().is(reg));
1090   __ push(receiver());
1091   if (heap()->InNewSpace(callback->data())) {
1092     __ li(scratch3(), callback);
1093     __ lw(scratch3(), FieldMemOperand(scratch3(),
1094                                       ExecutableAccessorInfo::kDataOffset));
1095   } else {
1096     __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
1097   }
1098   __ Subu(sp, sp, 6 * kPointerSize);
1099   __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
1100   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1101   __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1102   __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1103   __ li(scratch4(),
1104         Operand(ExternalReference::isolate_address(isolate())));
1105   __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1106   __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1107   __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1108   __ Addu(scratch2(), sp, 1 * kPointerSize);
1109
1110   __ mov(a2, scratch2());  // Saved in case scratch2 == a1.
1111   // Abi for CallApiGetter.
1112   Register getter_address_reg = a2;
1113
1114   Address getter_address = v8::ToCData<Address>(callback->getter());
1115   ApiFunction fun(getter_address);
1116   ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1117   ExternalReference ref = ExternalReference(&fun, type, isolate());
1118   __ li(getter_address_reg, Operand(ref));
1119
1120   CallApiGetterStub stub;
1121   __ TailCallStub(&stub);
1122 }
1123
1124
1125 void LoadStubCompiler::GenerateLoadInterceptor(
1126     Register holder_reg,
1127     Handle<Object> object,
1128     Handle<JSObject> interceptor_holder,
1129     LookupResult* lookup,
1130     Handle<Name> name) {
1131   ASSERT(interceptor_holder->HasNamedInterceptor());
1132   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1133
1134   // So far the most popular follow ups for interceptor loads are FIELD
1135   // and CALLBACKS, so inline only them, other cases may be added
1136   // later.
1137   bool compile_followup_inline = false;
1138   if (lookup->IsFound() && lookup->IsCacheable()) {
1139     if (lookup->IsField()) {
1140       compile_followup_inline = true;
1141     } else if (lookup->type() == CALLBACKS &&
1142         lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1143       ExecutableAccessorInfo* callback =
1144           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1145       compile_followup_inline = callback->getter() != NULL &&
1146           callback->IsCompatibleReceiver(*object);
1147     }
1148   }
1149
1150   if (compile_followup_inline) {
1151     // Compile the interceptor call, followed by inline code to load the
1152     // property from further up the prototype chain if the call fails.
1153     // Check that the maps haven't changed.
1154     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1155
1156     // Preserve the receiver register explicitly whenever it is different from
1157     // the holder and it is needed should the interceptor return without any
1158     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1159     // the FIELD case might cause a miss during the prototype check.
1160     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1161     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1162         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1163
1164     // Save necessary data before invoking an interceptor.
1165     // Requires a frame to make GC aware of pushed pointers.
1166     {
1167       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1168       if (must_preserve_receiver_reg) {
1169         __ Push(receiver(), holder_reg, this->name());
1170       } else {
1171         __ Push(holder_reg, this->name());
1172       }
1173       // Invoke an interceptor.  Note: map checks from receiver to
1174       // interceptor's holder has been compiled before (see a caller
1175       // of this method).
1176       CompileCallLoadPropertyWithInterceptor(
1177           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1178           IC::kLoadPropertyWithInterceptorOnly);
1179
1180       // Check if interceptor provided a value for property.  If it's
1181       // the case, return immediately.
1182       Label interceptor_failed;
1183       __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1184       __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
1185       frame_scope.GenerateLeaveFrame();
1186       __ Ret();
1187
1188       __ bind(&interceptor_failed);
1189       __ pop(this->name());
1190       __ pop(holder_reg);
1191       if (must_preserve_receiver_reg) {
1192         __ pop(receiver());
1193       }
1194       // Leave the internal frame.
1195     }
1196     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1197   } else {  // !compile_followup_inline
1198     // Call the runtime system to load the interceptor.
1199     // Check that the maps haven't changed.
1200     PushInterceptorArguments(masm(), receiver(), holder_reg,
1201                              this->name(), interceptor_holder);
1202
1203     ExternalReference ref = ExternalReference(
1204         IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1205     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1206   }
1207 }
1208
1209
1210 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1211   Label success;
1212   // Check that the object is a boolean.
1213   __ LoadRoot(at, Heap::kTrueValueRootIndex);
1214   __ Branch(&success, eq, object, Operand(at));
1215   __ LoadRoot(at, Heap::kFalseValueRootIndex);
1216   __ Branch(miss, ne, object, Operand(at));
1217   __ bind(&success);
1218 }
1219
1220
1221 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1222     Handle<JSObject> object,
1223     Handle<JSObject> holder,
1224     Handle<Name> name,
1225     Handle<ExecutableAccessorInfo> callback) {
1226   Register holder_reg = HandlerFrontend(
1227       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1228
1229   // Stub never generated for non-global objects that require access
1230   // checks.
1231   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1232
1233   __ Push(receiver(), holder_reg);  // Receiver.
1234   __ li(at, Operand(callback));  // Callback info.
1235   __ push(at);
1236   __ li(at, Operand(name));
1237   __ Push(at, value());
1238
1239   // Do tail-call to the runtime system.
1240   ExternalReference store_callback_property =
1241       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1242   __ TailCallExternalReference(store_callback_property, 5, 1);
1243
1244   // Return the generated code.
1245   return GetCode(kind(), Code::FAST, name);
1246 }
1247
1248
1249 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1250     Handle<JSObject> object,
1251     Handle<JSObject> holder,
1252     Handle<Name> name,
1253     const CallOptimization& call_optimization) {
1254   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1255                   receiver(), holder, name);
1256
1257   Register values[] = { value() };
1258   GenerateFastApiCall(
1259       masm(), call_optimization, handle(object->map()),
1260       receiver(), scratch3(), 1, values);
1261
1262   // Return the generated code.
1263   return GetCode(kind(), Code::FAST, name);
1264 }
1265
1266
1267 #undef __
1268 #define __ ACCESS_MASM(masm)
1269
1270
1271 void StoreStubCompiler::GenerateStoreViaSetter(
1272     MacroAssembler* masm,
1273     Handle<HeapType> type,
1274     Handle<JSFunction> setter) {
1275   // ----------- S t a t e -------------
1276   //  -- a0    : value
1277   //  -- a1    : receiver
1278   //  -- a2    : name
1279   //  -- ra    : return address
1280   // -----------------------------------
1281   {
1282     FrameScope scope(masm, StackFrame::INTERNAL);
1283     Register receiver = a1;
1284     Register value = a0;
1285
1286     // Save value register, so we can restore it later.
1287     __ push(value);
1288
1289     if (!setter.is_null()) {
1290       // Call the JavaScript setter with receiver and value on the stack.
1291       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1292         // Swap in the global receiver.
1293         __ lw(receiver,
1294                FieldMemOperand(
1295                    receiver, JSGlobalObject::kGlobalReceiverOffset));
1296       }
1297       __ Push(receiver, value);
1298       ParameterCount actual(1);
1299       ParameterCount expected(setter);
1300       __ InvokeFunction(setter, expected, actual,
1301                         CALL_FUNCTION, NullCallWrapper());
1302     } else {
1303       // If we generate a global code snippet for deoptimization only, remember
1304       // the place to continue after deoptimization.
1305       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1306     }
1307
1308     // We have to return the passed value, not the return value of the setter.
1309     __ pop(v0);
1310
1311     // Restore context register.
1312     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1313   }
1314   __ Ret();
1315 }
1316
1317
1318 #undef __
1319 #define __ ACCESS_MASM(masm())
1320
1321
1322 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1323     Handle<JSObject> object,
1324     Handle<Name> name) {
1325   Label miss;
1326
1327   // Check that the map of the object hasn't changed.
1328   __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
1329               DO_SMI_CHECK);
1330
1331   // Perform global security token check if needed.
1332   if (object->IsJSGlobalProxy()) {
1333     __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
1334   }
1335
1336   // Stub is never generated for non-global objects that require access
1337   // checks.
1338   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1339
1340   __ Push(receiver(), this->name(), value());
1341
1342   // Do tail-call to the runtime system.
1343   ExternalReference store_ic_property =
1344       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1345   __ TailCallExternalReference(store_ic_property, 3, 1);
1346
1347   // Handle store cache miss.
1348   __ bind(&miss);
1349   TailCallBuiltin(masm(), MissBuiltin(kind()));
1350
1351   // Return the generated code.
1352   return GetCode(kind(), Code::FAST, name);
1353 }
1354
1355
1356 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1357                                                       Handle<JSObject> last,
1358                                                       Handle<Name> name) {
1359   NonexistentHandlerFrontend(type, last, name);
1360
1361   // Return undefined if maps of the full prototype chain is still the same.
1362   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1363   __ Ret();
1364
1365   // Return the generated code.
1366   return GetCode(kind(), Code::FAST, name);
1367 }
1368
1369
1370 Register* LoadStubCompiler::registers() {
1371   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1372   static Register registers[] = { a0, a2, a3, a1, t0, t1 };
1373   return registers;
1374 }
1375
1376
1377 Register* KeyedLoadStubCompiler::registers() {
1378   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1379   static Register registers[] = { a1, a0, a2, a3, t0, t1 };
1380   return registers;
1381 }
1382
1383
1384 Register* StoreStubCompiler::registers() {
1385   // receiver, name, value, scratch1, scratch2, scratch3.
1386   static Register registers[] = { a1, a2, a0, a3, t0, t1 };
1387   return registers;
1388 }
1389
1390
1391 Register* KeyedStoreStubCompiler::registers() {
1392   // receiver, name, value, scratch1, scratch2, scratch3.
1393   static Register registers[] = { a2, a1, a0, a3, t0, t1 };
1394   return registers;
1395 }
1396
1397
1398 #undef __
1399 #define __ ACCESS_MASM(masm)
1400
1401
1402 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1403                                              Handle<HeapType> type,
1404                                              Register receiver,
1405                                              Handle<JSFunction> getter) {
1406   // ----------- S t a t e -------------
1407   //  -- a0    : receiver
1408   //  -- a2    : name
1409   //  -- ra    : return address
1410   // -----------------------------------
1411   {
1412     FrameScope scope(masm, StackFrame::INTERNAL);
1413
1414     if (!getter.is_null()) {
1415       // Call the JavaScript getter with the receiver on the stack.
1416       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1417         // Swap in the global receiver.
1418         __ lw(receiver,
1419                 FieldMemOperand(
1420                     receiver, JSGlobalObject::kGlobalReceiverOffset));
1421       }
1422       __ push(receiver);
1423       ParameterCount actual(0);
1424       ParameterCount expected(getter);
1425       __ InvokeFunction(getter, expected, actual,
1426                         CALL_FUNCTION, NullCallWrapper());
1427     } else {
1428       // If we generate a global code snippet for deoptimization only, remember
1429       // the place to continue after deoptimization.
1430       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1431     }
1432
1433     // Restore context register.
1434     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1435   }
1436   __ Ret();
1437 }
1438
1439
1440 #undef __
1441 #define __ ACCESS_MASM(masm())
1442
1443
1444 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1445     Handle<HeapType> type,
1446     Handle<GlobalObject> global,
1447     Handle<PropertyCell> cell,
1448     Handle<Name> name,
1449     bool is_dont_delete) {
1450   Label miss;
1451
1452   HandlerFrontendHeader(type, receiver(), global, name, &miss);
1453
1454   // Get the value from the cell.
1455   __ li(a3, Operand(cell));
1456   __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
1457
1458   // Check for deleted property if property can actually be deleted.
1459   if (!is_dont_delete) {
1460     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1461     __ Branch(&miss, eq, t0, Operand(at));
1462   }
1463
1464   Counters* counters = isolate()->counters();
1465   __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
1466   __ Ret(USE_DELAY_SLOT);
1467   __ mov(v0, t0);
1468
1469   HandlerFrontendFooter(name, &miss);
1470
1471   // Return the generated code.
1472   return GetCode(kind(), Code::NORMAL, name);
1473 }
1474
1475
1476 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1477     TypeHandleList* types,
1478     CodeHandleList* handlers,
1479     Handle<Name> name,
1480     Code::StubType type,
1481     IcCheckType check) {
1482   Label miss;
1483
1484   if (check == PROPERTY &&
1485       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1486     __ Branch(&miss, ne, this->name(), Operand(name));
1487   }
1488
1489   Label number_case;
1490   Register match = scratch1();
1491   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1492   __ JumpIfSmi(receiver(), smi_target, match);  // Reg match is 0 if Smi.
1493
1494   Register map_reg = scratch2();
1495
1496   int receiver_count = types->length();
1497   int number_of_handled_maps = 0;
1498   __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1499   for (int current = 0; current < receiver_count; ++current) {
1500     Handle<HeapType> type = types->at(current);
1501     Handle<Map> map = IC::TypeToMap(*type, isolate());
1502     if (!map->is_deprecated()) {
1503       number_of_handled_maps++;
1504       // Check map and tail call if there's a match.
1505       // Separate compare from branch, to provide path for above JumpIfSmi().
1506       __ Subu(match, map_reg, Operand(map));
1507       if (type->Is(HeapType::Number())) {
1508         ASSERT(!number_case.is_unused());
1509         __ bind(&number_case);
1510       }
1511       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
1512           eq, match, Operand(zero_reg));
1513     }
1514   }
1515   ASSERT(number_of_handled_maps != 0);
1516
1517   __ bind(&miss);
1518   TailCallBuiltin(masm(), MissBuiltin(kind()));
1519
1520   // Return the generated code.
1521   InlineCacheState state =
1522       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1523   return GetICCode(kind(), type, name, state);
1524 }
1525
1526
1527 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1528     MapHandleList* receiver_maps,
1529     CodeHandleList* handler_stubs,
1530     MapHandleList* transitioned_maps) {
1531   Label miss;
1532   __ JumpIfSmi(receiver(), &miss);
1533
1534   int receiver_count = receiver_maps->length();
1535   __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1536   for (int i = 0; i < receiver_count; ++i) {
1537     if (transitioned_maps->at(i).is_null()) {
1538       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
1539           scratch1(), Operand(receiver_maps->at(i)));
1540     } else {
1541       Label next_map;
1542       __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
1543       __ li(transition_map(), Operand(transitioned_maps->at(i)));
1544       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1545       __ bind(&next_map);
1546     }
1547   }
1548
1549   __ bind(&miss);
1550   TailCallBuiltin(masm(), MissBuiltin(kind()));
1551
1552   // Return the generated code.
1553   return GetICCode(
1554       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1555 }
1556
1557
1558 #undef __
1559 #define __ ACCESS_MASM(masm)
1560
1561
1562 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1563     MacroAssembler* masm) {
1564   // ---------- S t a t e --------------
1565   //  -- ra     : return address
1566   //  -- a0     : key
1567   //  -- a1     : receiver
1568   // -----------------------------------
1569   Label slow, miss;
1570
1571   Register key = a0;
1572   Register receiver = a1;
1573
1574   __ JumpIfNotSmi(key, &miss);
1575   __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
1576   __ sra(a2, a0, kSmiTagSize);
1577   __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1578   __ Ret();
1579
1580   // Slow case, key and receiver still in a0 and a1.
1581   __ bind(&slow);
1582   __ IncrementCounter(
1583       masm->isolate()->counters()->keyed_load_external_array_slow(),
1584       1, a2, a3);
1585   // Entry registers are intact.
1586   // ---------- S t a t e --------------
1587   //  -- ra     : return address
1588   //  -- a0     : key
1589   //  -- a1     : receiver
1590   // -----------------------------------
1591   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1592
1593   // Miss case, call the runtime.
1594   __ bind(&miss);
1595
1596   // ---------- S t a t e --------------
1597   //  -- ra     : return address
1598   //  -- a0     : key
1599   //  -- a1     : receiver
1600   // -----------------------------------
1601   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1602 }
1603
1604
1605 #undef __
1606
1607 } }  // namespace v8::internal
1608
1609 #endif  // V8_TARGET_ARCH_MIPS