Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / arm64 / stub-cache-arm64.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "v8.h"
6
7 #if V8_TARGET_ARCH_ARM64
8
9 #include "ic-inl.h"
10 #include "codegen.h"
11 #include "stub-cache.h"
12
13 namespace v8 {
14 namespace internal {
15
16
17 #define __ ACCESS_MASM(masm)
18
19
20 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
21                                                     Label* miss_label,
22                                                     Register receiver,
23                                                     Handle<Name> name,
24                                                     Register scratch0,
25                                                     Register scratch1) {
26   ASSERT(!AreAliased(receiver, scratch0, scratch1));
27   ASSERT(name->IsUniqueName());
28   Counters* counters = masm->isolate()->counters();
29   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
30   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
31
32   Label done;
33
34   const int kInterceptorOrAccessCheckNeededMask =
35       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
36
37   // Bail out if the receiver has a named interceptor or requires access checks.
38   Register map = scratch1;
39   __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
40   __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
41   __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask);
42   __ B(ne, miss_label);
43
44   // Check that receiver is a JSObject.
45   __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
46   __ Cmp(scratch0, FIRST_SPEC_OBJECT_TYPE);
47   __ B(lt, miss_label);
48
49   // Load properties array.
50   Register properties = scratch0;
51   __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
52   // Check that the properties array is a dictionary.
53   __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
54   __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label);
55
56   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
57                                                      miss_label,
58                                                      &done,
59                                                      receiver,
60                                                      properties,
61                                                      name,
62                                                      scratch1);
63   __ Bind(&done);
64   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
65 }
66
67
68 // Probe primary or secondary table.
69 // If the entry is found in the cache, the generated code jump to the first
70 // instruction of the stub in the cache.
71 // If there is a miss the code fall trough.
72 //
73 // 'receiver', 'name' and 'offset' registers are preserved on miss.
74 static void ProbeTable(Isolate* isolate,
75                        MacroAssembler* masm,
76                        Code::Flags flags,
77                        StubCache::Table table,
78                        Register receiver,
79                        Register name,
80                        Register offset,
81                        Register scratch,
82                        Register scratch2,
83                        Register scratch3) {
84   // Some code below relies on the fact that the Entry struct contains
85   // 3 pointers (name, code, map).
86   STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize));
87
88   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
89   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
90   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
91
92   uintptr_t key_off_addr = reinterpret_cast<uintptr_t>(key_offset.address());
93   uintptr_t value_off_addr =
94       reinterpret_cast<uintptr_t>(value_offset.address());
95   uintptr_t map_off_addr = reinterpret_cast<uintptr_t>(map_offset.address());
96
97   Label miss;
98
99   ASSERT(!AreAliased(name, offset, scratch, scratch2, scratch3));
100
101   // Multiply by 3 because there are 3 fields per entry.
102   __ Add(scratch3, offset, Operand(offset, LSL, 1));
103
104   // Calculate the base address of the entry.
105   __ Mov(scratch, key_offset);
106   __ Add(scratch, scratch, Operand(scratch3, LSL, kPointerSizeLog2));
107
108   // Check that the key in the entry matches the name.
109   __ Ldr(scratch2, MemOperand(scratch));
110   __ Cmp(name, scratch2);
111   __ B(ne, &miss);
112
113   // Check the map matches.
114   __ Ldr(scratch2, MemOperand(scratch, map_off_addr - key_off_addr));
115   __ Ldr(scratch3, FieldMemOperand(receiver, HeapObject::kMapOffset));
116   __ Cmp(scratch2, scratch3);
117   __ B(ne, &miss);
118
119   // Get the code entry from the cache.
120   __ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr));
121
122   // Check that the flags match what we're looking for.
123   __ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset));
124   __ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup);
125   __ Cmp(scratch2.W(), flags);
126   __ B(ne, &miss);
127
128 #ifdef DEBUG
129   if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
130     __ B(&miss);
131   } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
132     __ B(&miss);
133   }
134 #endif
135
136   // Jump to the first instruction in the code stub.
137   __ Add(scratch, scratch, Code::kHeaderSize - kHeapObjectTag);
138   __ Br(scratch);
139
140   // Miss: fall through.
141   __ Bind(&miss);
142 }
143
144
145 void StubCache::GenerateProbe(MacroAssembler* masm,
146                               Code::Flags flags,
147                               Register receiver,
148                               Register name,
149                               Register scratch,
150                               Register extra,
151                               Register extra2,
152                               Register extra3) {
153   Isolate* isolate = masm->isolate();
154   Label miss;
155
156   // Make sure the flags does not name a specific type.
157   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
158
159   // Make sure that there are no register conflicts.
160   ASSERT(!AreAliased(receiver, name, scratch, extra, extra2, extra3));
161
162   // Make sure extra and extra2 registers are valid.
163   ASSERT(!extra.is(no_reg));
164   ASSERT(!extra2.is(no_reg));
165   ASSERT(!extra3.is(no_reg));
166
167   Counters* counters = masm->isolate()->counters();
168   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
169                       extra2, extra3);
170
171   // Check that the receiver isn't a smi.
172   __ JumpIfSmi(receiver, &miss);
173
174   // Compute the hash for primary table.
175   __ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
176   __ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset));
177   __ Add(scratch, scratch, extra);
178   __ Eor(scratch, scratch, flags);
179   // We shift out the last two bits because they are not part of the hash.
180   __ Ubfx(scratch, scratch, kHeapObjectTagSize,
181           CountTrailingZeros(kPrimaryTableSize, 64));
182
183   // Probe the primary table.
184   ProbeTable(isolate, masm, flags, kPrimary, receiver, name,
185       scratch, extra, extra2, extra3);
186
187   // Primary miss: Compute hash for secondary table.
188   __ Sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
189   __ Add(scratch, scratch, flags >> kHeapObjectTagSize);
190   __ And(scratch, scratch, kSecondaryTableSize - 1);
191
192   // Probe the secondary table.
193   ProbeTable(isolate, masm, flags, kSecondary, receiver, name,
194       scratch, extra, extra2, extra3);
195
196   // Cache miss: Fall-through and let caller handle the miss by
197   // entering the runtime system.
198   __ Bind(&miss);
199   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
200                       extra2, extra3);
201 }
202
203
204 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
205                                                        int index,
206                                                        Register prototype) {
207   // Load the global or builtins object from the current context.
208   __ Ldr(prototype, GlobalObjectMemOperand());
209   // Load the native context from the global or builtins object.
210   __ Ldr(prototype,
211          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
212   // Load the function from the native context.
213   __ Ldr(prototype, ContextMemOperand(prototype, index));
214   // Load the initial map. The global functions all have initial maps.
215   __ Ldr(prototype,
216          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
217   // Load the prototype from the initial map.
218   __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
219 }
220
221
222 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
223     MacroAssembler* masm,
224     int index,
225     Register prototype,
226     Label* miss) {
227   Isolate* isolate = masm->isolate();
228   // Get the global function with the given index.
229   Handle<JSFunction> function(
230       JSFunction::cast(isolate->native_context()->get(index)));
231
232   // Check we're still in the same context.
233   Register scratch = prototype;
234   __ Ldr(scratch, GlobalObjectMemOperand());
235   __ Ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
236   __ Ldr(scratch, ContextMemOperand(scratch, index));
237   __ Cmp(scratch, Operand(function));
238   __ B(ne, miss);
239
240   // Load its initial map. The global functions all have initial maps.
241   __ Mov(prototype, Operand(Handle<Map>(function->initial_map())));
242   // Load the prototype from the initial map.
243   __ Ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
244 }
245
246
247 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
248                                             Register dst,
249                                             Register src,
250                                             bool inobject,
251                                             int index,
252                                             Representation representation) {
253   ASSERT(!representation.IsDouble());
254   USE(representation);
255   if (inobject) {
256     int offset = index * kPointerSize;
257     __ Ldr(dst, FieldMemOperand(src, offset));
258   } else {
259     // Calculate the offset into the properties array.
260     int offset = index * kPointerSize + FixedArray::kHeaderSize;
261     __ Ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
262     __ Ldr(dst, FieldMemOperand(dst, offset));
263   }
264 }
265
266
267 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
268                                            Register receiver,
269                                            Register scratch,
270                                            Label* miss_label) {
271   ASSERT(!AreAliased(receiver, scratch));
272
273   // Check that the receiver isn't a smi.
274   __ JumpIfSmi(receiver, miss_label);
275
276   // Check that the object is a JS array.
277   __ JumpIfNotObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE,
278                          miss_label);
279
280   // Load length directly from the JS array.
281   __ Ldr(x0, FieldMemOperand(receiver, JSArray::kLengthOffset));
282   __ Ret();
283 }
284
285
286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
287                                                  Register receiver,
288                                                  Register scratch1,
289                                                  Register scratch2,
290                                                  Label* miss_label) {
291   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
292   // TryGetFunctionPrototype can't put the result directly in x0 because the
293   // 3 inputs registers can't alias and we call this function from
294   // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly
295   // move the result in x0.
296   __ Mov(x0, scratch1);
297   __ Ret();
298 }
299
300
301 // Generate code to check that a global property cell is empty. Create
302 // the property cell at compilation time if no cell exists for the
303 // property.
304 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
305                                              Handle<JSGlobalObject> global,
306                                              Handle<Name> name,
307                                              Register scratch,
308                                              Label* miss) {
309   Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
310   ASSERT(cell->value()->IsTheHole());
311   __ Mov(scratch, Operand(cell));
312   __ Ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
313   __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss);
314 }
315
316
317 void StoreStubCompiler::GenerateNegativeHolderLookup(
318     MacroAssembler* masm,
319     Handle<JSObject> holder,
320     Register holder_reg,
321     Handle<Name> name,
322     Label* miss) {
323   if (holder->IsJSGlobalObject()) {
324     GenerateCheckPropertyCell(
325         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
326   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
327     GenerateDictionaryNegativeLookup(
328         masm, miss, holder_reg, name, scratch1(), scratch2());
329   }
330 }
331
332
333 // Generate StoreTransition code, value is passed in x0 register.
334 // When leaving generated code after success, the receiver_reg and storage_reg
335 // may be clobbered. Upon branch to miss_label, the receiver and name registers
336 // have their original values.
337 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
338                                                 Handle<JSObject> object,
339                                                 LookupResult* lookup,
340                                                 Handle<Map> transition,
341                                                 Handle<Name> name,
342                                                 Register receiver_reg,
343                                                 Register storage_reg,
344                                                 Register value_reg,
345                                                 Register scratch1,
346                                                 Register scratch2,
347                                                 Register scratch3,
348                                                 Label* miss_label,
349                                                 Label* slow) {
350   Label exit;
351
352   ASSERT(!AreAliased(receiver_reg, storage_reg, value_reg,
353                      scratch1, scratch2, scratch3));
354
355   // We don't need scratch3.
356   scratch3 = NoReg;
357
358   int descriptor = transition->LastAdded();
359   DescriptorArray* descriptors = transition->instance_descriptors();
360   PropertyDetails details = descriptors->GetDetails(descriptor);
361   Representation representation = details.representation();
362   ASSERT(!representation.IsNone());
363
364   if (details.type() == CONSTANT) {
365     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
366     __ LoadObject(scratch1, constant);
367     __ Cmp(value_reg, scratch1);
368     __ B(ne, miss_label);
369   } else if (representation.IsSmi()) {
370     __ JumpIfNotSmi(value_reg, miss_label);
371   } else if (representation.IsHeapObject()) {
372     __ JumpIfSmi(value_reg, miss_label);
373     HeapType* field_type = descriptors->GetFieldType(descriptor);
374     HeapType::Iterator<Map> it = field_type->Classes();
375     if (!it.Done()) {
376       __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
377       Label do_store;
378       while (true) {
379         __ CompareMap(scratch1, it.Current());
380         it.Advance();
381         if (it.Done()) {
382           __ B(ne, miss_label);
383           break;
384         }
385         __ B(eq, &do_store);
386       }
387       __ Bind(&do_store);
388     }
389   } else if (representation.IsDouble()) {
390     UseScratchRegisterScope temps(masm);
391     DoubleRegister temp_double = temps.AcquireD();
392     __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag);
393
394     Label do_store;
395     __ JumpIfSmi(value_reg, &do_store);
396
397     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
398                 miss_label, DONT_DO_SMI_CHECK);
399     __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
400
401     __ Bind(&do_store);
402     __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2, temp_double);
403   }
404
405   // Stub never generated for non-global objects that require access checks.
406   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
407
408   // Perform map transition for the receiver if necessary.
409   if ((details.type() == FIELD) &&
410       (object->map()->unused_property_fields() == 0)) {
411     // The properties must be extended before we can store the value.
412     // We jump to a runtime call that extends the properties array.
413     __ Mov(scratch1, Operand(transition));
414     __ Push(receiver_reg, scratch1, value_reg);
415     __ TailCallExternalReference(
416         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
417                           masm->isolate()),
418         3,
419         1);
420     return;
421   }
422
423   // Update the map of the object.
424   __ Mov(scratch1, Operand(transition));
425   __ Str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
426
427   // Update the write barrier for the map field.
428   __ RecordWriteField(receiver_reg,
429                       HeapObject::kMapOffset,
430                       scratch1,
431                       scratch2,
432                       kLRHasNotBeenSaved,
433                       kDontSaveFPRegs,
434                       OMIT_REMEMBERED_SET,
435                       OMIT_SMI_CHECK);
436
437   if (details.type() == CONSTANT) {
438     ASSERT(value_reg.is(x0));
439     __ Ret();
440     return;
441   }
442
443   int index = transition->instance_descriptors()->GetFieldIndex(
444       transition->LastAdded());
445
446   // Adjust for the number of properties stored in the object. Even in the
447   // face of a transition we can use the old map here because the size of the
448   // object and the number of in-object properties is not going to change.
449   index -= object->map()->inobject_properties();
450
451   // TODO(verwaest): Share this code as a code stub.
452   SmiCheck smi_check = representation.IsTagged()
453       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
454   Register prop_reg = representation.IsDouble() ? storage_reg : value_reg;
455   if (index < 0) {
456     // Set the property straight into the object.
457     int offset = object->map()->instance_size() + (index * kPointerSize);
458     __ Str(prop_reg, FieldMemOperand(receiver_reg, offset));
459
460     if (!representation.IsSmi()) {
461       // Update the write barrier for the array address.
462       if (!representation.IsDouble()) {
463         __ Mov(storage_reg, value_reg);
464       }
465       __ RecordWriteField(receiver_reg,
466                           offset,
467                           storage_reg,
468                           scratch1,
469                           kLRHasNotBeenSaved,
470                           kDontSaveFPRegs,
471                           EMIT_REMEMBERED_SET,
472                           smi_check);
473     }
474   } else {
475     // Write to the properties array.
476     int offset = index * kPointerSize + FixedArray::kHeaderSize;
477     // Get the properties array
478     __ Ldr(scratch1,
479            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
480     __ Str(prop_reg, FieldMemOperand(scratch1, offset));
481
482     if (!representation.IsSmi()) {
483       // Update the write barrier for the array address.
484       if (!representation.IsDouble()) {
485         __ Mov(storage_reg, value_reg);
486       }
487       __ RecordWriteField(scratch1,
488                           offset,
489                           storage_reg,
490                           receiver_reg,
491                           kLRHasNotBeenSaved,
492                           kDontSaveFPRegs,
493                           EMIT_REMEMBERED_SET,
494                           smi_check);
495     }
496   }
497
498   __ Bind(&exit);
499   // Return the value (register x0).
500   ASSERT(value_reg.is(x0));
501   __ Ret();
502 }
503
504
505 // Generate StoreField code, value is passed in x0 register.
506 // When leaving generated code after success, the receiver_reg and name_reg may
507 // be clobbered. Upon branch to miss_label, the receiver and name registers have
508 // their original values.
509 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
510                                            Handle<JSObject> object,
511                                            LookupResult* lookup,
512                                            Register receiver_reg,
513                                            Register name_reg,
514                                            Register value_reg,
515                                            Register scratch1,
516                                            Register scratch2,
517                                            Label* miss_label) {
518   // x0 : value
519   Label exit;
520
521   // Stub never generated for non-global objects that require access
522   // checks.
523   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
524
525   int index = lookup->GetFieldIndex().field_index();
526
527   // Adjust for the number of properties stored in the object. Even in the
528   // face of a transition we can use the old map here because the size of the
529   // object and the number of in-object properties is not going to change.
530   index -= object->map()->inobject_properties();
531
532   Representation representation = lookup->representation();
533   ASSERT(!representation.IsNone());
534   if (representation.IsSmi()) {
535     __ JumpIfNotSmi(value_reg, miss_label);
536   } else if (representation.IsHeapObject()) {
537     __ JumpIfSmi(value_reg, miss_label);
538     HeapType* field_type = lookup->GetFieldType();
539     HeapType::Iterator<Map> it = field_type->Classes();
540     if (!it.Done()) {
541       __ Ldr(scratch1, FieldMemOperand(value_reg, HeapObject::kMapOffset));
542       Label do_store;
543       while (true) {
544         __ CompareMap(scratch1, it.Current());
545         it.Advance();
546         if (it.Done()) {
547           __ B(ne, miss_label);
548           break;
549         }
550         __ B(eq, &do_store);
551       }
552       __ Bind(&do_store);
553     }
554   } else if (representation.IsDouble()) {
555     UseScratchRegisterScope temps(masm);
556     DoubleRegister temp_double = temps.AcquireD();
557
558     __ SmiUntagToDouble(temp_double, value_reg, kSpeculativeUntag);
559
560     // Load the double storage.
561     if (index < 0) {
562       int offset = (index * kPointerSize) + object->map()->instance_size();
563       __ Ldr(scratch1, FieldMemOperand(receiver_reg, offset));
564     } else {
565       int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
566       __ Ldr(scratch1,
567              FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
568       __ Ldr(scratch1, FieldMemOperand(scratch1, offset));
569     }
570
571     // Store the value into the storage.
572     Label do_store, heap_number;
573
574     __ JumpIfSmi(value_reg, &do_store);
575
576     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
577                 miss_label, DONT_DO_SMI_CHECK);
578     __ Ldr(temp_double, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
579
580     __ Bind(&do_store);
581     __ Str(temp_double, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
582
583     // Return the value (register x0).
584     ASSERT(value_reg.is(x0));
585     __ Ret();
586     return;
587   }
588
589   // TODO(verwaest): Share this code as a code stub.
590   SmiCheck smi_check = representation.IsTagged()
591       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
592   if (index < 0) {
593     // Set the property straight into the object.
594     int offset = object->map()->instance_size() + (index * kPointerSize);
595     __ Str(value_reg, FieldMemOperand(receiver_reg, offset));
596
597     if (!representation.IsSmi()) {
598       // Skip updating write barrier if storing a smi.
599       __ JumpIfSmi(value_reg, &exit);
600
601       // Update the write barrier for the array address.
602       // Pass the now unused name_reg as a scratch register.
603       __ Mov(name_reg, value_reg);
604       __ RecordWriteField(receiver_reg,
605                           offset,
606                           name_reg,
607                           scratch1,
608                           kLRHasNotBeenSaved,
609                           kDontSaveFPRegs,
610                           EMIT_REMEMBERED_SET,
611                           smi_check);
612     }
613   } else {
614     // Write to the properties array.
615     int offset = index * kPointerSize + FixedArray::kHeaderSize;
616     // Get the properties array
617     __ Ldr(scratch1,
618            FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
619     __ Str(value_reg, FieldMemOperand(scratch1, offset));
620
621     if (!representation.IsSmi()) {
622       // Skip updating write barrier if storing a smi.
623       __ JumpIfSmi(value_reg, &exit);
624
625       // Update the write barrier for the array address.
626       // Ok to clobber receiver_reg and name_reg, since we return.
627       __ Mov(name_reg, value_reg);
628       __ RecordWriteField(scratch1,
629                           offset,
630                           name_reg,
631                           receiver_reg,
632                           kLRHasNotBeenSaved,
633                           kDontSaveFPRegs,
634                           EMIT_REMEMBERED_SET,
635                           smi_check);
636     }
637   }
638
639   __ Bind(&exit);
640   // Return the value (register x0).
641   ASSERT(value_reg.is(x0));
642   __ Ret();
643 }
644
645
646 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
647                                             Label* label,
648                                             Handle<Name> name) {
649   if (!label->is_unused()) {
650     __ Bind(label);
651     __ Mov(this->name(), Operand(name));
652   }
653 }
654
655
656 static void PushInterceptorArguments(MacroAssembler* masm,
657                                      Register receiver,
658                                      Register holder,
659                                      Register name,
660                                      Handle<JSObject> holder_obj) {
661   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
662   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
663   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
664   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
665   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
666
667   __ Push(name);
668   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
669   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
670   Register scratch = name;
671   __ Mov(scratch, Operand(interceptor));
672   __ Push(scratch, receiver, holder);
673 }
674
675
676 static void CompileCallLoadPropertyWithInterceptor(
677     MacroAssembler* masm,
678     Register receiver,
679     Register holder,
680     Register name,
681     Handle<JSObject> holder_obj,
682     IC::UtilityId id) {
683   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
684
685   __ CallExternalReference(
686       ExternalReference(IC_Utility(id), masm->isolate()),
687       StubCache::kInterceptorArgsLength);
688 }
689
690
691 // Generate call to api function.
692 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
693                                        const CallOptimization& optimization,
694                                        Handle<Map> receiver_map,
695                                        Register receiver,
696                                        Register scratch,
697                                        bool is_store,
698                                        int argc,
699                                        Register* values) {
700   ASSERT(!AreAliased(receiver, scratch));
701
702   MacroAssembler::PushPopQueue queue(masm);
703   queue.Queue(receiver);
704   // Write the arguments to the stack frame.
705   for (int i = 0; i < argc; i++) {
706     Register arg = values[argc-1-i];
707     ASSERT(!AreAliased(receiver, scratch, arg));
708     queue.Queue(arg);
709   }
710   queue.PushQueued();
711
712   ASSERT(optimization.is_simple_api_call());
713
714   // Abi for CallApiFunctionStub.
715   Register callee = x0;
716   Register call_data = x4;
717   Register holder = x2;
718   Register api_function_address = x1;
719
720   // Put holder in place.
721   CallOptimization::HolderLookup holder_lookup;
722   Handle<JSObject> api_holder =
723       optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
724   switch (holder_lookup) {
725     case CallOptimization::kHolderIsReceiver:
726       __ Mov(holder, receiver);
727       break;
728     case CallOptimization::kHolderFound:
729       __ LoadObject(holder, api_holder);
730       break;
731     case CallOptimization::kHolderNotFound:
732       UNREACHABLE();
733       break;
734   }
735
736   Isolate* isolate = masm->isolate();
737   Handle<JSFunction> function = optimization.constant_function();
738   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
739   Handle<Object> call_data_obj(api_call_info->data(), isolate);
740
741   // Put callee in place.
742   __ LoadObject(callee, function);
743
744   bool call_data_undefined = false;
745   // Put call_data in place.
746   if (isolate->heap()->InNewSpace(*call_data_obj)) {
747     __ LoadObject(call_data, api_call_info);
748     __ Ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
749   } else if (call_data_obj->IsUndefined()) {
750     call_data_undefined = true;
751     __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
752   } else {
753     __ LoadObject(call_data, call_data_obj);
754   }
755
756   // Put api_function_address in place.
757   Address function_address = v8::ToCData<Address>(api_call_info->callback());
758   ApiFunction fun(function_address);
759   ExternalReference ref = ExternalReference(&fun,
760                                             ExternalReference::DIRECT_API_CALL,
761                                             masm->isolate());
762   __ Mov(api_function_address, ref);
763
764   // Jump to stub.
765   CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
766   __ TailCallStub(&stub);
767 }
768
769
770 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
771   __ Jump(code, RelocInfo::CODE_TARGET);
772 }
773
774
775 #undef __
776 #define __ ACCESS_MASM(masm())
777
778
779 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
780                                        Register object_reg,
781                                        Handle<JSObject> holder,
782                                        Register holder_reg,
783                                        Register scratch1,
784                                        Register scratch2,
785                                        Handle<Name> name,
786                                        Label* miss,
787                                        PrototypeCheckType check) {
788   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
789
790   // object_reg and holder_reg registers can alias.
791   ASSERT(!AreAliased(object_reg, scratch1, scratch2));
792   ASSERT(!AreAliased(holder_reg, scratch1, scratch2));
793
794   // Keep track of the current object in register reg.
795   Register reg = object_reg;
796   int depth = 0;
797
798   Handle<JSObject> current = Handle<JSObject>::null();
799   if (type->IsConstant()) {
800     current = Handle<JSObject>::cast(type->AsConstant()->Value());
801   }
802   Handle<JSObject> prototype = Handle<JSObject>::null();
803   Handle<Map> current_map = receiver_map;
804   Handle<Map> holder_map(holder->map());
805   // Traverse the prototype chain and check the maps in the prototype chain for
806   // fast and global objects or do negative lookup for normal objects.
807   while (!current_map.is_identical_to(holder_map)) {
808     ++depth;
809
810     // Only global objects and objects that do not require access
811     // checks are allowed in stubs.
812     ASSERT(current_map->IsJSGlobalProxyMap() ||
813            !current_map->is_access_check_needed());
814
815     prototype = handle(JSObject::cast(current_map->prototype()));
816     if (current_map->is_dictionary_map() &&
817         !current_map->IsJSGlobalObjectMap() &&
818         !current_map->IsJSGlobalProxyMap()) {
819       if (!name->IsUniqueName()) {
820         ASSERT(name->IsString());
821         name = factory()->InternalizeString(Handle<String>::cast(name));
822       }
823       ASSERT(current.is_null() ||
824              (current->property_dictionary()->FindEntry(name) ==
825               NameDictionary::kNotFound));
826
827       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
828                                        scratch1, scratch2);
829
830       __ Ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
831       reg = holder_reg;  // From now on the object will be in holder_reg.
832       __ Ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
833     } else {
834       bool need_map = (depth != 1 || check == CHECK_ALL_MAPS) ||
835                       heap()->InNewSpace(*prototype);
836       Register map_reg = NoReg;
837       if (need_map) {
838         map_reg = scratch1;
839         __ Ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
840       }
841
842       if (depth != 1 || check == CHECK_ALL_MAPS) {
843         __ CheckMap(map_reg, current_map, miss, DONT_DO_SMI_CHECK);
844       }
845
846       // Check access rights to the global object.  This has to happen after
847       // the map check so that we know that the object is actually a global
848       // object.
849       if (current_map->IsJSGlobalProxyMap()) {
850         UseScratchRegisterScope temps(masm());
851         __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss);
852       } else if (current_map->IsJSGlobalObjectMap()) {
853         GenerateCheckPropertyCell(
854             masm(), Handle<JSGlobalObject>::cast(current), name,
855             scratch2, miss);
856       }
857
858       reg = holder_reg;  // From now on the object will be in holder_reg.
859
860       if (heap()->InNewSpace(*prototype)) {
861         // The prototype is in new space; we cannot store a reference to it
862         // in the code.  Load it from the map.
863         __ Ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
864       } else {
865         // The prototype is in old space; load it directly.
866         __ Mov(reg, Operand(prototype));
867       }
868     }
869
870     // Go to the next object in the prototype chain.
871     current = prototype;
872     current_map = handle(current->map());
873   }
874
875   // Log the check depth.
876   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
877
878   // Check the holder map.
879   if (depth != 0 || check == CHECK_ALL_MAPS) {
880     // Check the holder map.
881     __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
882   }
883
884   // Perform security check for access to the global object.
885   ASSERT(current_map->IsJSGlobalProxyMap() ||
886          !current_map->is_access_check_needed());
887   if (current_map->IsJSGlobalProxyMap()) {
888     __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
889   }
890
891   // Return the register containing the holder.
892   return reg;
893 }
894
895
896 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
897   if (!miss->is_unused()) {
898     Label success;
899     __ B(&success);
900
901     __ Bind(miss);
902     TailCallBuiltin(masm(), MissBuiltin(kind()));
903
904     __ Bind(&success);
905   }
906 }
907
908
909 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
910   if (!miss->is_unused()) {
911     Label success;
912     __ B(&success);
913
914     GenerateRestoreName(masm(), miss, name);
915     TailCallBuiltin(masm(), MissBuiltin(kind()));
916
917     __ Bind(&success);
918   }
919 }
920
921
922 Register LoadStubCompiler::CallbackHandlerFrontend(Handle<HeapType> type,
923                                                    Register object_reg,
924                                                    Handle<JSObject> holder,
925                                                    Handle<Name> name,
926                                                    Handle<Object> callback) {
927   Label miss;
928
929   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
930   // HandlerFrontendHeader can return its result into scratch1() so do not
931   // use it.
932   Register scratch2 = this->scratch2();
933   Register scratch3 = this->scratch3();
934   Register dictionary = this->scratch4();
935   ASSERT(!AreAliased(reg, scratch2, scratch3, dictionary));
936
937   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
938     // Load the properties dictionary.
939     __ Ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
940
941     // Probe the dictionary.
942     Label probe_done;
943     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
944                                                      &miss,
945                                                      &probe_done,
946                                                      dictionary,
947                                                      this->name(),
948                                                      scratch2,
949                                                      scratch3);
950     __ Bind(&probe_done);
951
952     // If probing finds an entry in the dictionary, scratch3 contains the
953     // pointer into the dictionary. Check that the value is the callback.
954     Register pointer = scratch3;
955     const int kElementsStartOffset = NameDictionary::kHeaderSize +
956         NameDictionary::kElementsStartIndex * kPointerSize;
957     const int kValueOffset = kElementsStartOffset + kPointerSize;
958     __ Ldr(scratch2, FieldMemOperand(pointer, kValueOffset));
959     __ Cmp(scratch2, Operand(callback));
960     __ B(ne, &miss);
961   }
962
963   HandlerFrontendFooter(name, &miss);
964   return reg;
965 }
966
967
968 void LoadStubCompiler::GenerateLoadField(Register reg,
969                                          Handle<JSObject> holder,
970                                          PropertyIndex field,
971                                          Representation representation) {
972   __ Mov(receiver(), reg);
973   if (kind() == Code::LOAD_IC) {
974     LoadFieldStub stub(isolate(),
975                        field.is_inobject(holder),
976                        field.translate(holder),
977                        representation);
978     GenerateTailCall(masm(), stub.GetCode());
979   } else {
980     KeyedLoadFieldStub stub(isolate(),
981                             field.is_inobject(holder),
982                             field.translate(holder),
983                             representation);
984     GenerateTailCall(masm(), stub.GetCode());
985   }
986 }
987
988
989 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
990   // Return the constant value.
991   __ LoadObject(x0, value);
992   __ Ret();
993 }
994
995
996 void LoadStubCompiler::GenerateLoadCallback(
997     Register reg,
998     Handle<ExecutableAccessorInfo> callback) {
999   ASSERT(!AreAliased(scratch2(), scratch3(), scratch4(), reg));
1000
1001   // Build ExecutableAccessorInfo::args_ list on the stack and push property
1002   // name below the exit frame to make GC aware of them and store pointers to
1003   // them.
1004   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1005   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1006   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1007   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1008   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1009   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1010   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1011
1012   __ Push(receiver());
1013
1014   if (heap()->InNewSpace(callback->data())) {
1015     __ Mov(scratch3(), Operand(callback));
1016     __ Ldr(scratch3(), FieldMemOperand(scratch3(),
1017                                        ExecutableAccessorInfo::kDataOffset));
1018   } else {
1019     __ Mov(scratch3(), Operand(Handle<Object>(callback->data(), isolate())));
1020   }
1021   __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
1022   __ Mov(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
1023   __ Push(scratch3(), scratch4(), scratch4(), scratch2(), reg, name());
1024
1025   Register args_addr = scratch2();
1026   __ Add(args_addr, __ StackPointer(), kPointerSize);
1027
1028   // Stack at this point:
1029   //              sp[40] callback data
1030   //              sp[32] undefined
1031   //              sp[24] undefined
1032   //              sp[16] isolate
1033   // args_addr -> sp[8]  reg
1034   //              sp[0]  name
1035
1036   // Abi for CallApiGetter.
1037   Register getter_address_reg = x2;
1038
1039   // Set up the call.
1040   Address getter_address = v8::ToCData<Address>(callback->getter());
1041   ApiFunction fun(getter_address);
1042   ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1043   ExternalReference ref = ExternalReference(&fun, type, isolate());
1044   __ Mov(getter_address_reg, ref);
1045
1046   CallApiGetterStub stub(isolate());
1047   __ TailCallStub(&stub);
1048 }
1049
1050
1051 void LoadStubCompiler::GenerateLoadInterceptor(
1052     Register holder_reg,
1053     Handle<Object> object,
1054     Handle<JSObject> interceptor_holder,
1055     LookupResult* lookup,
1056     Handle<Name> name) {
1057   ASSERT(!AreAliased(receiver(), this->name(),
1058                      scratch1(), scratch2(), scratch3()));
1059   ASSERT(interceptor_holder->HasNamedInterceptor());
1060   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1061
1062   // So far the most popular follow ups for interceptor loads are FIELD
1063   // and CALLBACKS, so inline only them, other cases may be added later.
1064   bool compile_followup_inline = false;
1065   if (lookup->IsFound() && lookup->IsCacheable()) {
1066     if (lookup->IsField()) {
1067       compile_followup_inline = true;
1068     } else if (lookup->type() == CALLBACKS &&
1069                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1070       ExecutableAccessorInfo* callback =
1071           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1072       compile_followup_inline = callback->getter() != NULL &&
1073           callback->IsCompatibleReceiver(*object);
1074     }
1075   }
1076
1077   if (compile_followup_inline) {
1078     // Compile the interceptor call, followed by inline code to load the
1079     // property from further up the prototype chain if the call fails.
1080     // Check that the maps haven't changed.
1081     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1082
1083     // Preserve the receiver register explicitly whenever it is different from
1084     // the holder and it is needed should the interceptor return without any
1085     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1086     // the FIELD case might cause a miss during the prototype check.
1087     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1088     bool must_preserve_receiver_reg = !receiver().Is(holder_reg) &&
1089         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1090
1091     // Save necessary data before invoking an interceptor.
1092     // Requires a frame to make GC aware of pushed pointers.
1093     {
1094       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1095       if (must_preserve_receiver_reg) {
1096         __ Push(receiver(), holder_reg, this->name());
1097       } else {
1098         __ Push(holder_reg, this->name());
1099       }
1100       // Invoke an interceptor.  Note: map checks from receiver to
1101       // interceptor's holder has been compiled before (see a caller
1102       // of this method.)
1103       CompileCallLoadPropertyWithInterceptor(
1104           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1105           IC::kLoadPropertyWithInterceptorOnly);
1106
1107       // Check if interceptor provided a value for property.  If it's
1108       // the case, return immediately.
1109       Label interceptor_failed;
1110       __ JumpIfRoot(x0,
1111                     Heap::kNoInterceptorResultSentinelRootIndex,
1112                     &interceptor_failed);
1113       frame_scope.GenerateLeaveFrame();
1114       __ Ret();
1115
1116       __ Bind(&interceptor_failed);
1117       if (must_preserve_receiver_reg) {
1118         __ Pop(this->name(), holder_reg, receiver());
1119       } else {
1120         __ Pop(this->name(), holder_reg);
1121       }
1122       // Leave the internal frame.
1123     }
1124     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1125   } else {  // !compile_followup_inline
1126     // Call the runtime system to load the interceptor.
1127     // Check that the maps haven't changed.
1128     PushInterceptorArguments(
1129         masm(), receiver(), holder_reg, this->name(), interceptor_holder);
1130
1131     ExternalReference ref =
1132         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1133                           isolate());
1134     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1135   }
1136 }
1137
1138
1139 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1140   UseScratchRegisterScope temps(masm());
1141   // Check that the object is a boolean.
1142   Register true_root = temps.AcquireX();
1143   Register false_root = temps.AcquireX();
1144   ASSERT(!AreAliased(object, true_root, false_root));
1145   __ LoadTrueFalseRoots(true_root, false_root);
1146   __ Cmp(object, true_root);
1147   __ Ccmp(object, false_root, ZFlag, ne);
1148   __ B(ne, miss);
1149 }
1150
1151
1152 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1153     Handle<JSObject> object,
1154     Handle<JSObject> holder,
1155     Handle<Name> name,
1156     Handle<ExecutableAccessorInfo> callback) {
1157   ASM_LOCATION("StoreStubCompiler::CompileStoreCallback");
1158   Register holder_reg = HandlerFrontend(
1159       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1160
1161   // Stub never generated for non-global objects that require access checks.
1162   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1163
1164   // receiver() and holder_reg can alias.
1165   ASSERT(!AreAliased(receiver(), scratch1(), scratch2(), value()));
1166   ASSERT(!AreAliased(holder_reg, scratch1(), scratch2(), value()));
1167   __ Mov(scratch1(), Operand(callback));
1168   __ Mov(scratch2(), Operand(name));
1169   __ Push(receiver(), holder_reg, scratch1(), scratch2(), value());
1170
1171   // Do tail-call to the runtime system.
1172   ExternalReference store_callback_property =
1173       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1174   __ TailCallExternalReference(store_callback_property, 5, 1);
1175
1176   // Return the generated code.
1177   return GetCode(kind(), Code::FAST, name);
1178 }
1179
1180
1181 #undef __
1182 #define __ ACCESS_MASM(masm)
1183
1184
1185 void StoreStubCompiler::GenerateStoreViaSetter(
1186     MacroAssembler* masm,
1187     Handle<HeapType> type,
1188     Register receiver,
1189     Handle<JSFunction> setter) {
1190   // ----------- S t a t e -------------
1191   //  -- lr    : return address
1192   // -----------------------------------
1193   Label miss;
1194
1195   {
1196     FrameScope scope(masm, StackFrame::INTERNAL);
1197
1198     // Save value register, so we can restore it later.
1199     __ Push(value());
1200
1201     if (!setter.is_null()) {
1202       // Call the JavaScript setter with receiver and value on the stack.
1203       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1204         // Swap in the global receiver.
1205         __ Ldr(receiver,
1206                FieldMemOperand(
1207                    receiver, JSGlobalObject::kGlobalReceiverOffset));
1208       }
1209       __ Push(receiver, value());
1210       ParameterCount actual(1);
1211       ParameterCount expected(setter);
1212       __ InvokeFunction(setter, expected, actual,
1213                         CALL_FUNCTION, NullCallWrapper());
1214     } else {
1215       // If we generate a global code snippet for deoptimization only, remember
1216       // the place to continue after deoptimization.
1217       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1218     }
1219
1220     // We have to return the passed value, not the return value of the setter.
1221     __ Pop(x0);
1222
1223     // Restore context register.
1224     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1225   }
1226   __ Ret();
1227 }
1228
1229
1230 #undef __
1231 #define __ ACCESS_MASM(masm())
1232
1233
1234 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1235     Handle<JSObject> object,
1236     Handle<Name> name) {
1237   Label miss;
1238
1239   ASM_LOCATION("StoreStubCompiler::CompileStoreInterceptor");
1240
1241   __ Push(receiver(), this->name(), value());
1242
1243   // Do tail-call to the runtime system.
1244   ExternalReference store_ic_property =
1245       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1246   __ TailCallExternalReference(store_ic_property, 3, 1);
1247
1248   // Return the generated code.
1249   return GetCode(kind(), Code::FAST, name);
1250 }
1251
1252
1253 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1254                                                       Handle<JSObject> last,
1255                                                       Handle<Name> name) {
1256   NonexistentHandlerFrontend(type, last, name);
1257
1258   // Return undefined if maps of the full prototype chain are still the
1259   // same and no global property with this name contains a value.
1260   __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
1261   __ Ret();
1262
1263   // Return the generated code.
1264   return GetCode(kind(), Code::FAST, name);
1265 }
1266
1267
1268 // TODO(all): The so-called scratch registers are significant in some cases. For
1269 // example, KeyedStoreStubCompiler::registers()[3] (x3) is actually used for
1270 // KeyedStoreCompiler::transition_map(). We should verify which registers are
1271 // actually scratch registers, and which are important. For now, we use the same
1272 // assignments as ARM to remain on the safe side.
1273
1274 Register* LoadStubCompiler::registers() {
1275   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1276   static Register registers[] = { x0, x2, x3, x1, x4, x5 };
1277   return registers;
1278 }
1279
1280
1281 Register* KeyedLoadStubCompiler::registers() {
1282   // receiver, name/key, scratch1, scratch2, scratch3, scratch4.
1283   static Register registers[] = { x1, x0, x2, x3, x4, x5 };
1284   return registers;
1285 }
1286
1287
1288 Register StoreStubCompiler::value() {
1289   return x0;
1290 }
1291
1292
1293 Register* StoreStubCompiler::registers() {
1294   // receiver, value, scratch1, scratch2, scratch3.
1295   static Register registers[] = { x1, x2, x3, x4, x5 };
1296   return registers;
1297 }
1298
1299
1300 Register* KeyedStoreStubCompiler::registers() {
1301   // receiver, name, scratch1, scratch2, scratch3.
1302   static Register registers[] = { x2, x1, x3, x4, x5 };
1303   return registers;
1304 }
1305
1306
1307 #undef __
1308 #define __ ACCESS_MASM(masm)
1309
1310 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1311                                              Handle<HeapType> type,
1312                                              Register receiver,
1313                                              Handle<JSFunction> getter) {
1314   {
1315     FrameScope scope(masm, StackFrame::INTERNAL);
1316
1317     if (!getter.is_null()) {
1318       // Call the JavaScript getter with the receiver on the stack.
1319       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1320         // Swap in the global receiver.
1321         __ Ldr(receiver,
1322                 FieldMemOperand(
1323                     receiver, JSGlobalObject::kGlobalReceiverOffset));
1324       }
1325       __ Push(receiver);
1326       ParameterCount actual(0);
1327       ParameterCount expected(getter);
1328       __ InvokeFunction(getter, expected, actual,
1329                         CALL_FUNCTION, NullCallWrapper());
1330     } else {
1331       // If we generate a global code snippet for deoptimization only, remember
1332       // the place to continue after deoptimization.
1333       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1334     }
1335
1336     // Restore context register.
1337     __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1338   }
1339   __ Ret();
1340 }
1341
1342
1343 #undef __
1344 #define __ ACCESS_MASM(masm())
1345
1346
1347 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1348     Handle<HeapType> type,
1349     Handle<GlobalObject> global,
1350     Handle<PropertyCell> cell,
1351     Handle<Name> name,
1352     bool is_dont_delete) {
1353   Label miss;
1354   HandlerFrontendHeader(type, receiver(), global, name, &miss);
1355
1356   // Get the value from the cell.
1357   __ Mov(x3, Operand(cell));
1358   __ Ldr(x4, FieldMemOperand(x3, Cell::kValueOffset));
1359
1360   // Check for deleted property if property can actually be deleted.
1361   if (!is_dont_delete) {
1362     __ JumpIfRoot(x4, Heap::kTheHoleValueRootIndex, &miss);
1363   }
1364
1365   Counters* counters = isolate()->counters();
1366   __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3);
1367   __ Mov(x0, x4);
1368   __ Ret();
1369
1370   HandlerFrontendFooter(name, &miss);
1371
1372   // Return the generated code.
1373   return GetCode(kind(), Code::NORMAL, name);
1374 }
1375
1376
1377 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1378     TypeHandleList* types,
1379     CodeHandleList* handlers,
1380     Handle<Name> name,
1381     Code::StubType type,
1382     IcCheckType check) {
1383   Label miss;
1384
1385   if (check == PROPERTY &&
1386       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1387     __ CompareAndBranch(this->name(), Operand(name), ne, &miss);
1388   }
1389
1390   Label number_case;
1391   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1392   __ JumpIfSmi(receiver(), smi_target);
1393
1394   Register map_reg = scratch1();
1395   __ Ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1396   int receiver_count = types->length();
1397   int number_of_handled_maps = 0;
1398   for (int current = 0; current < receiver_count; ++current) {
1399     Handle<HeapType> type = types->at(current);
1400     Handle<Map> map = IC::TypeToMap(*type, isolate());
1401     if (!map->is_deprecated()) {
1402       number_of_handled_maps++;
1403       Label try_next;
1404       __ Cmp(map_reg, Operand(map));
1405       __ B(ne, &try_next);
1406       if (type->Is(HeapType::Number())) {
1407         ASSERT(!number_case.is_unused());
1408         __ Bind(&number_case);
1409       }
1410       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET);
1411       __ Bind(&try_next);
1412     }
1413   }
1414   ASSERT(number_of_handled_maps != 0);
1415
1416   __ Bind(&miss);
1417   TailCallBuiltin(masm(), MissBuiltin(kind()));
1418
1419   // Return the generated code.
1420   InlineCacheState state =
1421       (number_of_handled_maps > 1) ? POLYMORPHIC : MONOMORPHIC;
1422   return GetICCode(kind(), type, name, state);
1423 }
1424
1425
1426 void StoreStubCompiler::GenerateStoreArrayLength() {
1427   // Prepare tail call to StoreIC_ArrayLength.
1428   __ Push(receiver(), value());
1429
1430   ExternalReference ref =
1431       ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1432                         masm()->isolate());
1433   __ TailCallExternalReference(ref, 2, 1);
1434 }
1435
1436
1437 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1438     MapHandleList* receiver_maps,
1439     CodeHandleList* handler_stubs,
1440     MapHandleList* transitioned_maps) {
1441   Label miss;
1442
1443   ASM_LOCATION("KeyedStoreStubCompiler::CompileStorePolymorphic");
1444
1445   __ JumpIfSmi(receiver(), &miss);
1446
1447   int receiver_count = receiver_maps->length();
1448   __ Ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1449   for (int i = 0; i < receiver_count; i++) {
1450     __ Cmp(scratch1(), Operand(receiver_maps->at(i)));
1451
1452     Label skip;
1453     __ B(&skip, ne);
1454     if (!transitioned_maps->at(i).is_null()) {
1455       // This argument is used by the handler stub. For example, see
1456       // ElementsTransitionGenerator::GenerateMapChangeElementsTransition.
1457       __ Mov(transition_map(), Operand(transitioned_maps->at(i)));
1458     }
1459     __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1460     __ Bind(&skip);
1461   }
1462
1463   __ Bind(&miss);
1464   TailCallBuiltin(masm(), MissBuiltin(kind()));
1465
1466   return GetICCode(
1467       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1468 }
1469
1470
1471 #undef __
1472 #define __ ACCESS_MASM(masm)
1473
1474 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1475     MacroAssembler* masm) {
1476   // ---------- S t a t e --------------
1477   //  -- lr     : return address
1478   //  -- x0     : key
1479   //  -- x1     : receiver
1480   // -----------------------------------
1481   Label slow, miss;
1482
1483   Register result = x0;
1484   Register key = x0;
1485   Register receiver = x1;
1486
1487   __ JumpIfNotSmi(key, &miss);
1488   __ Ldr(x4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1489   __ LoadFromNumberDictionary(&slow, x4, key, result, x2, x3, x5, x6);
1490   __ Ret();
1491
1492   __ Bind(&slow);
1493   __ IncrementCounter(
1494       masm->isolate()->counters()->keyed_load_external_array_slow(), 1, x2, x3);
1495   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1496
1497   // Miss case, call the runtime.
1498   __ Bind(&miss);
1499   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1500 }
1501
1502
1503 } }  // namespace v8::internal
1504
1505 #endif  // V8_TARGET_ARCH_ARM64