a43d709b17d82db410c2defe2cf31d760cb843be
[platform/framework/web/crosswalk.git] / src / v8 / src / x64 / stub-cache-x64.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_X64
31
32 #include "arguments.h"
33 #include "ic-inl.h"
34 #include "codegen.h"
35 #include "stub-cache.h"
36
37 namespace v8 {
38 namespace internal {
39
40 #define __ ACCESS_MASM(masm)
41
42
43 static void ProbeTable(Isolate* isolate,
44                        MacroAssembler* masm,
45                        Code::Flags flags,
46                        StubCache::Table table,
47                        Register receiver,
48                        Register name,
49                        // The offset is scaled by 4, based on
50                        // kHeapObjectTagSize, which is two bits
51                        Register offset) {
52   // We need to scale up the pointer by 2 because the offset is scaled by less
53   // than the pointer size.
54   ASSERT(kPointerSizeLog2 == kHeapObjectTagSize + 1);
55   ScaleFactor scale_factor = times_2;
56
57   ASSERT_EQ(3 * kPointerSize, sizeof(StubCache::Entry));
58   // The offset register holds the entry offset times four (due to masking
59   // and shifting optimizations).
60   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
61   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
62   Label miss;
63
64   // Multiply by 3 because there are 3 fields per entry (name, code, map).
65   __ lea(offset, Operand(offset, offset, times_2, 0));
66
67   __ LoadAddress(kScratchRegister, key_offset);
68
69   // Check that the key in the entry matches the name.
70   // Multiply entry offset by 16 to get the entry address. Since the
71   // offset register already holds the entry offset times four, multiply
72   // by a further four.
73   __ cmpl(name, Operand(kScratchRegister, offset, scale_factor, 0));
74   __ j(not_equal, &miss);
75
76   // Get the map entry from the cache.
77   // Use key_offset + kPointerSize * 2, rather than loading map_offset.
78   __ movp(kScratchRegister,
79           Operand(kScratchRegister, offset, scale_factor, kPointerSize * 2));
80   __ cmpq(kScratchRegister, FieldOperand(receiver, HeapObject::kMapOffset));
81   __ j(not_equal, &miss);
82
83   // Get the code entry from the cache.
84   __ LoadAddress(kScratchRegister, value_offset);
85   __ movp(kScratchRegister,
86           Operand(kScratchRegister, offset, scale_factor, 0));
87
88   // Check that the flags match what we're looking for.
89   __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
90   __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
91   __ cmpl(offset, Immediate(flags));
92   __ j(not_equal, &miss);
93
94 #ifdef DEBUG
95     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
96       __ jmp(&miss);
97     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
98       __ jmp(&miss);
99     }
100 #endif
101
102   // Jump to the first instruction in the code stub.
103   __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
104   __ jmp(kScratchRegister);
105
106   __ bind(&miss);
107 }
108
109
110 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
111                                                     Label* miss_label,
112                                                     Register receiver,
113                                                     Handle<Name> name,
114                                                     Register scratch0,
115                                                     Register scratch1) {
116   ASSERT(name->IsUniqueName());
117   ASSERT(!receiver.is(scratch0));
118   Counters* counters = masm->isolate()->counters();
119   __ IncrementCounter(counters->negative_lookups(), 1);
120   __ IncrementCounter(counters->negative_lookups_miss(), 1);
121
122   __ movp(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
123
124   const int kInterceptorOrAccessCheckNeededMask =
125       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
126
127   // Bail out if the receiver has a named interceptor or requires access checks.
128   __ testb(FieldOperand(scratch0, Map::kBitFieldOffset),
129            Immediate(kInterceptorOrAccessCheckNeededMask));
130   __ j(not_zero, miss_label);
131
132   // Check that receiver is a JSObject.
133   __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
134   __ j(below, miss_label);
135
136   // Load properties array.
137   Register properties = scratch0;
138   __ movp(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
139
140   // Check that the properties array is a dictionary.
141   __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
142                  Heap::kHashTableMapRootIndex);
143   __ j(not_equal, miss_label);
144
145   Label done;
146   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
147                                                    miss_label,
148                                                    &done,
149                                                    properties,
150                                                    name,
151                                                    scratch1);
152   __ bind(&done);
153   __ DecrementCounter(counters->negative_lookups_miss(), 1);
154 }
155
156
157 void StubCache::GenerateProbe(MacroAssembler* masm,
158                               Code::Flags flags,
159                               Register receiver,
160                               Register name,
161                               Register scratch,
162                               Register extra,
163                               Register extra2,
164                               Register extra3) {
165   Isolate* isolate = masm->isolate();
166   Label miss;
167   USE(extra);   // The register extra is not used on the X64 platform.
168   USE(extra2);  // The register extra2 is not used on the X64 platform.
169   USE(extra3);  // The register extra2 is not used on the X64 platform.
170   // Make sure that code is valid. The multiplying code relies on the
171   // entry size being 3 * kPointerSize.
172   ASSERT(sizeof(Entry) == 3 * kPointerSize);
173
174   // Make sure the flags do not name a specific type.
175   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
176
177   // Make sure that there are no register conflicts.
178   ASSERT(!scratch.is(receiver));
179   ASSERT(!scratch.is(name));
180
181   // Check scratch register is valid, extra and extra2 are unused.
182   ASSERT(!scratch.is(no_reg));
183   ASSERT(extra2.is(no_reg));
184   ASSERT(extra3.is(no_reg));
185
186   Counters* counters = masm->isolate()->counters();
187   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
188
189   // Check that the receiver isn't a smi.
190   __ JumpIfSmi(receiver, &miss);
191
192   // Get the map of the receiver and compute the hash.
193   __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
194   // Use only the low 32 bits of the map pointer.
195   __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
196   __ xor_(scratch, Immediate(flags));
197   // We mask out the last two bits because they are not part of the hash and
198   // they are always 01 for maps.  Also in the two 'and' instructions below.
199   __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
200
201   // Probe the primary table.
202   ProbeTable(isolate, masm, flags, kPrimary, receiver, name, scratch);
203
204   // Primary miss: Compute hash for secondary probe.
205   __ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
206   __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
207   __ xor_(scratch, Immediate(flags));
208   __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
209   __ subl(scratch, name);
210   __ addl(scratch, Immediate(flags));
211   __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
212
213   // Probe the secondary table.
214   ProbeTable(isolate, masm, flags, kSecondary, receiver, name, scratch);
215
216   // Cache miss: Fall-through and let caller handle the miss by
217   // entering the runtime system.
218   __ bind(&miss);
219   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
220 }
221
222
223 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
224                                                        int index,
225                                                        Register prototype) {
226   // Load the global or builtins object from the current context.
227   __ movp(prototype,
228           Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
229   // Load the native context from the global or builtins object.
230   __ movp(prototype,
231           FieldOperand(prototype, GlobalObject::kNativeContextOffset));
232   // Load the function from the native context.
233   __ movp(prototype, Operand(prototype, Context::SlotOffset(index)));
234   // Load the initial map.  The global functions all have initial maps.
235   __ movp(prototype,
236           FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
237   // Load the prototype from the initial map.
238   __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
239 }
240
241
242 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
243     MacroAssembler* masm,
244     int index,
245     Register prototype,
246     Label* miss) {
247   Isolate* isolate = masm->isolate();
248   // Get the global function with the given index.
249   Handle<JSFunction> function(
250       JSFunction::cast(isolate->native_context()->get(index)));
251
252   // Check we're still in the same context.
253   Register scratch = prototype;
254   const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
255   __ movp(scratch, Operand(rsi, offset));
256   __ movp(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
257   __ Cmp(Operand(scratch, Context::SlotOffset(index)), function);
258   __ j(not_equal, miss);
259
260   // Load its initial map. The global functions all have initial maps.
261   __ Move(prototype, Handle<Map>(function->initial_map()));
262   // Load the prototype from the initial map.
263   __ movp(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
264 }
265
266
267 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
268                                            Register receiver,
269                                            Register scratch,
270                                            Label* miss_label) {
271   // Check that the receiver isn't a smi.
272   __ JumpIfSmi(receiver, miss_label);
273
274   // Check that the object is a JS array.
275   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
276   __ j(not_equal, miss_label);
277
278   // Load length directly from the JS array.
279   __ movp(rax, FieldOperand(receiver, JSArray::kLengthOffset));
280   __ ret(0);
281 }
282
283
284 // Generate code to check if an object is a string.  If the object is
285 // a string, the map's instance type is left in the scratch register.
286 static void GenerateStringCheck(MacroAssembler* masm,
287                                 Register receiver,
288                                 Register scratch,
289                                 Label* smi,
290                                 Label* non_string_object) {
291   // Check that the object isn't a smi.
292   __ JumpIfSmi(receiver, smi);
293
294   // Check that the object is a string.
295   __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
296   __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
297   STATIC_ASSERT(kNotStringTag != 0);
298   __ testl(scratch, Immediate(kNotStringTag));
299   __ j(not_zero, non_string_object);
300 }
301
302
303 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
304                                             Register receiver,
305                                             Register scratch1,
306                                             Register scratch2,
307                                             Label* miss) {
308   Label check_wrapper;
309
310   // Check if the object is a string leaving the instance type in the
311   // scratch register.
312   GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
313
314   // Load length directly from the string.
315   __ movp(rax, FieldOperand(receiver, String::kLengthOffset));
316   __ ret(0);
317
318   // Check if the object is a JSValue wrapper.
319   __ bind(&check_wrapper);
320   __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
321   __ j(not_equal, miss);
322
323   // Check if the wrapped value is a string and load the length
324   // directly if it is.
325   __ movp(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
326   GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
327   __ movp(rax, FieldOperand(scratch2, String::kLengthOffset));
328   __ ret(0);
329 }
330
331
332 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
333                                                  Register receiver,
334                                                  Register result,
335                                                  Register scratch,
336                                                  Label* miss_label) {
337   __ TryGetFunctionPrototype(receiver, result, miss_label);
338   if (!result.is(rax)) __ movp(rax, result);
339   __ ret(0);
340 }
341
342
343 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
344                                             Register dst,
345                                             Register src,
346                                             bool inobject,
347                                             int index,
348                                             Representation representation) {
349   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
350   int offset = index * kPointerSize;
351   if (!inobject) {
352     // Calculate the offset into the properties array.
353     offset = offset + FixedArray::kHeaderSize;
354     __ movp(dst, FieldOperand(src, JSObject::kPropertiesOffset));
355     src = dst;
356   }
357   __ movp(dst, FieldOperand(src, offset));
358 }
359
360
361 static void PushInterceptorArguments(MacroAssembler* masm,
362                                      Register receiver,
363                                      Register holder,
364                                      Register name,
365                                      Handle<JSObject> holder_obj) {
366   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
367   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
368   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
369   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
370   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
371   __ push(name);
372   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
373   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
374   __ Move(kScratchRegister, interceptor);
375   __ push(kScratchRegister);
376   __ push(receiver);
377   __ push(holder);
378 }
379
380
381 static void CompileCallLoadPropertyWithInterceptor(
382     MacroAssembler* masm,
383     Register receiver,
384     Register holder,
385     Register name,
386     Handle<JSObject> holder_obj,
387     IC::UtilityId id) {
388   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
389   __ CallExternalReference(
390       ExternalReference(IC_Utility(id), masm->isolate()),
391       StubCache::kInterceptorArgsLength);
392 }
393
394
395 // Generate call to api function.
396 static void GenerateFastApiCall(MacroAssembler* masm,
397                                 const CallOptimization& optimization,
398                                 Handle<Map> receiver_map,
399                                 Register receiver,
400                                 Register scratch_in,
401                                 int argc,
402                                 Register* values) {
403   ASSERT(optimization.is_simple_api_call());
404
405   __ PopReturnAddressTo(scratch_in);
406   // receiver
407   __ push(receiver);
408   // Write the arguments to stack frame.
409   for (int i = 0; i < argc; i++) {
410     Register arg = values[argc-1-i];
411     ASSERT(!receiver.is(arg));
412     ASSERT(!scratch_in.is(arg));
413     __ push(arg);
414   }
415   __ PushReturnAddressFrom(scratch_in);
416   // Stack now matches JSFunction abi.
417
418   // Abi for CallApiFunctionStub.
419   Register callee = rax;
420   Register call_data = rbx;
421   Register holder = rcx;
422   Register api_function_address = rdx;
423   Register scratch = rdi;  // scratch_in is no longer valid.
424
425   // Put holder in place.
426   CallOptimization::HolderLookup holder_lookup;
427   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
428       receiver_map,
429       &holder_lookup);
430   switch (holder_lookup) {
431     case CallOptimization::kHolderIsReceiver:
432       __ Move(holder, receiver);
433       break;
434     case CallOptimization::kHolderFound:
435       __ Move(holder, api_holder);
436      break;
437     case CallOptimization::kHolderNotFound:
438       UNREACHABLE();
439       break;
440   }
441
442   Isolate* isolate = masm->isolate();
443   Handle<JSFunction> function = optimization.constant_function();
444   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
445   Handle<Object> call_data_obj(api_call_info->data(), isolate);
446
447   // Put callee in place.
448   __ Move(callee, function);
449
450   bool call_data_undefined = false;
451   // Put call_data in place.
452   if (isolate->heap()->InNewSpace(*call_data_obj)) {
453     __ Move(scratch, api_call_info);
454     __ movp(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
455   } else if (call_data_obj->IsUndefined()) {
456     call_data_undefined = true;
457     __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
458   } else {
459     __ Move(call_data, call_data_obj);
460   }
461
462   // Put api_function_address in place.
463   Address function_address = v8::ToCData<Address>(api_call_info->callback());
464   __ Move(
465       api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
466
467   // Jump to stub.
468   CallApiFunctionStub stub(true, call_data_undefined, argc);
469   __ TailCallStub(&stub);
470 }
471
472
473 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
474                                             Label* label,
475                                             Handle<Name> name) {
476   if (!label->is_unused()) {
477     __ bind(label);
478     __ Move(this->name(), name);
479   }
480 }
481
482
483 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
484                                              Handle<JSGlobalObject> global,
485                                              Handle<Name> name,
486                                              Register scratch,
487                                              Label* miss) {
488   Handle<PropertyCell> cell =
489       JSGlobalObject::EnsurePropertyCell(global, name);
490   ASSERT(cell->value()->IsTheHole());
491   __ Move(scratch, cell);
492   __ Cmp(FieldOperand(scratch, Cell::kValueOffset),
493          masm->isolate()->factory()->the_hole_value());
494   __ j(not_equal, miss);
495 }
496
497
498 void StoreStubCompiler::GenerateNegativeHolderLookup(
499     MacroAssembler* masm,
500     Handle<JSObject> holder,
501     Register holder_reg,
502     Handle<Name> name,
503     Label* miss) {
504   if (holder->IsJSGlobalObject()) {
505     GenerateCheckPropertyCell(
506         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
507   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
508     GenerateDictionaryNegativeLookup(
509         masm, miss, holder_reg, name, scratch1(), scratch2());
510   }
511 }
512
513
514 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
515 // store is successful.
516 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
517                                                 Handle<JSObject> object,
518                                                 LookupResult* lookup,
519                                                 Handle<Map> transition,
520                                                 Handle<Name> name,
521                                                 Register receiver_reg,
522                                                 Register storage_reg,
523                                                 Register value_reg,
524                                                 Register scratch1,
525                                                 Register scratch2,
526                                                 Register unused,
527                                                 Label* miss_label,
528                                                 Label* slow) {
529   int descriptor = transition->LastAdded();
530   DescriptorArray* descriptors = transition->instance_descriptors();
531   PropertyDetails details = descriptors->GetDetails(descriptor);
532   Representation representation = details.representation();
533   ASSERT(!representation.IsNone());
534
535   if (details.type() == CONSTANT) {
536     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
537     __ Cmp(value_reg, constant);
538     __ j(not_equal, miss_label);
539   } else if (FLAG_track_fields && representation.IsSmi()) {
540     __ JumpIfNotSmi(value_reg, miss_label);
541   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
542     __ JumpIfSmi(value_reg, miss_label);
543   } else if (FLAG_track_double_fields && representation.IsDouble()) {
544     Label do_store, heap_number;
545     __ AllocateHeapNumber(storage_reg, scratch1, slow);
546
547     __ JumpIfNotSmi(value_reg, &heap_number);
548     __ SmiToInteger32(scratch1, value_reg);
549     __ Cvtlsi2sd(xmm0, scratch1);
550     __ jmp(&do_store);
551
552     __ bind(&heap_number);
553     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
554                 miss_label, DONT_DO_SMI_CHECK);
555     __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
556
557     __ bind(&do_store);
558     __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
559   }
560
561   // Stub never generated for non-global objects that require access
562   // checks.
563   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
564
565   // Perform map transition for the receiver if necessary.
566   if (details.type() == FIELD &&
567       object->map()->unused_property_fields() == 0) {
568     // The properties must be extended before we can store the value.
569     // We jump to a runtime call that extends the properties array.
570     __ PopReturnAddressTo(scratch1);
571     __ push(receiver_reg);
572     __ Push(transition);
573     __ push(value_reg);
574     __ PushReturnAddressFrom(scratch1);
575     __ TailCallExternalReference(
576         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
577                           masm->isolate()),
578         3,
579         1);
580     return;
581   }
582
583   // Update the map of the object.
584   __ Move(scratch1, transition);
585   __ movp(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
586
587   // Update the write barrier for the map field.
588   __ RecordWriteField(receiver_reg,
589                       HeapObject::kMapOffset,
590                       scratch1,
591                       scratch2,
592                       kDontSaveFPRegs,
593                       OMIT_REMEMBERED_SET,
594                       OMIT_SMI_CHECK);
595
596   if (details.type() == CONSTANT) {
597     ASSERT(value_reg.is(rax));
598     __ ret(0);
599     return;
600   }
601
602   int index = transition->instance_descriptors()->GetFieldIndex(
603       transition->LastAdded());
604
605   // Adjust for the number of properties stored in the object. Even in the
606   // face of a transition we can use the old map here because the size of the
607   // object and the number of in-object properties is not going to change.
608   index -= object->map()->inobject_properties();
609
610   // TODO(verwaest): Share this code as a code stub.
611   SmiCheck smi_check = representation.IsTagged()
612       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
613   if (index < 0) {
614     // Set the property straight into the object.
615     int offset = object->map()->instance_size() + (index * kPointerSize);
616     if (FLAG_track_double_fields && representation.IsDouble()) {
617       __ movp(FieldOperand(receiver_reg, offset), storage_reg);
618     } else {
619       __ movp(FieldOperand(receiver_reg, offset), value_reg);
620     }
621
622     if (!FLAG_track_fields || !representation.IsSmi()) {
623       // Update the write barrier for the array address.
624       if (!FLAG_track_double_fields || !representation.IsDouble()) {
625         __ movp(storage_reg, value_reg);
626       }
627       __ RecordWriteField(
628           receiver_reg, offset, storage_reg, scratch1, kDontSaveFPRegs,
629           EMIT_REMEMBERED_SET, smi_check);
630     }
631   } else {
632     // Write to the properties array.
633     int offset = index * kPointerSize + FixedArray::kHeaderSize;
634     // Get the properties array (optimistically).
635     __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
636     if (FLAG_track_double_fields && representation.IsDouble()) {
637       __ movp(FieldOperand(scratch1, offset), storage_reg);
638     } else {
639       __ movp(FieldOperand(scratch1, offset), value_reg);
640     }
641
642     if (!FLAG_track_fields || !representation.IsSmi()) {
643       // Update the write barrier for the array address.
644       if (!FLAG_track_double_fields || !representation.IsDouble()) {
645         __ movp(storage_reg, value_reg);
646       }
647       __ RecordWriteField(
648           scratch1, offset, storage_reg, receiver_reg, kDontSaveFPRegs,
649           EMIT_REMEMBERED_SET, smi_check);
650     }
651   }
652
653   // Return the value (register rax).
654   ASSERT(value_reg.is(rax));
655   __ ret(0);
656 }
657
658
659 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
660 // but may be destroyed if store is successful.
661 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
662                                            Handle<JSObject> object,
663                                            LookupResult* lookup,
664                                            Register receiver_reg,
665                                            Register name_reg,
666                                            Register value_reg,
667                                            Register scratch1,
668                                            Register scratch2,
669                                            Label* miss_label) {
670   // Stub never generated for non-global objects that require access
671   // checks.
672   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
673
674   int index = lookup->GetFieldIndex().field_index();
675
676   // Adjust for the number of properties stored in the object. Even in the
677   // face of a transition we can use the old map here because the size of the
678   // object and the number of in-object properties is not going to change.
679   index -= object->map()->inobject_properties();
680
681   Representation representation = lookup->representation();
682   ASSERT(!representation.IsNone());
683   if (FLAG_track_fields && representation.IsSmi()) {
684     __ JumpIfNotSmi(value_reg, miss_label);
685   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
686     __ JumpIfSmi(value_reg, miss_label);
687   } else if (FLAG_track_double_fields && representation.IsDouble()) {
688     // Load the double storage.
689     if (index < 0) {
690       int offset = object->map()->instance_size() + (index * kPointerSize);
691       __ movp(scratch1, FieldOperand(receiver_reg, offset));
692     } else {
693       __ movp(scratch1,
694               FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
695       int offset = index * kPointerSize + FixedArray::kHeaderSize;
696       __ movp(scratch1, FieldOperand(scratch1, offset));
697     }
698
699     // Store the value into the storage.
700     Label do_store, heap_number;
701     __ JumpIfNotSmi(value_reg, &heap_number);
702     __ SmiToInteger32(scratch2, value_reg);
703     __ Cvtlsi2sd(xmm0, scratch2);
704     __ jmp(&do_store);
705
706     __ bind(&heap_number);
707     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
708                 miss_label, DONT_DO_SMI_CHECK);
709     __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
710     __ bind(&do_store);
711     __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
712     // Return the value (register rax).
713     ASSERT(value_reg.is(rax));
714     __ ret(0);
715     return;
716   }
717
718   // TODO(verwaest): Share this code as a code stub.
719   SmiCheck smi_check = representation.IsTagged()
720       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
721   if (index < 0) {
722     // Set the property straight into the object.
723     int offset = object->map()->instance_size() + (index * kPointerSize);
724     __ movp(FieldOperand(receiver_reg, offset), value_reg);
725
726     if (!FLAG_track_fields || !representation.IsSmi()) {
727       // Update the write barrier for the array address.
728       // Pass the value being stored in the now unused name_reg.
729       __ movp(name_reg, value_reg);
730       __ RecordWriteField(
731           receiver_reg, offset, name_reg, scratch1, kDontSaveFPRegs,
732           EMIT_REMEMBERED_SET, smi_check);
733     }
734   } else {
735     // Write to the properties array.
736     int offset = index * kPointerSize + FixedArray::kHeaderSize;
737     // Get the properties array (optimistically).
738     __ movp(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
739     __ movp(FieldOperand(scratch1, offset), value_reg);
740
741     if (!FLAG_track_fields || !representation.IsSmi()) {
742       // Update the write barrier for the array address.
743       // Pass the value being stored in the now unused name_reg.
744       __ movp(name_reg, value_reg);
745       __ RecordWriteField(
746           scratch1, offset, name_reg, receiver_reg, kDontSaveFPRegs,
747           EMIT_REMEMBERED_SET, smi_check);
748     }
749   }
750
751   // Return the value (register rax).
752   ASSERT(value_reg.is(rax));
753   __ ret(0);
754 }
755
756
757 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
758   __ jmp(code, RelocInfo::CODE_TARGET);
759 }
760
761
762 #undef __
763 #define __ ACCESS_MASM((masm()))
764
765
766 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
767                                        Register object_reg,
768                                        Handle<JSObject> holder,
769                                        Register holder_reg,
770                                        Register scratch1,
771                                        Register scratch2,
772                                        Handle<Name> name,
773                                        Label* miss,
774                                        PrototypeCheckType check) {
775   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
776   // Make sure that the type feedback oracle harvests the receiver map.
777   // TODO(svenpanne) Remove this hack when all ICs are reworked.
778   __ Move(scratch1, receiver_map);
779
780   // Make sure there's no overlap between holder and object registers.
781   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
782   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
783          && !scratch2.is(scratch1));
784
785   // Keep track of the current object in register reg.  On the first
786   // iteration, reg is an alias for object_reg, on later iterations,
787   // it is an alias for holder_reg.
788   Register reg = object_reg;
789   int depth = 0;
790
791   Handle<JSObject> current = Handle<JSObject>::null();
792   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
793   Handle<JSObject> prototype = Handle<JSObject>::null();
794   Handle<Map> current_map = receiver_map;
795   Handle<Map> holder_map(holder->map());
796   // Traverse the prototype chain and check the maps in the prototype chain for
797   // fast and global objects or do negative lookup for normal objects.
798   while (!current_map.is_identical_to(holder_map)) {
799     ++depth;
800
801     // Only global objects and objects that do not require access
802     // checks are allowed in stubs.
803     ASSERT(current_map->IsJSGlobalProxyMap() ||
804            !current_map->is_access_check_needed());
805
806     prototype = handle(JSObject::cast(current_map->prototype()));
807     if (current_map->is_dictionary_map() &&
808         !current_map->IsJSGlobalObjectMap() &&
809         !current_map->IsJSGlobalProxyMap()) {
810       if (!name->IsUniqueName()) {
811         ASSERT(name->IsString());
812         name = factory()->InternalizeString(Handle<String>::cast(name));
813       }
814       ASSERT(current.is_null() ||
815              current->property_dictionary()->FindEntry(*name) ==
816              NameDictionary::kNotFound);
817
818       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
819                                        scratch1, scratch2);
820
821       __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
822       reg = holder_reg;  // From now on the object will be in holder_reg.
823       __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
824     } else {
825       bool in_new_space = heap()->InNewSpace(*prototype);
826       if (in_new_space) {
827         // Save the map in scratch1 for later.
828         __ movp(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
829       }
830       if (depth != 1 || check == CHECK_ALL_MAPS) {
831         __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
832       }
833
834       // Check access rights to the global object.  This has to happen after
835       // the map check so that we know that the object is actually a global
836       // object.
837       if (current_map->IsJSGlobalProxyMap()) {
838         __ CheckAccessGlobalProxy(reg, scratch2, miss);
839       } else if (current_map->IsJSGlobalObjectMap()) {
840         GenerateCheckPropertyCell(
841             masm(), Handle<JSGlobalObject>::cast(current), name,
842             scratch2, miss);
843       }
844       reg = holder_reg;  // From now on the object will be in holder_reg.
845
846       if (in_new_space) {
847         // The prototype is in new space; we cannot store a reference to it
848         // in the code.  Load it from the map.
849         __ movp(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
850       } else {
851         // The prototype is in old space; load it directly.
852         __ Move(reg, prototype);
853       }
854     }
855
856     // Go to the next object in the prototype chain.
857     current = prototype;
858     current_map = handle(current->map());
859   }
860
861   // Log the check depth.
862   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
863
864   if (depth != 0 || check == CHECK_ALL_MAPS) {
865     // Check the holder map.
866     __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
867   }
868
869   // Perform security check for access to the global object.
870   ASSERT(current_map->IsJSGlobalProxyMap() ||
871          !current_map->is_access_check_needed());
872   if (current_map->IsJSGlobalProxyMap()) {
873     __ CheckAccessGlobalProxy(reg, scratch1, miss);
874   }
875
876   // Return the register containing the holder.
877   return reg;
878 }
879
880
881 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
882   if (!miss->is_unused()) {
883     Label success;
884     __ jmp(&success);
885     __ bind(miss);
886     TailCallBuiltin(masm(), MissBuiltin(kind()));
887     __ bind(&success);
888   }
889 }
890
891
892 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
893   if (!miss->is_unused()) {
894     Label success;
895     __ jmp(&success);
896     GenerateRestoreName(masm(), miss, name);
897     TailCallBuiltin(masm(), MissBuiltin(kind()));
898     __ bind(&success);
899   }
900 }
901
902
903 Register LoadStubCompiler::CallbackHandlerFrontend(
904     Handle<HeapType> type,
905     Register object_reg,
906     Handle<JSObject> holder,
907     Handle<Name> name,
908     Handle<Object> callback) {
909   Label miss;
910
911   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
912
913   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
914     ASSERT(!reg.is(scratch2()));
915     ASSERT(!reg.is(scratch3()));
916     ASSERT(!reg.is(scratch4()));
917
918     // Load the properties dictionary.
919     Register dictionary = scratch4();
920     __ movp(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
921
922     // Probe the dictionary.
923     Label probe_done;
924     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
925                                                      &miss,
926                                                      &probe_done,
927                                                      dictionary,
928                                                      this->name(),
929                                                      scratch2(),
930                                                      scratch3());
931     __ bind(&probe_done);
932
933     // If probing finds an entry in the dictionary, scratch3 contains the
934     // index into the dictionary. Check that the value is the callback.
935     Register index = scratch3();
936     const int kElementsStartOffset =
937         NameDictionary::kHeaderSize +
938         NameDictionary::kElementsStartIndex * kPointerSize;
939     const int kValueOffset = kElementsStartOffset + kPointerSize;
940     __ movp(scratch2(),
941             Operand(dictionary, index, times_pointer_size,
942                     kValueOffset - kHeapObjectTag));
943     __ Move(scratch3(), callback, RelocInfo::EMBEDDED_OBJECT);
944     __ cmpq(scratch2(), scratch3());
945     __ j(not_equal, &miss);
946   }
947
948   HandlerFrontendFooter(name, &miss);
949   return reg;
950 }
951
952
953 void LoadStubCompiler::GenerateLoadField(Register reg,
954                                          Handle<JSObject> holder,
955                                          PropertyIndex field,
956                                          Representation representation) {
957   if (!reg.is(receiver())) __ movp(receiver(), reg);
958   if (kind() == Code::LOAD_IC) {
959     LoadFieldStub stub(field.is_inobject(holder),
960                        field.translate(holder),
961                        representation);
962     GenerateTailCall(masm(), stub.GetCode(isolate()));
963   } else {
964     KeyedLoadFieldStub stub(field.is_inobject(holder),
965                             field.translate(holder),
966                             representation);
967     GenerateTailCall(masm(), stub.GetCode(isolate()));
968   }
969 }
970
971
972 void LoadStubCompiler::GenerateLoadCallback(
973     const CallOptimization& call_optimization,
974     Handle<Map> receiver_map) {
975   GenerateFastApiCall(
976       masm(), call_optimization, receiver_map,
977       receiver(), scratch1(), 0, NULL);
978 }
979
980
981 void LoadStubCompiler::GenerateLoadCallback(
982     Register reg,
983     Handle<ExecutableAccessorInfo> callback) {
984   // Insert additional parameters into the stack frame above return address.
985   ASSERT(!scratch4().is(reg));
986   __ PopReturnAddressTo(scratch4());
987
988   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
989   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
990   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
991   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
992   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
993   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
994   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
995   __ push(receiver());  // receiver
996   if (heap()->InNewSpace(callback->data())) {
997     ASSERT(!scratch2().is(reg));
998     __ Move(scratch2(), callback);
999     __ push(FieldOperand(scratch2(),
1000                          ExecutableAccessorInfo::kDataOffset));  // data
1001   } else {
1002     __ Push(Handle<Object>(callback->data(), isolate()));
1003   }
1004   ASSERT(!kScratchRegister.is(reg));
1005   __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1006   __ push(kScratchRegister);  // return value
1007   __ push(kScratchRegister);  // return value default
1008   __ PushAddress(ExternalReference::isolate_address(isolate()));
1009   __ push(reg);  // holder
1010   __ push(name());  // name
1011   // Save a pointer to where we pushed the arguments pointer.  This will be
1012   // passed as the const PropertyAccessorInfo& to the C++ callback.
1013
1014   __ PushReturnAddressFrom(scratch4());
1015
1016   // Abi for CallApiGetter
1017   Register api_function_address = r8;
1018   Address getter_address = v8::ToCData<Address>(callback->getter());
1019   __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
1020
1021   CallApiGetterStub stub;
1022   __ TailCallStub(&stub);
1023 }
1024
1025
1026 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1027   // Return the constant value.
1028   __ Move(rax, value);
1029   __ ret(0);
1030 }
1031
1032
1033 void LoadStubCompiler::GenerateLoadInterceptor(
1034     Register holder_reg,
1035     Handle<Object> object,
1036     Handle<JSObject> interceptor_holder,
1037     LookupResult* lookup,
1038     Handle<Name> name) {
1039   ASSERT(interceptor_holder->HasNamedInterceptor());
1040   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1041
1042   // So far the most popular follow ups for interceptor loads are FIELD
1043   // and CALLBACKS, so inline only them, other cases may be added
1044   // later.
1045   bool compile_followup_inline = false;
1046   if (lookup->IsFound() && lookup->IsCacheable()) {
1047     if (lookup->IsField()) {
1048       compile_followup_inline = true;
1049     } else if (lookup->type() == CALLBACKS &&
1050                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1051       ExecutableAccessorInfo* callback =
1052           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1053       compile_followup_inline = callback->getter() != NULL &&
1054           callback->IsCompatibleReceiver(*object);
1055     }
1056   }
1057
1058   if (compile_followup_inline) {
1059     // Compile the interceptor call, followed by inline code to load the
1060     // property from further up the prototype chain if the call fails.
1061     // Check that the maps haven't changed.
1062     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1063
1064     // Preserve the receiver register explicitly whenever it is different from
1065     // the holder and it is needed should the interceptor return without any
1066     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1067     // the FIELD case might cause a miss during the prototype check.
1068     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1069     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1070         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1071
1072     // Save necessary data before invoking an interceptor.
1073     // Requires a frame to make GC aware of pushed pointers.
1074     {
1075       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1076
1077       if (must_preserve_receiver_reg) {
1078         __ push(receiver());
1079       }
1080       __ push(holder_reg);
1081       __ push(this->name());
1082
1083       // Invoke an interceptor.  Note: map checks from receiver to
1084       // interceptor's holder has been compiled before (see a caller
1085       // of this method.)
1086       CompileCallLoadPropertyWithInterceptor(
1087           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1088           IC::kLoadPropertyWithInterceptorOnly);
1089
1090       // Check if interceptor provided a value for property.  If it's
1091       // the case, return immediately.
1092       Label interceptor_failed;
1093       __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1094       __ j(equal, &interceptor_failed);
1095       frame_scope.GenerateLeaveFrame();
1096       __ ret(0);
1097
1098       __ bind(&interceptor_failed);
1099       __ pop(this->name());
1100       __ pop(holder_reg);
1101       if (must_preserve_receiver_reg) {
1102         __ pop(receiver());
1103       }
1104
1105       // Leave the internal frame.
1106     }
1107
1108     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1109   } else {  // !compile_followup_inline
1110     // Call the runtime system to load the interceptor.
1111     // Check that the maps haven't changed.
1112     __ PopReturnAddressTo(scratch2());
1113     PushInterceptorArguments(masm(), receiver(), holder_reg,
1114                              this->name(), interceptor_holder);
1115     __ PushReturnAddressFrom(scratch2());
1116
1117     ExternalReference ref = ExternalReference(
1118         IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1119     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1120   }
1121 }
1122
1123
1124 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1125   Label success;
1126   // Check that the object is a boolean.
1127   __ Cmp(object, factory()->true_value());
1128   __ j(equal, &success);
1129   __ Cmp(object, factory()->false_value());
1130   __ j(not_equal, miss);
1131   __ bind(&success);
1132 }
1133
1134
1135 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1136     Handle<JSObject> object,
1137     Handle<JSObject> holder,
1138     Handle<Name> name,
1139     Handle<ExecutableAccessorInfo> callback) {
1140   Register holder_reg = HandlerFrontend(
1141       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1142
1143   __ PopReturnAddressTo(scratch1());
1144   __ push(receiver());
1145   __ push(holder_reg);
1146   __ Push(callback);  // callback info
1147   __ Push(name);
1148   __ push(value());
1149   __ PushReturnAddressFrom(scratch1());
1150
1151   // Do tail-call to the runtime system.
1152   ExternalReference store_callback_property =
1153       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1154   __ TailCallExternalReference(store_callback_property, 5, 1);
1155
1156   // Return the generated code.
1157   return GetCode(kind(), Code::FAST, name);
1158 }
1159
1160
1161 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1162     Handle<JSObject> object,
1163     Handle<JSObject> holder,
1164     Handle<Name> name,
1165     const CallOptimization& call_optimization) {
1166   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1167                   receiver(), holder, name);
1168
1169   Register values[] = { value() };
1170   GenerateFastApiCall(
1171       masm(), call_optimization, handle(object->map()),
1172       receiver(), scratch1(), 1, values);
1173
1174   // Return the generated code.
1175   return GetCode(kind(), Code::FAST, name);
1176 }
1177
1178
1179 #undef __
1180 #define __ ACCESS_MASM(masm)
1181
1182
1183 void StoreStubCompiler::GenerateStoreViaSetter(
1184     MacroAssembler* masm,
1185     Handle<HeapType> type,
1186     Handle<JSFunction> setter) {
1187   // ----------- S t a t e -------------
1188   //  -- rax    : value
1189   //  -- rcx    : name
1190   //  -- rdx    : receiver
1191   //  -- rsp[0] : return address
1192   // -----------------------------------
1193   {
1194     FrameScope scope(masm, StackFrame::INTERNAL);
1195     Register receiver = rdx;
1196     Register value = rax;
1197
1198     // Save value register, so we can restore it later.
1199     __ push(value);
1200
1201     if (!setter.is_null()) {
1202       // Call the JavaScript setter with receiver and value on the stack.
1203       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1204         // Swap in the global receiver.
1205         __ movp(receiver,
1206                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1207       }
1208       __ push(receiver);
1209       __ push(value);
1210       ParameterCount actual(1);
1211       ParameterCount expected(setter);
1212       __ InvokeFunction(setter, expected, actual,
1213                         CALL_FUNCTION, NullCallWrapper());
1214     } else {
1215       // If we generate a global code snippet for deoptimization only, remember
1216       // the place to continue after deoptimization.
1217       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1218     }
1219
1220     // We have to return the passed value, not the return value of the setter.
1221     __ pop(rax);
1222
1223     // Restore context register.
1224     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1225   }
1226   __ ret(0);
1227 }
1228
1229
1230 #undef __
1231 #define __ ACCESS_MASM(masm())
1232
1233
1234 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1235     Handle<JSObject> object,
1236     Handle<Name> name) {
1237   __ PopReturnAddressTo(scratch1());
1238   __ push(receiver());
1239   __ push(this->name());
1240   __ push(value());
1241   __ PushReturnAddressFrom(scratch1());
1242
1243   // Do tail-call to the runtime system.
1244   ExternalReference store_ic_property =
1245       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1246   __ TailCallExternalReference(store_ic_property, 3, 1);
1247
1248   // Return the generated code.
1249   return GetCode(kind(), Code::FAST, name);
1250 }
1251
1252
1253 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1254     MapHandleList* receiver_maps,
1255     CodeHandleList* handler_stubs,
1256     MapHandleList* transitioned_maps) {
1257   Label miss;
1258   __ JumpIfSmi(receiver(), &miss, Label::kNear);
1259
1260   __ movp(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1261   int receiver_count = receiver_maps->length();
1262   for (int i = 0; i < receiver_count; ++i) {
1263     // Check map and tail call if there's a match
1264     __ Cmp(scratch1(), receiver_maps->at(i));
1265     if (transitioned_maps->at(i).is_null()) {
1266       __ j(equal, handler_stubs->at(i), RelocInfo::CODE_TARGET);
1267     } else {
1268       Label next_map;
1269       __ j(not_equal, &next_map, Label::kNear);
1270       __ Move(transition_map(),
1271               transitioned_maps->at(i),
1272               RelocInfo::EMBEDDED_OBJECT);
1273       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1274       __ bind(&next_map);
1275     }
1276   }
1277
1278   __ bind(&miss);
1279
1280   TailCallBuiltin(masm(), MissBuiltin(kind()));
1281
1282   // Return the generated code.
1283   return GetICCode(
1284       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1285 }
1286
1287
1288 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1289                                                       Handle<JSObject> last,
1290                                                       Handle<Name> name) {
1291   NonexistentHandlerFrontend(type, last, name);
1292
1293   // Return undefined if maps of the full prototype chain are still the
1294   // same and no global property with this name contains a value.
1295   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1296   __ ret(0);
1297
1298   // Return the generated code.
1299   return GetCode(kind(), Code::FAST, name);
1300 }
1301
1302
1303 Register* LoadStubCompiler::registers() {
1304   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1305   static Register registers[] = { rax, rcx, rdx, rbx, rdi, r8 };
1306   return registers;
1307 }
1308
1309
1310 Register* KeyedLoadStubCompiler::registers() {
1311   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1312   static Register registers[] = { rdx, rax, rbx, rcx, rdi, r8 };
1313   return registers;
1314 }
1315
1316
1317 Register* StoreStubCompiler::registers() {
1318   // receiver, name, value, scratch1, scratch2, scratch3.
1319   static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
1320   return registers;
1321 }
1322
1323
1324 Register* KeyedStoreStubCompiler::registers() {
1325   // receiver, name, value, scratch1, scratch2, scratch3.
1326   static Register registers[] = { rdx, rcx, rax, rbx, rdi, r8 };
1327   return registers;
1328 }
1329
1330
1331 #undef __
1332 #define __ ACCESS_MASM(masm)
1333
1334
1335 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1336                                              Handle<HeapType> type,
1337                                              Register receiver,
1338                                              Handle<JSFunction> getter) {
1339   // ----------- S t a t e -------------
1340   //  -- rax    : receiver
1341   //  -- rcx    : name
1342   //  -- rsp[0] : return address
1343   // -----------------------------------
1344   {
1345     FrameScope scope(masm, StackFrame::INTERNAL);
1346
1347     if (!getter.is_null()) {
1348       // Call the JavaScript getter with the receiver on the stack.
1349       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1350         // Swap in the global receiver.
1351         __ movp(receiver,
1352                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1353       }
1354       __ push(receiver);
1355       ParameterCount actual(0);
1356       ParameterCount expected(getter);
1357       __ InvokeFunction(getter, expected, actual,
1358                         CALL_FUNCTION, NullCallWrapper());
1359     } else {
1360       // If we generate a global code snippet for deoptimization only, remember
1361       // the place to continue after deoptimization.
1362       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1363     }
1364
1365     // Restore context register.
1366     __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1367   }
1368   __ ret(0);
1369 }
1370
1371
1372 #undef __
1373 #define __ ACCESS_MASM(masm())
1374
1375
1376 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1377     Handle<HeapType> type,
1378     Handle<GlobalObject> global,
1379     Handle<PropertyCell> cell,
1380     Handle<Name> name,
1381     bool is_dont_delete) {
1382   Label miss;
1383   // TODO(verwaest): Directly store to rax. Currently we cannot do this, since
1384   // rax is used as receiver(), which we would otherwise clobber before a
1385   // potential miss.
1386   HandlerFrontendHeader(type, receiver(), global, name, &miss);
1387
1388   // Get the value from the cell.
1389   __ Move(rbx, cell);
1390   __ movp(rbx, FieldOperand(rbx, PropertyCell::kValueOffset));
1391
1392   // Check for deleted property if property can actually be deleted.
1393   if (!is_dont_delete) {
1394     __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1395     __ j(equal, &miss);
1396   } else if (FLAG_debug_code) {
1397     __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
1398     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1399   }
1400
1401   Counters* counters = isolate()->counters();
1402   __ IncrementCounter(counters->named_load_global_stub(), 1);
1403   __ movp(rax, rbx);
1404   __ ret(0);
1405
1406   HandlerFrontendFooter(name, &miss);
1407
1408   // Return the generated code.
1409   return GetCode(kind(), Code::NORMAL, name);
1410 }
1411
1412
1413 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1414     TypeHandleList* types,
1415     CodeHandleList* handlers,
1416     Handle<Name> name,
1417     Code::StubType type,
1418     IcCheckType check) {
1419   Label miss;
1420
1421   if (check == PROPERTY &&
1422       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1423     __ Cmp(this->name(), name);
1424     __ j(not_equal, &miss);
1425   }
1426
1427   Label number_case;
1428   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1429   __ JumpIfSmi(receiver(), smi_target);
1430
1431   Register map_reg = scratch1();
1432   __ movp(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1433   int receiver_count = types->length();
1434   int number_of_handled_maps = 0;
1435   for (int current = 0; current < receiver_count; ++current) {
1436     Handle<HeapType> type = types->at(current);
1437     Handle<Map> map = IC::TypeToMap(*type, isolate());
1438     if (!map->is_deprecated()) {
1439       number_of_handled_maps++;
1440       // Check map and tail call if there's a match
1441       __ Cmp(map_reg, map);
1442       if (type->Is(HeapType::Number())) {
1443         ASSERT(!number_case.is_unused());
1444         __ bind(&number_case);
1445       }
1446       __ j(equal, handlers->at(current), RelocInfo::CODE_TARGET);
1447     }
1448   }
1449   ASSERT(number_of_handled_maps > 0);
1450
1451   __  bind(&miss);
1452   TailCallBuiltin(masm(), MissBuiltin(kind()));
1453
1454   // Return the generated code.
1455   InlineCacheState state =
1456       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1457   return GetICCode(kind(), type, name, state);
1458 }
1459
1460
1461 #undef __
1462 #define __ ACCESS_MASM(masm)
1463
1464
1465 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1466     MacroAssembler* masm) {
1467   // ----------- S t a t e -------------
1468   //  -- rax    : key
1469   //  -- rdx    : receiver
1470   //  -- rsp[0] : return address
1471   // -----------------------------------
1472   Label slow, miss;
1473
1474   // This stub is meant to be tail-jumped to, the receiver must already
1475   // have been verified by the caller to not be a smi.
1476
1477   __ JumpIfNotSmi(rax, &miss);
1478   __ SmiToInteger32(rbx, rax);
1479   __ movp(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
1480
1481   // Check whether the elements is a number dictionary.
1482   // rdx: receiver
1483   // rax: key
1484   // rbx: key as untagged int32
1485   // rcx: elements
1486   __ LoadFromNumberDictionary(&slow, rcx, rax, rbx, r9, rdi, rax);
1487   __ ret(0);
1488
1489   __ bind(&slow);
1490   // ----------- S t a t e -------------
1491   //  -- rax    : key
1492   //  -- rdx    : receiver
1493   //  -- rsp[0] : return address
1494   // -----------------------------------
1495   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1496
1497   __ bind(&miss);
1498   // ----------- S t a t e -------------
1499   //  -- rax    : key
1500   //  -- rdx    : receiver
1501   //  -- rsp[0] : return address
1502   // -----------------------------------
1503   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1504 }
1505
1506
1507 #undef __
1508
1509 } }  // namespace v8::internal
1510
1511 #endif  // V8_TARGET_ARCH_X64