Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / ia32 / stub-cache-ia32.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if V8_TARGET_ARCH_IA32
31
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35
36 namespace v8 {
37 namespace internal {
38
39 #define __ ACCESS_MASM(masm)
40
41
42 static void ProbeTable(Isolate* isolate,
43                        MacroAssembler* masm,
44                        Code::Flags flags,
45                        StubCache::Table table,
46                        Register name,
47                        Register receiver,
48                        // Number of the cache entry pointer-size scaled.
49                        Register offset,
50                        Register extra) {
51   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
52   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
53   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
54
55   Label miss;
56
57   // Multiply by 3 because there are 3 fields per entry (name, code, map).
58   __ lea(offset, Operand(offset, offset, times_2, 0));
59
60   if (extra.is_valid()) {
61     // Get the code entry from the cache.
62     __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
63
64     // Check that the key in the entry matches the name.
65     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
66     __ j(not_equal, &miss);
67
68     // Check the map matches.
69     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
70     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
71     __ j(not_equal, &miss);
72
73     // Check that the flags match what we're looking for.
74     __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
75     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
76     __ cmp(offset, flags);
77     __ j(not_equal, &miss);
78
79 #ifdef DEBUG
80     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
81       __ jmp(&miss);
82     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
83       __ jmp(&miss);
84     }
85 #endif
86
87     // Jump to the first instruction in the code stub.
88     __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
89     __ jmp(extra);
90
91     __ bind(&miss);
92   } else {
93     // Save the offset on the stack.
94     __ push(offset);
95
96     // Check that the key in the entry matches the name.
97     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
98     __ j(not_equal, &miss);
99
100     // Check the map matches.
101     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
102     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
103     __ j(not_equal, &miss);
104
105     // Restore offset register.
106     __ mov(offset, Operand(esp, 0));
107
108     // Get the code entry from the cache.
109     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
110
111     // Check that the flags match what we're looking for.
112     __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
113     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
114     __ cmp(offset, flags);
115     __ j(not_equal, &miss);
116
117 #ifdef DEBUG
118     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
119       __ jmp(&miss);
120     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
121       __ jmp(&miss);
122     }
123 #endif
124
125     // Restore offset and re-load code entry from cache.
126     __ pop(offset);
127     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
128
129     // Jump to the first instruction in the code stub.
130     __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
131     __ jmp(offset);
132
133     // Pop at miss.
134     __ bind(&miss);
135     __ pop(offset);
136   }
137 }
138
139
140 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
141                                                     Label* miss_label,
142                                                     Register receiver,
143                                                     Handle<Name> name,
144                                                     Register scratch0,
145                                                     Register scratch1) {
146   ASSERT(name->IsUniqueName());
147   ASSERT(!receiver.is(scratch0));
148   Counters* counters = masm->isolate()->counters();
149   __ IncrementCounter(counters->negative_lookups(), 1);
150   __ IncrementCounter(counters->negative_lookups_miss(), 1);
151
152   __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
153
154   const int kInterceptorOrAccessCheckNeededMask =
155       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
156
157   // Bail out if the receiver has a named interceptor or requires access checks.
158   __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
159             kInterceptorOrAccessCheckNeededMask);
160   __ j(not_zero, miss_label);
161
162   // Check that receiver is a JSObject.
163   __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
164   __ j(below, miss_label);
165
166   // Load properties array.
167   Register properties = scratch0;
168   __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
169
170   // Check that the properties array is a dictionary.
171   __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
172          Immediate(masm->isolate()->factory()->hash_table_map()));
173   __ j(not_equal, miss_label);
174
175   Label done;
176   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
177                                                    miss_label,
178                                                    &done,
179                                                    properties,
180                                                    name,
181                                                    scratch1);
182   __ bind(&done);
183   __ DecrementCounter(counters->negative_lookups_miss(), 1);
184 }
185
186
187 void StubCache::GenerateProbe(MacroAssembler* masm,
188                               Code::Flags flags,
189                               Register receiver,
190                               Register name,
191                               Register scratch,
192                               Register extra,
193                               Register extra2,
194                               Register extra3) {
195   Label miss;
196
197   // Assert that code is valid.  The multiplying code relies on the entry size
198   // being 12.
199   ASSERT(sizeof(Entry) == 12);
200
201   // Assert the flags do not name a specific type.
202   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
203
204   // Assert that there are no register conflicts.
205   ASSERT(!scratch.is(receiver));
206   ASSERT(!scratch.is(name));
207   ASSERT(!extra.is(receiver));
208   ASSERT(!extra.is(name));
209   ASSERT(!extra.is(scratch));
210
211   // Assert scratch and extra registers are valid, and extra2/3 are unused.
212   ASSERT(!scratch.is(no_reg));
213   ASSERT(extra2.is(no_reg));
214   ASSERT(extra3.is(no_reg));
215
216   Register offset = scratch;
217   scratch = no_reg;
218
219   Counters* counters = masm->isolate()->counters();
220   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
221
222   // Check that the receiver isn't a smi.
223   __ JumpIfSmi(receiver, &miss);
224
225   // Get the map of the receiver and compute the hash.
226   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
227   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
228   __ xor_(offset, flags);
229   // We mask out the last two bits because they are not part of the hash and
230   // they are always 01 for maps.  Also in the two 'and' instructions below.
231   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
232   // ProbeTable expects the offset to be pointer scaled, which it is, because
233   // the heap object tag size is 2 and the pointer size log 2 is also 2.
234   ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
235
236   // Probe the primary table.
237   ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
238
239   // Primary miss: Compute hash for secondary probe.
240   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
241   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
242   __ xor_(offset, flags);
243   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
244   __ sub(offset, name);
245   __ add(offset, Immediate(flags));
246   __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
247
248   // Probe the secondary table.
249   ProbeTable(
250       isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
251
252   // Cache miss: Fall-through and let caller handle the miss by
253   // entering the runtime system.
254   __ bind(&miss);
255   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
256 }
257
258
259 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
260                                                        int index,
261                                                        Register prototype) {
262   __ LoadGlobalFunction(index, prototype);
263   __ LoadGlobalFunctionInitialMap(prototype, prototype);
264   // Load the prototype from the initial map.
265   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
266 }
267
268
269 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
270     MacroAssembler* masm,
271     int index,
272     Register prototype,
273     Label* miss) {
274   // Get the global function with the given index.
275   Handle<JSFunction> function(
276       JSFunction::cast(masm->isolate()->native_context()->get(index)));
277   // Check we're still in the same context.
278   Register scratch = prototype;
279   const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
280   __ mov(scratch, Operand(esi, offset));
281   __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
282   __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
283   __ j(not_equal, miss);
284
285   // Load its initial map. The global functions all have initial maps.
286   __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
287   // Load the prototype from the initial map.
288   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
289 }
290
291
292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
293                                            Register receiver,
294                                            Register scratch,
295                                            Label* miss_label) {
296   // Check that the receiver isn't a smi.
297   __ JumpIfSmi(receiver, miss_label);
298
299   // Check that the object is a JS array.
300   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
301   __ j(not_equal, miss_label);
302
303   // Load length directly from the JS array.
304   __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
305   __ ret(0);
306 }
307
308
309 // Generate code to check if an object is a string.  If the object is
310 // a string, the map's instance type is left in the scratch register.
311 static void GenerateStringCheck(MacroAssembler* masm,
312                                 Register receiver,
313                                 Register scratch,
314                                 Label* smi,
315                                 Label* non_string_object) {
316   // Check that the object isn't a smi.
317   __ JumpIfSmi(receiver, smi);
318
319   // Check that the object is a string.
320   __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
321   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
322   STATIC_ASSERT(kNotStringTag != 0);
323   __ test(scratch, Immediate(kNotStringTag));
324   __ j(not_zero, non_string_object);
325 }
326
327
328 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
329                                             Register receiver,
330                                             Register scratch1,
331                                             Register scratch2,
332                                             Label* miss) {
333   Label check_wrapper;
334
335   // Check if the object is a string leaving the instance type in the
336   // scratch register.
337   GenerateStringCheck(masm, receiver, scratch1, miss, &check_wrapper);
338
339   // Load length from the string and convert to a smi.
340   __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
341   __ ret(0);
342
343   // Check if the object is a JSValue wrapper.
344   __ bind(&check_wrapper);
345   __ cmp(scratch1, JS_VALUE_TYPE);
346   __ j(not_equal, miss);
347
348   // Check if the wrapped value is a string and load the length
349   // directly if it is.
350   __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
351   GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
352   __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
353   __ ret(0);
354 }
355
356
357 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
358                                                  Register receiver,
359                                                  Register scratch1,
360                                                  Register scratch2,
361                                                  Label* miss_label) {
362   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
363   __ mov(eax, scratch1);
364   __ ret(0);
365 }
366
367
368 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
369                                             Register dst,
370                                             Register src,
371                                             bool inobject,
372                                             int index,
373                                             Representation representation) {
374   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
375   int offset = index * kPointerSize;
376   if (!inobject) {
377     // Calculate the offset into the properties array.
378     offset = offset + FixedArray::kHeaderSize;
379     __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
380     src = dst;
381   }
382   __ mov(dst, FieldOperand(src, offset));
383 }
384
385
386 static void PushInterceptorArguments(MacroAssembler* masm,
387                                      Register receiver,
388                                      Register holder,
389                                      Register name,
390                                      Handle<JSObject> holder_obj) {
391   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
392   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
393   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
394   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
395   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
396   __ push(name);
397   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
398   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
399   Register scratch = name;
400   __ mov(scratch, Immediate(interceptor));
401   __ push(scratch);
402   __ push(receiver);
403   __ push(holder);
404 }
405
406
407 static void CompileCallLoadPropertyWithInterceptor(
408     MacroAssembler* masm,
409     Register receiver,
410     Register holder,
411     Register name,
412     Handle<JSObject> holder_obj,
413     IC::UtilityId id) {
414   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
415   __ CallExternalReference(
416       ExternalReference(IC_Utility(id), masm->isolate()),
417       StubCache::kInterceptorArgsLength);
418 }
419
420
421 // Generate call to api function.
422 // This function uses push() to generate smaller, faster code than
423 // the version above. It is an optimization that should will be removed
424 // when api call ICs are generated in hydrogen.
425 static void GenerateFastApiCall(MacroAssembler* masm,
426                                 const CallOptimization& optimization,
427                                 Handle<Map> receiver_map,
428                                 Register receiver,
429                                 Register scratch_in,
430                                 int argc,
431                                 Register* values) {
432   // Copy return value.
433   __ pop(scratch_in);
434   // receiver
435   __ push(receiver);
436   // Write the arguments to stack frame.
437   for (int i = 0; i < argc; i++) {
438     Register arg = values[argc-1-i];
439     ASSERT(!receiver.is(arg));
440     ASSERT(!scratch_in.is(arg));
441     __ push(arg);
442   }
443   __ push(scratch_in);
444   // Stack now matches JSFunction abi.
445   ASSERT(optimization.is_simple_api_call());
446
447   // Abi for CallApiFunctionStub.
448   Register callee = eax;
449   Register call_data = ebx;
450   Register holder = ecx;
451   Register api_function_address = edx;
452   Register scratch = edi;  // scratch_in is no longer valid.
453
454   // Put holder in place.
455   CallOptimization::HolderLookup holder_lookup;
456   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
457       receiver_map,
458       &holder_lookup);
459   switch (holder_lookup) {
460     case CallOptimization::kHolderIsReceiver:
461       __ Move(holder, receiver);
462       break;
463     case CallOptimization::kHolderFound:
464       __ LoadHeapObject(holder, api_holder);
465      break;
466     case CallOptimization::kHolderNotFound:
467       UNREACHABLE();
468       break;
469   }
470
471   Isolate* isolate = masm->isolate();
472   Handle<JSFunction> function = optimization.constant_function();
473   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
474   Handle<Object> call_data_obj(api_call_info->data(), isolate);
475
476   // Put callee in place.
477   __ LoadHeapObject(callee, function);
478
479   bool call_data_undefined = false;
480   // Put call_data in place.
481   if (isolate->heap()->InNewSpace(*call_data_obj)) {
482     __ mov(scratch, api_call_info);
483     __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
484   } else if (call_data_obj->IsUndefined()) {
485     call_data_undefined = true;
486     __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
487   } else {
488     __ mov(call_data, call_data_obj);
489   }
490
491   // Put api_function_address in place.
492   Address function_address = v8::ToCData<Address>(api_call_info->callback());
493   __ mov(api_function_address, Immediate(function_address));
494
495   // Jump to stub.
496   CallApiFunctionStub stub(true, call_data_undefined, argc);
497   __ TailCallStub(&stub);
498 }
499
500
501 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
502                                             Label* label,
503                                             Handle<Name> name) {
504   if (!label->is_unused()) {
505     __ bind(label);
506     __ mov(this->name(), Immediate(name));
507   }
508 }
509
510
511 // Generate code to check that a global property cell is empty. Create
512 // the property cell at compilation time if no cell exists for the
513 // property.
514 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
515                                              Handle<JSGlobalObject> global,
516                                              Handle<Name> name,
517                                              Register scratch,
518                                              Label* miss) {
519   Handle<PropertyCell> cell =
520       JSGlobalObject::EnsurePropertyCell(global, name);
521   ASSERT(cell->value()->IsTheHole());
522   Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
523   if (Serializer::enabled()) {
524     __ mov(scratch, Immediate(cell));
525     __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
526            Immediate(the_hole));
527   } else {
528     __ cmp(Operand::ForCell(cell), Immediate(the_hole));
529   }
530   __ j(not_equal, miss);
531 }
532
533
534 void StoreStubCompiler::GenerateNegativeHolderLookup(
535     MacroAssembler* masm,
536     Handle<JSObject> holder,
537     Register holder_reg,
538     Handle<Name> name,
539     Label* miss) {
540   if (holder->IsJSGlobalObject()) {
541     GenerateCheckPropertyCell(
542         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
543   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
544     GenerateDictionaryNegativeLookup(
545         masm, miss, holder_reg, name, scratch1(), scratch2());
546   }
547 }
548
549
550 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
551 // store is successful.
552 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
553                                                 Handle<JSObject> object,
554                                                 LookupResult* lookup,
555                                                 Handle<Map> transition,
556                                                 Handle<Name> name,
557                                                 Register receiver_reg,
558                                                 Register storage_reg,
559                                                 Register value_reg,
560                                                 Register scratch1,
561                                                 Register scratch2,
562                                                 Register unused,
563                                                 Label* miss_label,
564                                                 Label* slow) {
565   int descriptor = transition->LastAdded();
566   DescriptorArray* descriptors = transition->instance_descriptors();
567   PropertyDetails details = descriptors->GetDetails(descriptor);
568   Representation representation = details.representation();
569   ASSERT(!representation.IsNone());
570
571   if (details.type() == CONSTANT) {
572     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
573     __ CmpObject(value_reg, constant);
574     __ j(not_equal, miss_label);
575   } else if (FLAG_track_fields && representation.IsSmi()) {
576       __ JumpIfNotSmi(value_reg, miss_label);
577   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
578     __ JumpIfSmi(value_reg, miss_label);
579   } else if (FLAG_track_double_fields && representation.IsDouble()) {
580     Label do_store, heap_number;
581     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
582
583     __ JumpIfNotSmi(value_reg, &heap_number);
584     __ SmiUntag(value_reg);
585     if (CpuFeatures::IsSupported(SSE2)) {
586       CpuFeatureScope use_sse2(masm, SSE2);
587       __ Cvtsi2sd(xmm0, value_reg);
588     } else {
589       __ push(value_reg);
590       __ fild_s(Operand(esp, 0));
591       __ pop(value_reg);
592     }
593     __ SmiTag(value_reg);
594     __ jmp(&do_store);
595
596     __ bind(&heap_number);
597     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
598                 miss_label, DONT_DO_SMI_CHECK);
599     if (CpuFeatures::IsSupported(SSE2)) {
600       CpuFeatureScope use_sse2(masm, SSE2);
601       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
602     } else {
603       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
604     }
605
606     __ bind(&do_store);
607     if (CpuFeatures::IsSupported(SSE2)) {
608       CpuFeatureScope use_sse2(masm, SSE2);
609       __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
610     } else {
611       __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
612     }
613   }
614
615   // Stub never generated for non-global objects that require access
616   // checks.
617   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
618
619   // Perform map transition for the receiver if necessary.
620   if (details.type() == FIELD &&
621       object->map()->unused_property_fields() == 0) {
622     // The properties must be extended before we can store the value.
623     // We jump to a runtime call that extends the properties array.
624     __ pop(scratch1);  // Return address.
625     __ push(receiver_reg);
626     __ push(Immediate(transition));
627     __ push(value_reg);
628     __ push(scratch1);
629     __ TailCallExternalReference(
630         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
631                           masm->isolate()),
632         3,
633         1);
634     return;
635   }
636
637   // Update the map of the object.
638   __ mov(scratch1, Immediate(transition));
639   __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
640
641   // Update the write barrier for the map field.
642   __ RecordWriteField(receiver_reg,
643                       HeapObject::kMapOffset,
644                       scratch1,
645                       scratch2,
646                       kDontSaveFPRegs,
647                       OMIT_REMEMBERED_SET,
648                       OMIT_SMI_CHECK);
649
650   if (details.type() == CONSTANT) {
651     ASSERT(value_reg.is(eax));
652     __ ret(0);
653     return;
654   }
655
656   int index = transition->instance_descriptors()->GetFieldIndex(
657       transition->LastAdded());
658
659   // Adjust for the number of properties stored in the object. Even in the
660   // face of a transition we can use the old map here because the size of the
661   // object and the number of in-object properties is not going to change.
662   index -= object->map()->inobject_properties();
663
664   SmiCheck smi_check = representation.IsTagged()
665       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
666   // TODO(verwaest): Share this code as a code stub.
667   if (index < 0) {
668     // Set the property straight into the object.
669     int offset = object->map()->instance_size() + (index * kPointerSize);
670     if (FLAG_track_double_fields && representation.IsDouble()) {
671       __ mov(FieldOperand(receiver_reg, offset), storage_reg);
672     } else {
673       __ mov(FieldOperand(receiver_reg, offset), value_reg);
674     }
675
676     if (!FLAG_track_fields || !representation.IsSmi()) {
677       // Update the write barrier for the array address.
678       if (!FLAG_track_double_fields || !representation.IsDouble()) {
679         __ mov(storage_reg, value_reg);
680       }
681       __ RecordWriteField(receiver_reg,
682                           offset,
683                           storage_reg,
684                           scratch1,
685                           kDontSaveFPRegs,
686                           EMIT_REMEMBERED_SET,
687                           smi_check);
688     }
689   } else {
690     // Write to the properties array.
691     int offset = index * kPointerSize + FixedArray::kHeaderSize;
692     // Get the properties array (optimistically).
693     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
694     if (FLAG_track_double_fields && representation.IsDouble()) {
695       __ mov(FieldOperand(scratch1, offset), storage_reg);
696     } else {
697       __ mov(FieldOperand(scratch1, offset), value_reg);
698     }
699
700     if (!FLAG_track_fields || !representation.IsSmi()) {
701       // Update the write barrier for the array address.
702       if (!FLAG_track_double_fields || !representation.IsDouble()) {
703         __ mov(storage_reg, value_reg);
704       }
705       __ RecordWriteField(scratch1,
706                           offset,
707                           storage_reg,
708                           receiver_reg,
709                           kDontSaveFPRegs,
710                           EMIT_REMEMBERED_SET,
711                           smi_check);
712     }
713   }
714
715   // Return the value (register eax).
716   ASSERT(value_reg.is(eax));
717   __ ret(0);
718 }
719
720
721 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
722 // but may be destroyed if store is successful.
723 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
724                                            Handle<JSObject> object,
725                                            LookupResult* lookup,
726                                            Register receiver_reg,
727                                            Register name_reg,
728                                            Register value_reg,
729                                            Register scratch1,
730                                            Register scratch2,
731                                            Label* miss_label) {
732   // Stub never generated for non-global objects that require access
733   // checks.
734   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
735
736   int index = lookup->GetFieldIndex().field_index();
737
738   // Adjust for the number of properties stored in the object. Even in the
739   // face of a transition we can use the old map here because the size of the
740   // object and the number of in-object properties is not going to change.
741   index -= object->map()->inobject_properties();
742
743   Representation representation = lookup->representation();
744   ASSERT(!representation.IsNone());
745   if (FLAG_track_fields && representation.IsSmi()) {
746     __ JumpIfNotSmi(value_reg, miss_label);
747   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
748     __ JumpIfSmi(value_reg, miss_label);
749   } else if (FLAG_track_double_fields && representation.IsDouble()) {
750     // Load the double storage.
751     if (index < 0) {
752       int offset = object->map()->instance_size() + (index * kPointerSize);
753       __ mov(scratch1, FieldOperand(receiver_reg, offset));
754     } else {
755       __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
756       int offset = index * kPointerSize + FixedArray::kHeaderSize;
757       __ mov(scratch1, FieldOperand(scratch1, offset));
758     }
759
760     // Store the value into the storage.
761     Label do_store, heap_number;
762     __ JumpIfNotSmi(value_reg, &heap_number);
763     __ SmiUntag(value_reg);
764     if (CpuFeatures::IsSupported(SSE2)) {
765       CpuFeatureScope use_sse2(masm, SSE2);
766       __ Cvtsi2sd(xmm0, value_reg);
767     } else {
768       __ push(value_reg);
769       __ fild_s(Operand(esp, 0));
770       __ pop(value_reg);
771     }
772     __ SmiTag(value_reg);
773     __ jmp(&do_store);
774     __ bind(&heap_number);
775     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
776                 miss_label, DONT_DO_SMI_CHECK);
777     if (CpuFeatures::IsSupported(SSE2)) {
778       CpuFeatureScope use_sse2(masm, SSE2);
779       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
780     } else {
781       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
782     }
783     __ bind(&do_store);
784     if (CpuFeatures::IsSupported(SSE2)) {
785       CpuFeatureScope use_sse2(masm, SSE2);
786       __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
787     } else {
788       __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
789     }
790     // Return the value (register eax).
791     ASSERT(value_reg.is(eax));
792     __ ret(0);
793     return;
794   }
795
796   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
797   // TODO(verwaest): Share this code as a code stub.
798   SmiCheck smi_check = representation.IsTagged()
799       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
800   if (index < 0) {
801     // Set the property straight into the object.
802     int offset = object->map()->instance_size() + (index * kPointerSize);
803     __ mov(FieldOperand(receiver_reg, offset), value_reg);
804
805     if (!FLAG_track_fields || !representation.IsSmi()) {
806       // Update the write barrier for the array address.
807       // Pass the value being stored in the now unused name_reg.
808       __ mov(name_reg, value_reg);
809       __ RecordWriteField(receiver_reg,
810                           offset,
811                           name_reg,
812                           scratch1,
813                           kDontSaveFPRegs,
814                           EMIT_REMEMBERED_SET,
815                           smi_check);
816     }
817   } else {
818     // Write to the properties array.
819     int offset = index * kPointerSize + FixedArray::kHeaderSize;
820     // Get the properties array (optimistically).
821     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
822     __ mov(FieldOperand(scratch1, offset), value_reg);
823
824     if (!FLAG_track_fields || !representation.IsSmi()) {
825       // Update the write barrier for the array address.
826       // Pass the value being stored in the now unused name_reg.
827       __ mov(name_reg, value_reg);
828       __ RecordWriteField(scratch1,
829                           offset,
830                           name_reg,
831                           receiver_reg,
832                           kDontSaveFPRegs,
833                           EMIT_REMEMBERED_SET,
834                           smi_check);
835     }
836   }
837
838   // Return the value (register eax).
839   ASSERT(value_reg.is(eax));
840   __ ret(0);
841 }
842
843
844 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
845   __ jmp(code, RelocInfo::CODE_TARGET);
846 }
847
848
849 #undef __
850 #define __ ACCESS_MASM(masm())
851
852
853 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
854                                        Register object_reg,
855                                        Handle<JSObject> holder,
856                                        Register holder_reg,
857                                        Register scratch1,
858                                        Register scratch2,
859                                        Handle<Name> name,
860                                        Label* miss,
861                                        PrototypeCheckType check) {
862   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
863   // Make sure that the type feedback oracle harvests the receiver map.
864   // TODO(svenpanne) Remove this hack when all ICs are reworked.
865   __ mov(scratch1, receiver_map);
866
867   // Make sure there's no overlap between holder and object registers.
868   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
869   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
870          && !scratch2.is(scratch1));
871
872   // Keep track of the current object in register reg.
873   Register reg = object_reg;
874   int depth = 0;
875
876   Handle<JSObject> current = Handle<JSObject>::null();
877   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
878   Handle<JSObject> prototype = Handle<JSObject>::null();
879   Handle<Map> current_map = receiver_map;
880   Handle<Map> holder_map(holder->map());
881   // Traverse the prototype chain and check the maps in the prototype chain for
882   // fast and global objects or do negative lookup for normal objects.
883   while (!current_map.is_identical_to(holder_map)) {
884     ++depth;
885
886     // Only global objects and objects that do not require access
887     // checks are allowed in stubs.
888     ASSERT(current_map->IsJSGlobalProxyMap() ||
889            !current_map->is_access_check_needed());
890
891     prototype = handle(JSObject::cast(current_map->prototype()));
892     if (current_map->is_dictionary_map() &&
893         !current_map->IsJSGlobalObjectMap() &&
894         !current_map->IsJSGlobalProxyMap()) {
895       if (!name->IsUniqueName()) {
896         ASSERT(name->IsString());
897         name = factory()->InternalizeString(Handle<String>::cast(name));
898       }
899       ASSERT(current.is_null() ||
900              current->property_dictionary()->FindEntry(*name) ==
901              NameDictionary::kNotFound);
902
903       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
904                                        scratch1, scratch2);
905
906       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
907       reg = holder_reg;  // From now on the object will be in holder_reg.
908       __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
909     } else {
910       bool in_new_space = heap()->InNewSpace(*prototype);
911       if (depth != 1 || check == CHECK_ALL_MAPS) {
912         __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
913       }
914
915       // Check access rights to the global object.  This has to happen after
916       // the map check so that we know that the object is actually a global
917       // object.
918       if (current_map->IsJSGlobalProxyMap()) {
919         __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
920       } else if (current_map->IsJSGlobalObjectMap()) {
921         GenerateCheckPropertyCell(
922             masm(), Handle<JSGlobalObject>::cast(current), name,
923             scratch2, miss);
924       }
925
926       if (in_new_space) {
927         // Save the map in scratch1 for later.
928         __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
929       }
930
931       reg = holder_reg;  // From now on the object will be in holder_reg.
932
933       if (in_new_space) {
934         // The prototype is in new space; we cannot store a reference to it
935         // in the code.  Load it from the map.
936         __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
937       } else {
938         // The prototype is in old space; load it directly.
939         __ mov(reg, prototype);
940       }
941     }
942
943     // Go to the next object in the prototype chain.
944     current = prototype;
945     current_map = handle(current->map());
946   }
947
948   // Log the check depth.
949   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
950
951   if (depth != 0 || check == CHECK_ALL_MAPS) {
952     // Check the holder map.
953     __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
954   }
955
956   // Perform security check for access to the global object.
957   ASSERT(current_map->IsJSGlobalProxyMap() ||
958          !current_map->is_access_check_needed());
959   if (current_map->IsJSGlobalProxyMap()) {
960     __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
961   }
962
963   // Return the register containing the holder.
964   return reg;
965 }
966
967
968 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
969   if (!miss->is_unused()) {
970     Label success;
971     __ jmp(&success);
972     __ bind(miss);
973     TailCallBuiltin(masm(), MissBuiltin(kind()));
974     __ bind(&success);
975   }
976 }
977
978
979 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
980   if (!miss->is_unused()) {
981     Label success;
982     __ jmp(&success);
983     GenerateRestoreName(masm(), miss, name);
984     TailCallBuiltin(masm(), MissBuiltin(kind()));
985     __ bind(&success);
986   }
987 }
988
989
990 Register LoadStubCompiler::CallbackHandlerFrontend(
991     Handle<HeapType> type,
992     Register object_reg,
993     Handle<JSObject> holder,
994     Handle<Name> name,
995     Handle<Object> callback) {
996   Label miss;
997
998   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
999
1000   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1001     ASSERT(!reg.is(scratch2()));
1002     ASSERT(!reg.is(scratch3()));
1003     Register dictionary = scratch1();
1004     bool must_preserve_dictionary_reg = reg.is(dictionary);
1005
1006     // Load the properties dictionary.
1007     if (must_preserve_dictionary_reg) {
1008       __ push(dictionary);
1009     }
1010     __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
1011
1012     // Probe the dictionary.
1013     Label probe_done, pop_and_miss;
1014     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1015                                                      &pop_and_miss,
1016                                                      &probe_done,
1017                                                      dictionary,
1018                                                      this->name(),
1019                                                      scratch2(),
1020                                                      scratch3());
1021     __ bind(&pop_and_miss);
1022     if (must_preserve_dictionary_reg) {
1023       __ pop(dictionary);
1024     }
1025     __ jmp(&miss);
1026     __ bind(&probe_done);
1027
1028     // If probing finds an entry in the dictionary, scratch2 contains the
1029     // index into the dictionary. Check that the value is the callback.
1030     Register index = scratch2();
1031     const int kElementsStartOffset =
1032         NameDictionary::kHeaderSize +
1033         NameDictionary::kElementsStartIndex * kPointerSize;
1034     const int kValueOffset = kElementsStartOffset + kPointerSize;
1035     __ mov(scratch3(),
1036            Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
1037     if (must_preserve_dictionary_reg) {
1038       __ pop(dictionary);
1039     }
1040     __ cmp(scratch3(), callback);
1041     __ j(not_equal, &miss);
1042   }
1043
1044   HandlerFrontendFooter(name, &miss);
1045   return reg;
1046 }
1047
1048
1049 void LoadStubCompiler::GenerateLoadField(Register reg,
1050                                          Handle<JSObject> holder,
1051                                          PropertyIndex field,
1052                                          Representation representation) {
1053   if (!reg.is(receiver())) __ mov(receiver(), reg);
1054   if (kind() == Code::LOAD_IC) {
1055     LoadFieldStub stub(field.is_inobject(holder),
1056                        field.translate(holder),
1057                        representation);
1058     GenerateTailCall(masm(), stub.GetCode(isolate()));
1059   } else {
1060     KeyedLoadFieldStub stub(field.is_inobject(holder),
1061                             field.translate(holder),
1062                             representation);
1063     GenerateTailCall(masm(), stub.GetCode(isolate()));
1064   }
1065 }
1066
1067
1068 void LoadStubCompiler::GenerateLoadCallback(
1069     const CallOptimization& call_optimization,
1070     Handle<Map> receiver_map) {
1071   GenerateFastApiCall(
1072       masm(), call_optimization, receiver_map,
1073       receiver(), scratch1(), 0, NULL);
1074 }
1075
1076
1077 void LoadStubCompiler::GenerateLoadCallback(
1078     Register reg,
1079     Handle<ExecutableAccessorInfo> callback) {
1080   // Insert additional parameters into the stack frame above return address.
1081   ASSERT(!scratch3().is(reg));
1082   __ pop(scratch3());  // Get return address to place it below.
1083
1084   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1085   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1086   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1087   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1088   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1089   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1090   __ push(receiver());  // receiver
1091   // Push data from ExecutableAccessorInfo.
1092   if (isolate()->heap()->InNewSpace(callback->data())) {
1093     ASSERT(!scratch2().is(reg));
1094     __ mov(scratch2(), Immediate(callback));
1095     __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
1096   } else {
1097     __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1098   }
1099   __ push(Immediate(isolate()->factory()->undefined_value()));  // ReturnValue
1100   // ReturnValue default value
1101   __ push(Immediate(isolate()->factory()->undefined_value()));
1102   __ push(Immediate(reinterpret_cast<int>(isolate())));
1103   __ push(reg);  // holder
1104
1105   // Save a pointer to where we pushed the arguments. This will be
1106   // passed as the const PropertyAccessorInfo& to the C++ callback.
1107   __ push(esp);
1108
1109   __ push(name());  // name
1110
1111   __ push(scratch3());  // Restore return address.
1112
1113   // Abi for CallApiGetter
1114   Register getter_address = edx;
1115   Address function_address = v8::ToCData<Address>(callback->getter());
1116   __ mov(getter_address, Immediate(function_address));
1117
1118   CallApiGetterStub stub;
1119   __ TailCallStub(&stub);
1120 }
1121
1122
1123 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1124   // Return the constant value.
1125   __ LoadObject(eax, value);
1126   __ ret(0);
1127 }
1128
1129
1130 void LoadStubCompiler::GenerateLoadInterceptor(
1131     Register holder_reg,
1132     Handle<Object> object,
1133     Handle<JSObject> interceptor_holder,
1134     LookupResult* lookup,
1135     Handle<Name> name) {
1136   ASSERT(interceptor_holder->HasNamedInterceptor());
1137   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1138
1139   // So far the most popular follow ups for interceptor loads are FIELD
1140   // and CALLBACKS, so inline only them, other cases may be added
1141   // later.
1142   bool compile_followup_inline = false;
1143   if (lookup->IsFound() && lookup->IsCacheable()) {
1144     if (lookup->IsField()) {
1145       compile_followup_inline = true;
1146     } else if (lookup->type() == CALLBACKS &&
1147                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1148       ExecutableAccessorInfo* callback =
1149           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1150       compile_followup_inline = callback->getter() != NULL &&
1151           callback->IsCompatibleReceiver(*object);
1152     }
1153   }
1154
1155   if (compile_followup_inline) {
1156     // Compile the interceptor call, followed by inline code to load the
1157     // property from further up the prototype chain if the call fails.
1158     // Check that the maps haven't changed.
1159     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1160
1161     // Preserve the receiver register explicitly whenever it is different from
1162     // the holder and it is needed should the interceptor return without any
1163     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1164     // the FIELD case might cause a miss during the prototype check.
1165     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1166     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1167         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1168
1169     // Save necessary data before invoking an interceptor.
1170     // Requires a frame to make GC aware of pushed pointers.
1171     {
1172       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1173
1174       if (must_preserve_receiver_reg) {
1175         __ push(receiver());
1176       }
1177       __ push(holder_reg);
1178       __ push(this->name());
1179
1180       // Invoke an interceptor.  Note: map checks from receiver to
1181       // interceptor's holder has been compiled before (see a caller
1182       // of this method.)
1183       CompileCallLoadPropertyWithInterceptor(
1184           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1185           IC::kLoadPropertyWithInterceptorOnly);
1186
1187       // Check if interceptor provided a value for property.  If it's
1188       // the case, return immediately.
1189       Label interceptor_failed;
1190       __ cmp(eax, factory()->no_interceptor_result_sentinel());
1191       __ j(equal, &interceptor_failed);
1192       frame_scope.GenerateLeaveFrame();
1193       __ ret(0);
1194
1195       // Clobber registers when generating debug-code to provoke errors.
1196       __ bind(&interceptor_failed);
1197       if (FLAG_debug_code) {
1198         __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1199         __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1200         __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1201       }
1202
1203       __ pop(this->name());
1204       __ pop(holder_reg);
1205       if (must_preserve_receiver_reg) {
1206         __ pop(receiver());
1207       }
1208
1209       // Leave the internal frame.
1210     }
1211
1212     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1213   } else {  // !compile_followup_inline
1214     // Call the runtime system to load the interceptor.
1215     // Check that the maps haven't changed.
1216     __ pop(scratch2());  // save old return address
1217     PushInterceptorArguments(masm(), receiver(), holder_reg,
1218                              this->name(), interceptor_holder);
1219     __ push(scratch2());  // restore old return address
1220
1221     ExternalReference ref =
1222         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1223                           isolate());
1224     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1225   }
1226 }
1227
1228
1229 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1230   Label success;
1231   // Check that the object is a boolean.
1232   __ cmp(object, factory()->true_value());
1233   __ j(equal, &success);
1234   __ cmp(object, factory()->false_value());
1235   __ j(not_equal, miss);
1236   __ bind(&success);
1237 }
1238
1239
1240 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1241     Handle<JSObject> object,
1242     Handle<JSObject> holder,
1243     Handle<Name> name,
1244     Handle<ExecutableAccessorInfo> callback) {
1245   Register holder_reg = HandlerFrontend(
1246       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1247
1248   __ pop(scratch1());  // remove the return address
1249   __ push(receiver());
1250   __ push(holder_reg);
1251   __ Push(callback);
1252   __ Push(name);
1253   __ push(value());
1254   __ push(scratch1());  // restore return address
1255
1256   // Do tail-call to the runtime system.
1257   ExternalReference store_callback_property =
1258       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1259   __ TailCallExternalReference(store_callback_property, 5, 1);
1260
1261   // Return the generated code.
1262   return GetCode(kind(), Code::FAST, name);
1263 }
1264
1265
1266 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1267     Handle<JSObject> object,
1268     Handle<JSObject> holder,
1269     Handle<Name> name,
1270     const CallOptimization& call_optimization) {
1271   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
1272                   receiver(), holder, name);
1273
1274   Register values[] = { value() };
1275   GenerateFastApiCall(
1276       masm(), call_optimization, handle(object->map()),
1277       receiver(), scratch1(), 1, values);
1278
1279   // Return the generated code.
1280   return GetCode(kind(), Code::FAST, name);
1281 }
1282
1283
1284 #undef __
1285 #define __ ACCESS_MASM(masm)
1286
1287
1288 void StoreStubCompiler::GenerateStoreViaSetter(
1289     MacroAssembler* masm,
1290     Handle<HeapType> type,
1291     Handle<JSFunction> setter) {
1292   // ----------- S t a t e -------------
1293   //  -- eax    : value
1294   //  -- ecx    : name
1295   //  -- edx    : receiver
1296   //  -- esp[0] : return address
1297   // -----------------------------------
1298   {
1299     FrameScope scope(masm, StackFrame::INTERNAL);
1300     Register receiver = edx;
1301     Register value = eax;
1302
1303     // Save value register, so we can restore it later.
1304     __ push(value);
1305
1306     if (!setter.is_null()) {
1307       // Call the JavaScript setter with receiver and value on the stack.
1308       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1309         // Swap in the global receiver.
1310         __ mov(receiver,
1311                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1312       }
1313       __ push(receiver);
1314       __ push(value);
1315       ParameterCount actual(1);
1316       ParameterCount expected(setter);
1317       __ InvokeFunction(setter, expected, actual,
1318                         CALL_FUNCTION, NullCallWrapper());
1319     } else {
1320       // If we generate a global code snippet for deoptimization only, remember
1321       // the place to continue after deoptimization.
1322       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1323     }
1324
1325     // We have to return the passed value, not the return value of the setter.
1326     __ pop(eax);
1327
1328     // Restore context register.
1329     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1330   }
1331   __ ret(0);
1332 }
1333
1334
1335 #undef __
1336 #define __ ACCESS_MASM(masm())
1337
1338
1339 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1340     Handle<JSObject> object,
1341     Handle<Name> name) {
1342   __ pop(scratch1());  // remove the return address
1343   __ push(receiver());
1344   __ push(this->name());
1345   __ push(value());
1346   __ push(scratch1());  // restore return address
1347
1348   // Do tail-call to the runtime system.
1349   ExternalReference store_ic_property =
1350       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1351   __ TailCallExternalReference(store_ic_property, 3, 1);
1352
1353   // Return the generated code.
1354   return GetCode(kind(), Code::FAST, name);
1355 }
1356
1357
1358 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1359     MapHandleList* receiver_maps,
1360     CodeHandleList* handler_stubs,
1361     MapHandleList* transitioned_maps) {
1362   Label miss;
1363   __ JumpIfSmi(receiver(), &miss, Label::kNear);
1364   __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1365   for (int i = 0; i < receiver_maps->length(); ++i) {
1366     __ cmp(scratch1(), receiver_maps->at(i));
1367     if (transitioned_maps->at(i).is_null()) {
1368       __ j(equal, handler_stubs->at(i));
1369     } else {
1370       Label next_map;
1371       __ j(not_equal, &next_map, Label::kNear);
1372       __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1373       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1374       __ bind(&next_map);
1375     }
1376   }
1377   __ bind(&miss);
1378   TailCallBuiltin(masm(), MissBuiltin(kind()));
1379
1380   // Return the generated code.
1381   return GetICCode(
1382       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1383 }
1384
1385
1386 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1387                                                       Handle<JSObject> last,
1388                                                       Handle<Name> name) {
1389   NonexistentHandlerFrontend(type, last, name);
1390
1391   // Return undefined if maps of the full prototype chain are still the
1392   // same and no global property with this name contains a value.
1393   __ mov(eax, isolate()->factory()->undefined_value());
1394   __ ret(0);
1395
1396   // Return the generated code.
1397   return GetCode(kind(), Code::FAST, name);
1398 }
1399
1400
1401 Register* LoadStubCompiler::registers() {
1402   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1403   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1404   return registers;
1405 }
1406
1407
1408 Register* KeyedLoadStubCompiler::registers() {
1409   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1410   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1411   return registers;
1412 }
1413
1414
1415 Register* StoreStubCompiler::registers() {
1416   // receiver, name, value, scratch1, scratch2, scratch3.
1417   static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
1418   return registers;
1419 }
1420
1421
1422 Register* KeyedStoreStubCompiler::registers() {
1423   // receiver, name, value, scratch1, scratch2, scratch3.
1424   static Register registers[] = { edx, ecx, eax, ebx, edi, no_reg };
1425   return registers;
1426 }
1427
1428
1429 #undef __
1430 #define __ ACCESS_MASM(masm)
1431
1432
1433 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1434                                              Handle<HeapType> type,
1435                                              Register receiver,
1436                                              Handle<JSFunction> getter) {
1437   {
1438     FrameScope scope(masm, StackFrame::INTERNAL);
1439
1440     if (!getter.is_null()) {
1441       // Call the JavaScript getter with the receiver on the stack.
1442       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1443         // Swap in the global receiver.
1444         __ mov(receiver,
1445                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1446       }
1447       __ push(receiver);
1448       ParameterCount actual(0);
1449       ParameterCount expected(getter);
1450       __ InvokeFunction(getter, expected, actual,
1451                         CALL_FUNCTION, NullCallWrapper());
1452     } else {
1453       // If we generate a global code snippet for deoptimization only, remember
1454       // the place to continue after deoptimization.
1455       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1456     }
1457
1458     // Restore context register.
1459     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1460   }
1461   __ ret(0);
1462 }
1463
1464
1465 #undef __
1466 #define __ ACCESS_MASM(masm())
1467
1468
1469 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1470     Handle<HeapType> type,
1471     Handle<GlobalObject> global,
1472     Handle<PropertyCell> cell,
1473     Handle<Name> name,
1474     bool is_dont_delete) {
1475   Label miss;
1476
1477   HandlerFrontendHeader(type, receiver(), global, name, &miss);
1478   // Get the value from the cell.
1479   if (Serializer::enabled()) {
1480     __ mov(eax, Immediate(cell));
1481     __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
1482   } else {
1483     __ mov(eax, Operand::ForCell(cell));
1484   }
1485
1486   // Check for deleted property if property can actually be deleted.
1487   if (!is_dont_delete) {
1488     __ cmp(eax, factory()->the_hole_value());
1489     __ j(equal, &miss);
1490   } else if (FLAG_debug_code) {
1491     __ cmp(eax, factory()->the_hole_value());
1492     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1493   }
1494
1495   Counters* counters = isolate()->counters();
1496   __ IncrementCounter(counters->named_load_global_stub(), 1);
1497   // The code above already loads the result into the return register.
1498   __ ret(0);
1499
1500   HandlerFrontendFooter(name, &miss);
1501
1502   // Return the generated code.
1503   return GetCode(kind(), Code::NORMAL, name);
1504 }
1505
1506
1507 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1508     TypeHandleList* types,
1509     CodeHandleList* handlers,
1510     Handle<Name> name,
1511     Code::StubType type,
1512     IcCheckType check) {
1513   Label miss;
1514
1515   if (check == PROPERTY &&
1516       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1517     __ cmp(this->name(), Immediate(name));
1518     __ j(not_equal, &miss);
1519   }
1520
1521   Label number_case;
1522   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1523   __ JumpIfSmi(receiver(), smi_target);
1524
1525   Register map_reg = scratch1();
1526   __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1527   int receiver_count = types->length();
1528   int number_of_handled_maps = 0;
1529   for (int current = 0; current < receiver_count; ++current) {
1530     Handle<HeapType> type = types->at(current);
1531     Handle<Map> map = IC::TypeToMap(*type, isolate());
1532     if (!map->is_deprecated()) {
1533       number_of_handled_maps++;
1534       __ cmp(map_reg, map);
1535       if (type->Is(HeapType::Number())) {
1536         ASSERT(!number_case.is_unused());
1537         __ bind(&number_case);
1538       }
1539       __ j(equal, handlers->at(current));
1540     }
1541   }
1542   ASSERT(number_of_handled_maps != 0);
1543
1544   __ bind(&miss);
1545   TailCallBuiltin(masm(), MissBuiltin(kind()));
1546
1547   // Return the generated code.
1548   InlineCacheState state =
1549       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1550   return GetICCode(kind(), type, name, state);
1551 }
1552
1553
1554 #undef __
1555 #define __ ACCESS_MASM(masm)
1556
1557
1558 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1559     MacroAssembler* masm) {
1560   // ----------- S t a t e -------------
1561   //  -- ecx    : key
1562   //  -- edx    : receiver
1563   //  -- esp[0] : return address
1564   // -----------------------------------
1565   Label slow, miss;
1566
1567   // This stub is meant to be tail-jumped to, the receiver must already
1568   // have been verified by the caller to not be a smi.
1569   __ JumpIfNotSmi(ecx, &miss);
1570   __ mov(ebx, ecx);
1571   __ SmiUntag(ebx);
1572   __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1573
1574   // Push receiver on the stack to free up a register for the dictionary
1575   // probing.
1576   __ push(edx);
1577   __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
1578   // Pop receiver before returning.
1579   __ pop(edx);
1580   __ ret(0);
1581
1582   __ bind(&slow);
1583   __ pop(edx);
1584
1585   // ----------- S t a t e -------------
1586   //  -- ecx    : key
1587   //  -- edx    : receiver
1588   //  -- esp[0] : return address
1589   // -----------------------------------
1590   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1591
1592   __ bind(&miss);
1593   // ----------- S t a t e -------------
1594   //  -- ecx    : key
1595   //  -- edx    : receiver
1596   //  -- esp[0] : return address
1597   // -----------------------------------
1598   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1599 }
1600
1601
1602 #undef __
1603
1604 } }  // namespace v8::internal
1605
1606 #endif  // V8_TARGET_ARCH_IA32