Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / ia32 / stub-cache-ia32.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "v8.h"
6
7 #if V8_TARGET_ARCH_IA32
8
9 #include "ic-inl.h"
10 #include "codegen.h"
11 #include "stub-cache.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
18
19 static void ProbeTable(Isolate* isolate,
20                        MacroAssembler* masm,
21                        Code::Flags flags,
22                        StubCache::Table table,
23                        Register name,
24                        Register receiver,
25                        // Number of the cache entry pointer-size scaled.
26                        Register offset,
27                        Register extra) {
28   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
29   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
30   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
31
32   Label miss;
33
34   // Multiply by 3 because there are 3 fields per entry (name, code, map).
35   __ lea(offset, Operand(offset, offset, times_2, 0));
36
37   if (extra.is_valid()) {
38     // Get the code entry from the cache.
39     __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
40
41     // Check that the key in the entry matches the name.
42     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
43     __ j(not_equal, &miss);
44
45     // Check the map matches.
46     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
47     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
48     __ j(not_equal, &miss);
49
50     // Check that the flags match what we're looking for.
51     __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
52     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
53     __ cmp(offset, flags);
54     __ j(not_equal, &miss);
55
56 #ifdef DEBUG
57     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
58       __ jmp(&miss);
59     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
60       __ jmp(&miss);
61     }
62 #endif
63
64     // Jump to the first instruction in the code stub.
65     __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
66     __ jmp(extra);
67
68     __ bind(&miss);
69   } else {
70     // Save the offset on the stack.
71     __ push(offset);
72
73     // Check that the key in the entry matches the name.
74     __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
75     __ j(not_equal, &miss);
76
77     // Check the map matches.
78     __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
79     __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
80     __ j(not_equal, &miss);
81
82     // Restore offset register.
83     __ mov(offset, Operand(esp, 0));
84
85     // Get the code entry from the cache.
86     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
87
88     // Check that the flags match what we're looking for.
89     __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
90     __ and_(offset, ~Code::kFlagsNotUsedInLookup);
91     __ cmp(offset, flags);
92     __ j(not_equal, &miss);
93
94 #ifdef DEBUG
95     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
96       __ jmp(&miss);
97     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
98       __ jmp(&miss);
99     }
100 #endif
101
102     // Restore offset and re-load code entry from cache.
103     __ pop(offset);
104     __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
105
106     // Jump to the first instruction in the code stub.
107     __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
108     __ jmp(offset);
109
110     // Pop at miss.
111     __ bind(&miss);
112     __ pop(offset);
113   }
114 }
115
116
117 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
118                                                     Label* miss_label,
119                                                     Register receiver,
120                                                     Handle<Name> name,
121                                                     Register scratch0,
122                                                     Register scratch1) {
123   ASSERT(name->IsUniqueName());
124   ASSERT(!receiver.is(scratch0));
125   Counters* counters = masm->isolate()->counters();
126   __ IncrementCounter(counters->negative_lookups(), 1);
127   __ IncrementCounter(counters->negative_lookups_miss(), 1);
128
129   __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
130
131   const int kInterceptorOrAccessCheckNeededMask =
132       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
133
134   // Bail out if the receiver has a named interceptor or requires access checks.
135   __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
136             kInterceptorOrAccessCheckNeededMask);
137   __ j(not_zero, miss_label);
138
139   // Check that receiver is a JSObject.
140   __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
141   __ j(below, miss_label);
142
143   // Load properties array.
144   Register properties = scratch0;
145   __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
146
147   // Check that the properties array is a dictionary.
148   __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
149          Immediate(masm->isolate()->factory()->hash_table_map()));
150   __ j(not_equal, miss_label);
151
152   Label done;
153   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
154                                                    miss_label,
155                                                    &done,
156                                                    properties,
157                                                    name,
158                                                    scratch1);
159   __ bind(&done);
160   __ DecrementCounter(counters->negative_lookups_miss(), 1);
161 }
162
163
164 void StubCache::GenerateProbe(MacroAssembler* masm,
165                               Code::Flags flags,
166                               Register receiver,
167                               Register name,
168                               Register scratch,
169                               Register extra,
170                               Register extra2,
171                               Register extra3) {
172   Label miss;
173
174   // Assert that code is valid.  The multiplying code relies on the entry size
175   // being 12.
176   ASSERT(sizeof(Entry) == 12);
177
178   // Assert the flags do not name a specific type.
179   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
180
181   // Assert that there are no register conflicts.
182   ASSERT(!scratch.is(receiver));
183   ASSERT(!scratch.is(name));
184   ASSERT(!extra.is(receiver));
185   ASSERT(!extra.is(name));
186   ASSERT(!extra.is(scratch));
187
188   // Assert scratch and extra registers are valid, and extra2/3 are unused.
189   ASSERT(!scratch.is(no_reg));
190   ASSERT(extra2.is(no_reg));
191   ASSERT(extra3.is(no_reg));
192
193   Register offset = scratch;
194   scratch = no_reg;
195
196   Counters* counters = masm->isolate()->counters();
197   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
198
199   // Check that the receiver isn't a smi.
200   __ JumpIfSmi(receiver, &miss);
201
202   // Get the map of the receiver and compute the hash.
203   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
204   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
205   __ xor_(offset, flags);
206   // We mask out the last two bits because they are not part of the hash and
207   // they are always 01 for maps.  Also in the two 'and' instructions below.
208   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
209   // ProbeTable expects the offset to be pointer scaled, which it is, because
210   // the heap object tag size is 2 and the pointer size log 2 is also 2.
211   ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
212
213   // Probe the primary table.
214   ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
215
216   // Primary miss: Compute hash for secondary probe.
217   __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
218   __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
219   __ xor_(offset, flags);
220   __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
221   __ sub(offset, name);
222   __ add(offset, Immediate(flags));
223   __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
224
225   // Probe the secondary table.
226   ProbeTable(
227       isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
228
229   // Cache miss: Fall-through and let caller handle the miss by
230   // entering the runtime system.
231   __ bind(&miss);
232   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
233 }
234
235
236 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
237                                                        int index,
238                                                        Register prototype) {
239   __ LoadGlobalFunction(index, prototype);
240   __ LoadGlobalFunctionInitialMap(prototype, prototype);
241   // Load the prototype from the initial map.
242   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
243 }
244
245
246 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
247     MacroAssembler* masm,
248     int index,
249     Register prototype,
250     Label* miss) {
251   // Get the global function with the given index.
252   Handle<JSFunction> function(
253       JSFunction::cast(masm->isolate()->native_context()->get(index)));
254   // Check we're still in the same context.
255   Register scratch = prototype;
256   const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
257   __ mov(scratch, Operand(esi, offset));
258   __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
259   __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
260   __ j(not_equal, miss);
261
262   // Load its initial map. The global functions all have initial maps.
263   __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
264   // Load the prototype from the initial map.
265   __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
266 }
267
268
269 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
270                                            Register receiver,
271                                            Register scratch,
272                                            Label* miss_label) {
273   // Check that the receiver isn't a smi.
274   __ JumpIfSmi(receiver, miss_label);
275
276   // Check that the object is a JS array.
277   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
278   __ j(not_equal, miss_label);
279
280   // Load length directly from the JS array.
281   __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
282   __ ret(0);
283 }
284
285
286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
287                                                  Register receiver,
288                                                  Register scratch1,
289                                                  Register scratch2,
290                                                  Label* miss_label) {
291   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
292   __ mov(eax, scratch1);
293   __ ret(0);
294 }
295
296
297 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
298                                             Register dst,
299                                             Register src,
300                                             bool inobject,
301                                             int index,
302                                             Representation representation) {
303   ASSERT(!representation.IsDouble());
304   int offset = index * kPointerSize;
305   if (!inobject) {
306     // Calculate the offset into the properties array.
307     offset = offset + FixedArray::kHeaderSize;
308     __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
309     src = dst;
310   }
311   __ mov(dst, FieldOperand(src, offset));
312 }
313
314
315 static void PushInterceptorArguments(MacroAssembler* masm,
316                                      Register receiver,
317                                      Register holder,
318                                      Register name,
319                                      Handle<JSObject> holder_obj) {
320   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
321   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
322   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
323   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
324   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
325   __ push(name);
326   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
327   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
328   Register scratch = name;
329   __ mov(scratch, Immediate(interceptor));
330   __ push(scratch);
331   __ push(receiver);
332   __ push(holder);
333 }
334
335
336 static void CompileCallLoadPropertyWithInterceptor(
337     MacroAssembler* masm,
338     Register receiver,
339     Register holder,
340     Register name,
341     Handle<JSObject> holder_obj,
342     IC::UtilityId id) {
343   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
344   __ CallExternalReference(
345       ExternalReference(IC_Utility(id), masm->isolate()),
346       StubCache::kInterceptorArgsLength);
347 }
348
349
350 // Generate call to api function.
351 // This function uses push() to generate smaller, faster code than
352 // the version above. It is an optimization that should will be removed
353 // when api call ICs are generated in hydrogen.
354 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
355                                        const CallOptimization& optimization,
356                                        Handle<Map> receiver_map,
357                                        Register receiver,
358                                        Register scratch_in,
359                                        bool is_store,
360                                        int argc,
361                                        Register* values) {
362   // Copy return value.
363   __ pop(scratch_in);
364   // receiver
365   __ push(receiver);
366   // Write the arguments to stack frame.
367   for (int i = 0; i < argc; i++) {
368     Register arg = values[argc-1-i];
369     ASSERT(!receiver.is(arg));
370     ASSERT(!scratch_in.is(arg));
371     __ push(arg);
372   }
373   __ push(scratch_in);
374   // Stack now matches JSFunction abi.
375   ASSERT(optimization.is_simple_api_call());
376
377   // Abi for CallApiFunctionStub.
378   Register callee = eax;
379   Register call_data = ebx;
380   Register holder = ecx;
381   Register api_function_address = edx;
382   Register scratch = edi;  // scratch_in is no longer valid.
383
384   // Put holder in place.
385   CallOptimization::HolderLookup holder_lookup;
386   Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
387       receiver_map,
388       &holder_lookup);
389   switch (holder_lookup) {
390     case CallOptimization::kHolderIsReceiver:
391       __ Move(holder, receiver);
392       break;
393     case CallOptimization::kHolderFound:
394       __ LoadHeapObject(holder, api_holder);
395      break;
396     case CallOptimization::kHolderNotFound:
397       UNREACHABLE();
398       break;
399   }
400
401   Isolate* isolate = masm->isolate();
402   Handle<JSFunction> function = optimization.constant_function();
403   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
404   Handle<Object> call_data_obj(api_call_info->data(), isolate);
405
406   // Put callee in place.
407   __ LoadHeapObject(callee, function);
408
409   bool call_data_undefined = false;
410   // Put call_data in place.
411   if (isolate->heap()->InNewSpace(*call_data_obj)) {
412     __ mov(scratch, api_call_info);
413     __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
414   } else if (call_data_obj->IsUndefined()) {
415     call_data_undefined = true;
416     __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
417   } else {
418     __ mov(call_data, call_data_obj);
419   }
420
421   // Put api_function_address in place.
422   Address function_address = v8::ToCData<Address>(api_call_info->callback());
423   __ mov(api_function_address, Immediate(function_address));
424
425   // Jump to stub.
426   CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
427   __ TailCallStub(&stub);
428 }
429
430
431 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
432                                             Label* label,
433                                             Handle<Name> name) {
434   if (!label->is_unused()) {
435     __ bind(label);
436     __ mov(this->name(), Immediate(name));
437   }
438 }
439
440
441 // Generate code to check that a global property cell is empty. Create
442 // the property cell at compilation time if no cell exists for the
443 // property.
444 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
445                                              Handle<JSGlobalObject> global,
446                                              Handle<Name> name,
447                                              Register scratch,
448                                              Label* miss) {
449   Handle<PropertyCell> cell =
450       JSGlobalObject::EnsurePropertyCell(global, name);
451   ASSERT(cell->value()->IsTheHole());
452   Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
453   if (Serializer::enabled(masm->isolate())) {
454     __ mov(scratch, Immediate(cell));
455     __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
456            Immediate(the_hole));
457   } else {
458     __ cmp(Operand::ForCell(cell), Immediate(the_hole));
459   }
460   __ j(not_equal, miss);
461 }
462
463
464 void StoreStubCompiler::GenerateNegativeHolderLookup(
465     MacroAssembler* masm,
466     Handle<JSObject> holder,
467     Register holder_reg,
468     Handle<Name> name,
469     Label* miss) {
470   if (holder->IsJSGlobalObject()) {
471     GenerateCheckPropertyCell(
472         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
473   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
474     GenerateDictionaryNegativeLookup(
475         masm, miss, holder_reg, name, scratch1(), scratch2());
476   }
477 }
478
479
480 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
481 // store is successful.
482 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
483                                                 Handle<JSObject> object,
484                                                 LookupResult* lookup,
485                                                 Handle<Map> transition,
486                                                 Handle<Name> name,
487                                                 Register receiver_reg,
488                                                 Register storage_reg,
489                                                 Register value_reg,
490                                                 Register scratch1,
491                                                 Register scratch2,
492                                                 Register unused,
493                                                 Label* miss_label,
494                                                 Label* slow) {
495   int descriptor = transition->LastAdded();
496   DescriptorArray* descriptors = transition->instance_descriptors();
497   PropertyDetails details = descriptors->GetDetails(descriptor);
498   Representation representation = details.representation();
499   ASSERT(!representation.IsNone());
500
501   if (details.type() == CONSTANT) {
502     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
503     __ CmpObject(value_reg, constant);
504     __ j(not_equal, miss_label);
505   } else if (representation.IsSmi()) {
506       __ JumpIfNotSmi(value_reg, miss_label);
507   } else if (representation.IsHeapObject()) {
508     __ JumpIfSmi(value_reg, miss_label);
509     HeapType* field_type = descriptors->GetFieldType(descriptor);
510     HeapType::Iterator<Map> it = field_type->Classes();
511     if (!it.Done()) {
512       Label do_store;
513       while (true) {
514         __ CompareMap(value_reg, it.Current());
515         it.Advance();
516         if (it.Done()) {
517           __ j(not_equal, miss_label);
518           break;
519         }
520         __ j(equal, &do_store, Label::kNear);
521       }
522       __ bind(&do_store);
523     }
524   } else if (representation.IsDouble()) {
525     Label do_store, heap_number;
526     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
527
528     __ JumpIfNotSmi(value_reg, &heap_number);
529     __ SmiUntag(value_reg);
530     if (CpuFeatures::IsSupported(SSE2)) {
531       CpuFeatureScope use_sse2(masm, SSE2);
532       __ Cvtsi2sd(xmm0, value_reg);
533     } else {
534       __ push(value_reg);
535       __ fild_s(Operand(esp, 0));
536       __ pop(value_reg);
537     }
538     __ SmiTag(value_reg);
539     __ jmp(&do_store);
540
541     __ bind(&heap_number);
542     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
543                 miss_label, DONT_DO_SMI_CHECK);
544     if (CpuFeatures::IsSupported(SSE2)) {
545       CpuFeatureScope use_sse2(masm, SSE2);
546       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
547     } else {
548       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
549     }
550
551     __ bind(&do_store);
552     if (CpuFeatures::IsSupported(SSE2)) {
553       CpuFeatureScope use_sse2(masm, SSE2);
554       __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
555     } else {
556       __ fstp_d(FieldOperand(storage_reg, HeapNumber::kValueOffset));
557     }
558   }
559
560   // Stub never generated for non-global objects that require access
561   // checks.
562   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
563
564   // Perform map transition for the receiver if necessary.
565   if (details.type() == FIELD &&
566       object->map()->unused_property_fields() == 0) {
567     // The properties must be extended before we can store the value.
568     // We jump to a runtime call that extends the properties array.
569     __ pop(scratch1);  // Return address.
570     __ push(receiver_reg);
571     __ push(Immediate(transition));
572     __ push(value_reg);
573     __ push(scratch1);
574     __ TailCallExternalReference(
575         ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
576                           masm->isolate()),
577         3,
578         1);
579     return;
580   }
581
582   // Update the map of the object.
583   __ mov(scratch1, Immediate(transition));
584   __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
585
586   // Update the write barrier for the map field.
587   __ RecordWriteField(receiver_reg,
588                       HeapObject::kMapOffset,
589                       scratch1,
590                       scratch2,
591                       kDontSaveFPRegs,
592                       OMIT_REMEMBERED_SET,
593                       OMIT_SMI_CHECK);
594
595   if (details.type() == CONSTANT) {
596     ASSERT(value_reg.is(eax));
597     __ ret(0);
598     return;
599   }
600
601   int index = transition->instance_descriptors()->GetFieldIndex(
602       transition->LastAdded());
603
604   // Adjust for the number of properties stored in the object. Even in the
605   // face of a transition we can use the old map here because the size of the
606   // object and the number of in-object properties is not going to change.
607   index -= object->map()->inobject_properties();
608
609   SmiCheck smi_check = representation.IsTagged()
610       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
611   // TODO(verwaest): Share this code as a code stub.
612   if (index < 0) {
613     // Set the property straight into the object.
614     int offset = object->map()->instance_size() + (index * kPointerSize);
615     if (representation.IsDouble()) {
616       __ mov(FieldOperand(receiver_reg, offset), storage_reg);
617     } else {
618       __ mov(FieldOperand(receiver_reg, offset), value_reg);
619     }
620
621     if (!representation.IsSmi()) {
622       // Update the write barrier for the array address.
623       if (!representation.IsDouble()) {
624         __ mov(storage_reg, value_reg);
625       }
626       __ RecordWriteField(receiver_reg,
627                           offset,
628                           storage_reg,
629                           scratch1,
630                           kDontSaveFPRegs,
631                           EMIT_REMEMBERED_SET,
632                           smi_check);
633     }
634   } else {
635     // Write to the properties array.
636     int offset = index * kPointerSize + FixedArray::kHeaderSize;
637     // Get the properties array (optimistically).
638     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
639     if (representation.IsDouble()) {
640       __ mov(FieldOperand(scratch1, offset), storage_reg);
641     } else {
642       __ mov(FieldOperand(scratch1, offset), value_reg);
643     }
644
645     if (!representation.IsSmi()) {
646       // Update the write barrier for the array address.
647       if (!representation.IsDouble()) {
648         __ mov(storage_reg, value_reg);
649       }
650       __ RecordWriteField(scratch1,
651                           offset,
652                           storage_reg,
653                           receiver_reg,
654                           kDontSaveFPRegs,
655                           EMIT_REMEMBERED_SET,
656                           smi_check);
657     }
658   }
659
660   // Return the value (register eax).
661   ASSERT(value_reg.is(eax));
662   __ ret(0);
663 }
664
665
666 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
667 // but may be destroyed if store is successful.
668 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
669                                            Handle<JSObject> object,
670                                            LookupResult* lookup,
671                                            Register receiver_reg,
672                                            Register name_reg,
673                                            Register value_reg,
674                                            Register scratch1,
675                                            Register scratch2,
676                                            Label* miss_label) {
677   // Stub never generated for non-global objects that require access
678   // checks.
679   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
680
681   int index = lookup->GetFieldIndex().field_index();
682
683   // Adjust for the number of properties stored in the object. Even in the
684   // face of a transition we can use the old map here because the size of the
685   // object and the number of in-object properties is not going to change.
686   index -= object->map()->inobject_properties();
687
688   Representation representation = lookup->representation();
689   ASSERT(!representation.IsNone());
690   if (representation.IsSmi()) {
691     __ JumpIfNotSmi(value_reg, miss_label);
692   } else if (representation.IsHeapObject()) {
693     __ JumpIfSmi(value_reg, miss_label);
694     HeapType* field_type = lookup->GetFieldType();
695     HeapType::Iterator<Map> it = field_type->Classes();
696     if (!it.Done()) {
697       Label do_store;
698       while (true) {
699         __ CompareMap(value_reg, it.Current());
700         it.Advance();
701         if (it.Done()) {
702           __ j(not_equal, miss_label);
703           break;
704         }
705         __ j(equal, &do_store, Label::kNear);
706       }
707       __ bind(&do_store);
708     }
709   } else if (representation.IsDouble()) {
710     // Load the double storage.
711     if (index < 0) {
712       int offset = object->map()->instance_size() + (index * kPointerSize);
713       __ mov(scratch1, FieldOperand(receiver_reg, offset));
714     } else {
715       __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
716       int offset = index * kPointerSize + FixedArray::kHeaderSize;
717       __ mov(scratch1, FieldOperand(scratch1, offset));
718     }
719
720     // Store the value into the storage.
721     Label do_store, heap_number;
722     __ JumpIfNotSmi(value_reg, &heap_number);
723     __ SmiUntag(value_reg);
724     if (CpuFeatures::IsSupported(SSE2)) {
725       CpuFeatureScope use_sse2(masm, SSE2);
726       __ Cvtsi2sd(xmm0, value_reg);
727     } else {
728       __ push(value_reg);
729       __ fild_s(Operand(esp, 0));
730       __ pop(value_reg);
731     }
732     __ SmiTag(value_reg);
733     __ jmp(&do_store);
734     __ bind(&heap_number);
735     __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
736                 miss_label, DONT_DO_SMI_CHECK);
737     if (CpuFeatures::IsSupported(SSE2)) {
738       CpuFeatureScope use_sse2(masm, SSE2);
739       __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
740     } else {
741       __ fld_d(FieldOperand(value_reg, HeapNumber::kValueOffset));
742     }
743     __ bind(&do_store);
744     if (CpuFeatures::IsSupported(SSE2)) {
745       CpuFeatureScope use_sse2(masm, SSE2);
746       __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
747     } else {
748       __ fstp_d(FieldOperand(scratch1, HeapNumber::kValueOffset));
749     }
750     // Return the value (register eax).
751     ASSERT(value_reg.is(eax));
752     __ ret(0);
753     return;
754   }
755
756   ASSERT(!representation.IsDouble());
757   // TODO(verwaest): Share this code as a code stub.
758   SmiCheck smi_check = representation.IsTagged()
759       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
760   if (index < 0) {
761     // Set the property straight into the object.
762     int offset = object->map()->instance_size() + (index * kPointerSize);
763     __ mov(FieldOperand(receiver_reg, offset), value_reg);
764
765     if (!representation.IsSmi()) {
766       // Update the write barrier for the array address.
767       // Pass the value being stored in the now unused name_reg.
768       __ mov(name_reg, value_reg);
769       __ RecordWriteField(receiver_reg,
770                           offset,
771                           name_reg,
772                           scratch1,
773                           kDontSaveFPRegs,
774                           EMIT_REMEMBERED_SET,
775                           smi_check);
776     }
777   } else {
778     // Write to the properties array.
779     int offset = index * kPointerSize + FixedArray::kHeaderSize;
780     // Get the properties array (optimistically).
781     __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
782     __ mov(FieldOperand(scratch1, offset), value_reg);
783
784     if (!representation.IsSmi()) {
785       // Update the write barrier for the array address.
786       // Pass the value being stored in the now unused name_reg.
787       __ mov(name_reg, value_reg);
788       __ RecordWriteField(scratch1,
789                           offset,
790                           name_reg,
791                           receiver_reg,
792                           kDontSaveFPRegs,
793                           EMIT_REMEMBERED_SET,
794                           smi_check);
795     }
796   }
797
798   // Return the value (register eax).
799   ASSERT(value_reg.is(eax));
800   __ ret(0);
801 }
802
803
804 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
805   __ jmp(code, RelocInfo::CODE_TARGET);
806 }
807
808
809 #undef __
810 #define __ ACCESS_MASM(masm())
811
812
813 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
814                                        Register object_reg,
815                                        Handle<JSObject> holder,
816                                        Register holder_reg,
817                                        Register scratch1,
818                                        Register scratch2,
819                                        Handle<Name> name,
820                                        Label* miss,
821                                        PrototypeCheckType check) {
822   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
823
824   // Make sure there's no overlap between holder and object registers.
825   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
826   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
827          && !scratch2.is(scratch1));
828
829   // Keep track of the current object in register reg.
830   Register reg = object_reg;
831   int depth = 0;
832
833   Handle<JSObject> current = Handle<JSObject>::null();
834   if (type->IsConstant()) current =
835       Handle<JSObject>::cast(type->AsConstant()->Value());
836   Handle<JSObject> prototype = Handle<JSObject>::null();
837   Handle<Map> current_map = receiver_map;
838   Handle<Map> holder_map(holder->map());
839   // Traverse the prototype chain and check the maps in the prototype chain for
840   // fast and global objects or do negative lookup for normal objects.
841   while (!current_map.is_identical_to(holder_map)) {
842     ++depth;
843
844     // Only global objects and objects that do not require access
845     // checks are allowed in stubs.
846     ASSERT(current_map->IsJSGlobalProxyMap() ||
847            !current_map->is_access_check_needed());
848
849     prototype = handle(JSObject::cast(current_map->prototype()));
850     if (current_map->is_dictionary_map() &&
851         !current_map->IsJSGlobalObjectMap() &&
852         !current_map->IsJSGlobalProxyMap()) {
853       if (!name->IsUniqueName()) {
854         ASSERT(name->IsString());
855         name = factory()->InternalizeString(Handle<String>::cast(name));
856       }
857       ASSERT(current.is_null() ||
858              current->property_dictionary()->FindEntry(name) ==
859              NameDictionary::kNotFound);
860
861       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
862                                        scratch1, scratch2);
863
864       __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
865       reg = holder_reg;  // From now on the object will be in holder_reg.
866       __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
867     } else {
868       bool in_new_space = heap()->InNewSpace(*prototype);
869       if (depth != 1 || check == CHECK_ALL_MAPS) {
870         __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
871       }
872
873       // Check access rights to the global object.  This has to happen after
874       // the map check so that we know that the object is actually a global
875       // object.
876       if (current_map->IsJSGlobalProxyMap()) {
877         __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
878       } else if (current_map->IsJSGlobalObjectMap()) {
879         GenerateCheckPropertyCell(
880             masm(), Handle<JSGlobalObject>::cast(current), name,
881             scratch2, miss);
882       }
883
884       if (in_new_space) {
885         // Save the map in scratch1 for later.
886         __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
887       }
888
889       reg = holder_reg;  // From now on the object will be in holder_reg.
890
891       if (in_new_space) {
892         // The prototype is in new space; we cannot store a reference to it
893         // in the code.  Load it from the map.
894         __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
895       } else {
896         // The prototype is in old space; load it directly.
897         __ mov(reg, prototype);
898       }
899     }
900
901     // Go to the next object in the prototype chain.
902     current = prototype;
903     current_map = handle(current->map());
904   }
905
906   // Log the check depth.
907   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
908
909   if (depth != 0 || check == CHECK_ALL_MAPS) {
910     // Check the holder map.
911     __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
912   }
913
914   // Perform security check for access to the global object.
915   ASSERT(current_map->IsJSGlobalProxyMap() ||
916          !current_map->is_access_check_needed());
917   if (current_map->IsJSGlobalProxyMap()) {
918     __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
919   }
920
921   // Return the register containing the holder.
922   return reg;
923 }
924
925
926 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
927   if (!miss->is_unused()) {
928     Label success;
929     __ jmp(&success);
930     __ bind(miss);
931     TailCallBuiltin(masm(), MissBuiltin(kind()));
932     __ bind(&success);
933   }
934 }
935
936
937 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
938   if (!miss->is_unused()) {
939     Label success;
940     __ jmp(&success);
941     GenerateRestoreName(masm(), miss, name);
942     TailCallBuiltin(masm(), MissBuiltin(kind()));
943     __ bind(&success);
944   }
945 }
946
947
948 Register LoadStubCompiler::CallbackHandlerFrontend(
949     Handle<HeapType> type,
950     Register object_reg,
951     Handle<JSObject> holder,
952     Handle<Name> name,
953     Handle<Object> callback) {
954   Label miss;
955
956   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
957
958   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
959     ASSERT(!reg.is(scratch2()));
960     ASSERT(!reg.is(scratch3()));
961     Register dictionary = scratch1();
962     bool must_preserve_dictionary_reg = reg.is(dictionary);
963
964     // Load the properties dictionary.
965     if (must_preserve_dictionary_reg) {
966       __ push(dictionary);
967     }
968     __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
969
970     // Probe the dictionary.
971     Label probe_done, pop_and_miss;
972     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
973                                                      &pop_and_miss,
974                                                      &probe_done,
975                                                      dictionary,
976                                                      this->name(),
977                                                      scratch2(),
978                                                      scratch3());
979     __ bind(&pop_and_miss);
980     if (must_preserve_dictionary_reg) {
981       __ pop(dictionary);
982     }
983     __ jmp(&miss);
984     __ bind(&probe_done);
985
986     // If probing finds an entry in the dictionary, scratch2 contains the
987     // index into the dictionary. Check that the value is the callback.
988     Register index = scratch2();
989     const int kElementsStartOffset =
990         NameDictionary::kHeaderSize +
991         NameDictionary::kElementsStartIndex * kPointerSize;
992     const int kValueOffset = kElementsStartOffset + kPointerSize;
993     __ mov(scratch3(),
994            Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
995     if (must_preserve_dictionary_reg) {
996       __ pop(dictionary);
997     }
998     __ cmp(scratch3(), callback);
999     __ j(not_equal, &miss);
1000   }
1001
1002   HandlerFrontendFooter(name, &miss);
1003   return reg;
1004 }
1005
1006
1007 void LoadStubCompiler::GenerateLoadField(Register reg,
1008                                          Handle<JSObject> holder,
1009                                          PropertyIndex field,
1010                                          Representation representation) {
1011   if (!reg.is(receiver())) __ mov(receiver(), reg);
1012   if (kind() == Code::LOAD_IC) {
1013     LoadFieldStub stub(isolate(),
1014                        field.is_inobject(holder),
1015                        field.translate(holder),
1016                        representation);
1017     GenerateTailCall(masm(), stub.GetCode());
1018   } else {
1019     KeyedLoadFieldStub stub(isolate(),
1020                             field.is_inobject(holder),
1021                             field.translate(holder),
1022                             representation);
1023     GenerateTailCall(masm(), stub.GetCode());
1024   }
1025 }
1026
1027
1028 void LoadStubCompiler::GenerateLoadCallback(
1029     Register reg,
1030     Handle<ExecutableAccessorInfo> callback) {
1031   // Insert additional parameters into the stack frame above return address.
1032   ASSERT(!scratch3().is(reg));
1033   __ pop(scratch3());  // Get return address to place it below.
1034
1035   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1036   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1037   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1038   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1039   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1040   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1041   __ push(receiver());  // receiver
1042   // Push data from ExecutableAccessorInfo.
1043   if (isolate()->heap()->InNewSpace(callback->data())) {
1044     ASSERT(!scratch2().is(reg));
1045     __ mov(scratch2(), Immediate(callback));
1046     __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
1047   } else {
1048     __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1049   }
1050   __ push(Immediate(isolate()->factory()->undefined_value()));  // ReturnValue
1051   // ReturnValue default value
1052   __ push(Immediate(isolate()->factory()->undefined_value()));
1053   __ push(Immediate(reinterpret_cast<int>(isolate())));
1054   __ push(reg);  // holder
1055
1056   // Save a pointer to where we pushed the arguments. This will be
1057   // passed as the const PropertyAccessorInfo& to the C++ callback.
1058   __ push(esp);
1059
1060   __ push(name());  // name
1061
1062   __ push(scratch3());  // Restore return address.
1063
1064   // Abi for CallApiGetter
1065   Register getter_address = edx;
1066   Address function_address = v8::ToCData<Address>(callback->getter());
1067   __ mov(getter_address, Immediate(function_address));
1068
1069   CallApiGetterStub stub(isolate());
1070   __ TailCallStub(&stub);
1071 }
1072
1073
1074 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1075   // Return the constant value.
1076   __ LoadObject(eax, value);
1077   __ ret(0);
1078 }
1079
1080
1081 void LoadStubCompiler::GenerateLoadInterceptor(
1082     Register holder_reg,
1083     Handle<Object> object,
1084     Handle<JSObject> interceptor_holder,
1085     LookupResult* lookup,
1086     Handle<Name> name) {
1087   ASSERT(interceptor_holder->HasNamedInterceptor());
1088   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1089
1090   // So far the most popular follow ups for interceptor loads are FIELD
1091   // and CALLBACKS, so inline only them, other cases may be added
1092   // later.
1093   bool compile_followup_inline = false;
1094   if (lookup->IsFound() && lookup->IsCacheable()) {
1095     if (lookup->IsField()) {
1096       compile_followup_inline = true;
1097     } else if (lookup->type() == CALLBACKS &&
1098                lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1099       ExecutableAccessorInfo* callback =
1100           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1101       compile_followup_inline = callback->getter() != NULL &&
1102           callback->IsCompatibleReceiver(*object);
1103     }
1104   }
1105
1106   if (compile_followup_inline) {
1107     // Compile the interceptor call, followed by inline code to load the
1108     // property from further up the prototype chain if the call fails.
1109     // Check that the maps haven't changed.
1110     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1111
1112     // Preserve the receiver register explicitly whenever it is different from
1113     // the holder and it is needed should the interceptor return without any
1114     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1115     // the FIELD case might cause a miss during the prototype check.
1116     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1117     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1118         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1119
1120     // Save necessary data before invoking an interceptor.
1121     // Requires a frame to make GC aware of pushed pointers.
1122     {
1123       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1124
1125       if (must_preserve_receiver_reg) {
1126         __ push(receiver());
1127       }
1128       __ push(holder_reg);
1129       __ push(this->name());
1130
1131       // Invoke an interceptor.  Note: map checks from receiver to
1132       // interceptor's holder has been compiled before (see a caller
1133       // of this method.)
1134       CompileCallLoadPropertyWithInterceptor(
1135           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1136           IC::kLoadPropertyWithInterceptorOnly);
1137
1138       // Check if interceptor provided a value for property.  If it's
1139       // the case, return immediately.
1140       Label interceptor_failed;
1141       __ cmp(eax, factory()->no_interceptor_result_sentinel());
1142       __ j(equal, &interceptor_failed);
1143       frame_scope.GenerateLeaveFrame();
1144       __ ret(0);
1145
1146       // Clobber registers when generating debug-code to provoke errors.
1147       __ bind(&interceptor_failed);
1148       if (FLAG_debug_code) {
1149         __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1150         __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1151         __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1152       }
1153
1154       __ pop(this->name());
1155       __ pop(holder_reg);
1156       if (must_preserve_receiver_reg) {
1157         __ pop(receiver());
1158       }
1159
1160       // Leave the internal frame.
1161     }
1162
1163     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1164   } else {  // !compile_followup_inline
1165     // Call the runtime system to load the interceptor.
1166     // Check that the maps haven't changed.
1167     __ pop(scratch2());  // save old return address
1168     PushInterceptorArguments(masm(), receiver(), holder_reg,
1169                              this->name(), interceptor_holder);
1170     __ push(scratch2());  // restore old return address
1171
1172     ExternalReference ref =
1173         ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1174                           isolate());
1175     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1176   }
1177 }
1178
1179
1180 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1181   Label success;
1182   // Check that the object is a boolean.
1183   __ cmp(object, factory()->true_value());
1184   __ j(equal, &success);
1185   __ cmp(object, factory()->false_value());
1186   __ j(not_equal, miss);
1187   __ bind(&success);
1188 }
1189
1190
1191 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1192     Handle<JSObject> object,
1193     Handle<JSObject> holder,
1194     Handle<Name> name,
1195     Handle<ExecutableAccessorInfo> callback) {
1196   Register holder_reg = HandlerFrontend(
1197       IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1198
1199   __ pop(scratch1());  // remove the return address
1200   __ push(receiver());
1201   __ push(holder_reg);
1202   __ Push(callback);
1203   __ Push(name);
1204   __ push(value());
1205   __ push(scratch1());  // restore return address
1206
1207   // Do tail-call to the runtime system.
1208   ExternalReference store_callback_property =
1209       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1210   __ TailCallExternalReference(store_callback_property, 5, 1);
1211
1212   // Return the generated code.
1213   return GetCode(kind(), Code::FAST, name);
1214 }
1215
1216
1217 #undef __
1218 #define __ ACCESS_MASM(masm)
1219
1220
1221 void StoreStubCompiler::GenerateStoreViaSetter(
1222     MacroAssembler* masm,
1223     Handle<HeapType> type,
1224     Register receiver,
1225     Handle<JSFunction> setter) {
1226   // ----------- S t a t e -------------
1227   //  -- esp[0] : return address
1228   // -----------------------------------
1229   {
1230     FrameScope scope(masm, StackFrame::INTERNAL);
1231
1232     // Save value register, so we can restore it later.
1233     __ push(value());
1234
1235     if (!setter.is_null()) {
1236       // Call the JavaScript setter with receiver and value on the stack.
1237       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1238         // Swap in the global receiver.
1239         __ mov(receiver,
1240                FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1241       }
1242       __ push(receiver);
1243       __ push(value());
1244       ParameterCount actual(1);
1245       ParameterCount expected(setter);
1246       __ InvokeFunction(setter, expected, actual,
1247                         CALL_FUNCTION, NullCallWrapper());
1248     } else {
1249       // If we generate a global code snippet for deoptimization only, remember
1250       // the place to continue after deoptimization.
1251       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1252     }
1253
1254     // We have to return the passed value, not the return value of the setter.
1255     __ pop(eax);
1256
1257     // Restore context register.
1258     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1259   }
1260   __ ret(0);
1261 }
1262
1263
1264 #undef __
1265 #define __ ACCESS_MASM(masm())
1266
1267
1268 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1269     Handle<JSObject> object,
1270     Handle<Name> name) {
1271   __ pop(scratch1());  // remove the return address
1272   __ push(receiver());
1273   __ push(this->name());
1274   __ push(value());
1275   __ push(scratch1());  // restore return address
1276
1277   // Do tail-call to the runtime system.
1278   ExternalReference store_ic_property =
1279       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1280   __ TailCallExternalReference(store_ic_property, 3, 1);
1281
1282   // Return the generated code.
1283   return GetCode(kind(), Code::FAST, name);
1284 }
1285
1286
1287 void StoreStubCompiler::GenerateStoreArrayLength() {
1288   // Prepare tail call to StoreIC_ArrayLength.
1289   __ pop(scratch1());  // remove the return address
1290   __ push(receiver());
1291   __ push(value());
1292   __ push(scratch1());  // restore return address
1293
1294   ExternalReference ref =
1295       ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1296                         masm()->isolate());
1297   __ TailCallExternalReference(ref, 2, 1);
1298 }
1299
1300
1301 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1302     MapHandleList* receiver_maps,
1303     CodeHandleList* handler_stubs,
1304     MapHandleList* transitioned_maps) {
1305   Label miss;
1306   __ JumpIfSmi(receiver(), &miss, Label::kNear);
1307   __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1308   for (int i = 0; i < receiver_maps->length(); ++i) {
1309     __ cmp(scratch1(), receiver_maps->at(i));
1310     if (transitioned_maps->at(i).is_null()) {
1311       __ j(equal, handler_stubs->at(i));
1312     } else {
1313       Label next_map;
1314       __ j(not_equal, &next_map, Label::kNear);
1315       __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1316       __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1317       __ bind(&next_map);
1318     }
1319   }
1320   __ bind(&miss);
1321   TailCallBuiltin(masm(), MissBuiltin(kind()));
1322
1323   // Return the generated code.
1324   return GetICCode(
1325       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1326 }
1327
1328
1329 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1330                                                       Handle<JSObject> last,
1331                                                       Handle<Name> name) {
1332   NonexistentHandlerFrontend(type, last, name);
1333
1334   // Return undefined if maps of the full prototype chain are still the
1335   // same and no global property with this name contains a value.
1336   __ mov(eax, isolate()->factory()->undefined_value());
1337   __ ret(0);
1338
1339   // Return the generated code.
1340   return GetCode(kind(), Code::FAST, name);
1341 }
1342
1343
1344 Register* LoadStubCompiler::registers() {
1345   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1346   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1347   return registers;
1348 }
1349
1350
1351 Register* KeyedLoadStubCompiler::registers() {
1352   // receiver, name, scratch1, scratch2, scratch3, scratch4.
1353   static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1354   return registers;
1355 }
1356
1357
1358 Register StoreStubCompiler::value() {
1359   return eax;
1360 }
1361
1362
1363 Register* StoreStubCompiler::registers() {
1364   // receiver, name, scratch1, scratch2, scratch3.
1365   static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1366   return registers;
1367 }
1368
1369
1370 Register* KeyedStoreStubCompiler::registers() {
1371   // receiver, name, scratch1, scratch2, scratch3.
1372   static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1373   return registers;
1374 }
1375
1376
1377 #undef __
1378 #define __ ACCESS_MASM(masm)
1379
1380
1381 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1382                                              Handle<HeapType> type,
1383                                              Register receiver,
1384                                              Handle<JSFunction> getter) {
1385   {
1386     FrameScope scope(masm, StackFrame::INTERNAL);
1387
1388     if (!getter.is_null()) {
1389       // Call the JavaScript getter with the receiver on the stack.
1390       if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1391         // Swap in the global receiver.
1392         __ mov(receiver,
1393                 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1394       }
1395       __ push(receiver);
1396       ParameterCount actual(0);
1397       ParameterCount expected(getter);
1398       __ InvokeFunction(getter, expected, actual,
1399                         CALL_FUNCTION, NullCallWrapper());
1400     } else {
1401       // If we generate a global code snippet for deoptimization only, remember
1402       // the place to continue after deoptimization.
1403       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1404     }
1405
1406     // Restore context register.
1407     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1408   }
1409   __ ret(0);
1410 }
1411
1412
1413 #undef __
1414 #define __ ACCESS_MASM(masm())
1415
1416
1417 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1418     Handle<HeapType> type,
1419     Handle<GlobalObject> global,
1420     Handle<PropertyCell> cell,
1421     Handle<Name> name,
1422     bool is_dont_delete) {
1423   Label miss;
1424
1425   HandlerFrontendHeader(type, receiver(), global, name, &miss);
1426   // Get the value from the cell.
1427   if (Serializer::enabled(isolate())) {
1428     __ mov(eax, Immediate(cell));
1429     __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
1430   } else {
1431     __ mov(eax, Operand::ForCell(cell));
1432   }
1433
1434   // Check for deleted property if property can actually be deleted.
1435   if (!is_dont_delete) {
1436     __ cmp(eax, factory()->the_hole_value());
1437     __ j(equal, &miss);
1438   } else if (FLAG_debug_code) {
1439     __ cmp(eax, factory()->the_hole_value());
1440     __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1441   }
1442
1443   Counters* counters = isolate()->counters();
1444   __ IncrementCounter(counters->named_load_global_stub(), 1);
1445   // The code above already loads the result into the return register.
1446   __ ret(0);
1447
1448   HandlerFrontendFooter(name, &miss);
1449
1450   // Return the generated code.
1451   return GetCode(kind(), Code::NORMAL, name);
1452 }
1453
1454
1455 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1456     TypeHandleList* types,
1457     CodeHandleList* handlers,
1458     Handle<Name> name,
1459     Code::StubType type,
1460     IcCheckType check) {
1461   Label miss;
1462
1463   if (check == PROPERTY &&
1464       (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1465     __ cmp(this->name(), Immediate(name));
1466     __ j(not_equal, &miss);
1467   }
1468
1469   Label number_case;
1470   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1471   __ JumpIfSmi(receiver(), smi_target);
1472
1473   Register map_reg = scratch1();
1474   __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1475   int receiver_count = types->length();
1476   int number_of_handled_maps = 0;
1477   for (int current = 0; current < receiver_count; ++current) {
1478     Handle<HeapType> type = types->at(current);
1479     Handle<Map> map = IC::TypeToMap(*type, isolate());
1480     if (!map->is_deprecated()) {
1481       number_of_handled_maps++;
1482       __ cmp(map_reg, map);
1483       if (type->Is(HeapType::Number())) {
1484         ASSERT(!number_case.is_unused());
1485         __ bind(&number_case);
1486       }
1487       __ j(equal, handlers->at(current));
1488     }
1489   }
1490   ASSERT(number_of_handled_maps != 0);
1491
1492   __ bind(&miss);
1493   TailCallBuiltin(masm(), MissBuiltin(kind()));
1494
1495   // Return the generated code.
1496   InlineCacheState state =
1497       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1498   return GetICCode(kind(), type, name, state);
1499 }
1500
1501
1502 #undef __
1503 #define __ ACCESS_MASM(masm)
1504
1505
1506 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1507     MacroAssembler* masm) {
1508   // ----------- S t a t e -------------
1509   //  -- ecx    : key
1510   //  -- edx    : receiver
1511   //  -- esp[0] : return address
1512   // -----------------------------------
1513   Label slow, miss;
1514
1515   // This stub is meant to be tail-jumped to, the receiver must already
1516   // have been verified by the caller to not be a smi.
1517   __ JumpIfNotSmi(ecx, &miss);
1518   __ mov(ebx, ecx);
1519   __ SmiUntag(ebx);
1520   __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1521
1522   // Push receiver on the stack to free up a register for the dictionary
1523   // probing.
1524   __ push(edx);
1525   __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
1526   // Pop receiver before returning.
1527   __ pop(edx);
1528   __ ret(0);
1529
1530   __ bind(&slow);
1531   __ pop(edx);
1532
1533   // ----------- S t a t e -------------
1534   //  -- ecx    : key
1535   //  -- edx    : receiver
1536   //  -- esp[0] : return address
1537   // -----------------------------------
1538   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1539
1540   __ bind(&miss);
1541   // ----------- S t a t e -------------
1542   //  -- ecx    : key
1543   //  -- edx    : receiver
1544   //  -- esp[0] : return address
1545   // -----------------------------------
1546   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1547 }
1548
1549
1550 #undef __
1551
1552 } }  // namespace v8::internal
1553
1554 #endif  // V8_TARGET_ARCH_IA32