1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/codegen.h"
10 #include "src/ic/ic.h"
11 #include "src/ic/stub-cache.h"
16 // ----------------------------------------------------------------------------
17 // Static IC stub generators.
20 #define __ ACCESS_MASM(masm)
23 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
24 Label* global_object) {
26 // type: holds the receiver instance type on entry.
27 __ cmp(type, JS_GLOBAL_OBJECT_TYPE);
28 __ j(equal, global_object);
29 __ cmp(type, JS_BUILTINS_OBJECT_TYPE);
30 __ j(equal, global_object);
31 __ cmp(type, JS_GLOBAL_PROXY_TYPE);
32 __ j(equal, global_object);
36 // Helper function used to load a property from a dictionary backing
37 // storage. This function may fail to load a property even though it is
38 // in the dictionary, so code at miss_label must always call a backup
39 // property load that is complete. This function is safe to call if
40 // name is not internalized, and will jump to the miss_label in that
41 // case. The generated code assumes that the receiver has slow
42 // properties, is not a global object and does not have interceptors.
43 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
44 Register elements, Register name,
45 Register r0, Register r1, Register result) {
48 // elements - holds the property dictionary on entry and is unchanged.
50 // name - holds the name of the property on entry and is unchanged.
54 // r0 - used for the index into the property dictionary
56 // r1 - used to hold the capacity of the property dictionary.
58 // result - holds the result on exit.
62 // Probe the dictionary.
63 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
64 elements, name, r0, r1);
66 // If probing finds an entry in the dictionary, r0 contains the
67 // index into the dictionary. Check that the value is a normal
70 const int kElementsStartOffset =
71 NameDictionary::kHeaderSize +
72 NameDictionary::kElementsStartIndex * kPointerSize;
73 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
74 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
75 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
76 __ j(not_zero, miss_label);
78 // Get the value at the masked, scaled index.
79 const int kValueOffset = kElementsStartOffset + kPointerSize;
80 __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
84 // Helper function used to store a property to a dictionary backing
85 // storage. This function may fail to store a property eventhough it
86 // is in the dictionary, so code at miss_label must always call a
87 // backup property store that is complete. This function is safe to
88 // call if name is not internalized, and will jump to the miss_label in
89 // that case. The generated code assumes that the receiver has slow
90 // properties, is not a global object and does not have interceptors.
91 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
92 Register elements, Register name,
93 Register value, Register r0, Register r1) {
96 // elements - holds the property dictionary on entry and is clobbered.
98 // name - holds the name of the property on entry and is unchanged.
100 // value - holds the value to store and is unchanged.
102 // r0 - used for index into the property dictionary and is clobbered.
104 // r1 - used to hold the capacity of the property dictionary and is clobbered.
108 // Probe the dictionary.
109 NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
110 elements, name, r0, r1);
112 // If probing finds an entry in the dictionary, r0 contains the
113 // index into the dictionary. Check that the value is a normal
114 // property that is not read only.
116 const int kElementsStartOffset =
117 NameDictionary::kHeaderSize +
118 NameDictionary::kElementsStartIndex * kPointerSize;
119 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
120 const int kTypeAndReadOnlyMask =
121 (PropertyDetails::TypeField::kMask |
122 PropertyDetails::AttributesField::encode(READ_ONLY))
124 __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
125 Immediate(kTypeAndReadOnlyMask));
126 __ j(not_zero, miss_label);
128 // Store the value at the masked, scaled index.
129 const int kValueOffset = kElementsStartOffset + kPointerSize;
130 __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
131 __ mov(Operand(r0, 0), value);
133 // Update write barrier. Make sure not to clobber the value.
135 __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
139 // Checks the receiver for special cases (value type, slow case bits).
140 // Falls through for regular JS object.
141 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
142 Register receiver, Register map,
143 int interceptor_bit, Label* slow) {
145 // receiver - holds the receiver and is unchanged.
146 // Scratch registers:
147 // map - used to hold the map of the receiver.
149 // Check that the object isn't a smi.
150 __ JumpIfSmi(receiver, slow);
152 // Get the map of the receiver.
153 __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
156 __ test_b(FieldOperand(map, Map::kBitFieldOffset),
157 (1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit));
158 __ j(not_zero, slow);
159 // Check that the object is some kind of JS object EXCEPT JS Value type.
160 // In the case that the object is a value-wrapper object,
161 // we enter the runtime system to make sure that indexing
162 // into string objects works as intended.
163 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
165 __ CmpInstanceType(map, JS_OBJECT_TYPE);
170 // Loads an indexed element from a fast case array.
171 // If not_fast_array is NULL, doesn't perform the elements map check.
172 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
173 Register key, Register scratch,
174 Register result, Label* not_fast_array,
175 Label* out_of_range) {
177 // receiver - holds the receiver and is unchanged.
178 // key - holds the key and is unchanged (must be a smi).
179 // Scratch registers:
180 // scratch - used to hold elements of the receiver and the loaded value.
181 // result - holds the result on exit if the load succeeds and
184 __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
185 if (not_fast_array != NULL) {
186 // Check that the object is in fast mode and writable.
187 __ CheckMap(scratch, masm->isolate()->factory()->fixed_array_map(),
188 not_fast_array, DONT_DO_SMI_CHECK);
190 __ AssertFastElements(scratch);
192 // Check that the key (index) is within bounds.
193 __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
194 __ j(above_equal, out_of_range);
195 // Fast case: Do the load.
196 STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
197 __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
198 __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value()));
199 // In case the loaded value is the_hole we have to consult GetProperty
200 // to ensure the prototype chain is searched.
201 __ j(equal, out_of_range);
202 if (!result.is(scratch)) {
203 __ mov(result, scratch);
208 // Checks whether a key is an array index string or a unique name.
209 // Falls through if the key is a unique name.
210 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
211 Register map, Register hash,
212 Label* index_string, Label* not_unique) {
214 // key - holds the key and is unchanged. Assumed to be non-smi.
215 // Scratch registers:
216 // map - used to hold the map of the key.
217 // hash - used to hold the hash of the key.
219 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
220 __ j(above, not_unique);
221 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
222 __ j(equal, &unique);
224 // Is the string an array index, with cached numeric value?
225 __ mov(hash, FieldOperand(key, Name::kHashFieldOffset));
226 __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask));
227 __ j(zero, index_string);
229 // Is the string internalized? We already know it's a string so a single
230 // bit test is enough.
231 STATIC_ASSERT(kNotInternalizedTag != 0);
232 __ test_b(FieldOperand(map, Map::kInstanceTypeOffset),
233 kIsNotInternalizedMask);
234 __ j(not_zero, not_unique);
240 static Operand GenerateMappedArgumentsLookup(
241 MacroAssembler* masm, Register object, Register key, Register scratch1,
242 Register scratch2, Label* unmapped_case, Label* slow_case) {
243 Heap* heap = masm->isolate()->heap();
244 Factory* factory = masm->isolate()->factory();
246 // Check that the receiver is a JSObject. Because of the elements
247 // map check later, we do not need to check for interceptors or
248 // whether it requires access checks.
249 __ JumpIfSmi(object, slow_case);
250 // Check that the object is some kind of JSObject.
251 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
252 __ j(below, slow_case);
254 // Check that the key is a positive smi.
255 __ test(key, Immediate(0x80000001));
256 __ j(not_zero, slow_case);
258 // Load the elements into scratch1 and check its map.
259 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
260 __ mov(scratch1, FieldOperand(object, JSObject::kElementsOffset));
261 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
263 // Check if element is in the range of mapped arguments. If not, jump
264 // to the unmapped lookup with the parameter map in scratch1.
265 __ mov(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
266 __ sub(scratch2, Immediate(Smi::FromInt(2)));
267 __ cmp(key, scratch2);
268 __ j(above_equal, unmapped_case);
270 // Load element index and check whether it is the hole.
271 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
273 FieldOperand(scratch1, key, times_half_pointer_size, kHeaderSize));
274 __ cmp(scratch2, factory->the_hole_value());
275 __ j(equal, unmapped_case);
277 // Load value from context and return it. We can reuse scratch1 because
278 // we do not jump to the unmapped lookup (which requires the parameter
280 const int kContextOffset = FixedArray::kHeaderSize;
281 __ mov(scratch1, FieldOperand(scratch1, kContextOffset));
282 return FieldOperand(scratch1, scratch2, times_half_pointer_size,
283 Context::kHeaderSize);
287 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
289 Register parameter_map,
292 // Element is in arguments backing store, which is referenced by the
293 // second element of the parameter_map.
294 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
295 Register backing_store = parameter_map;
296 __ mov(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
297 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
298 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
299 __ mov(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
300 __ cmp(key, scratch);
301 __ j(greater_equal, slow_case);
302 return FieldOperand(backing_store, key, times_half_pointer_size,
303 FixedArray::kHeaderSize);
307 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
308 // The return address is on the stack.
309 Label slow, check_name, index_smi, index_name, property_array_property;
310 Label probe_dictionary, check_number_dictionary;
312 Register receiver = ReceiverRegister();
313 Register key = NameRegister();
314 DCHECK(receiver.is(edx));
317 // Check that the key is a smi.
318 __ JumpIfNotSmi(key, &check_name);
320 // Now the key is known to be a smi. This place is also jumped to from
321 // where a numeric string is converted to a smi.
323 GenerateKeyedLoadReceiverCheck(masm, receiver, eax,
324 Map::kHasIndexedInterceptor, &slow);
326 // Check the receiver's map to see if it has fast elements.
327 __ CheckFastElements(eax, &check_number_dictionary);
329 GenerateFastArrayLoad(masm, receiver, key, eax, eax, NULL, &slow);
330 Isolate* isolate = masm->isolate();
331 Counters* counters = isolate->counters();
332 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
335 __ bind(&check_number_dictionary);
338 __ mov(eax, FieldOperand(receiver, JSObject::kElementsOffset));
340 // Check whether the elements is a number dictionary.
341 // ebx: untagged index
343 __ CheckMap(eax, isolate->factory()->hash_table_map(), &slow,
345 Label slow_pop_receiver;
346 // Push receiver on the stack to free up a register for the dictionary
349 __ LoadFromNumberDictionary(&slow_pop_receiver, eax, key, ebx, edx, edi, eax);
350 // Pop receiver before returning.
354 __ bind(&slow_pop_receiver);
355 // Pop the receiver from the stack and jump to runtime.
359 // Slow case: jump to runtime.
360 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
361 GenerateRuntimeGetProperty(masm);
363 __ bind(&check_name);
364 GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow);
366 GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor,
369 // If the receiver is a fast-case object, check the keyed lookup
370 // cache. Otherwise probe the dictionary.
371 __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset));
372 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
373 Immediate(isolate->factory()->hash_table_map()));
374 __ j(equal, &probe_dictionary);
376 // The receiver's map is still in eax, compute the keyed lookup cache hash
377 // based on 32 bits of the map pointer and the string hash.
378 if (FLAG_debug_code) {
379 __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset));
380 __ Check(equal, kMapIsNoLongerInEax);
382 __ mov(ebx, eax); // Keep the map around for later.
383 __ shr(eax, KeyedLookupCache::kMapHashShift);
384 __ mov(edi, FieldOperand(key, String::kHashFieldOffset));
385 __ shr(edi, String::kHashShift);
387 __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
389 // Load the key (consisting of map and internalized string) from the cache and
391 Label load_in_object_property;
392 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
393 Label hit_on_nth_entry[kEntriesPerBucket];
394 ExternalReference cache_keys =
395 ExternalReference::keyed_lookup_cache_keys(masm->isolate());
397 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
398 Label try_next_entry;
400 __ shl(edi, kPointerSizeLog2 + 1);
402 __ add(edi, Immediate(kPointerSize * i * 2));
404 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
405 __ j(not_equal, &try_next_entry);
406 __ add(edi, Immediate(kPointerSize));
407 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
408 __ j(equal, &hit_on_nth_entry[i]);
409 __ bind(&try_next_entry);
412 __ lea(edi, Operand(eax, 1));
413 __ shl(edi, kPointerSizeLog2 + 1);
414 __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
415 __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
416 __ j(not_equal, &slow);
417 __ add(edi, Immediate(kPointerSize));
418 __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
419 __ j(not_equal, &slow);
422 // ebx : receiver's map
423 // eax : lookup cache index
424 ExternalReference cache_field_offsets =
425 ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
428 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
429 __ bind(&hit_on_nth_entry[i]);
431 __ add(eax, Immediate(i));
434 Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
435 __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
437 __ j(above_equal, &property_array_property);
439 __ jmp(&load_in_object_property);
443 // Load in-object property.
444 __ bind(&load_in_object_property);
445 __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset));
447 __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0));
448 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
451 // Load property array property.
452 __ bind(&property_array_property);
453 __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset));
455 FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize));
456 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
459 // Do a quick inline probe of the receiver's dictionary, if it
461 __ bind(&probe_dictionary);
463 __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset));
464 __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
465 GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
467 GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax);
468 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
471 __ bind(&index_name);
472 __ IndexFromHash(ebx, key);
473 // Now jump to the place where smi keys are handled.
478 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
479 // Return address is on the stack.
482 Register receiver = ReceiverRegister();
483 Register index = NameRegister();
484 Register scratch = ebx;
485 DCHECK(!scratch.is(receiver) && !scratch.is(index));
486 Register result = eax;
487 DCHECK(!result.is(scratch));
489 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
490 &miss, // When not a string.
491 &miss, // When not a number.
492 &miss, // When index out of range.
493 STRING_INDEX_IS_ARRAY_INDEX);
494 char_at_generator.GenerateFast(masm);
497 StubRuntimeCallHelper call_helper;
498 char_at_generator.GenerateSlow(masm, call_helper);
505 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
506 // Return address is on the stack.
509 Register receiver = ReceiverRegister();
510 Register key = NameRegister();
511 Register scratch = eax;
512 DCHECK(!scratch.is(receiver) && !scratch.is(key));
514 // Check that the receiver isn't a smi.
515 __ JumpIfSmi(receiver, &slow);
517 // Check that the key is an array index, that is Uint32.
518 __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
519 __ j(not_zero, &slow);
521 // Get the map of the receiver.
522 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
524 // Check that it has indexed interceptor and access checks
525 // are not enabled for this object.
526 __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
527 __ and_(scratch, Immediate(kSlowCaseBitFieldMask));
528 __ cmp(scratch, Immediate(1 << Map::kHasIndexedInterceptor));
529 __ j(not_zero, &slow);
531 // Everything is fine, call runtime.
533 __ push(receiver); // receiver
535 __ push(scratch); // return address
537 // Perform tail call to the entry.
538 ExternalReference ref = ExternalReference(
539 IC_Utility(kLoadElementWithInterceptor), masm->isolate());
540 __ TailCallExternalReference(ref, 2, 1);
547 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
548 // The return address is on the stack.
549 Register receiver = ReceiverRegister();
550 Register key = NameRegister();
551 DCHECK(receiver.is(edx));
555 Factory* factory = masm->isolate()->factory();
556 Operand mapped_location = GenerateMappedArgumentsLookup(
557 masm, receiver, key, ebx, eax, ¬in, &slow);
558 __ mov(eax, mapped_location);
561 // The unmapped lookup expects that the parameter map is in ebx.
562 Operand unmapped_location =
563 GenerateUnmappedArgumentsLookup(masm, key, ebx, eax, &slow);
564 __ cmp(unmapped_location, factory->the_hole_value());
566 __ mov(eax, unmapped_location);
573 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
574 // Return address is on the stack.
576 Register receiver = ReceiverRegister();
577 Register name = NameRegister();
578 Register value = ValueRegister();
579 DCHECK(receiver.is(edx));
580 DCHECK(name.is(ecx));
581 DCHECK(value.is(eax));
583 Operand mapped_location = GenerateMappedArgumentsLookup(
584 masm, receiver, name, ebx, edi, ¬in, &slow);
585 __ mov(mapped_location, value);
586 __ lea(ecx, mapped_location);
588 __ RecordWrite(ebx, ecx, edx, kDontSaveFPRegs);
591 // The unmapped lookup expects that the parameter map is in ebx.
592 Operand unmapped_location =
593 GenerateUnmappedArgumentsLookup(masm, name, ebx, edi, &slow);
594 __ mov(unmapped_location, value);
595 __ lea(edi, unmapped_location);
597 __ RecordWrite(ebx, edi, edx, kDontSaveFPRegs);
604 static void KeyedStoreGenerateGenericHelper(
605 MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
606 KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
607 Label transition_smi_elements;
608 Label finish_object_store, non_double_value, transition_double_elements;
609 Label fast_double_without_map_check;
610 Register receiver = KeyedStoreIC::ReceiverRegister();
611 Register key = KeyedStoreIC::NameRegister();
612 Register value = KeyedStoreIC::ValueRegister();
613 DCHECK(receiver.is(edx));
615 DCHECK(value.is(eax));
617 // ebx: FixedArray receiver->elements
619 // Fast case: Do the store, could either Object or double.
620 __ bind(fast_object);
621 if (check_map == kCheckMap) {
622 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
623 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
624 __ j(not_equal, fast_double);
627 // HOLECHECK: guards "A[i] = V"
628 // We have to go to the runtime if the current value is the hole because
629 // there may be a callback on the element
630 Label holecheck_passed1;
631 __ cmp(FixedArrayElementOperand(ebx, key),
632 masm->isolate()->factory()->the_hole_value());
633 __ j(not_equal, &holecheck_passed1);
634 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
635 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
637 __ bind(&holecheck_passed1);
639 // Smi stores don't require further checks.
641 __ JumpIfNotSmi(value, &non_smi_value);
642 if (increment_length == kIncrementLength) {
643 // Add 1 to receiver->length.
644 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
645 Immediate(Smi::FromInt(1)));
647 // It's irrelevant whether array is smi-only or not when writing a smi.
648 __ mov(FixedArrayElementOperand(ebx, key), value);
651 __ bind(&non_smi_value);
652 // Escape to elements kind transition case.
653 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
654 __ CheckFastObjectElements(edi, &transition_smi_elements);
656 // Fast elements array, store the value to the elements backing store.
657 __ bind(&finish_object_store);
658 if (increment_length == kIncrementLength) {
659 // Add 1 to receiver->length.
660 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
661 Immediate(Smi::FromInt(1)));
663 __ mov(FixedArrayElementOperand(ebx, key), value);
664 // Update write barrier for the elements array address.
665 __ mov(edx, value); // Preserve the value which is returned.
666 __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
670 __ bind(fast_double);
671 if (check_map == kCheckMap) {
672 // Check for fast double array case. If this fails, call through to the
674 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
675 __ j(not_equal, slow);
676 // If the value is a number, store it as a double in the FastDoubleElements
680 // HOLECHECK: guards "A[i] double hole?"
681 // We have to see if the double version of the hole is present. If so
682 // go to the runtime.
683 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
684 __ cmp(FieldOperand(ebx, key, times_4, offset), Immediate(kHoleNanUpper32));
685 __ j(not_equal, &fast_double_without_map_check);
686 __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
687 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
689 __ bind(&fast_double_without_map_check);
690 __ StoreNumberToDoubleElements(value, ebx, key, edi, xmm0,
691 &transition_double_elements);
692 if (increment_length == kIncrementLength) {
693 // Add 1 to receiver->length.
694 __ add(FieldOperand(receiver, JSArray::kLengthOffset),
695 Immediate(Smi::FromInt(1)));
699 __ bind(&transition_smi_elements);
700 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
702 // Transition the array appropriately depending on the value type.
703 __ CheckMap(value, masm->isolate()->factory()->heap_number_map(),
704 &non_double_value, DONT_DO_SMI_CHECK);
706 // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
707 // and complete the store.
708 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
709 FAST_DOUBLE_ELEMENTS, ebx, edi, slow);
710 AllocationSiteMode mode =
711 AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
712 ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
714 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
715 __ jmp(&fast_double_without_map_check);
717 __ bind(&non_double_value);
718 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
719 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, ebx,
721 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
722 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
723 masm, receiver, key, value, ebx, mode, slow);
724 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
725 __ jmp(&finish_object_store);
727 __ bind(&transition_double_elements);
728 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
729 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
730 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
731 __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
732 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
734 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
735 ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
736 value, ebx, mode, slow);
737 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
738 __ jmp(&finish_object_store);
742 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
743 StrictMode strict_mode) {
744 // Return address is on the stack.
745 Label slow, fast_object, fast_object_grow;
746 Label fast_double, fast_double_grow;
747 Label array, extra, check_if_double_array;
748 Register receiver = ReceiverRegister();
749 Register key = NameRegister();
750 DCHECK(receiver.is(edx));
753 // Check that the object isn't a smi.
754 __ JumpIfSmi(receiver, &slow);
755 // Get the map from the receiver.
756 __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
757 // Check that the receiver does not require access checks and is not observed.
758 // The generic stub does not perform map checks or handle observed objects.
759 __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
760 1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved);
761 __ j(not_zero, &slow);
762 // Check that the key is a smi.
763 __ JumpIfNotSmi(key, &slow);
764 __ CmpInstanceType(edi, JS_ARRAY_TYPE);
766 // Check that the object is some kind of JSObject.
767 __ CmpInstanceType(edi, FIRST_JS_OBJECT_TYPE);
770 // Object case: Check key against length in the elements array.
773 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
774 // Check array bounds. Both the key and the length of FixedArray are smis.
775 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
776 __ j(below, &fast_object);
778 // Slow case: call runtime.
780 GenerateRuntimeSetProperty(masm, strict_mode);
782 // Extra capacity case: Check if there is extra capacity to
783 // perform the store and update the length. Used for adding one
784 // element to the array by writing to array[array.length].
786 // receiver is a JSArray.
788 // ebx: receiver->elements, a FixedArray
790 // flags: compare (key, receiver.length())
791 // do not leave holes in the array:
792 __ j(not_equal, &slow);
793 __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
794 __ j(above_equal, &slow);
795 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
796 __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
797 __ j(not_equal, &check_if_double_array);
798 __ jmp(&fast_object_grow);
800 __ bind(&check_if_double_array);
801 __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
802 __ j(not_equal, &slow);
803 __ jmp(&fast_double_grow);
805 // Array case: Get the length and the elements array from the JS
806 // array. Check that the array is in fast mode (and writable); if it
807 // is the length is always a smi.
809 // receiver is a JSArray.
812 __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
814 // Check the key against the length in the array and fall through to the
815 // common store code.
816 __ cmp(key, FieldOperand(receiver, JSArray::kLengthOffset)); // Compare smis.
817 __ j(above_equal, &extra);
819 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double, &slow,
820 kCheckMap, kDontIncrementLength);
821 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
822 &slow, kDontCheckMap, kIncrementLength);
826 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
827 // The return address is on the stack.
828 Register receiver = ReceiverRegister();
829 Register name = NameRegister();
830 DCHECK(receiver.is(edx));
831 DCHECK(name.is(ecx));
833 // Probe the stub cache.
834 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
835 Code::ComputeHandlerFlags(Code::LOAD_IC));
836 masm->isolate()->stub_cache()->GenerateProbe(masm, flags, receiver, name, ebx,
839 // Cache miss: Jump to runtime.
844 void LoadIC::GenerateNormal(MacroAssembler* masm) {
845 Register dictionary = eax;
846 DCHECK(!dictionary.is(ReceiverRegister()));
847 DCHECK(!dictionary.is(NameRegister()));
852 FieldOperand(ReceiverRegister(), JSObject::kPropertiesOffset));
853 GenerateDictionaryLoad(masm, &slow, dictionary, NameRegister(), edi, ebx,
857 // Dictionary load failed, go slow (but don't miss).
859 GenerateRuntimeGetProperty(masm);
863 static void LoadIC_PushArgs(MacroAssembler* masm) {
864 Register receiver = LoadIC::ReceiverRegister();
865 Register name = LoadIC::NameRegister();
866 DCHECK(!ebx.is(receiver) && !ebx.is(name));
875 void LoadIC::GenerateMiss(MacroAssembler* masm) {
876 // Return address is on the stack.
877 __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
879 LoadIC_PushArgs(masm);
881 // Perform tail call to the entry.
882 ExternalReference ref =
883 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
884 __ TailCallExternalReference(ref, 2, 1);
888 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
889 // Return address is on the stack.
890 LoadIC_PushArgs(masm);
892 // Perform tail call to the entry.
893 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
897 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
898 // Return address is on the stack.
899 __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1);
901 LoadIC_PushArgs(masm);
903 // Perform tail call to the entry.
904 ExternalReference ref =
905 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
906 __ TailCallExternalReference(ref, 2, 1);
910 // IC register specifications
911 const Register LoadIC::ReceiverRegister() { return edx; }
912 const Register LoadIC::NameRegister() { return ecx; }
915 const Register LoadIC::SlotRegister() {
916 DCHECK(FLAG_vector_ics);
921 const Register LoadIC::VectorRegister() {
922 DCHECK(FLAG_vector_ics);
927 const Register StoreIC::ReceiverRegister() { return edx; }
928 const Register StoreIC::NameRegister() { return ecx; }
929 const Register StoreIC::ValueRegister() { return eax; }
932 const Register KeyedStoreIC::MapRegister() { return ebx; }
935 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
936 // Return address is on the stack.
937 LoadIC_PushArgs(masm);
939 // Perform tail call to the entry.
940 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
944 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
945 // Return address is on the stack.
946 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
947 Code::ComputeHandlerFlags(Code::STORE_IC));
948 masm->isolate()->stub_cache()->GenerateProbe(masm, flags, ReceiverRegister(),
949 NameRegister(), ebx, no_reg);
951 // Cache miss: Jump to runtime.
956 static void StoreIC_PushArgs(MacroAssembler* masm) {
957 Register receiver = StoreIC::ReceiverRegister();
958 Register name = StoreIC::NameRegister();
959 Register value = StoreIC::ValueRegister();
961 DCHECK(!ebx.is(receiver) && !ebx.is(name) && !ebx.is(value));
971 void StoreIC::GenerateMiss(MacroAssembler* masm) {
972 // Return address is on the stack.
973 StoreIC_PushArgs(masm);
975 // Perform tail call to the entry.
976 ExternalReference ref =
977 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
978 __ TailCallExternalReference(ref, 3, 1);
982 void StoreIC::GenerateNormal(MacroAssembler* masm) {
984 Register receiver = ReceiverRegister();
985 Register name = NameRegister();
986 Register value = ValueRegister();
987 Register dictionary = ebx;
989 __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
991 // A lot of registers are needed for storing to slow case
992 // objects. Push and restore receiver but rely on
993 // GenerateDictionaryStore preserving the value and name.
995 GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
998 Counters* counters = masm->isolate()->counters();
999 __ IncrementCounter(counters->store_normal_hit(), 1);
1002 __ bind(&restore_miss);
1004 __ IncrementCounter(counters->store_normal_miss(), 1);
1009 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1010 StrictMode strict_mode) {
1011 // Return address is on the stack.
1012 DCHECK(!ebx.is(ReceiverRegister()) && !ebx.is(NameRegister()) &&
1013 !ebx.is(ValueRegister()));
1015 __ push(ReceiverRegister());
1016 __ push(NameRegister());
1017 __ push(ValueRegister());
1018 __ push(Immediate(Smi::FromInt(strict_mode)));
1019 __ push(ebx); // return address
1021 // Do tail-call to runtime routine.
1022 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1026 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1027 StrictMode strict_mode) {
1028 // Return address is on the stack.
1029 DCHECK(!ebx.is(ReceiverRegister()) && !ebx.is(NameRegister()) &&
1030 !ebx.is(ValueRegister()));
1032 __ push(ReceiverRegister());
1033 __ push(NameRegister());
1034 __ push(ValueRegister());
1035 __ push(Immediate(Smi::FromInt(strict_mode)));
1036 __ push(ebx); // return address
1038 // Do tail-call to runtime routine.
1039 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1043 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1044 // Return address is on the stack.
1045 StoreIC_PushArgs(masm);
1047 // Do tail-call to runtime routine.
1048 ExternalReference ref =
1049 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1050 __ TailCallExternalReference(ref, 3, 1);
1054 void StoreIC::GenerateSlow(MacroAssembler* masm) {
1055 // Return address is on the stack.
1056 StoreIC_PushArgs(masm);
1058 // Do tail-call to runtime routine.
1059 ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
1060 __ TailCallExternalReference(ref, 3, 1);
1064 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1065 // Return address is on the stack.
1066 StoreIC_PushArgs(masm);
1068 // Do tail-call to runtime routine.
1069 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1070 __ TailCallExternalReference(ref, 3, 1);
1077 Condition CompareIC::ComputeCondition(Token::Value op) {
1079 case Token::EQ_STRICT:
1089 return greater_equal;
1092 return no_condition;
1097 bool CompareIC::HasInlinedSmiCode(Address address) {
1098 // The address of the instruction following the call.
1099 Address test_instruction_address =
1100 address + Assembler::kCallTargetAddressOffset;
1102 // If the instruction following the call is not a test al, nothing
1104 return *test_instruction_address == Assembler::kTestAlByte;
1108 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1109 // The address of the instruction following the call.
1110 Address test_instruction_address =
1111 address + Assembler::kCallTargetAddressOffset;
1113 // If the instruction following the call is not a test al, nothing
1115 if (*test_instruction_address != Assembler::kTestAlByte) {
1116 DCHECK(*test_instruction_address == Assembler::kNopByte);
1120 Address delta_address = test_instruction_address + 1;
1121 // The delta to the start of the map check instruction and the
1122 // condition code uses at the patched jump.
1123 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
1124 if (FLAG_trace_ic) {
1125 PrintF("[ patching ic at %p, test=%p, delta=%d\n", address,
1126 test_instruction_address, delta);
1129 // Patch with a short conditional jump. Enabling means switching from a short
1130 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1131 // reverse operation of that.
1132 Address jmp_address = test_instruction_address - delta;
1133 DCHECK((check == ENABLE_INLINED_SMI_CHECK)
1134 ? (*jmp_address == Assembler::kJncShortOpcode ||
1135 *jmp_address == Assembler::kJcShortOpcode)
1136 : (*jmp_address == Assembler::kJnzShortOpcode ||
1137 *jmp_address == Assembler::kJzShortOpcode));
1139 (check == ENABLE_INLINED_SMI_CHECK)
1140 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1141 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1142 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1145 } // namespace v8::internal
1147 #endif // V8_TARGET_ARCH_IA32