1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/ic-inl.h"
11 #include "src/runtime.h"
12 #include "src/stub-cache.h"
17 // ----------------------------------------------------------------------------
18 // Static IC stub generators.
21 #define __ ACCESS_MASM(masm)
24 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
26 Label* global_object) {
28 // type: holds the receiver instance type on entry.
29 __ cmpb(type, Immediate(JS_GLOBAL_OBJECT_TYPE));
30 __ j(equal, global_object);
31 __ cmpb(type, Immediate(JS_BUILTINS_OBJECT_TYPE));
32 __ j(equal, global_object);
33 __ cmpb(type, Immediate(JS_GLOBAL_PROXY_TYPE));
34 __ j(equal, global_object);
38 // Helper function used to load a property from a dictionary backing storage.
39 // This function may return false negatives, so miss_label
40 // must always call a backup property load that is complete.
41 // This function is safe to call if name is not an internalized string,
42 // and will jump to the miss_label in that case.
43 // The generated code assumes that the receiver has slow properties,
44 // is not a global object and does not have interceptors.
45 static void GenerateDictionaryLoad(MacroAssembler* masm,
54 // elements - holds the property dictionary on entry and is unchanged.
56 // name - holds the name of the property on entry and is unchanged.
58 // r0 - used to hold the capacity of the property dictionary.
60 // r1 - used to hold the index into the property dictionary.
62 // result - holds the result on exit if the load succeeded.
66 // Probe the dictionary.
67 NameDictionaryLookupStub::GeneratePositiveLookup(masm,
75 // If probing finds an entry in the dictionary, r1 contains the
76 // index into the dictionary. Check that the value is a normal
79 const int kElementsStartOffset =
80 NameDictionary::kHeaderSize +
81 NameDictionary::kElementsStartIndex * kPointerSize;
82 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
83 __ Test(Operand(elements, r1, times_pointer_size,
84 kDetailsOffset - kHeapObjectTag),
85 Smi::FromInt(PropertyDetails::TypeField::kMask));
86 __ j(not_zero, miss_label);
88 // Get the value at the masked, scaled index.
89 const int kValueOffset = kElementsStartOffset + kPointerSize;
91 Operand(elements, r1, times_pointer_size,
92 kValueOffset - kHeapObjectTag));
96 // Helper function used to store a property to a dictionary backing
97 // storage. This function may fail to store a property even though it
98 // is in the dictionary, so code at miss_label must always call a
99 // backup property store that is complete. This function is safe to
100 // call if name is not an internalized string, and will jump to the miss_label
101 // in that case. The generated code assumes that the receiver has slow
102 // properties, is not a global object and does not have interceptors.
103 static void GenerateDictionaryStore(MacroAssembler* masm,
112 // elements - holds the property dictionary on entry and is clobbered.
114 // name - holds the name of the property on entry and is unchanged.
116 // value - holds the value to store and is unchanged.
118 // scratch0 - used during the positive dictionary lookup and is clobbered.
120 // scratch1 - used for index into the property dictionary and is clobbered.
123 // Probe the dictionary.
124 NameDictionaryLookupStub::GeneratePositiveLookup(masm,
132 // If probing finds an entry in the dictionary, scratch0 contains the
133 // index into the dictionary. Check that the value is a normal
134 // property that is not read only.
136 const int kElementsStartOffset =
137 NameDictionary::kHeaderSize +
138 NameDictionary::kElementsStartIndex * kPointerSize;
139 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
140 const int kTypeAndReadOnlyMask =
141 (PropertyDetails::TypeField::kMask |
142 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
143 __ Test(Operand(elements,
146 kDetailsOffset - kHeapObjectTag),
147 Smi::FromInt(kTypeAndReadOnlyMask));
148 __ j(not_zero, miss_label);
150 // Store the value at the masked, scaled index.
151 const int kValueOffset = kElementsStartOffset + kPointerSize;
152 __ leap(scratch1, Operand(elements,
155 kValueOffset - kHeapObjectTag));
156 __ movp(Operand(scratch1, 0), value);
158 // Update write barrier. Make sure not to clobber the value.
159 __ movp(scratch0, value);
160 __ RecordWrite(elements, scratch1, scratch0, kDontSaveFPRegs);
164 // Checks the receiver for special cases (value type, slow case bits).
165 // Falls through for regular JS object.
166 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
172 // receiver - holds the receiver and is unchanged.
173 // Scratch registers:
174 // map - used to hold the map of the receiver.
176 // Check that the object isn't a smi.
177 __ JumpIfSmi(receiver, slow);
179 // Check that the object is some kind of JS object EXCEPT JS Value type.
180 // In the case that the object is a value-wrapper object,
181 // we enter the runtime system to make sure that indexing
182 // into string objects work as intended.
183 DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
184 __ CmpObjectType(receiver, JS_OBJECT_TYPE, map);
188 __ testb(FieldOperand(map, Map::kBitFieldOffset),
189 Immediate((1 << Map::kIsAccessCheckNeeded) |
190 (1 << interceptor_bit)));
191 __ j(not_zero, slow);
195 // Loads an indexed element from a fast case array.
196 // If not_fast_array is NULL, doesn't perform the elements map check.
197 static void GenerateFastArrayLoad(MacroAssembler* masm,
203 Label* not_fast_array,
204 Label* out_of_range) {
207 // receiver - holds the receiver on entry.
208 // Unchanged unless 'result' is the same register.
210 // key - holds the smi key on entry.
211 // Unchanged unless 'result' is the same register.
213 // elements - holds the elements of the receiver on exit.
215 // result - holds the result on exit if the load succeeded.
216 // Allowed to be the the same as 'receiver' or 'key'.
217 // Unchanged on bailout so 'receiver' and 'key' can be safely
218 // used by further computation.
220 // Scratch registers:
222 // scratch - used to hold elements of the receiver and the loaded value.
224 __ movp(elements, FieldOperand(receiver, JSObject::kElementsOffset));
225 if (not_fast_array != NULL) {
226 // Check that the object is in fast mode and writable.
227 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
228 Heap::kFixedArrayMapRootIndex);
229 __ j(not_equal, not_fast_array);
231 __ AssertFastElements(elements);
233 // Check that the key (index) is within bounds.
234 __ SmiCompare(key, FieldOperand(elements, FixedArray::kLengthOffset));
235 // Unsigned comparison rejects negative indices.
236 __ j(above_equal, out_of_range);
237 // Fast case: Do the load.
238 SmiIndex index = masm->SmiToIndex(scratch, key, kPointerSizeLog2);
239 __ movp(scratch, FieldOperand(elements,
242 FixedArray::kHeaderSize));
243 __ CompareRoot(scratch, Heap::kTheHoleValueRootIndex);
244 // In case the loaded value is the_hole we have to consult GetProperty
245 // to ensure the prototype chain is searched.
246 __ j(equal, out_of_range);
247 if (!result.is(scratch)) {
248 __ movp(result, scratch);
253 // Checks whether a key is an array index string or a unique name.
254 // Falls through if the key is a unique name.
255 static void GenerateKeyNameCheck(MacroAssembler* masm,
262 // key - holds the key and is unchanged. Assumed to be non-smi.
263 // Scratch registers:
264 // map - used to hold the map of the key.
265 // hash - used to hold the hash of the key.
267 __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
268 __ j(above, not_unique);
269 STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
270 __ j(equal, &unique);
272 // Is the string an array index, with cached numeric value?
273 __ movl(hash, FieldOperand(key, Name::kHashFieldOffset));
274 __ testl(hash, Immediate(Name::kContainsCachedArrayIndexMask));
275 __ j(zero, index_string); // The value in hash is used at jump target.
277 // Is the string internalized? We already know it's a string so a single
278 // bit test is enough.
279 STATIC_ASSERT(kNotInternalizedTag != 0);
280 __ testb(FieldOperand(map, Map::kInstanceTypeOffset),
281 Immediate(kIsNotInternalizedMask));
282 __ j(not_zero, not_unique);
289 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
290 // The return address is on the stack.
291 Label slow, check_name, index_smi, index_name, property_array_property;
292 Label probe_dictionary, check_number_dictionary;
294 Register receiver = ReceiverRegister();
295 Register key = NameRegister();
296 DCHECK(receiver.is(rdx));
299 // Check that the key is a smi.
300 __ JumpIfNotSmi(key, &check_name);
302 // Now the key is known to be a smi. This place is also jumped to from below
303 // where a numeric string is converted to a smi.
305 GenerateKeyedLoadReceiverCheck(
306 masm, receiver, rax, Map::kHasIndexedInterceptor, &slow);
308 // Check the receiver's map to see if it has fast elements.
309 __ CheckFastElements(rax, &check_number_dictionary);
311 GenerateFastArrayLoad(masm,
319 Counters* counters = masm->isolate()->counters();
320 __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
323 __ bind(&check_number_dictionary);
324 __ SmiToInteger32(rbx, key);
325 __ movp(rax, FieldOperand(receiver, JSObject::kElementsOffset));
327 // Check whether the elements is a number dictionary.
328 // rbx: key as untagged int32
330 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
331 Heap::kHashTableMapRootIndex);
332 __ j(not_equal, &slow);
333 __ LoadFromNumberDictionary(&slow, rax, key, rbx, r9, rdi, rax);
337 // Slow case: Jump to runtime.
338 __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
339 GenerateRuntimeGetProperty(masm);
341 __ bind(&check_name);
342 GenerateKeyNameCheck(masm, key, rax, rbx, &index_name, &slow);
344 GenerateKeyedLoadReceiverCheck(
345 masm, receiver, rax, Map::kHasNamedInterceptor, &slow);
347 // If the receiver is a fast-case object, check the keyed lookup
348 // cache. Otherwise probe the dictionary leaving result in key.
349 __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
350 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
351 Heap::kHashTableMapRootIndex);
352 __ j(equal, &probe_dictionary);
354 // Load the map of the receiver, compute the keyed lookup cache hash
355 // based on 32 bits of the map pointer and the string hash.
356 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
358 __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift));
359 __ movl(rdi, FieldOperand(key, String::kHashFieldOffset));
360 __ shrl(rdi, Immediate(String::kHashShift));
362 int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
363 __ andp(rax, Immediate(mask));
365 // Load the key (consisting of map and internalized string) from the cache and
367 Label load_in_object_property;
368 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
369 Label hit_on_nth_entry[kEntriesPerBucket];
370 ExternalReference cache_keys
371 = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
373 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
374 Label try_next_entry;
376 __ shlp(rdi, Immediate(kPointerSizeLog2 + 1));
377 __ LoadAddress(kScratchRegister, cache_keys);
378 int off = kPointerSize * i * 2;
379 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
380 __ j(not_equal, &try_next_entry);
381 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
382 __ j(equal, &hit_on_nth_entry[i]);
383 __ bind(&try_next_entry);
386 int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
387 __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
388 __ j(not_equal, &slow);
389 __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
390 __ j(not_equal, &slow);
392 // Get field offset, which is a 32-bit integer.
393 ExternalReference cache_field_offsets
394 = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
397 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
398 __ bind(&hit_on_nth_entry[i]);
400 __ addl(rax, Immediate(i));
402 __ LoadAddress(kScratchRegister, cache_field_offsets);
403 __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0));
404 __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
406 __ j(above_equal, &property_array_property);
408 __ jmp(&load_in_object_property);
412 // Load in-object property.
413 __ bind(&load_in_object_property);
414 __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset));
416 __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0));
417 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
420 // Load property array property.
421 __ bind(&property_array_property);
422 __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset));
423 __ movp(rax, FieldOperand(rax, rdi, times_pointer_size,
424 FixedArray::kHeaderSize));
425 __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
428 // Do a quick inline probe of the receiver's dictionary, if it
430 __ bind(&probe_dictionary);
433 __ movp(rax, FieldOperand(receiver, JSObject::kMapOffset));
434 __ movb(rax, FieldOperand(rax, Map::kInstanceTypeOffset));
435 GenerateGlobalInstanceTypeCheck(masm, rax, &slow);
437 GenerateDictionaryLoad(masm, &slow, rbx, key, rax, rdi, rax);
438 __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
441 __ bind(&index_name);
442 __ IndexFromHash(rbx, key);
447 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
448 // Return address is on the stack.
451 Register receiver = ReceiverRegister();
452 Register index = NameRegister();
453 Register scratch = rbx;
454 Register result = rax;
455 DCHECK(!scratch.is(receiver) && !scratch.is(index));
457 StringCharAtGenerator char_at_generator(receiver,
461 &miss, // When not a string.
462 &miss, // When not a number.
463 &miss, // When index out of range.
464 STRING_INDEX_IS_ARRAY_INDEX);
465 char_at_generator.GenerateFast(masm);
468 StubRuntimeCallHelper call_helper;
469 char_at_generator.GenerateSlow(masm, call_helper);
476 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
477 // Return address is on the stack.
480 Register receiver = ReceiverRegister();
481 Register key = NameRegister();
482 Register scratch = rax;
483 DCHECK(!scratch.is(receiver) && !scratch.is(key));
485 // Check that the receiver isn't a smi.
486 __ JumpIfSmi(receiver, &slow);
488 // Check that the key is an array index, that is Uint32.
489 STATIC_ASSERT(kSmiValueSize <= 32);
490 __ JumpUnlessNonNegativeSmi(key, &slow);
492 // Get the map of the receiver.
493 __ movp(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
495 // Check that it has indexed interceptor and access checks
496 // are not enabled for this object.
497 __ movb(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
498 __ andb(scratch, Immediate(kSlowCaseBitFieldMask));
499 __ cmpb(scratch, Immediate(1 << Map::kHasIndexedInterceptor));
500 __ j(not_zero, &slow);
502 // Everything is fine, call runtime.
503 __ PopReturnAddressTo(scratch);
504 __ Push(receiver); // receiver
506 __ PushReturnAddressFrom(scratch);
508 // Perform tail call to the entry.
509 __ TailCallExternalReference(
510 ExternalReference(IC_Utility(kLoadElementWithInterceptor),
519 static void KeyedStoreGenerateGenericHelper(
520 MacroAssembler* masm,
524 KeyedStoreCheckMap check_map,
525 KeyedStoreIncrementLength increment_length) {
526 Label transition_smi_elements;
527 Label finish_object_store, non_double_value, transition_double_elements;
528 Label fast_double_without_map_check;
529 Register receiver = KeyedStoreIC::ReceiverRegister();
530 Register key = KeyedStoreIC::NameRegister();
531 Register value = KeyedStoreIC::ValueRegister();
532 DCHECK(receiver.is(rdx));
534 DCHECK(value.is(rax));
535 // Fast case: Do the store, could be either Object or double.
536 __ bind(fast_object);
537 // rbx: receiver's elements array (a FixedArray)
538 // receiver is a JSArray.
539 // r9: map of receiver
540 if (check_map == kCheckMap) {
541 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
542 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
543 __ j(not_equal, fast_double);
546 // HOLECHECK: guards "A[i] = V"
547 // We have to go to the runtime if the current value is the hole because
548 // there may be a callback on the element
549 Label holecheck_passed1;
550 __ movp(kScratchRegister, FieldOperand(rbx,
553 FixedArray::kHeaderSize));
554 __ CompareRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
555 __ j(not_equal, &holecheck_passed1);
556 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
558 __ bind(&holecheck_passed1);
560 // Smi stores don't require further checks.
562 __ JumpIfNotSmi(value, &non_smi_value);
563 if (increment_length == kIncrementLength) {
564 // Add 1 to receiver->length.
565 __ leal(rdi, Operand(key, 1));
566 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
568 // It's irrelevant whether array is smi-only or not when writing a smi.
569 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
573 __ bind(&non_smi_value);
574 // Writing a non-smi, check whether array allows non-smi elements.
575 // r9: receiver's map
576 __ CheckFastObjectElements(r9, &transition_smi_elements);
578 __ bind(&finish_object_store);
579 if (increment_length == kIncrementLength) {
580 // Add 1 to receiver->length.
581 __ leal(rdi, Operand(key, 1));
582 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
584 __ movp(FieldOperand(rbx, key, times_pointer_size, FixedArray::kHeaderSize),
586 __ movp(rdx, value); // Preserve the value which is returned.
588 rbx, rdx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
591 __ bind(fast_double);
592 if (check_map == kCheckMap) {
593 // Check for fast double array case. If this fails, call through to the
595 // rdi: elements array's map
596 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
597 __ j(not_equal, slow);
600 // HOLECHECK: guards "A[i] double hole?"
601 // We have to see if the double version of the hole is present. If so
602 // go to the runtime.
603 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
604 __ cmpl(FieldOperand(rbx, key, times_8, offset), Immediate(kHoleNanUpper32));
605 __ j(not_equal, &fast_double_without_map_check);
606 __ JumpIfDictionaryInPrototypeChain(receiver, rdi, kScratchRegister, slow);
608 __ bind(&fast_double_without_map_check);
609 __ StoreNumberToDoubleElements(value, rbx, key, xmm0,
610 &transition_double_elements);
611 if (increment_length == kIncrementLength) {
612 // Add 1 to receiver->length.
613 __ leal(rdi, Operand(key, 1));
614 __ Integer32ToSmiField(FieldOperand(receiver, JSArray::kLengthOffset), rdi);
618 __ bind(&transition_smi_elements);
619 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
621 // Transition the array appropriately depending on the value type.
622 __ movp(r9, FieldOperand(value, HeapObject::kMapOffset));
623 __ CompareRoot(r9, Heap::kHeapNumberMapRootIndex);
624 __ j(not_equal, &non_double_value);
626 // Value is a double. Transition FAST_SMI_ELEMENTS ->
627 // FAST_DOUBLE_ELEMENTS and complete the store.
628 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
629 FAST_DOUBLE_ELEMENTS,
633 AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS,
634 FAST_DOUBLE_ELEMENTS);
635 ElementsTransitionGenerator::GenerateSmiToDouble(
636 masm, receiver, key, value, rbx, mode, slow);
637 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
638 __ jmp(&fast_double_without_map_check);
640 __ bind(&non_double_value);
641 // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
642 __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
647 mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
648 ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
649 masm, receiver, key, value, rbx, mode, slow);
650 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
651 __ jmp(&finish_object_store);
653 __ bind(&transition_double_elements);
654 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
655 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
656 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
657 __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
658 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
663 mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
664 ElementsTransitionGenerator::GenerateDoubleToObject(
665 masm, receiver, key, value, rbx, mode, slow);
666 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
667 __ jmp(&finish_object_store);
671 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
672 StrictMode strict_mode) {
673 // Return address is on the stack.
674 Label slow, slow_with_tagged_index, fast_object, fast_object_grow;
675 Label fast_double, fast_double_grow;
676 Label array, extra, check_if_double_array;
677 Register receiver = ReceiverRegister();
678 Register key = NameRegister();
679 DCHECK(receiver.is(rdx));
682 // Check that the object isn't a smi.
683 __ JumpIfSmi(receiver, &slow_with_tagged_index);
684 // Get the map from the receiver.
685 __ movp(r9, FieldOperand(receiver, HeapObject::kMapOffset));
686 // Check that the receiver does not require access checks and is not observed.
687 // The generic stub does not perform map checks or handle observed objects.
688 __ testb(FieldOperand(r9, Map::kBitFieldOffset),
689 Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
690 __ j(not_zero, &slow_with_tagged_index);
691 // Check that the key is a smi.
692 __ JumpIfNotSmi(key, &slow_with_tagged_index);
693 __ SmiToInteger32(key, key);
695 __ CmpInstanceType(r9, JS_ARRAY_TYPE);
697 // Check that the object is some kind of JSObject.
698 __ CmpInstanceType(r9, FIRST_JS_OBJECT_TYPE);
701 // Object case: Check key against length in the elements array.
702 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
703 // Check array bounds.
704 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
706 __ j(above, &fast_object);
708 // Slow case: call runtime.
710 __ Integer32ToSmi(key, key);
711 __ bind(&slow_with_tagged_index);
712 GenerateRuntimeSetProperty(masm, strict_mode);
713 // Never returns to here.
715 // Extra capacity case: Check if there is extra capacity to
716 // perform the store and update the length. Used for adding one
717 // element to the array by writing to array[array.length].
719 // receiver is a JSArray.
720 // rbx: receiver's elements array (a FixedArray)
721 // flags: smicompare (receiver.length(), rbx)
722 __ j(not_equal, &slow); // do not leave holes in the array
723 __ SmiCompareInteger32(FieldOperand(rbx, FixedArray::kLengthOffset), key);
724 __ j(below_equal, &slow);
725 // Increment index to get new length.
726 __ movp(rdi, FieldOperand(rbx, HeapObject::kMapOffset));
727 __ CompareRoot(rdi, Heap::kFixedArrayMapRootIndex);
728 __ j(not_equal, &check_if_double_array);
729 __ jmp(&fast_object_grow);
731 __ bind(&check_if_double_array);
732 // rdi: elements array's map
733 __ CompareRoot(rdi, Heap::kFixedDoubleArrayMapRootIndex);
734 __ j(not_equal, &slow);
735 __ jmp(&fast_double_grow);
737 // Array case: Get the length and the elements array from the JS
738 // array. Check that the array is in fast mode (and writable); if it
739 // is the length is always a smi.
741 // receiver is a JSArray.
742 __ movp(rbx, FieldOperand(receiver, JSObject::kElementsOffset));
744 // Check the key against the length in the array, compute the
745 // address to store into and fall through to fast case.
746 __ SmiCompareInteger32(FieldOperand(receiver, JSArray::kLengthOffset), key);
747 __ j(below_equal, &extra);
749 KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
750 &slow, kCheckMap, kDontIncrementLength);
751 KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
752 &slow, kDontCheckMap, kIncrementLength);
756 static Operand GenerateMappedArgumentsLookup(MacroAssembler* masm,
762 Label* unmapped_case,
764 Heap* heap = masm->isolate()->heap();
766 // Check that the receiver is a JSObject. Because of the elements
767 // map check later, we do not need to check for interceptors or
768 // whether it requires access checks.
769 __ JumpIfSmi(object, slow_case);
770 // Check that the object is some kind of JSObject.
771 __ CmpObjectType(object, FIRST_JS_RECEIVER_TYPE, scratch1);
772 __ j(below, slow_case);
774 // Check that the key is a positive smi.
775 Condition check = masm->CheckNonNegativeSmi(key);
776 __ j(NegateCondition(check), slow_case);
778 // Load the elements into scratch1 and check its map. If not, jump
779 // to the unmapped lookup with the parameter map in scratch1.
780 Handle<Map> arguments_map(heap->sloppy_arguments_elements_map());
781 __ movp(scratch1, FieldOperand(object, JSObject::kElementsOffset));
782 __ CheckMap(scratch1, arguments_map, slow_case, DONT_DO_SMI_CHECK);
784 // Check if element is in the range of mapped arguments.
785 __ movp(scratch2, FieldOperand(scratch1, FixedArray::kLengthOffset));
786 __ SmiSubConstant(scratch2, scratch2, Smi::FromInt(2));
787 __ cmpp(key, scratch2);
788 __ j(greater_equal, unmapped_case);
790 // Load element index and check whether it is the hole.
791 const int kHeaderSize = FixedArray::kHeaderSize + 2 * kPointerSize;
792 __ SmiToInteger64(scratch3, key);
793 __ movp(scratch2, FieldOperand(scratch1,
797 __ CompareRoot(scratch2, Heap::kTheHoleValueRootIndex);
798 __ j(equal, unmapped_case);
800 // Load value from context and return it. We can reuse scratch1 because
801 // we do not jump to the unmapped lookup (which requires the parameter
803 __ movp(scratch1, FieldOperand(scratch1, FixedArray::kHeaderSize));
804 __ SmiToInteger64(scratch3, scratch2);
805 return FieldOperand(scratch1,
808 Context::kHeaderSize);
812 static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
814 Register parameter_map,
817 // Element is in arguments backing store, which is referenced by the
818 // second element of the parameter_map. The parameter_map register
819 // must be loaded with the parameter map of the arguments object and is
821 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
822 Register backing_store = parameter_map;
823 __ movp(backing_store, FieldOperand(parameter_map, kBackingStoreOffset));
824 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
825 __ CheckMap(backing_store, fixed_array_map, slow_case, DONT_DO_SMI_CHECK);
826 __ movp(scratch, FieldOperand(backing_store, FixedArray::kLengthOffset));
827 __ cmpp(key, scratch);
828 __ j(greater_equal, slow_case);
829 __ SmiToInteger64(scratch, key);
830 return FieldOperand(backing_store,
833 FixedArray::kHeaderSize);
837 void KeyedLoadIC::GenerateSloppyArguments(MacroAssembler* masm) {
838 // The return address is on the stack.
839 Register receiver = ReceiverRegister();
840 Register key = NameRegister();
841 DCHECK(receiver.is(rdx));
845 Operand mapped_location =
846 GenerateMappedArgumentsLookup(
847 masm, receiver, key, rbx, rax, rdi, ¬in, &slow);
848 __ movp(rax, mapped_location);
851 // The unmapped lookup expects that the parameter map is in rbx.
852 Operand unmapped_location =
853 GenerateUnmappedArgumentsLookup(masm, key, rbx, rax, &slow);
854 __ CompareRoot(unmapped_location, Heap::kTheHoleValueRootIndex);
856 __ movp(rax, unmapped_location);
863 void KeyedStoreIC::GenerateSloppyArguments(MacroAssembler* masm) {
864 // The return address is on the stack.
866 Register receiver = ReceiverRegister();
867 Register name = NameRegister();
868 Register value = ValueRegister();
869 DCHECK(receiver.is(rdx));
870 DCHECK(name.is(rcx));
871 DCHECK(value.is(rax));
873 Operand mapped_location = GenerateMappedArgumentsLookup(
874 masm, receiver, name, rbx, rdi, r8, ¬in, &slow);
875 __ movp(mapped_location, value);
876 __ leap(r9, mapped_location);
886 // The unmapped lookup expects that the parameter map is in rbx.
887 Operand unmapped_location =
888 GenerateUnmappedArgumentsLookup(masm, name, rbx, rdi, &slow);
889 __ movp(unmapped_location, value);
890 __ leap(r9, unmapped_location);
904 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
905 // The return address is on the stack.
906 Register receiver = ReceiverRegister();
907 Register name = NameRegister();
908 DCHECK(receiver.is(rdx));
909 DCHECK(name.is(rcx));
911 // Probe the stub cache.
912 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
913 Code::ComputeHandlerFlags(Code::LOAD_IC));
914 masm->isolate()->stub_cache()->GenerateProbe(
915 masm, flags, receiver, name, rbx, rax);
921 void LoadIC::GenerateNormal(MacroAssembler* masm) {
922 Register dictionary = rax;
923 DCHECK(!dictionary.is(ReceiverRegister()));
924 DCHECK(!dictionary.is(NameRegister()));
929 FieldOperand(ReceiverRegister(), JSObject::kPropertiesOffset));
930 GenerateDictionaryLoad(masm, &slow, dictionary, NameRegister(), rbx, rdi,
934 // Dictionary load failed, go slow (but don't miss).
936 GenerateRuntimeGetProperty(masm);
940 // A register that isn't one of the parameters to the load ic.
941 static const Register LoadIC_TempRegister() { return rbx; }
944 static const Register KeyedLoadIC_TempRegister() {
949 void LoadIC::GenerateMiss(MacroAssembler* masm) {
950 // The return address is on the stack.
952 Counters* counters = masm->isolate()->counters();
953 __ IncrementCounter(counters->load_miss(), 1);
955 __ PopReturnAddressTo(LoadIC_TempRegister());
956 __ Push(ReceiverRegister()); // receiver
957 __ Push(NameRegister()); // name
958 __ PushReturnAddressFrom(LoadIC_TempRegister());
960 // Perform tail call to the entry.
961 ExternalReference ref =
962 ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
963 __ TailCallExternalReference(ref, 2, 1);
967 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
968 // The return address is on the stack.
970 __ PopReturnAddressTo(LoadIC_TempRegister());
971 __ Push(ReceiverRegister()); // receiver
972 __ Push(NameRegister()); // name
973 __ PushReturnAddressFrom(LoadIC_TempRegister());
975 // Perform tail call to the entry.
976 __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
980 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
981 // The return address is on the stack.
982 Counters* counters = masm->isolate()->counters();
983 __ IncrementCounter(counters->keyed_load_miss(), 1);
985 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
986 __ Push(ReceiverRegister()); // receiver
987 __ Push(NameRegister()); // name
988 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
990 // Perform tail call to the entry.
991 ExternalReference ref =
992 ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
993 __ TailCallExternalReference(ref, 2, 1);
997 // IC register specifications
998 const Register LoadIC::ReceiverRegister() { return rdx; }
999 const Register LoadIC::NameRegister() { return rcx; }
1002 const Register LoadIC::SlotRegister() {
1003 DCHECK(FLAG_vector_ics);
1008 const Register LoadIC::VectorRegister() {
1009 DCHECK(FLAG_vector_ics);
1014 const Register StoreIC::ReceiverRegister() { return rdx; }
1015 const Register StoreIC::NameRegister() { return rcx; }
1016 const Register StoreIC::ValueRegister() { return rax; }
1019 const Register KeyedStoreIC::MapRegister() {
1024 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
1025 // The return address is on the stack.
1027 __ PopReturnAddressTo(KeyedLoadIC_TempRegister());
1028 __ Push(ReceiverRegister()); // receiver
1029 __ Push(NameRegister()); // name
1030 __ PushReturnAddressFrom(KeyedLoadIC_TempRegister());
1032 // Perform tail call to the entry.
1033 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
1037 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
1038 // The return address is on the stack.
1040 // Get the receiver from the stack and probe the stub cache.
1041 Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
1042 Code::ComputeHandlerFlags(Code::STORE_IC));
1043 masm->isolate()->stub_cache()->GenerateProbe(
1044 masm, flags, ReceiverRegister(), NameRegister(), rbx, no_reg);
1046 // Cache miss: Jump to runtime.
1051 static void StoreIC_PushArgs(MacroAssembler* masm) {
1052 Register receiver = StoreIC::ReceiverRegister();
1053 Register name = StoreIC::NameRegister();
1054 Register value = StoreIC::ValueRegister();
1056 DCHECK(!rbx.is(receiver) && !rbx.is(name) && !rbx.is(value));
1058 __ PopReturnAddressTo(rbx);
1062 __ PushReturnAddressFrom(rbx);
1066 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1067 // Return address is on the stack.
1068 StoreIC_PushArgs(masm);
1070 // Perform tail call to the entry.
1071 ExternalReference ref =
1072 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1073 __ TailCallExternalReference(ref, 3, 1);
1077 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1078 Register receiver = ReceiverRegister();
1079 Register name = NameRegister();
1080 Register value = ValueRegister();
1081 Register dictionary = rbx;
1085 __ movp(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
1086 GenerateDictionaryStore(masm, &miss, dictionary, name, value, r8, r9);
1087 Counters* counters = masm->isolate()->counters();
1088 __ IncrementCounter(counters->store_normal_hit(), 1);
1092 __ IncrementCounter(counters->store_normal_miss(), 1);
1097 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1098 StrictMode strict_mode) {
1099 // Return address is on the stack.
1100 DCHECK(!rbx.is(ReceiverRegister()) && !rbx.is(NameRegister()) &&
1101 !rbx.is(ValueRegister()));
1103 __ PopReturnAddressTo(rbx);
1104 __ Push(ReceiverRegister());
1105 __ Push(NameRegister());
1106 __ Push(ValueRegister());
1107 __ Push(Smi::FromInt(strict_mode));
1108 __ PushReturnAddressFrom(rbx);
1110 // Do tail-call to runtime routine.
1111 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1115 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1116 StrictMode strict_mode) {
1117 // Return address is on the stack.
1118 DCHECK(!rbx.is(ReceiverRegister()) && !rbx.is(NameRegister()) &&
1119 !rbx.is(ValueRegister()));
1121 __ PopReturnAddressTo(rbx);
1122 __ Push(ReceiverRegister());
1123 __ Push(NameRegister());
1124 __ Push(ValueRegister());
1125 __ Push(Smi::FromInt(strict_mode)); // Strict mode.
1126 __ PushReturnAddressFrom(rbx);
1128 // Do tail-call to runtime routine.
1129 __ TailCallRuntime(Runtime::kSetProperty, 4, 1);
1133 void StoreIC::GenerateSlow(MacroAssembler* masm) {
1134 // Return address is on the stack.
1135 StoreIC_PushArgs(masm);
1137 // Do tail-call to runtime routine.
1138 ExternalReference ref(IC_Utility(kStoreIC_Slow), masm->isolate());
1139 __ TailCallExternalReference(ref, 3, 1);
1143 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1144 // Return address is on the stack.
1145 StoreIC_PushArgs(masm);
1147 // Do tail-call to runtime routine.
1148 ExternalReference ref(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1149 __ TailCallExternalReference(ref, 3, 1);
1153 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1154 // Return address is on the stack.
1155 StoreIC_PushArgs(masm);
1157 // Do tail-call to runtime routine.
1158 ExternalReference ref =
1159 ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1160 __ TailCallExternalReference(ref, 3, 1);
1167 Condition CompareIC::ComputeCondition(Token::Value op) {
1169 case Token::EQ_STRICT:
1179 return greater_equal;
1182 return no_condition;
1187 bool CompareIC::HasInlinedSmiCode(Address address) {
1188 // The address of the instruction following the call.
1189 Address test_instruction_address =
1190 address + Assembler::kCallTargetAddressOffset;
1192 // If the instruction following the call is not a test al, nothing
1194 return *test_instruction_address == Assembler::kTestAlByte;
1198 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1199 // The address of the instruction following the call.
1200 Address test_instruction_address =
1201 address + Assembler::kCallTargetAddressOffset;
1203 // If the instruction following the call is not a test al, nothing
1205 if (*test_instruction_address != Assembler::kTestAlByte) {
1206 DCHECK(*test_instruction_address == Assembler::kNopByte);
1210 Address delta_address = test_instruction_address + 1;
1211 // The delta to the start of the map check instruction and the
1212 // condition code uses at the patched jump.
1213 uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
1214 if (FLAG_trace_ic) {
1215 PrintF("[ patching ic at %p, test=%p, delta=%d\n",
1216 address, test_instruction_address, delta);
1219 // Patch with a short conditional jump. Enabling means switching from a short
1220 // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
1221 // reverse operation of that.
1222 Address jmp_address = test_instruction_address - delta;
1223 DCHECK((check == ENABLE_INLINED_SMI_CHECK)
1224 ? (*jmp_address == Assembler::kJncShortOpcode ||
1225 *jmp_address == Assembler::kJcShortOpcode)
1226 : (*jmp_address == Assembler::kJnzShortOpcode ||
1227 *jmp_address == Assembler::kJzShortOpcode));
1228 Condition cc = (check == ENABLE_INLINED_SMI_CHECK)
1229 ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
1230 : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
1231 *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
1235 } } // namespace v8::internal
1237 #endif // V8_TARGET_ARCH_X64