1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_ARM)
32 #include "assembler-arm.h"
33 #include "code-stubs.h"
38 #include "stub-cache.h"
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
48 #define __ ACCESS_MASM(masm)
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
53 Label* global_object) {
55 // type: holds the receiver instance type on entry.
56 __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57 __ b(eq, global_object);
58 __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59 __ b(eq, global_object);
60 __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61 __ b(eq, global_object);
65 // Generated code falls through if the receiver is a regular non-global
66 // JS object with slow properties and no interceptors.
67 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
74 // receiver: holds the receiver on entry and is unchanged.
75 // elements: holds the property dictionary on fall through.
77 // t0: used to holds the receiver map.
78 // t1: used to holds the receiver instance type, receiver bit mask and
81 // Check that the receiver isn't a smi.
82 __ JumpIfSmi(receiver, miss);
84 // Check that the receiver is a valid JS object.
85 __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE);
88 // If this assert fails, we have to check upper bound too.
89 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
91 GenerateGlobalInstanceTypeCheck(masm, t1, miss);
93 // Check that the global object does not require access checks.
94 __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset));
95 __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
96 (1 << Map::kHasNamedInterceptor)));
99 __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100 __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
101 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
107 // Helper function used from LoadIC/CallIC GenerateNormal.
109 // elements: Property dictionary. It is not clobbered if a jump to the miss
111 // name: Property name. It is not clobbered if a jump to the miss label is
113 // result: Register for the result. It is only updated if a jump to the miss
114 // label is not done. Can be the same as elements or name clobbering
115 // one of these in the case of not jumping to the miss label.
116 // The two scratch registers need to be different from elements, name and
118 // The generated code assumes that the receiver has slow properties,
119 // is not a global object and does not have interceptors.
120 static void GenerateDictionaryLoad(MacroAssembler* masm,
127 // Main use of the scratch registers.
128 // scratch1: Used as temporary and to hold the capacity of the property
130 // scratch2: Used as temporary.
133 // Probe the dictionary.
134 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
142 // If probing finds an entry check that the value is a normal
144 __ bind(&done); // scratch2 == elements + 4 * index
145 const int kElementsStartOffset = StringDictionary::kHeaderSize +
146 StringDictionary::kElementsStartIndex * kPointerSize;
147 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
148 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149 __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
152 // Get the value at the masked, scaled index and return.
154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
158 // Helper function used from StoreIC::GenerateNormal.
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
162 // name: Property name. It is not clobbered if a jump to the miss label is
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 static void GenerateDictionaryStore(MacroAssembler* masm,
176 // Main use of the scratch registers.
177 // scratch1: Used as temporary and to hold the capacity of the property
179 // scratch2: Used as temporary.
182 // Probe the dictionary.
183 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
191 // If probing finds an entry in the dictionary check that the value
192 // is a normal property that is not read only.
193 __ bind(&done); // scratch2 == elements + 4 * index
194 const int kElementsStartOffset = StringDictionary::kHeaderSize +
195 StringDictionary::kElementsStartIndex * kPointerSize;
196 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
197 const int kTypeAndReadOnlyMask =
198 (PropertyDetails::TypeField::kMask |
199 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
200 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
201 __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
204 // Store the value at the masked, scaled index and return.
205 const int kValueOffset = kElementsStartOffset + kPointerSize;
206 __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
207 __ str(value, MemOperand(scratch2));
209 // Update the write barrier. Make sure not to clobber the value.
210 __ mov(scratch1, value);
212 elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs);
216 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
217 // ----------- S t a t e -------------
219 // -- lr : return address
221 // -- sp[0] : receiver
222 // -----------------------------------
225 StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
227 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
231 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
232 // ----------- S t a t e -------------
234 // -- lr : return address
236 // -- sp[0] : receiver
237 // -----------------------------------
240 StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss,
242 // Cache miss: Jump to runtime.
244 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
248 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
249 // ----------- S t a t e -------------
251 // -- lr : return address
253 // -- sp[0] : receiver
254 // -----------------------------------
257 StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
259 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
271 // Check that the object isn't a smi.
272 __ JumpIfSmi(receiver, slow);
273 // Get the map of the receiver.
274 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
276 __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
278 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
280 // Check that the object is some kind of JS object EXCEPT JS Value type.
281 // In the case that the object is a value-wrapper object,
282 // we enter the runtime system to make sure that indexing into string
283 // objects work as intended.
284 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
285 __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
286 __ cmp(scratch, Operand(JS_OBJECT_TYPE));
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
300 Label* not_fast_array,
301 Label* out_of_range) {
304 // receiver - holds the receiver on entry.
305 // Unchanged unless 'result' is the same register.
307 // key - holds the smi key on entry.
308 // Unchanged unless 'result' is the same register.
310 // elements - holds the elements of the receiver on exit.
312 // result - holds the result on exit if the load succeeded.
313 // Allowed to be the the same as 'receiver' or 'key'.
314 // Unchanged on bailout so 'receiver' and 'key' can be safely
315 // used by further computation.
317 // Scratch registers:
319 // scratch1 - used to hold elements map and elements length.
320 // Holds the elements map if not_fast_array branch is taken.
322 // scratch2 - used to hold the loaded value.
324 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325 if (not_fast_array != NULL) {
326 // Check that the object is in fast mode and writable.
327 __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
329 __ cmp(scratch1, ip);
330 __ b(ne, not_fast_array);
332 __ AssertFastElements(elements);
334 // Check that the key (index) is within bounds.
335 __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336 __ cmp(key, Operand(scratch1));
337 __ b(hs, out_of_range);
338 // Fast case: Do the load.
339 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
341 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
343 MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
344 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
345 __ cmp(scratch2, ip);
346 // In case the loaded value is the_hole we have to consult GetProperty
347 // to ensure the prototype chain is searched.
348 __ b(eq, out_of_range);
349 __ mov(result, scratch2);
353 // Checks whether a key is an array index string or a symbol string.
354 // Falls through if a key is a symbol.
355 static void GenerateKeyStringCheck(MacroAssembler* masm,
361 // The key is not a smi.
363 __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
364 __ b(ge, not_symbol);
366 // Is the string an array index, with cached numeric value?
367 __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset));
368 __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
369 __ b(eq, index_string);
371 // Is the string a symbol?
373 __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
374 STATIC_ASSERT(kSymbolTag != 0);
375 __ tst(hash, Operand(kIsSymbolMask));
376 __ b(eq, not_symbol);
381 Object* CallIC_Miss(Arguments args);
383 // The generated code does not accept smi keys.
384 // The generated code falls through if both probes miss.
385 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
388 Code::ExtraICState extra_state) {
389 // ----------- S t a t e -------------
392 // -----------------------------------
393 Label number, non_number, non_string, boolean, probe, miss;
395 // Probe the stub cache.
396 Code::Flags flags = Code::ComputeFlags(kind,
401 Isolate::Current()->stub_cache()->GenerateProbe(
402 masm, flags, r1, r2, r3, r4, r5, r6);
404 // If the stub cache probing failed, the receiver might be a value.
405 // For value objects, we use the map of the prototype objects for
406 // the corresponding JSValue for the cache and that is what we need
410 __ JumpIfSmi(r1, &number);
411 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
412 __ b(ne, &non_number);
414 StubCompiler::GenerateLoadGlobalFunctionPrototype(
415 masm, Context::NUMBER_FUNCTION_INDEX, r1);
419 __ bind(&non_number);
420 __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
421 __ b(hs, &non_string);
422 StubCompiler::GenerateLoadGlobalFunctionPrototype(
423 masm, Context::STRING_FUNCTION_INDEX, r1);
426 // Check for boolean.
427 __ bind(&non_string);
428 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
431 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
435 StubCompiler::GenerateLoadGlobalFunctionPrototype(
436 masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
438 // Probe the stub cache for the value object.
440 Isolate::Current()->stub_cache()->GenerateProbe(
441 masm, flags, r1, r2, r3, r4, r5, r6);
447 static void GenerateFunctionTailCall(MacroAssembler* masm,
453 // Check that the value isn't a smi.
454 __ JumpIfSmi(r1, miss);
456 // Check that the value is a JSFunction.
457 __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
460 // Invoke the function.
461 ParameterCount actual(argc);
462 __ InvokeFunction(r1, actual, JUMP_FUNCTION,
463 NullCallWrapper(), CALL_AS_METHOD);
467 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
468 // ----------- S t a t e -------------
470 // -- lr : return address
471 // -----------------------------------
474 // Get the receiver of the function from the stack into r1.
475 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
477 GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
480 // Search the dictionary - put result in register r1.
481 GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
483 GenerateFunctionTailCall(masm, argc, &miss, r4);
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
492 Code::ExtraICState extra_state) {
493 // ----------- S t a t e -------------
495 // -- lr : return address
496 // -----------------------------------
497 Isolate* isolate = masm->isolate();
499 if (id == IC::kCallIC_Miss) {
500 __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
502 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
505 // Get the receiver of the function from the stack.
506 __ ldr(r3, MemOperand(sp, argc * kPointerSize));
509 FrameScope scope(masm, StackFrame::INTERNAL);
511 // Push the receiver and the name of the function.
515 __ mov(r0, Operand(2));
516 __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
521 // Move result to r1 and leave the internal frame.
522 __ mov(r1, Operand(r0));
525 // Check if the receiver is a global object of some sort.
526 // This can happen only for regular CallIC but not KeyedCallIC.
527 if (id == IC::kCallIC_Miss) {
528 Label invoke, global;
529 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
530 __ JumpIfSmi(r2, &invoke);
531 __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
533 __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
536 // Patch the receiver on the stack.
538 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
539 __ str(r2, MemOperand(sp, argc * kPointerSize));
543 // Invoke the function.
544 CallKind call_kind = CallICBase::Contextual::decode(extra_state)
547 ParameterCount actual(argc);
548 __ InvokeFunction(r1,
556 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
558 Code::ExtraICState extra_ic_state) {
559 // ----------- S t a t e -------------
561 // -- lr : return address
562 // -----------------------------------
564 // Get the receiver of the function from the stack into r1.
565 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
566 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
567 GenerateMiss(masm, argc, extra_ic_state);
571 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
572 // ----------- S t a t e -------------
574 // -- lr : return address
575 // -----------------------------------
577 // Get the receiver of the function from the stack into r1.
578 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
580 Label do_call, slow_call, slow_load, slow_reload_receiver;
581 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
582 Label index_smi, index_string;
584 // Check that the key is a smi.
585 __ JumpIfNotSmi(r2, &check_string);
587 // Now the key is known to be a smi. This place is also jumped to from below
588 // where a numeric string is converted to a smi.
590 GenerateKeyedLoadReceiverCheck(
591 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
593 GenerateFastArrayLoad(
594 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
595 Counters* counters = masm->isolate()->counters();
596 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
599 // receiver in r1 is not used after this point.
602 GenerateFunctionTailCall(masm, argc, &slow_call, r0);
604 __ bind(&check_number_dictionary);
608 // Check whether the elements is a number dictionary.
609 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
611 __ b(ne, &slow_load);
612 __ mov(r0, Operand(r2, ASR, kSmiTagSize));
613 // r0: untagged index
614 __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
615 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
619 // This branch is taken when calling KeyedCallIC_Miss is neither required
621 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
623 FrameScope scope(masm, StackFrame::INTERNAL);
624 __ push(r2); // save the key
625 __ Push(r1, r2); // pass the receiver and the key
626 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
627 __ pop(r2); // restore the key
632 __ bind(&check_string);
633 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
635 // The key is known to be a symbol.
636 // If the receiver is a regular JS object with slow properties then do
637 // a quick inline probe of the receiver's dictionary.
638 // Otherwise do the monomorphic cache probe.
639 GenerateKeyedLoadReceiverCheck(
640 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
642 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
643 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
644 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
646 __ b(ne, &lookup_monomorphic_cache);
648 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
649 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
652 __ bind(&lookup_monomorphic_cache);
653 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
654 GenerateMonomorphicCacheProbe(masm,
657 Code::kNoExtraICState);
658 // Fall through on miss.
661 // This branch is taken if:
662 // - the receiver requires boxing or access check,
663 // - the key is neither smi nor symbol,
664 // - the value loaded is not a function,
665 // - there is hope that the runtime will create a monomorphic call stub
666 // that will get fetched next time.
667 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
668 GenerateMiss(masm, argc);
670 __ bind(&index_string);
671 __ IndexFromHash(r3, r2);
672 // Now jump to the place where smi keys are handled.
677 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
678 // ----------- S t a t e -------------
680 // -- lr : return address
681 // -----------------------------------
683 // Check if the name is a string.
685 __ JumpIfSmi(r2, &miss);
686 __ IsObjectJSStringType(r2, r0, &miss);
688 CallICBase::GenerateNormal(masm, argc);
690 GenerateMiss(masm, argc);
695 Object* LoadIC_Miss(Arguments args);
697 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
698 // ----------- S t a t e -------------
700 // -- lr : return address
702 // -- sp[0] : receiver
703 // -----------------------------------
705 // Probe the stub cache.
707 Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
708 Isolate::Current()->stub_cache()->GenerateProbe(
709 masm, flags, r0, r2, r3, r4, r5, r6);
711 // Cache miss: Jump to runtime.
716 void LoadIC::GenerateNormal(MacroAssembler* masm) {
717 // ----------- S t a t e -------------
719 // -- lr : return address
721 // -- sp[0] : receiver
722 // -----------------------------------
725 GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
728 GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
731 // Cache miss: Jump to runtime.
737 void LoadIC::GenerateMiss(MacroAssembler* masm) {
738 // ----------- S t a t e -------------
740 // -- lr : return address
742 // -- sp[0] : receiver
743 // -----------------------------------
744 Isolate* isolate = masm->isolate();
746 __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
751 // Perform tail call to the entry.
752 ExternalReference ref =
753 ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
754 __ TailCallExternalReference(ref, 2, 1);
758 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
764 Label* unmapped_case,
766 Heap* heap = masm->isolate()->heap();
768 // Check that the receiver is a JSObject. Because of the map check
769 // later, we do not need to check for interceptors or whether it
770 // requires access checks.
771 __ JumpIfSmi(object, slow_case);
772 // Check that the object is some kind of JSObject.
773 __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
776 // Check that the key is a positive smi.
777 __ tst(key, Operand(0x8000001));
780 // Load the elements into scratch1 and check its map.
781 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
782 __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
783 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
785 // Check if element is in the range of mapped arguments. If not, jump
786 // to the unmapped lookup with the parameter map in scratch1.
787 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
788 __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
789 __ cmp(key, Operand(scratch2));
790 __ b(cs, unmapped_case);
792 // Load element index and check whether it is the hole.
794 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
796 __ mov(scratch3, Operand(kPointerSize >> 1));
797 __ mul(scratch3, key, scratch3);
798 __ add(scratch3, scratch3, Operand(kOffset));
800 __ ldr(scratch2, MemOperand(scratch1, scratch3));
801 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
802 __ cmp(scratch2, scratch3);
803 __ b(eq, unmapped_case);
805 // Load value from context and return it. We can reuse scratch1 because
806 // we do not jump to the unmapped lookup (which requires the parameter
808 __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
809 __ mov(scratch3, Operand(kPointerSize >> 1));
810 __ mul(scratch3, scratch2, scratch3);
811 __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
812 return MemOperand(scratch1, scratch3);
816 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
818 Register parameter_map,
821 // Element is in arguments backing store, which is referenced by the
822 // second element of the parameter_map. The parameter_map register
823 // must be loaded with the parameter map of the arguments object and is
825 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
826 Register backing_store = parameter_map;
827 __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
828 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
829 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
831 __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
832 __ cmp(key, Operand(scratch));
834 __ mov(scratch, Operand(kPointerSize >> 1));
835 __ mul(scratch, key, scratch);
838 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
839 return MemOperand(backing_store, scratch);
843 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
844 // ---------- S t a t e --------------
845 // -- lr : return address
848 // -----------------------------------
850 MemOperand mapped_location =
851 GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, ¬in, &slow);
852 __ ldr(r0, mapped_location);
855 // The unmapped lookup expects that the parameter map is in r2.
856 MemOperand unmapped_location =
857 GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
858 __ ldr(r2, unmapped_location);
859 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
865 GenerateMiss(masm, false);
869 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
870 // ---------- S t a t e --------------
874 // -- lr : return address
875 // -----------------------------------
877 MemOperand mapped_location =
878 GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, ¬in, &slow);
879 __ str(r0, mapped_location);
882 __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
885 // The unmapped lookup expects that the parameter map is in r3.
886 MemOperand unmapped_location =
887 GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
888 __ str(r0, unmapped_location);
891 __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
894 GenerateMiss(masm, false);
898 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
900 // ----------- S t a t e -------------
902 // -- lr : return address
903 // -----------------------------------
906 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
907 MemOperand mapped_location =
908 GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, ¬in, &slow);
909 __ ldr(r1, mapped_location);
910 GenerateFunctionTailCall(masm, argc, &slow, r3);
912 // The unmapped lookup expects that the parameter map is in r3.
913 MemOperand unmapped_location =
914 GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
915 __ ldr(r1, unmapped_location);
916 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
919 GenerateFunctionTailCall(masm, argc, &slow, r3);
921 GenerateMiss(masm, argc);
925 Object* KeyedLoadIC_Miss(Arguments args);
928 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
929 // ---------- S t a t e --------------
930 // -- lr : return address
933 // -----------------------------------
934 Isolate* isolate = masm->isolate();
936 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
940 // Perform tail call to the entry.
941 ExternalReference ref = force_generic
942 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
943 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
945 __ TailCallExternalReference(ref, 2, 1);
949 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
950 // ---------- S t a t e --------------
951 // -- lr : return address
954 // -----------------------------------
958 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
962 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
963 // ---------- S t a t e --------------
964 // -- lr : return address
967 // -----------------------------------
968 Label slow, check_string, index_smi, index_string, property_array_property;
969 Label probe_dictionary, check_number_dictionary;
972 Register receiver = r1;
974 Isolate* isolate = masm->isolate();
976 // Check that the key is a smi.
977 __ JumpIfNotSmi(key, &check_string);
979 // Now the key is known to be a smi. This place is also jumped to from below
980 // where a numeric string is converted to a smi.
982 GenerateKeyedLoadReceiverCheck(
983 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
985 // Check the receiver's map to see if it has fast elements.
986 __ CheckFastElements(r2, r3, &check_number_dictionary);
988 GenerateFastArrayLoad(
989 masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
990 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
993 __ bind(&check_number_dictionary);
994 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
995 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
997 // Check whether the elements is a number dictionary.
1001 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1004 __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1005 __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
1008 // Slow case, key and receiver still in r0 and r1.
1010 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1012 GenerateRuntimeGetProperty(masm);
1014 __ bind(&check_string);
1015 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1017 GenerateKeyedLoadReceiverCheck(
1018 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1020 // If the receiver is a fast-case object, check the keyed lookup
1021 // cache. Otherwise probe the dictionary.
1022 __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1023 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1024 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1026 __ b(eq, &probe_dictionary);
1028 // Load the map of the receiver, compute the keyed lookup cache hash
1029 // based on 32 bits of the map pointer and the string hash.
1030 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1031 __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
1032 __ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
1033 __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
1034 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
1035 __ And(r3, r3, Operand(mask));
1037 // Load the key (consisting of map and symbol) from the cache and
1039 Label load_in_object_property;
1040 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1041 Label hit_on_nth_entry[kEntriesPerBucket];
1042 ExternalReference cache_keys =
1043 ExternalReference::keyed_lookup_cache_keys(isolate);
1045 __ mov(r4, Operand(cache_keys));
1046 __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1048 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1049 Label try_next_entry;
1050 // Load map and move r4 to next entry.
1051 __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
1053 __ b(ne, &try_next_entry);
1054 __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
1056 __ b(eq, &hit_on_nth_entry[i]);
1057 __ bind(&try_next_entry);
1060 // Last entry: Load map and move r4 to symbol.
1061 __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
1064 __ ldr(r5, MemOperand(r4));
1068 // Get field offset.
1071 // r2 : receiver's map
1072 // r3 : lookup cache index
1073 ExternalReference cache_field_offsets =
1074 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1076 // Hit on nth entry.
1077 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1078 __ bind(&hit_on_nth_entry[i]);
1079 __ mov(r4, Operand(cache_field_offsets));
1081 __ add(r3, r3, Operand(i));
1083 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1084 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1085 __ sub(r5, r5, r6, SetCC);
1086 __ b(ge, &property_array_property);
1088 __ jmp(&load_in_object_property);
1092 // Load in-object property.
1093 __ bind(&load_in_object_property);
1094 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1095 __ add(r6, r6, r5); // Index from start of object.
1096 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1097 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1098 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1102 // Load property array property.
1103 __ bind(&property_array_property);
1104 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1105 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1107 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1111 // Do a quick inline probe of the receiver's dictionary, if it
1113 __ bind(&probe_dictionary);
1117 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1118 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1119 GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1120 // Load the property to r0.
1121 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1122 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1126 __ bind(&index_string);
1127 __ IndexFromHash(r3, key);
1128 // Now jump to the place where smi keys are handled.
1133 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1134 // ---------- S t a t e --------------
1135 // -- lr : return address
1136 // -- r0 : key (index)
1138 // -----------------------------------
1141 Register receiver = r1;
1142 Register index = r0;
1143 Register scratch = r3;
1144 Register result = r0;
1146 StringCharAtGenerator char_at_generator(receiver,
1150 &miss, // When not a string.
1151 &miss, // When not a number.
1152 &miss, // When index out of range.
1153 STRING_INDEX_IS_ARRAY_INDEX);
1154 char_at_generator.GenerateFast(masm);
1157 StubRuntimeCallHelper call_helper;
1158 char_at_generator.GenerateSlow(masm, call_helper);
1161 GenerateMiss(masm, false);
1165 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1166 // ---------- S t a t e --------------
1167 // -- lr : return address
1170 // -----------------------------------
1173 // Check that the receiver isn't a smi.
1174 __ JumpIfSmi(r1, &slow);
1176 // Check that the key is an array index, that is Uint32.
1177 __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
1180 // Get the map of the receiver.
1181 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1183 // Check that it has indexed interceptor and access checks
1184 // are not enabled for this object.
1185 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1186 __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1187 __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1190 // Everything is fine, call runtime.
1191 __ Push(r1, r0); // Receiver, key.
1193 // Perform tail call to the entry.
1194 __ TailCallExternalReference(
1195 ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1201 GenerateMiss(masm, false);
1205 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1206 // ---------- S t a t e --------------
1210 // -- lr : return address
1211 // -----------------------------------
1213 // Push receiver, key and value for runtime call.
1214 __ Push(r2, r1, r0);
1216 ExternalReference ref = force_generic
1217 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1219 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1220 __ TailCallExternalReference(ref, 3, 1);
1224 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1225 // ---------- S t a t e --------------
1229 // -- lr : return address
1230 // -----------------------------------
1232 // Push receiver, key and value for runtime call.
1233 __ Push(r2, r1, r0);
1235 // The slow case calls into the runtime to complete the store without causing
1236 // an IC miss that would otherwise cause a transition to the generic stub.
1237 ExternalReference ref =
1238 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1239 __ TailCallExternalReference(ref, 3, 1);
1243 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1244 // ---------- S t a t e --------------
1246 // -- r3 : target map
1247 // -- lr : return address
1248 // -----------------------------------
1249 // Must return the modified receiver in r0.
1250 if (!FLAG_trace_elements_transitions) {
1252 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
1259 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1263 void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
1264 MacroAssembler* masm) {
1265 // ---------- S t a t e --------------
1267 // -- r3 : target map
1268 // -- lr : return address
1269 // -----------------------------------
1270 // Must return the modified receiver in r0.
1271 if (!FLAG_trace_elements_transitions) {
1273 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
1280 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1284 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1285 StrictModeFlag strict_mode) {
1286 // ---------- S t a t e --------------
1290 // -- lr : return address
1291 // -----------------------------------
1293 // Push receiver, key and value for runtime call.
1294 __ Push(r2, r1, r0);
1296 __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1297 __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1300 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1304 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1305 StrictModeFlag strict_mode) {
1306 // ---------- S t a t e --------------
1310 // -- lr : return address
1311 // -----------------------------------
1312 Label slow, array, extra, check_if_double_array;
1313 Label fast_object_with_map_check, fast_object_without_map_check;
1314 Label fast_double_with_map_check, fast_double_without_map_check;
1315 Label transition_smi_elements, finish_object_store, non_double_value;
1316 Label transition_double_elements;
1319 Register value = r0;
1321 Register receiver = r2;
1322 Register receiver_map = r3;
1323 Register elements_map = r6;
1324 Register elements = r7; // Elements array of the receiver.
1325 // r4 and r5 are used as general scratch registers.
1327 // Check that the key is a smi.
1328 __ JumpIfNotSmi(key, &slow);
1329 // Check that the object isn't a smi.
1330 __ JumpIfSmi(receiver, &slow);
1331 // Get the map of the object.
1332 __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1333 // Check that the receiver does not require access checks. We need
1334 // to do this because this generic stub does not perform map checks.
1335 __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1336 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1338 // Check if the object is a JS array or not.
1339 __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1340 __ cmp(r4, Operand(JS_ARRAY_TYPE));
1342 // Check that the object is some kind of JSObject.
1343 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1346 // Object case: Check key against length in the elements array.
1347 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1348 // Check array bounds. Both the key and the length of FixedArray are smis.
1349 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1350 __ cmp(key, Operand(ip));
1351 __ b(lo, &fast_object_with_map_check);
1353 // Slow case, handle jump to runtime.
1355 // Entry registers are intact.
1359 GenerateRuntimeSetProperty(masm, strict_mode);
1361 // Extra capacity case: Check if there is extra capacity to
1362 // perform the store and update the length. Used for adding one
1363 // element to the array by writing to array[array.length].
1365 // Condition code from comparing key and array length is still available.
1366 __ b(ne, &slow); // Only support writing to writing to array[array.length].
1367 // Check for room in the elements backing store.
1368 // Both the key and the length of FixedArray are smis.
1369 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1370 __ cmp(key, Operand(ip));
1372 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1373 __ cmp(elements_map,
1374 Operand(masm->isolate()->factory()->fixed_array_map()));
1375 __ b(ne, &check_if_double_array);
1376 // Calculate key + 1 as smi.
1377 STATIC_ASSERT(kSmiTag == 0);
1378 __ add(r4, key, Operand(Smi::FromInt(1)));
1379 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1380 __ b(&fast_object_without_map_check);
1382 __ bind(&check_if_double_array);
1383 __ cmp(elements_map,
1384 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1386 // Add 1 to key, and go to common element store code for doubles.
1387 STATIC_ASSERT(kSmiTag == 0);
1388 __ add(r4, key, Operand(Smi::FromInt(1)));
1389 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1390 __ jmp(&fast_double_without_map_check);
1392 // Array case: Get the length and the elements array from the JS
1393 // array. Check that the array is in fast mode (and writable); if it
1394 // is the length is always a smi.
1396 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1398 // Check the key against the length in the array.
1399 __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1400 __ cmp(key, Operand(ip));
1402 // Fall through to fast case.
1404 __ bind(&fast_object_with_map_check);
1405 Register scratch_value = r4;
1406 Register address = r5;
1407 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1408 __ cmp(elements_map,
1409 Operand(masm->isolate()->factory()->fixed_array_map()));
1410 __ b(ne, &fast_double_with_map_check);
1411 __ bind(&fast_object_without_map_check);
1412 // Smi stores don't require further checks.
1413 Label non_smi_value;
1414 __ JumpIfNotSmi(value, &non_smi_value);
1415 // It's irrelevant whether array is smi-only or not when writing a smi.
1416 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1417 __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1418 __ str(value, MemOperand(address));
1421 __ bind(&non_smi_value);
1422 // Escape to elements kind transition case.
1423 __ CheckFastObjectElements(receiver_map, scratch_value,
1424 &transition_smi_elements);
1425 // Fast elements array, store the value to the elements backing store.
1426 __ bind(&finish_object_store);
1427 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1428 __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1429 __ str(value, MemOperand(address));
1430 // Update write barrier for the elements array address.
1431 __ mov(scratch_value, value); // Preserve the value which is returned.
1432 __ RecordWrite(elements,
1437 EMIT_REMEMBERED_SET,
1441 __ bind(&fast_double_with_map_check);
1442 // Check for fast double array case. If this fails, call through to the
1444 __ cmp(elements_map,
1445 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1447 __ bind(&fast_double_without_map_check);
1448 __ StoreNumberToDoubleElements(value,
1456 &transition_double_elements);
1459 __ bind(&transition_smi_elements);
1460 // Transition the array appropriately depending on the value type.
1461 __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
1462 __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1463 __ b(ne, &non_double_value);
1465 // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
1466 // FAST_DOUBLE_ELEMENTS and complete the store.
1467 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1468 FAST_DOUBLE_ELEMENTS,
1472 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1473 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
1474 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1475 __ jmp(&fast_double_without_map_check);
1477 __ bind(&non_double_value);
1478 // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
1479 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1484 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1485 ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
1486 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1487 __ jmp(&finish_object_store);
1489 __ bind(&transition_double_elements);
1490 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1491 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1492 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1493 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1498 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1499 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
1500 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1501 __ jmp(&finish_object_store);
1505 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1506 StrictModeFlag strict_mode) {
1507 // ----------- S t a t e -------------
1511 // -- lr : return address
1512 // -----------------------------------
1514 // Get the receiver from the stack and probe the stub cache.
1516 Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1518 Isolate::Current()->stub_cache()->GenerateProbe(
1519 masm, flags, r1, r2, r3, r4, r5, r6);
1521 // Cache miss: Jump to runtime.
1526 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1527 // ----------- S t a t e -------------
1531 // -- lr : return address
1532 // -----------------------------------
1534 __ Push(r1, r2, r0);
1536 // Perform tail call to the entry.
1537 ExternalReference ref =
1538 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1539 __ TailCallExternalReference(ref, 3, 1);
1543 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1544 // ----------- S t a t e -------------
1548 // -- lr : return address
1549 // -----------------------------------
1551 // This accepts as a receiver anything JSArray::SetElementsLength accepts
1552 // (currently anything except for external arrays which means anything with
1553 // elements of FixedArray type). Value must be a number, but only smis are
1554 // accepted as the most common case.
1558 Register receiver = r1;
1559 Register value = r0;
1560 Register scratch = r3;
1562 // Check that the receiver isn't a smi.
1563 __ JumpIfSmi(receiver, &miss);
1565 // Check that the object is a JS array.
1566 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1569 // Check that elements are FixedArray.
1570 // We rely on StoreIC_ArrayLength below to deal with all types of
1571 // fast elements (including COW).
1572 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1573 __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1576 // Check that the array has fast properties, otherwise the length
1577 // property might have been redefined.
1578 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1579 __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1580 __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
1583 // Check that value is a smi.
1584 __ JumpIfNotSmi(value, &miss);
1586 // Prepare tail call to StoreIC_ArrayLength.
1587 __ Push(receiver, value);
1589 ExternalReference ref =
1590 ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1591 __ TailCallExternalReference(ref, 2, 1);
1599 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1600 // ----------- S t a t e -------------
1604 // -- lr : return address
1605 // -----------------------------------
1608 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1610 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1611 Counters* counters = masm->isolate()->counters();
1612 __ IncrementCounter(counters->store_normal_hit(),
1617 __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1622 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1623 StrictModeFlag strict_mode) {
1624 // ----------- S t a t e -------------
1628 // -- lr : return address
1629 // -----------------------------------
1631 __ Push(r1, r2, r0);
1633 __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1634 __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1637 // Do tail-call to runtime routine.
1638 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1645 Condition CompareIC::ComputeCondition(Token::Value op) {
1647 case Token::EQ_STRICT:
1660 return kNoCondition;
1665 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1667 Handle<Code> rewritten;
1668 State previous_state = GetState();
1669 State state = TargetState(previous_state, false, x, y);
1670 if (state == GENERIC) {
1671 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
1672 rewritten = stub.GetCode();
1674 ICCompareStub stub(op_, state);
1675 if (state == KNOWN_OBJECTS) {
1676 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1678 rewritten = stub.GetCode();
1680 set_target(*rewritten);
1683 if (FLAG_trace_ic) {
1684 PrintF("[CompareIC (%s->%s)#%s]\n",
1685 GetStateName(previous_state),
1686 GetStateName(state),
1691 // Activate inlined smi code.
1692 if (previous_state == UNINITIALIZED) {
1693 PatchInlinedSmiCode(address());
1698 void PatchInlinedSmiCode(Address address) {
1699 Address cmp_instruction_address =
1700 address + Assembler::kCallTargetAddressOffset;
1702 // If the instruction following the call is not a cmp rx, #yyy, nothing
1704 Instr instr = Assembler::instr_at(cmp_instruction_address);
1705 if (!Assembler::IsCmpImmediate(instr)) {
1709 // The delta to the start of the map check instruction and the
1710 // condition code uses at the patched jump.
1711 int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1713 Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1714 // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1715 // nothing was inlined.
1721 if (FLAG_trace_ic) {
1722 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
1723 address, cmp_instruction_address, delta);
1727 Address patch_address =
1728 cmp_instruction_address - delta * Instruction::kInstrSize;
1729 Instr instr_at_patch = Assembler::instr_at(patch_address);
1730 Instr branch_instr =
1731 Assembler::instr_at(patch_address + Instruction::kInstrSize);
1732 ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1733 ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1734 Assembler::GetRm(instr_at_patch).code());
1735 ASSERT(Assembler::IsBranch(branch_instr));
1736 if (Assembler::GetCondition(branch_instr) == eq) {
1737 // This is patching a "jump if not smi" site to be active.
1742 // tst rx, #kSmiTagMask
1744 CodePatcher patcher(patch_address, 2);
1745 Register reg = Assembler::GetRn(instr_at_patch);
1746 patcher.masm()->tst(reg, Operand(kSmiTagMask));
1747 patcher.EmitCondition(ne);
1749 ASSERT(Assembler::GetCondition(branch_instr) == ne);
1750 // This is patching a "jump if smi" site to be active.
1755 // tst rx, #kSmiTagMask
1757 CodePatcher patcher(patch_address, 2);
1758 Register reg = Assembler::GetRn(instr_at_patch);
1759 patcher.masm()->tst(reg, Operand(kSmiTagMask));
1760 patcher.EmitCondition(eq);
1765 } } // namespace v8::internal
1767 #endif // V8_TARGET_ARCH_ARM