1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #if defined(V8_TARGET_ARCH_MIPS)
35 #include "code-stubs.h"
38 #include "stub-cache.h"
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
48 #define __ ACCESS_MASM(masm)
51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
53 Label* global_object) {
55 // type: holds the receiver instance type on entry.
56 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE));
57 __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE));
58 __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE));
62 // Generated code falls through if the receiver is a regular non-global
63 // JS object with slow properties and no interceptors.
64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
71 // receiver: holds the receiver on entry and is unchanged.
72 // elements: holds the property dictionary on fall through.
74 // scratch0: used to holds the receiver map.
75 // scratch1: used to holds the receiver instance type, receiver bit mask
78 // Check that the receiver isn't a smi.
79 __ JumpIfSmi(receiver, miss);
81 // Check that the receiver is a valid JS object.
82 __ GetObjectType(receiver, scratch0, scratch1);
83 __ Branch(miss, lt, scratch1, Operand(FIRST_SPEC_OBJECT_TYPE));
85 // If this assert fails, we have to check upper bound too.
86 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
88 GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
90 // Check that the global object does not require access checks.
91 __ lbu(scratch1, FieldMemOperand(scratch0, Map::kBitFieldOffset));
92 __ And(scratch1, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
93 (1 << Map::kHasNamedInterceptor)));
94 __ Branch(miss, ne, scratch1, Operand(zero_reg));
96 __ lw(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
97 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
98 __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99 __ Branch(miss, ne, scratch1, Operand(scratch0));
103 // Helper function used from LoadIC/CallIC GenerateNormal.
105 // elements: Property dictionary. It is not clobbered if a jump to the miss
107 // name: Property name. It is not clobbered if a jump to the miss label is
109 // result: Register for the result. It is only updated if a jump to the miss
110 // label is not done. Can be the same as elements or name clobbering
111 // one of these in the case of not jumping to the miss label.
112 // The two scratch registers need to be different from elements, name and
114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors.
116 // The address returned from GenerateStringDictionaryProbes() in scratch2
118 static void GenerateDictionaryLoad(MacroAssembler* masm,
125 // Main use of the scratch registers.
126 // scratch1: Used as temporary and to hold the capacity of the property
128 // scratch2: Used as temporary.
131 // Probe the dictionary.
132 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
140 // If probing finds an entry check that the value is a normal
142 __ bind(&done); // scratch2 == elements + 4 * index.
143 const int kElementsStartOffset = StringDictionary::kHeaderSize +
144 StringDictionary::kElementsStartIndex * kPointerSize;
145 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149 Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150 __ Branch(miss, ne, at, Operand(zero_reg));
152 // Get the value at the masked, scaled index and return.
154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
158 // Helper function used from StoreIC::GenerateNormal.
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
162 // name: Property name. It is not clobbered if a jump to the miss label is
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 // The address returned from GenerateStringDictionaryProbes() in scratch2
171 static void GenerateDictionaryStore(MacroAssembler* masm,
178 // Main use of the scratch registers.
179 // scratch1: Used as temporary and to hold the capacity of the property
181 // scratch2: Used as temporary.
184 // Probe the dictionary.
185 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
193 // If probing finds an entry in the dictionary check that the value
194 // is a normal property that is not read only.
195 __ bind(&done); // scratch2 == elements + 4 * index.
196 const int kElementsStartOffset = StringDictionary::kHeaderSize +
197 StringDictionary::kElementsStartIndex * kPointerSize;
198 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
199 const int kTypeAndReadOnlyMask =
200 (PropertyDetails::TypeField::kMask |
201 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
202 __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
203 __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204 __ Branch(miss, ne, at, Operand(zero_reg));
206 // Store the value at the masked, scaled index and return.
207 const int kValueOffset = kElementsStartOffset + kPointerSize;
208 __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
209 __ sw(value, MemOperand(scratch2));
211 // Update the write barrier. Make sure not to clobber the value.
212 __ mov(scratch1, value);
214 elements, scratch2, scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs);
218 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
219 // ----------- S t a t e -------------
221 // -- ra : return address
223 // -- sp[0] : receiver
224 // -----------------------------------
227 StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
229 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
233 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
234 // ----------- S t a t e -------------
236 // -- lr : return address
238 // -- sp[0] : receiver
239 // -----------------------------------
242 StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
244 // Cache miss: Jump to runtime.
246 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
250 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
251 // ----------- S t a t e -------------
253 // -- lr : return address
255 // -- sp[0] : receiver
256 // -----------------------------------
259 StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
261 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
265 // Checks the receiver for special cases (value type, slow case bits).
266 // Falls through for regular JS object.
267 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
273 // Check that the object isn't a smi.
274 __ JumpIfSmi(receiver, slow);
275 // Get the map of the receiver.
276 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
278 __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
279 __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
280 __ Branch(slow, ne, at, Operand(zero_reg));
281 // Check that the object is some kind of JS object EXCEPT JS Value type.
282 // In the case that the object is a value-wrapper object,
283 // we enter the runtime system to make sure that indexing into string
284 // objects work as intended.
285 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
286 __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
287 __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE));
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
293 static void GenerateFastArrayLoad(MacroAssembler* masm,
300 Label* not_fast_array,
301 Label* out_of_range) {
304 // receiver - holds the receiver on entry.
305 // Unchanged unless 'result' is the same register.
307 // key - holds the smi key on entry.
308 // Unchanged unless 'result' is the same register.
310 // elements - holds the elements of the receiver on exit.
312 // result - holds the result on exit if the load succeeded.
313 // Allowed to be the the same as 'receiver' or 'key'.
314 // Unchanged on bailout so 'receiver' and 'key' can be safely
315 // used by further computation.
317 // Scratch registers:
319 // scratch1 - used to hold elements map and elements length.
320 // Holds the elements map if not_fast_array branch is taken.
322 // scratch2 - used to hold the loaded value.
324 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325 if (not_fast_array != NULL) {
326 // Check that the object is in fast mode (not dictionary).
327 __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
329 __ Branch(not_fast_array, ne, scratch1, Operand(at));
331 __ AssertFastElements(elements);
334 // Check that the key (index) is within bounds.
335 __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336 __ Branch(out_of_range, hs, key, Operand(scratch1));
338 // Fast case: Do the load.
339 __ Addu(scratch1, elements,
340 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
342 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
343 __ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
344 __ addu(at, at, scratch1);
345 __ lw(scratch2, MemOperand(at));
347 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
348 // In case the loaded value is the_hole we have to consult GetProperty
349 // to ensure the prototype chain is searched.
350 __ Branch(out_of_range, eq, scratch2, Operand(at));
351 __ mov(result, scratch2);
355 // Checks whether a key is an array index string or a symbol string.
356 // Falls through if a key is a symbol.
357 static void GenerateKeyStringCheck(MacroAssembler* masm,
363 // The key is not a smi.
365 __ GetObjectType(key, map, hash);
366 __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE));
368 // Is the string an array index, with cached numeric value?
369 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset));
370 __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask));
371 __ Branch(index_string, eq, at, Operand(zero_reg));
373 // Is the string a symbol?
375 __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
376 STATIC_ASSERT(kSymbolTag != 0);
377 __ And(at, hash, Operand(kIsSymbolMask));
378 __ Branch(not_symbol, eq, at, Operand(zero_reg));
383 Object* CallIC_Miss(Arguments args);
385 // The generated code does not accept smi keys.
386 // The generated code falls through if both probes miss.
387 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
390 Code::ExtraICState extra_state) {
391 // ----------- S t a t e -------------
394 // -----------------------------------
395 Label number, non_number, non_string, boolean, probe, miss;
397 // Probe the stub cache.
398 Code::Flags flags = Code::ComputeFlags(kind,
403 Isolate::Current()->stub_cache()->GenerateProbe(
404 masm, flags, a1, a2, a3, t0, t1, t2);
406 // If the stub cache probing failed, the receiver might be a value.
407 // For value objects, we use the map of the prototype objects for
408 // the corresponding JSValue for the cache and that is what we need
412 __ JumpIfSmi(a1, &number, t1);
413 __ GetObjectType(a1, a3, a3);
414 __ Branch(&non_number, ne, a3, Operand(HEAP_NUMBER_TYPE));
416 StubCompiler::GenerateLoadGlobalFunctionPrototype(
417 masm, Context::NUMBER_FUNCTION_INDEX, a1);
421 __ bind(&non_number);
422 __ Branch(&non_string, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
423 StubCompiler::GenerateLoadGlobalFunctionPrototype(
424 masm, Context::STRING_FUNCTION_INDEX, a1);
427 // Check for boolean.
428 __ bind(&non_string);
429 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
430 __ Branch(&boolean, eq, a1, Operand(t0));
431 __ LoadRoot(t1, Heap::kFalseValueRootIndex);
432 __ Branch(&miss, ne, a1, Operand(t1));
434 StubCompiler::GenerateLoadGlobalFunctionPrototype(
435 masm, Context::BOOLEAN_FUNCTION_INDEX, a1);
437 // Probe the stub cache for the value object.
439 Isolate::Current()->stub_cache()->GenerateProbe(
440 masm, flags, a1, a2, a3, t0, t1, t2);
446 static void GenerateFunctionTailCall(MacroAssembler* masm,
452 // Check that the value isn't a smi.
453 __ JumpIfSmi(a1, miss);
455 // Check that the value is a JSFunction.
456 __ GetObjectType(a1, scratch, scratch);
457 __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
459 // Invoke the function.
460 ParameterCount actual(argc);
461 __ InvokeFunction(a1, actual, JUMP_FUNCTION,
462 NullCallWrapper(), CALL_AS_METHOD);
466 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
467 // ----------- S t a t e -------------
469 // -- ra : return address
470 // -----------------------------------
473 // Get the receiver of the function from the stack into a1.
474 __ lw(a1, MemOperand(sp, argc * kPointerSize));
476 GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
479 // Search the dictionary - put result in register a1.
480 GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
482 GenerateFunctionTailCall(masm, argc, &miss, t0);
484 // Cache miss: Jump to runtime.
489 void CallICBase::GenerateMiss(MacroAssembler* masm,
492 Code::ExtraICState extra_state) {
493 // ----------- S t a t e -------------
495 // -- ra : return address
496 // -----------------------------------
497 Isolate* isolate = masm->isolate();
499 if (id == IC::kCallIC_Miss) {
500 __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
502 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
505 // Get the receiver of the function from the stack.
506 __ lw(a3, MemOperand(sp, argc*kPointerSize));
509 FrameScope scope(masm, StackFrame::INTERNAL);
511 // Push the receiver and the name of the function.
515 __ PrepareCEntryArgs(2);
516 __ PrepareCEntryFunction(ExternalReference(IC_Utility(id), isolate));
521 // Move result to a1 and leave the internal frame.
525 // Check if the receiver is a global object of some sort.
526 // This can happen only for regular CallIC but not KeyedCallIC.
527 if (id == IC::kCallIC_Miss) {
528 Label invoke, global;
529 __ lw(a2, MemOperand(sp, argc * kPointerSize));
530 __ JumpIfSmi(a2, &invoke);
531 __ GetObjectType(a2, a3, a3);
532 __ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
533 __ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
535 // Patch the receiver on the stack.
537 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
538 __ sw(a2, MemOperand(sp, argc * kPointerSize));
541 // Invoke the function.
542 CallKind call_kind = CallICBase::Contextual::decode(extra_state)
545 ParameterCount actual(argc);
546 __ InvokeFunction(a1,
554 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
556 Code::ExtraICState extra_ic_state) {
557 // ----------- S t a t e -------------
559 // -- ra : return address
560 // -----------------------------------
562 // Get the receiver of the function from the stack into a1.
563 __ lw(a1, MemOperand(sp, argc * kPointerSize));
564 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
565 GenerateMiss(masm, argc, extra_ic_state);
569 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
570 // ----------- S t a t e -------------
572 // -- ra : return address
573 // -----------------------------------
575 // Get the receiver of the function from the stack into a1.
576 __ lw(a1, MemOperand(sp, argc * kPointerSize));
578 Label do_call, slow_call, slow_load, slow_reload_receiver;
579 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
580 Label index_smi, index_string;
582 // Check that the key is a smi.
583 __ JumpIfNotSmi(a2, &check_string);
585 // Now the key is known to be a smi. This place is also jumped to from below
586 // where a numeric string is converted to a smi.
588 GenerateKeyedLoadReceiverCheck(
589 masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call);
591 GenerateFastArrayLoad(
592 masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
593 Counters* counters = masm->isolate()->counters();
594 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
597 // receiver in a1 is not used after this point.
601 GenerateFunctionTailCall(masm, argc, &slow_call, a0);
603 __ bind(&check_number_dictionary);
606 // t0: elements pointer
607 // Check whether the elements is a number dictionary.
608 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
609 __ Branch(&slow_load, ne, a3, Operand(at));
610 __ sra(a0, a2, kSmiTagSize);
611 // a0: untagged index
612 __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
613 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
617 // This branch is taken when calling KeyedCallIC_Miss is neither required
619 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
621 FrameScope scope(masm, StackFrame::INTERNAL);
622 __ push(a2); // Save the key.
623 __ Push(a1, a2); // Pass the receiver and the key.
624 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
625 __ pop(a2); // Restore the key.
630 __ bind(&check_string);
631 GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
633 // The key is known to be a symbol.
634 // If the receiver is a regular JS object with slow properties then do
635 // a quick inline probe of the receiver's dictionary.
636 // Otherwise do the monomorphic cache probe.
637 GenerateKeyedLoadReceiverCheck(
638 masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
640 __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset));
641 __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset));
642 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
643 __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at));
645 GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
646 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
649 __ bind(&lookup_monomorphic_cache);
650 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
651 GenerateMonomorphicCacheProbe(masm,
654 Code::kNoExtraICState);
655 // Fall through on miss.
658 // This branch is taken if:
659 // - the receiver requires boxing or access check,
660 // - the key is neither smi nor symbol,
661 // - the value loaded is not a function,
662 // - there is hope that the runtime will create a monomorphic call stub,
663 // that will get fetched next time.
664 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
665 GenerateMiss(masm, argc);
667 __ bind(&index_string);
668 __ IndexFromHash(a3, a2);
669 // Now jump to the place where smi keys are handled.
674 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
675 // ----------- S t a t e -------------
677 // -- ra : return address
678 // -----------------------------------
680 // Check if the name is a string.
682 __ JumpIfSmi(a2, &miss);
683 __ IsObjectJSStringType(a2, a0, &miss);
685 CallICBase::GenerateNormal(masm, argc);
687 GenerateMiss(masm, argc);
692 Object* LoadIC_Miss(Arguments args);
694 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
695 // ----------- S t a t e -------------
697 // -- ra : return address
699 // -- sp[0] : receiver
700 // -----------------------------------
702 // Probe the stub cache.
703 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
704 Isolate::Current()->stub_cache()->GenerateProbe(
705 masm, flags, a0, a2, a3, t0, t1, t2);
707 // Cache miss: Jump to runtime.
712 void LoadIC::GenerateNormal(MacroAssembler* masm) {
713 // ----------- S t a t e -------------
715 // -- lr : return address
717 // -- sp[0] : receiver
718 // -----------------------------------
721 GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
724 GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
727 // Cache miss: Jump to runtime.
733 void LoadIC::GenerateMiss(MacroAssembler* masm) {
734 // ----------- S t a t e -------------
736 // -- ra : return address
738 // -- sp[0] : receiver
739 // -----------------------------------
740 Isolate* isolate = masm->isolate();
742 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
747 // Perform tail call to the entry.
748 ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
749 __ TailCallExternalReference(ref, 2, 1);
753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
759 Label* unmapped_case,
761 // Check that the receiver is a JSObject. Because of the map check
762 // later, we do not need to check for interceptors or whether it
763 // requires access checks.
764 __ JumpIfSmi(object, slow_case);
765 // Check that the object is some kind of JSObject.
766 __ GetObjectType(object, scratch1, scratch2);
767 __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
769 // Check that the key is a positive smi.
770 __ And(scratch1, key, Operand(0x8000001));
771 __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
773 // Load the elements into scratch1 and check its map.
774 __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
775 __ CheckMap(scratch1,
777 Heap::kNonStrictArgumentsElementsMapRootIndex,
780 // Check if element is in the range of mapped arguments. If not, jump
781 // to the unmapped lookup with the parameter map in scratch1.
782 __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
783 __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2)));
784 __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
786 // Load element index and check whether it is the hole.
788 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
790 __ li(scratch3, Operand(kPointerSize >> 1));
791 __ Mul(scratch3, key, scratch3);
792 __ Addu(scratch3, scratch3, Operand(kOffset));
794 __ Addu(scratch2, scratch1, scratch3);
795 __ lw(scratch2, MemOperand(scratch2));
796 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
797 __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
799 // Load value from context and return it. We can reuse scratch1 because
800 // we do not jump to the unmapped lookup (which requires the parameter
802 __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
803 __ li(scratch3, Operand(kPointerSize >> 1));
804 __ Mul(scratch3, scratch2, scratch3);
805 __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
806 __ Addu(scratch2, scratch1, scratch3);
807 return MemOperand(scratch2);
811 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
813 Register parameter_map,
816 // Element is in arguments backing store, which is referenced by the
817 // second element of the parameter_map. The parameter_map register
818 // must be loaded with the parameter map of the arguments object and is
820 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
821 Register backing_store = parameter_map;
822 __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
823 __ CheckMap(backing_store,
825 Heap::kFixedArrayMapRootIndex,
828 __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
829 __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
830 __ li(scratch, Operand(kPointerSize >> 1));
831 __ Mul(scratch, key, scratch);
834 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
835 __ Addu(scratch, backing_store, scratch);
836 return MemOperand(scratch);
840 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
841 // ---------- S t a t e --------------
842 // -- lr : return address
845 // -----------------------------------
847 MemOperand mapped_location =
848 GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, ¬in, &slow);
849 __ Ret(USE_DELAY_SLOT);
850 __ lw(v0, mapped_location);
852 // The unmapped lookup expects that the parameter map is in a2.
853 MemOperand unmapped_location =
854 GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
855 __ lw(a2, unmapped_location);
856 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
857 __ Branch(&slow, eq, a2, Operand(a3));
858 __ Ret(USE_DELAY_SLOT);
861 GenerateMiss(masm, false);
865 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
866 // ---------- S t a t e --------------
870 // -- lr : return address
871 // -----------------------------------
873 // Store address is returned in register (of MemOperand) mapped_location.
874 MemOperand mapped_location =
875 GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, ¬in, &slow);
876 __ sw(a0, mapped_location);
878 ASSERT_EQ(mapped_location.offset(), 0);
879 __ RecordWrite(a3, mapped_location.rm(), t5,
880 kRAHasNotBeenSaved, kDontSaveFPRegs);
881 __ Ret(USE_DELAY_SLOT);
882 __ mov(v0, a0); // (In delay slot) return the value stored in v0.
884 // The unmapped lookup expects that the parameter map is in a3.
885 // Store address is returned in register (of MemOperand) unmapped_location.
886 MemOperand unmapped_location =
887 GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
888 __ sw(a0, unmapped_location);
890 ASSERT_EQ(unmapped_location.offset(), 0);
891 __ RecordWrite(a3, unmapped_location.rm(), t5,
892 kRAHasNotBeenSaved, kDontSaveFPRegs);
893 __ Ret(USE_DELAY_SLOT);
894 __ mov(v0, a0); // (In delay slot) return the value stored in v0.
896 GenerateMiss(masm, false);
900 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
902 // ----------- S t a t e -------------
904 // -- lr : return address
905 // -----------------------------------
908 __ lw(a1, MemOperand(sp, argc * kPointerSize));
909 MemOperand mapped_location =
910 GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, ¬in, &slow);
911 __ lw(a1, mapped_location);
912 GenerateFunctionTailCall(masm, argc, &slow, a3);
914 // The unmapped lookup expects that the parameter map is in a3.
915 MemOperand unmapped_location =
916 GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow);
917 __ lw(a1, unmapped_location);
918 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
919 __ Branch(&slow, eq, a1, Operand(a3));
920 GenerateFunctionTailCall(masm, argc, &slow, a3);
922 GenerateMiss(masm, argc);
926 Object* KeyedLoadIC_Miss(Arguments args);
929 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
930 // ---------- S t a t e --------------
931 // -- ra : return address
934 // -----------------------------------
935 Isolate* isolate = masm->isolate();
937 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
941 // Perform tail call to the entry.
942 ExternalReference ref = force_generic
943 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
944 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
946 __ TailCallExternalReference(ref, 2, 1);
950 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
951 // ---------- S t a t e --------------
952 // -- ra : return address
955 // -----------------------------------
959 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
963 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
964 // ---------- S t a t e --------------
965 // -- ra : return address
968 // -----------------------------------
969 Label slow, check_string, index_smi, index_string, property_array_property;
970 Label probe_dictionary, check_number_dictionary;
973 Register receiver = a1;
975 Isolate* isolate = masm->isolate();
977 // Check that the key is a smi.
978 __ JumpIfNotSmi(key, &check_string);
980 // Now the key is known to be a smi. This place is also jumped to from below
981 // where a numeric string is converted to a smi.
983 GenerateKeyedLoadReceiverCheck(
984 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
986 // Check the receiver's map to see if it has fast elements.
987 __ CheckFastElements(a2, a3, &check_number_dictionary);
989 GenerateFastArrayLoad(
990 masm, receiver, key, t0, a3, a2, v0, NULL, &slow);
992 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
995 __ bind(&check_number_dictionary);
996 __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
997 __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset));
999 // Check whether the elements is a number dictionary.
1003 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1004 __ Branch(&slow, ne, a3, Operand(at));
1005 __ sra(a2, a0, kSmiTagSize);
1006 __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1009 // Slow case, key and receiver still in a0 and a1.
1011 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1015 GenerateRuntimeGetProperty(masm);
1017 __ bind(&check_string);
1018 GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
1020 GenerateKeyedLoadReceiverCheck(
1021 masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
1024 // If the receiver is a fast-case object, check the keyed lookup
1025 // cache. Otherwise probe the dictionary.
1026 __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset));
1027 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
1028 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1029 __ Branch(&probe_dictionary, eq, t0, Operand(at));
1031 // Load the map of the receiver, compute the keyed lookup cache hash
1032 // based on 32 bits of the map pointer and the string hash.
1033 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1034 __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
1035 __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
1036 __ sra(at, t0, String::kHashShift);
1037 __ xor_(a3, a3, at);
1038 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
1039 __ And(a3, a3, Operand(mask));
1041 // Load the key (consisting of map and symbol) from the cache and
1043 Label load_in_object_property;
1044 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1045 Label hit_on_nth_entry[kEntriesPerBucket];
1046 ExternalReference cache_keys =
1047 ExternalReference::keyed_lookup_cache_keys(isolate);
1048 __ li(t0, Operand(cache_keys));
1049 __ sll(at, a3, kPointerSizeLog2 + 1);
1050 __ addu(t0, t0, at);
1052 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1053 Label try_next_entry;
1054 __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
1055 __ Branch(&try_next_entry, ne, a2, Operand(t1));
1056 __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
1057 __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
1058 __ bind(&try_next_entry);
1061 __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1062 __ Branch(&slow, ne, a2, Operand(t1));
1063 __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1064 __ Branch(&slow, ne, a0, Operand(t1));
1066 // Get field offset.
1069 // a2 : receiver's map
1070 // a3 : lookup cache index
1071 ExternalReference cache_field_offsets =
1072 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1074 // Hit on nth entry.
1075 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1076 __ bind(&hit_on_nth_entry[i]);
1077 __ li(t0, Operand(cache_field_offsets));
1078 __ sll(at, a3, kPointerSizeLog2);
1079 __ addu(at, t0, at);
1080 __ lw(t1, MemOperand(at, kPointerSize * i));
1081 __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
1082 __ Subu(t1, t1, t2);
1083 __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1085 __ Branch(&load_in_object_property);
1089 // Load in-object property.
1090 __ bind(&load_in_object_property);
1091 __ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
1092 __ addu(t2, t2, t1); // Index from start of object.
1093 __ Subu(a1, a1, Operand(kHeapObjectTag)); // Remove the heap tag.
1094 __ sll(at, t2, kPointerSizeLog2);
1095 __ addu(at, a1, at);
1096 __ lw(v0, MemOperand(at));
1097 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1103 // Load property array property.
1104 __ bind(&property_array_property);
1105 __ lw(a1, FieldMemOperand(a1, JSObject::kPropertiesOffset));
1106 __ Addu(a1, a1, FixedArray::kHeaderSize - kHeapObjectTag);
1107 __ sll(t0, t1, kPointerSizeLog2);
1108 __ Addu(t0, t0, a1);
1109 __ lw(v0, MemOperand(t0));
1110 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1117 // Do a quick inline probe of the receiver's dictionary, if it
1119 __ bind(&probe_dictionary);
1123 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1124 __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset));
1125 GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1126 // Load the property to v0.
1127 GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1128 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1134 __ bind(&index_string);
1135 __ IndexFromHash(a3, key);
1136 // Now jump to the place where smi keys are handled.
1137 __ Branch(&index_smi);
1141 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1142 // ---------- S t a t e --------------
1143 // -- ra : return address
1144 // -- a0 : key (index)
1146 // -----------------------------------
1149 Register receiver = a1;
1150 Register index = a0;
1151 Register scratch = a3;
1152 Register result = v0;
1154 StringCharAtGenerator char_at_generator(receiver,
1158 &miss, // When not a string.
1159 &miss, // When not a number.
1160 &miss, // When index out of range.
1161 STRING_INDEX_IS_ARRAY_INDEX);
1162 char_at_generator.GenerateFast(masm);
1165 StubRuntimeCallHelper call_helper;
1166 char_at_generator.GenerateSlow(masm, call_helper);
1169 GenerateMiss(masm, false);
1173 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1174 StrictModeFlag strict_mode) {
1175 // ---------- S t a t e --------------
1179 // -- ra : return address
1180 // -----------------------------------
1182 // Push receiver, key and value for runtime call.
1183 __ Push(a2, a1, a0);
1184 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1185 __ li(a0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1188 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1192 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1193 StrictModeFlag strict_mode) {
1194 // ---------- S t a t e --------------
1198 // -- ra : return address
1199 // -----------------------------------
1200 Label slow, array, extra, check_if_double_array;
1201 Label fast_object_with_map_check, fast_object_without_map_check;
1202 Label fast_double_with_map_check, fast_double_without_map_check;
1203 Label transition_smi_elements, finish_object_store, non_double_value;
1204 Label transition_double_elements;
1207 Register value = a0;
1209 Register receiver = a2;
1210 Register receiver_map = a3;
1211 Register elements_map = t2;
1212 Register elements = t3; // Elements array of the receiver.
1213 // t0 and t1 are used as general scratch registers.
1215 // Check that the key is a smi.
1216 __ JumpIfNotSmi(key, &slow);
1217 // Check that the object isn't a smi.
1218 __ JumpIfSmi(receiver, &slow);
1219 // Get the map of the object.
1220 __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1221 // Check that the receiver does not require access checks. We need
1222 // to do this because this generic stub does not perform map checks.
1223 __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1224 __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
1225 __ Branch(&slow, ne, t0, Operand(zero_reg));
1226 // Check if the object is a JS array or not.
1227 __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1228 __ Branch(&array, eq, t0, Operand(JS_ARRAY_TYPE));
1229 // Check that the object is some kind of JSObject.
1230 __ Branch(&slow, lt, t0, Operand(FIRST_JS_OBJECT_TYPE));
1232 // Object case: Check key against length in the elements array.
1233 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1234 // Check array bounds. Both the key and the length of FixedArray are smis.
1235 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1236 __ Branch(&fast_object_with_map_check, lo, key, Operand(t0));
1238 // Slow case, handle jump to runtime.
1240 // Entry registers are intact.
1244 GenerateRuntimeSetProperty(masm, strict_mode);
1246 // Extra capacity case: Check if there is extra capacity to
1247 // perform the store and update the length. Used for adding one
1248 // element to the array by writing to array[array.length].
1250 // Condition code from comparing key and array length is still available.
1251 // Only support writing to array[array.length].
1252 __ Branch(&slow, ne, key, Operand(t0));
1253 // Check for room in the elements backing store.
1254 // Both the key and the length of FixedArray are smis.
1255 __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1256 __ Branch(&slow, hs, key, Operand(t0));
1257 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1259 &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
1261 // Calculate key + 1 as smi.
1262 STATIC_ASSERT(kSmiTag == 0);
1263 __ Addu(t0, key, Operand(Smi::FromInt(1)));
1264 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1265 __ Branch(&fast_object_without_map_check);
1267 __ bind(&check_if_double_array);
1268 __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1269 // Add 1 to key, and go to common element store code for doubles.
1270 STATIC_ASSERT(kSmiTag == 0);
1271 __ Addu(t0, key, Operand(Smi::FromInt(1)));
1272 __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1273 __ jmp(&fast_double_without_map_check);
1275 // Array case: Get the length and the elements array from the JS
1276 // array. Check that the array is in fast mode (and writable); if it
1277 // is the length is always a smi.
1279 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1281 // Check the key against the length in the array.
1282 __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1283 __ Branch(&extra, hs, key, Operand(t0));
1284 // Fall through to fast case.
1286 __ bind(&fast_object_with_map_check);
1287 Register scratch_value = t0;
1288 Register address = t1;
1289 __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1290 __ Branch(&fast_double_with_map_check,
1293 Heap::kFixedArrayMapRootIndex);
1294 __ bind(&fast_object_without_map_check);
1295 // Smi stores don't require further checks.
1296 Label non_smi_value;
1297 __ JumpIfNotSmi(value, &non_smi_value);
1298 // It's irrelevant whether array is smi-only or not when writing a smi.
1299 __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1300 __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1301 __ Addu(address, address, scratch_value);
1302 __ sw(value, MemOperand(address));
1303 __ Ret(USE_DELAY_SLOT);
1306 __ bind(&non_smi_value);
1307 // Escape to elements kind transition case.
1308 __ CheckFastObjectElements(receiver_map, scratch_value,
1309 &transition_smi_elements);
1310 // Fast elements array, store the value to the elements backing store.
1311 __ bind(&finish_object_store);
1312 __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1313 __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1314 __ Addu(address, address, scratch_value);
1315 __ sw(value, MemOperand(address));
1316 // Update write barrier for the elements array address.
1317 __ mov(v0, value); // Preserve the value which is returned.
1318 __ RecordWrite(elements,
1323 EMIT_REMEMBERED_SET,
1327 __ bind(&fast_double_with_map_check);
1328 // Check for fast double array case. If this fails, call through to the
1330 __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1331 __ bind(&fast_double_without_map_check);
1332 __ StoreNumberToDoubleElements(value,
1340 &transition_double_elements);
1341 __ Ret(USE_DELAY_SLOT);
1344 __ bind(&transition_smi_elements);
1345 // Transition the array appropriately depending on the value type.
1346 __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
1347 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1348 __ Branch(&non_double_value, ne, t0, Operand(at));
1350 // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
1351 // FAST_DOUBLE_ELEMENTS and complete the store.
1352 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1353 FAST_DOUBLE_ELEMENTS,
1357 ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1358 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
1359 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1360 __ jmp(&fast_double_without_map_check);
1362 __ bind(&non_double_value);
1363 // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
1364 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1369 ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1370 ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
1371 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1372 __ jmp(&finish_object_store);
1374 __ bind(&transition_double_elements);
1375 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1376 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1377 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1378 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1383 ASSERT(receiver_map.is(a3)); // Transition code expects map in a3
1384 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
1385 __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1386 __ jmp(&finish_object_store);
1390 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1391 // ---------- S t a t e --------------
1392 // -- ra : return address
1395 // -----------------------------------
1398 // Check that the receiver isn't a smi.
1399 __ JumpIfSmi(a1, &slow);
1401 // Check that the key is an array index, that is Uint32.
1402 __ And(t0, a0, Operand(kSmiTagMask | kSmiSignMask));
1403 __ Branch(&slow, ne, t0, Operand(zero_reg));
1405 // Get the map of the receiver.
1406 __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1408 // Check that it has indexed interceptor and access checks
1409 // are not enabled for this object.
1410 __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset));
1411 __ And(a3, a3, Operand(kSlowCaseBitFieldMask));
1412 __ Branch(&slow, ne, a3, Operand(1 << Map::kHasIndexedInterceptor));
1413 // Everything is fine, call runtime.
1414 __ Push(a1, a0); // Receiver, key.
1416 // Perform tail call to the entry.
1417 __ TailCallExternalReference(ExternalReference(
1418 IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1421 GenerateMiss(masm, false);
1425 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1426 // ---------- S t a t e --------------
1430 // -- ra : return address
1431 // -----------------------------------
1433 // Push receiver, key and value for runtime call.
1434 __ Push(a2, a1, a0);
1436 ExternalReference ref = force_generic
1437 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1439 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1440 __ TailCallExternalReference(ref, 3, 1);
1444 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1445 // ---------- S t a t e --------------
1449 // -- ra : return address
1450 // -----------------------------------
1452 // Push receiver, key and value for runtime call.
1453 // We can't use MultiPush as the order of the registers is important.
1454 __ Push(a2, a1, a0);
1456 // The slow case calls into the runtime to complete the store without causing
1457 // an IC miss that would otherwise cause a transition to the generic stub.
1458 ExternalReference ref =
1459 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1461 __ TailCallExternalReference(ref, 3, 1);
1465 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1466 // ---------- S t a t e --------------
1468 // -- a3 : target map
1469 // -- ra : return address
1470 // -----------------------------------
1471 // Must return the modified receiver in v0.
1472 if (!FLAG_trace_elements_transitions) {
1474 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
1475 __ Ret(USE_DELAY_SLOT);
1481 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1485 void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
1486 MacroAssembler* masm) {
1487 // ---------- S t a t e --------------
1489 // -- a3 : target map
1490 // -- ra : return address
1491 // -----------------------------------
1492 // Must return the modified receiver in v0.
1493 if (!FLAG_trace_elements_transitions) {
1495 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
1496 __ Ret(USE_DELAY_SLOT);
1502 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1506 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1507 StrictModeFlag strict_mode) {
1508 // ----------- S t a t e -------------
1512 // -- ra : return address
1513 // -----------------------------------
1515 // Get the receiver from the stack and probe the stub cache.
1517 Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1518 Isolate::Current()->stub_cache()->GenerateProbe(
1519 masm, flags, a1, a2, a3, t0, t1, t2);
1521 // Cache miss: Jump to runtime.
1526 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1527 // ----------- S t a t e -------------
1531 // -- ra : return address
1532 // -----------------------------------
1534 __ Push(a1, a2, a0);
1535 // Perform tail call to the entry.
1536 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1538 __ TailCallExternalReference(ref, 3, 1);
1542 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1543 // ----------- S t a t e -------------
1547 // -- ra : return address
1548 // -----------------------------------
1550 // This accepts as a receiver anything JSArray::SetElementsLength accepts
1551 // (currently anything except for external arrays which means anything with
1552 // elements of FixedArray type). Value must be a number, but only smis are
1553 // accepted as the most common case.
1557 Register receiver = a1;
1558 Register value = a0;
1559 Register scratch = a3;
1561 // Check that the receiver isn't a smi.
1562 __ JumpIfSmi(receiver, &miss);
1564 // Check that the object is a JS array.
1565 __ GetObjectType(receiver, scratch, scratch);
1566 __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
1568 // Check that elements are FixedArray.
1569 // We rely on StoreIC_ArrayLength below to deal with all types of
1570 // fast elements (including COW).
1571 __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1572 __ GetObjectType(scratch, scratch, scratch);
1573 __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
1575 // Check that the array has fast properties, otherwise the length
1576 // property might have been redefined.
1577 __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1578 __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1579 __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1580 __ Branch(&miss, eq, scratch, Operand(at));
1582 // Check that value is a smi.
1583 __ JumpIfNotSmi(value, &miss);
1585 // Prepare tail call to StoreIC_ArrayLength.
1586 __ Push(receiver, value);
1588 ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1590 __ TailCallExternalReference(ref, 2, 1);
1598 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1599 // ----------- S t a t e -------------
1603 // -- ra : return address
1604 // -----------------------------------
1607 GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1609 GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1610 Counters* counters = masm->isolate()->counters();
1611 __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1615 __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1620 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1621 StrictModeFlag strict_mode) {
1622 // ----------- S t a t e -------------
1626 // -- ra : return address
1627 // -----------------------------------
1629 __ Push(a1, a2, a0);
1631 __ li(a1, Operand(Smi::FromInt(NONE))); // PropertyAttributes.
1632 __ li(a0, Operand(Smi::FromInt(strict_mode)));
1635 // Do tail-call to runtime routine.
1636 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1643 Condition CompareIC::ComputeCondition(Token::Value op) {
1645 case Token::EQ_STRICT:
1658 return kNoCondition;
1663 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1665 Handle<Code> rewritten;
1666 State previous_state = GetState();
1667 State state = TargetState(previous_state, false, x, y);
1668 if (state == GENERIC) {
1669 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
1670 rewritten = stub.GetCode();
1672 ICCompareStub stub(op_, state);
1673 if (state == KNOWN_OBJECTS) {
1674 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1676 rewritten = stub.GetCode();
1678 set_target(*rewritten);
1681 if (FLAG_trace_ic) {
1682 PrintF("[CompareIC (%s->%s)#%s]\n",
1683 GetStateName(previous_state),
1684 GetStateName(state),
1689 // Activate inlined smi code.
1690 if (previous_state == UNINITIALIZED) {
1691 PatchInlinedSmiCode(address());
1696 void PatchInlinedSmiCode(Address address) {
1697 Address andi_instruction_address =
1698 address + Assembler::kCallTargetAddressOffset;
1700 // If the instruction following the call is not a andi at, rx, #yyy, nothing
1702 Instr instr = Assembler::instr_at(andi_instruction_address);
1703 if (!(Assembler::IsAndImmediate(instr) &&
1704 Assembler::GetRt(instr) == (uint32_t)zero_reg.code())) {
1708 // The delta to the start of the map check instruction and the
1709 // condition code uses at the patched jump.
1710 int delta = Assembler::GetImmediate16(instr);
1711 delta += Assembler::GetRs(instr) * kImm16Mask;
1712 // If the delta is 0 the instruction is andi at, zero_reg, #0 which also
1713 // signals that nothing was inlined.
1719 if (FLAG_trace_ic) {
1720 PrintF("[ patching ic at %p, andi=%p, delta=%d\n",
1721 address, andi_instruction_address, delta);
1725 Address patch_address =
1726 andi_instruction_address - delta * Instruction::kInstrSize;
1727 Instr instr_at_patch = Assembler::instr_at(patch_address);
1728 Instr branch_instr =
1729 Assembler::instr_at(patch_address + Instruction::kInstrSize);
1730 ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1731 ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
1732 ASSERT(Assembler::IsBranch(branch_instr));
1733 if (Assembler::IsBeq(branch_instr)) {
1734 // This is patching a "jump if not smi" site to be active.
1737 // Branch <target>, eq, at, Operand(zero_reg)
1739 // andi at, rx, #kSmiTagMask
1740 // Branch <target>, ne, at, Operand(zero_reg)
1741 CodePatcher patcher(patch_address, 2);
1742 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1743 patcher.masm()->andi(at, reg, kSmiTagMask);
1744 patcher.ChangeBranchCondition(ne);
1746 ASSERT(Assembler::IsBne(branch_instr));
1747 // This is patching a "jump if smi" site to be active.
1750 // Branch <target>, ne, at, Operand(zero_reg)
1752 // andi at, rx, #kSmiTagMask
1753 // Branch <target>, eq, at, Operand(zero_reg)
1754 CodePatcher patcher(patch_address, 2);
1755 Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1756 patcher.masm()->andi(at, reg, kSmiTagMask);
1757 patcher.ChangeBranchCondition(eq);
1762 } } // namespace v8::internal
1764 #endif // V8_TARGET_ARCH_MIPS