1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_ARM
34 #include "stub-cache.h"
39 #define __ ACCESS_MASM(masm)
42 static void ProbeTable(Isolate* isolate,
45 StubCache::Table table,
48 // Number of the cache entry, not scaled.
52 Register offset_scratch) {
53 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
57 uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58 uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59 uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
61 // Check the relative positions of the address fields.
62 ASSERT(value_off_addr > key_off_addr);
63 ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64 ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65 ASSERT(map_off_addr > key_off_addr);
66 ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67 ASSERT((map_off_addr - key_off_addr) < (256 * 4));
70 Register base_addr = scratch;
73 // Multiply by 3 because there are 3 fields per entry (name, code, map).
74 __ add(offset_scratch, offset, Operand(offset, LSL, 1));
76 // Calculate the base address of the entry.
77 __ mov(base_addr, Operand(key_offset));
78 __ add(base_addr, base_addr, Operand(offset_scratch, LSL, kPointerSizeLog2));
80 // Check that the key in the entry matches the name.
81 __ ldr(ip, MemOperand(base_addr, 0));
85 // Check the map matches.
86 __ ldr(ip, MemOperand(base_addr, map_off_addr - key_off_addr));
87 __ ldr(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
91 // Get the code entry from the cache.
92 Register code = scratch2;
94 __ ldr(code, MemOperand(base_addr, value_off_addr - key_off_addr));
96 // Check that the flags match what we're looking for.
97 Register flags_reg = base_addr;
99 __ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100 // It's a nice optimization if this constant is encodable in the bic insn.
102 uint32_t mask = Code::kFlagsNotUsedInLookup;
103 ASSERT(__ ImmediateFitsAddrMode1Instruction(mask));
104 __ bic(flags_reg, flags_reg, Operand(mask));
105 __ cmp(flags_reg, Operand(flags));
109 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
111 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
116 // Jump to the first instruction in the code stub.
117 __ add(pc, code, Operand(Code::kHeaderSize - kHeapObjectTag));
119 // Miss: fall through.
124 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
130 ASSERT(name->IsUniqueName());
131 ASSERT(!receiver.is(scratch0));
132 Counters* counters = masm->isolate()->counters();
133 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
138 const int kInterceptorOrAccessCheckNeededMask =
139 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
141 // Bail out if the receiver has a named interceptor or requires access checks.
142 Register map = scratch1;
143 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144 __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145 __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146 __ b(ne, miss_label);
148 // Check that receiver is a JSObject.
149 __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150 __ cmp(scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
151 __ b(lt, miss_label);
153 // Load properties array.
154 Register properties = scratch0;
155 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
156 // Check that the properties array is a dictionary.
157 __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
158 Register tmp = properties;
159 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
161 __ b(ne, miss_label);
163 // Restore the temporarily used register.
164 __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
167 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
175 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
179 void StubCache::GenerateProbe(MacroAssembler* masm,
187 Isolate* isolate = masm->isolate();
190 // Make sure that code is valid. The multiplying code relies on the
191 // entry size being 12.
192 ASSERT(sizeof(Entry) == 12);
194 // Make sure the flags does not name a specific type.
195 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
197 // Make sure that there are no register conflicts.
198 ASSERT(!scratch.is(receiver));
199 ASSERT(!scratch.is(name));
200 ASSERT(!extra.is(receiver));
201 ASSERT(!extra.is(name));
202 ASSERT(!extra.is(scratch));
203 ASSERT(!extra2.is(receiver));
204 ASSERT(!extra2.is(name));
205 ASSERT(!extra2.is(scratch));
206 ASSERT(!extra2.is(extra));
208 // Check scratch, extra and extra2 registers are valid.
209 ASSERT(!scratch.is(no_reg));
210 ASSERT(!extra.is(no_reg));
211 ASSERT(!extra2.is(no_reg));
212 ASSERT(!extra3.is(no_reg));
214 Counters* counters = masm->isolate()->counters();
215 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
218 // Check that the receiver isn't a smi.
219 __ JumpIfSmi(receiver, &miss);
221 // Get the map of the receiver and compute the hash.
222 __ ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
223 __ ldr(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
224 __ add(scratch, scratch, Operand(ip));
225 uint32_t mask = kPrimaryTableSize - 1;
226 // We shift out the last two bits because they are not part of the hash and
227 // they are always 01 for maps.
228 __ mov(scratch, Operand(scratch, LSR, kHeapObjectTagSize));
229 // Mask down the eor argument to the minimum to keep the immediate
231 __ eor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
232 // Prefer and_ to ubfx here because ubfx takes 2 cycles.
233 __ and_(scratch, scratch, Operand(mask));
235 // Probe the primary table.
247 // Primary miss: Compute hash for secondary probe.
248 __ sub(scratch, scratch, Operand(name, LSR, kHeapObjectTagSize));
249 uint32_t mask2 = kSecondaryTableSize - 1;
250 __ add(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
251 __ and_(scratch, scratch, Operand(mask2));
253 // Probe the secondary table.
265 // Cache miss: Fall-through and let caller handle the miss by
266 // entering the runtime system.
268 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
273 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
275 Register prototype) {
276 // Load the global or builtins object from the current context.
278 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
279 // Load the native context from the global or builtins object.
281 FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
282 // Load the function from the native context.
283 __ ldr(prototype, MemOperand(prototype, Context::SlotOffset(index)));
284 // Load the initial map. The global functions all have initial maps.
286 FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
287 // Load the prototype from the initial map.
288 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
292 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
293 MacroAssembler* masm,
297 Isolate* isolate = masm->isolate();
298 // Get the global function with the given index.
299 Handle<JSFunction> function(
300 JSFunction::cast(isolate->native_context()->get(index)));
302 // Check we're still in the same context.
303 Register scratch = prototype;
304 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
305 __ ldr(scratch, MemOperand(cp, offset));
306 __ ldr(scratch, FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
307 __ ldr(scratch, MemOperand(scratch, Context::SlotOffset(index)));
308 __ Move(ip, function);
312 // Load its initial map. The global functions all have initial maps.
313 __ Move(prototype, Handle<Map>(function->initial_map()));
314 // Load the prototype from the initial map.
315 __ ldr(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
319 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
324 Representation representation) {
325 ASSERT(!representation.IsDouble());
326 int offset = index * kPointerSize;
328 // Calculate the offset into the properties array.
329 offset = offset + FixedArray::kHeaderSize;
330 __ ldr(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
333 __ ldr(dst, FieldMemOperand(src, offset));
337 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
341 // Check that the receiver isn't a smi.
342 __ JumpIfSmi(receiver, miss_label);
344 // Check that the object is a JS array.
345 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
346 __ b(ne, miss_label);
348 // Load length directly from the JS array.
349 __ ldr(r0, FieldMemOperand(receiver, JSArray::kLengthOffset));
354 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
359 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
360 __ mov(r0, scratch1);
365 // Generate code to check that a global property cell is empty. Create
366 // the property cell at compilation time if no cell exists for the
368 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
369 Handle<JSGlobalObject> global,
373 Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
374 ASSERT(cell->value()->IsTheHole());
375 __ mov(scratch, Operand(cell));
376 __ ldr(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
377 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
383 void StoreStubCompiler::GenerateNegativeHolderLookup(
384 MacroAssembler* masm,
385 Handle<JSObject> holder,
389 if (holder->IsJSGlobalObject()) {
390 GenerateCheckPropertyCell(
391 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
392 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
393 GenerateDictionaryNegativeLookup(
394 masm, miss, holder_reg, name, scratch1(), scratch2());
399 // Generate StoreTransition code, value is passed in r0 register.
400 // When leaving generated code after success, the receiver_reg and name_reg
401 // may be clobbered. Upon branch to miss_label, the receiver and name
402 // registers have their original values.
403 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
404 Handle<JSObject> object,
405 LookupResult* lookup,
406 Handle<Map> transition,
408 Register receiver_reg,
409 Register storage_reg,
419 int descriptor = transition->LastAdded();
420 DescriptorArray* descriptors = transition->instance_descriptors();
421 PropertyDetails details = descriptors->GetDetails(descriptor);
422 Representation representation = details.representation();
423 ASSERT(!representation.IsNone());
425 if (details.type() == CONSTANT) {
426 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
427 __ Move(scratch1, constant);
428 __ cmp(value_reg, scratch1);
429 __ b(ne, miss_label);
430 } else if (representation.IsSmi()) {
431 __ JumpIfNotSmi(value_reg, miss_label);
432 } else if (representation.IsHeapObject()) {
433 __ JumpIfSmi(value_reg, miss_label);
434 } else if (representation.IsDouble()) {
435 Label do_store, heap_number;
436 __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
437 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
439 __ JumpIfNotSmi(value_reg, &heap_number);
440 __ SmiUntag(scratch1, value_reg);
441 __ vmov(s0, scratch1);
442 __ vcvt_f64_s32(d0, s0);
445 __ bind(&heap_number);
446 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
447 miss_label, DONT_DO_SMI_CHECK);
448 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
451 __ vstr(d0, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
454 // Stub never generated for non-global objects that require access
456 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
458 // Perform map transition for the receiver if necessary.
459 if (details.type() == FIELD &&
460 object->map()->unused_property_fields() == 0) {
461 // The properties must be extended before we can store the value.
462 // We jump to a runtime call that extends the properties array.
463 __ push(receiver_reg);
464 __ mov(r2, Operand(transition));
466 __ TailCallExternalReference(
467 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
474 // Update the map of the object.
475 __ mov(scratch1, Operand(transition));
476 __ str(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
478 // Update the write barrier for the map field.
479 __ RecordWriteField(receiver_reg,
480 HeapObject::kMapOffset,
488 if (details.type() == CONSTANT) {
489 ASSERT(value_reg.is(r0));
494 int index = transition->instance_descriptors()->GetFieldIndex(
495 transition->LastAdded());
497 // Adjust for the number of properties stored in the object. Even in the
498 // face of a transition we can use the old map here because the size of the
499 // object and the number of in-object properties is not going to change.
500 index -= object->map()->inobject_properties();
502 // TODO(verwaest): Share this code as a code stub.
503 SmiCheck smi_check = representation.IsTagged()
504 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
506 // Set the property straight into the object.
507 int offset = object->map()->instance_size() + (index * kPointerSize);
508 if (representation.IsDouble()) {
509 __ str(storage_reg, FieldMemOperand(receiver_reg, offset));
511 __ str(value_reg, FieldMemOperand(receiver_reg, offset));
514 if (!representation.IsSmi()) {
515 // Update the write barrier for the array address.
516 if (!representation.IsDouble()) {
517 __ mov(storage_reg, value_reg);
519 __ RecordWriteField(receiver_reg,
529 // Write to the properties array.
530 int offset = index * kPointerSize + FixedArray::kHeaderSize;
531 // Get the properties array
533 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
534 if (representation.IsDouble()) {
535 __ str(storage_reg, FieldMemOperand(scratch1, offset));
537 __ str(value_reg, FieldMemOperand(scratch1, offset));
540 if (!representation.IsSmi()) {
541 // Update the write barrier for the array address.
542 if (!representation.IsDouble()) {
543 __ mov(storage_reg, value_reg);
545 __ RecordWriteField(scratch1,
556 // Return the value (register r0).
557 ASSERT(value_reg.is(r0));
563 // Generate StoreField code, value is passed in r0 register.
564 // When leaving generated code after success, the receiver_reg and name_reg
565 // may be clobbered. Upon branch to miss_label, the receiver and name
566 // registers have their original values.
567 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
568 Handle<JSObject> object,
569 LookupResult* lookup,
570 Register receiver_reg,
579 // Stub never generated for non-global objects that require access
581 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
583 int index = lookup->GetFieldIndex().field_index();
585 // Adjust for the number of properties stored in the object. Even in the
586 // face of a transition we can use the old map here because the size of the
587 // object and the number of in-object properties is not going to change.
588 index -= object->map()->inobject_properties();
590 Representation representation = lookup->representation();
591 ASSERT(!representation.IsNone());
592 if (representation.IsSmi()) {
593 __ JumpIfNotSmi(value_reg, miss_label);
594 } else if (representation.IsHeapObject()) {
595 __ JumpIfSmi(value_reg, miss_label);
596 } else if (representation.IsDouble()) {
597 // Load the double storage.
599 int offset = object->map()->instance_size() + (index * kPointerSize);
600 __ ldr(scratch1, FieldMemOperand(receiver_reg, offset));
603 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
604 int offset = index * kPointerSize + FixedArray::kHeaderSize;
605 __ ldr(scratch1, FieldMemOperand(scratch1, offset));
608 // Store the value into the storage.
609 Label do_store, heap_number;
610 __ JumpIfNotSmi(value_reg, &heap_number);
611 __ SmiUntag(scratch2, value_reg);
612 __ vmov(s0, scratch2);
613 __ vcvt_f64_s32(d0, s0);
616 __ bind(&heap_number);
617 __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
618 miss_label, DONT_DO_SMI_CHECK);
619 __ vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
622 __ vstr(d0, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
623 // Return the value (register r0).
624 ASSERT(value_reg.is(r0));
629 // TODO(verwaest): Share this code as a code stub.
630 SmiCheck smi_check = representation.IsTagged()
631 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
633 // Set the property straight into the object.
634 int offset = object->map()->instance_size() + (index * kPointerSize);
635 __ str(value_reg, FieldMemOperand(receiver_reg, offset));
637 if (!representation.IsSmi()) {
638 // Skip updating write barrier if storing a smi.
639 __ JumpIfSmi(value_reg, &exit);
641 // Update the write barrier for the array address.
642 // Pass the now unused name_reg as a scratch register.
643 __ mov(name_reg, value_reg);
644 __ RecordWriteField(receiver_reg,
654 // Write to the properties array.
655 int offset = index * kPointerSize + FixedArray::kHeaderSize;
656 // Get the properties array
658 FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
659 __ str(value_reg, FieldMemOperand(scratch1, offset));
661 if (!representation.IsSmi()) {
662 // Skip updating write barrier if storing a smi.
663 __ JumpIfSmi(value_reg, &exit);
665 // Update the write barrier for the array address.
666 // Ok to clobber receiver_reg and name_reg, since we return.
667 __ mov(name_reg, value_reg);
668 __ RecordWriteField(scratch1,
679 // Return the value (register r0).
680 ASSERT(value_reg.is(r0));
686 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
689 if (!label->is_unused()) {
691 __ mov(this->name(), Operand(name));
696 static void PushInterceptorArguments(MacroAssembler* masm,
700 Handle<JSObject> holder_obj) {
701 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
702 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
703 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
704 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
705 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
707 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
708 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
709 Register scratch = name;
710 __ mov(scratch, Operand(interceptor));
717 static void CompileCallLoadPropertyWithInterceptor(
718 MacroAssembler* masm,
722 Handle<JSObject> holder_obj,
724 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
725 __ CallExternalReference(
726 ExternalReference(IC_Utility(id), masm->isolate()),
727 StubCache::kInterceptorArgsLength);
731 // Generate call to api function.
732 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
733 const CallOptimization& optimization,
734 Handle<Map> receiver_map,
740 ASSERT(!receiver.is(scratch_in));
742 // Write the arguments to stack frame.
743 for (int i = 0; i < argc; i++) {
744 Register arg = values[argc-1-i];
745 ASSERT(!receiver.is(arg));
746 ASSERT(!scratch_in.is(arg));
749 ASSERT(optimization.is_simple_api_call());
751 // Abi for CallApiFunctionStub.
752 Register callee = r0;
753 Register call_data = r4;
754 Register holder = r2;
755 Register api_function_address = r1;
757 // Put holder in place.
758 CallOptimization::HolderLookup holder_lookup;
759 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
762 switch (holder_lookup) {
763 case CallOptimization::kHolderIsReceiver:
764 __ Move(holder, receiver);
766 case CallOptimization::kHolderFound:
767 __ Move(holder, api_holder);
769 case CallOptimization::kHolderNotFound:
774 Isolate* isolate = masm->isolate();
775 Handle<JSFunction> function = optimization.constant_function();
776 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
777 Handle<Object> call_data_obj(api_call_info->data(), isolate);
779 // Put callee in place.
780 __ Move(callee, function);
782 bool call_data_undefined = false;
783 // Put call_data in place.
784 if (isolate->heap()->InNewSpace(*call_data_obj)) {
785 __ Move(call_data, api_call_info);
786 __ ldr(call_data, FieldMemOperand(call_data, CallHandlerInfo::kDataOffset));
787 } else if (call_data_obj->IsUndefined()) {
788 call_data_undefined = true;
789 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
791 __ Move(call_data, call_data_obj);
794 // Put api_function_address in place.
795 Address function_address = v8::ToCData<Address>(api_call_info->callback());
796 ApiFunction fun(function_address);
797 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
798 ExternalReference ref = ExternalReference(&fun,
801 __ mov(api_function_address, Operand(ref));
804 CallApiFunctionStub stub(is_store, call_data_undefined, argc);
805 __ TailCallStub(&stub);
809 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
810 __ Jump(code, RelocInfo::CODE_TARGET);
815 #define __ ACCESS_MASM(masm())
818 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
820 Handle<JSObject> holder,
826 PrototypeCheckType check) {
827 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
829 // Make sure there's no overlap between holder and object registers.
830 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
831 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
832 && !scratch2.is(scratch1));
834 // Keep track of the current object in register reg.
835 Register reg = object_reg;
838 Handle<JSObject> current = Handle<JSObject>::null();
839 if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
840 Handle<JSObject> prototype = Handle<JSObject>::null();
841 Handle<Map> current_map = receiver_map;
842 Handle<Map> holder_map(holder->map());
843 // Traverse the prototype chain and check the maps in the prototype chain for
844 // fast and global objects or do negative lookup for normal objects.
845 while (!current_map.is_identical_to(holder_map)) {
848 // Only global objects and objects that do not require access
849 // checks are allowed in stubs.
850 ASSERT(current_map->IsJSGlobalProxyMap() ||
851 !current_map->is_access_check_needed());
853 prototype = handle(JSObject::cast(current_map->prototype()));
854 if (current_map->is_dictionary_map() &&
855 !current_map->IsJSGlobalObjectMap() &&
856 !current_map->IsJSGlobalProxyMap()) {
857 if (!name->IsUniqueName()) {
858 ASSERT(name->IsString());
859 name = factory()->InternalizeString(Handle<String>::cast(name));
861 ASSERT(current.is_null() ||
862 current->property_dictionary()->FindEntry(*name) ==
863 NameDictionary::kNotFound);
865 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
868 __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
869 reg = holder_reg; // From now on the object will be in holder_reg.
870 __ ldr(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
872 Register map_reg = scratch1;
873 if (depth != 1 || check == CHECK_ALL_MAPS) {
874 // CheckMap implicitly loads the map of |reg| into |map_reg|.
875 __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
877 __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
880 // Check access rights to the global object. This has to happen after
881 // the map check so that we know that the object is actually a global
883 if (current_map->IsJSGlobalProxyMap()) {
884 __ CheckAccessGlobalProxy(reg, scratch2, miss);
885 } else if (current_map->IsJSGlobalObjectMap()) {
886 GenerateCheckPropertyCell(
887 masm(), Handle<JSGlobalObject>::cast(current), name,
891 reg = holder_reg; // From now on the object will be in holder_reg.
893 if (heap()->InNewSpace(*prototype)) {
894 // The prototype is in new space; we cannot store a reference to it
895 // in the code. Load it from the map.
896 __ ldr(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
898 // The prototype is in old space; load it directly.
899 __ mov(reg, Operand(prototype));
903 // Go to the next object in the prototype chain.
905 current_map = handle(current->map());
908 // Log the check depth.
909 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
911 if (depth != 0 || check == CHECK_ALL_MAPS) {
912 // Check the holder map.
913 __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
916 // Perform security check for access to the global object.
917 ASSERT(current_map->IsJSGlobalProxyMap() ||
918 !current_map->is_access_check_needed());
919 if (current_map->IsJSGlobalProxyMap()) {
920 __ CheckAccessGlobalProxy(reg, scratch1, miss);
923 // Return the register containing the holder.
928 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
929 if (!miss->is_unused()) {
933 TailCallBuiltin(masm(), MissBuiltin(kind()));
939 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
940 if (!miss->is_unused()) {
943 GenerateRestoreName(masm(), miss, name);
944 TailCallBuiltin(masm(), MissBuiltin(kind()));
950 Register LoadStubCompiler::CallbackHandlerFrontend(
951 Handle<HeapType> type,
953 Handle<JSObject> holder,
955 Handle<Object> callback) {
958 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
960 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
961 ASSERT(!reg.is(scratch2()));
962 ASSERT(!reg.is(scratch3()));
963 ASSERT(!reg.is(scratch4()));
965 // Load the properties dictionary.
966 Register dictionary = scratch4();
967 __ ldr(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
969 // Probe the dictionary.
971 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
978 __ bind(&probe_done);
980 // If probing finds an entry in the dictionary, scratch3 contains the
981 // pointer into the dictionary. Check that the value is the callback.
982 Register pointer = scratch3();
983 const int kElementsStartOffset = NameDictionary::kHeaderSize +
984 NameDictionary::kElementsStartIndex * kPointerSize;
985 const int kValueOffset = kElementsStartOffset + kPointerSize;
986 __ ldr(scratch2(), FieldMemOperand(pointer, kValueOffset));
987 __ cmp(scratch2(), Operand(callback));
991 HandlerFrontendFooter(name, &miss);
996 void LoadStubCompiler::GenerateLoadField(Register reg,
997 Handle<JSObject> holder,
999 Representation representation) {
1000 if (!reg.is(receiver())) __ mov(receiver(), reg);
1001 if (kind() == Code::LOAD_IC) {
1002 LoadFieldStub stub(field.is_inobject(holder),
1003 field.translate(holder),
1005 GenerateTailCall(masm(), stub.GetCode(isolate()));
1007 KeyedLoadFieldStub stub(field.is_inobject(holder),
1008 field.translate(holder),
1010 GenerateTailCall(masm(), stub.GetCode(isolate()));
1015 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1016 // Return the constant value.
1022 void LoadStubCompiler::GenerateLoadCallback(
1024 Handle<ExecutableAccessorInfo> callback) {
1025 // Build AccessorInfo::args_ list on the stack and push property name below
1026 // the exit frame to make GC aware of them and store pointers to them.
1027 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1028 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1029 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1030 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1031 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1032 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1033 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1034 ASSERT(!scratch2().is(reg));
1035 ASSERT(!scratch3().is(reg));
1036 ASSERT(!scratch4().is(reg));
1037 __ push(receiver());
1038 if (heap()->InNewSpace(callback->data())) {
1039 __ Move(scratch3(), callback);
1040 __ ldr(scratch3(), FieldMemOperand(scratch3(),
1041 ExecutableAccessorInfo::kDataOffset));
1043 __ Move(scratch3(), Handle<Object>(callback->data(), isolate()));
1045 __ push(scratch3());
1046 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1047 __ mov(scratch4(), scratch3());
1048 __ Push(scratch3(), scratch4());
1050 Operand(ExternalReference::isolate_address(isolate())));
1051 __ Push(scratch4(), reg);
1052 __ mov(scratch2(), sp); // scratch2 = PropertyAccessorInfo::args_
1055 // Abi for CallApiGetter
1056 Register getter_address_reg = r2;
1058 Address getter_address = v8::ToCData<Address>(callback->getter());
1059 ApiFunction fun(getter_address);
1060 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1061 ExternalReference ref = ExternalReference(&fun, type, isolate());
1062 __ mov(getter_address_reg, Operand(ref));
1064 CallApiGetterStub stub;
1065 __ TailCallStub(&stub);
1069 void LoadStubCompiler::GenerateLoadInterceptor(
1070 Register holder_reg,
1071 Handle<Object> object,
1072 Handle<JSObject> interceptor_holder,
1073 LookupResult* lookup,
1074 Handle<Name> name) {
1075 ASSERT(interceptor_holder->HasNamedInterceptor());
1076 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1078 // So far the most popular follow ups for interceptor loads are FIELD
1079 // and CALLBACKS, so inline only them, other cases may be added
1081 bool compile_followup_inline = false;
1082 if (lookup->IsFound() && lookup->IsCacheable()) {
1083 if (lookup->IsField()) {
1084 compile_followup_inline = true;
1085 } else if (lookup->type() == CALLBACKS &&
1086 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1087 ExecutableAccessorInfo* callback =
1088 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1089 compile_followup_inline = callback->getter() != NULL &&
1090 callback->IsCompatibleReceiver(*object);
1094 if (compile_followup_inline) {
1095 // Compile the interceptor call, followed by inline code to load the
1096 // property from further up the prototype chain if the call fails.
1097 // Check that the maps haven't changed.
1098 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1100 // Preserve the receiver register explicitly whenever it is different from
1101 // the holder and it is needed should the interceptor return without any
1102 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1103 // the FIELD case might cause a miss during the prototype check.
1104 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1105 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1106 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1108 // Save necessary data before invoking an interceptor.
1109 // Requires a frame to make GC aware of pushed pointers.
1111 FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
1112 if (must_preserve_receiver_reg) {
1113 __ Push(receiver(), holder_reg, this->name());
1115 __ Push(holder_reg, this->name());
1117 // Invoke an interceptor. Note: map checks from receiver to
1118 // interceptor's holder has been compiled before (see a caller
1120 CompileCallLoadPropertyWithInterceptor(
1121 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1122 IC::kLoadPropertyWithInterceptorOnly);
1124 // Check if interceptor provided a value for property. If it's
1125 // the case, return immediately.
1126 Label interceptor_failed;
1127 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1128 __ cmp(r0, scratch1());
1129 __ b(eq, &interceptor_failed);
1130 frame_scope.GenerateLeaveFrame();
1133 __ bind(&interceptor_failed);
1134 __ pop(this->name());
1136 if (must_preserve_receiver_reg) {
1139 // Leave the internal frame.
1142 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1143 } else { // !compile_followup_inline
1144 // Call the runtime system to load the interceptor.
1145 // Check that the maps haven't changed.
1146 PushInterceptorArguments(masm(), receiver(), holder_reg,
1147 this->name(), interceptor_holder);
1149 ExternalReference ref =
1150 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1152 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1157 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
1159 // Check that the object is a boolean.
1160 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
1163 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
1170 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1171 Handle<JSObject> object,
1172 Handle<JSObject> holder,
1174 Handle<ExecutableAccessorInfo> callback) {
1175 Register holder_reg = HandlerFrontend(
1176 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1178 // Stub never generated for non-global objects that require access checks.
1179 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1181 __ push(receiver()); // receiver
1182 __ push(holder_reg);
1183 __ mov(ip, Operand(callback)); // callback info
1185 __ mov(ip, Operand(name));
1186 __ Push(ip, value());
1188 // Do tail-call to the runtime system.
1189 ExternalReference store_callback_property =
1190 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1191 __ TailCallExternalReference(store_callback_property, 5, 1);
1193 // Return the generated code.
1194 return GetCode(kind(), Code::FAST, name);
1199 #define __ ACCESS_MASM(masm)
1202 void StoreStubCompiler::GenerateStoreViaSetter(
1203 MacroAssembler* masm,
1204 Handle<HeapType> type,
1206 Handle<JSFunction> setter) {
1207 // ----------- S t a t e -------------
1208 // -- lr : return address
1209 // -----------------------------------
1211 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1213 // Save value register, so we can restore it later.
1216 if (!setter.is_null()) {
1217 // Call the JavaScript setter with receiver and value on the stack.
1218 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1219 // Swap in the global receiver.
1222 receiver, JSGlobalObject::kGlobalReceiverOffset));
1224 __ Push(receiver, value());
1225 ParameterCount actual(1);
1226 ParameterCount expected(setter);
1227 __ InvokeFunction(setter, expected, actual,
1228 CALL_FUNCTION, NullCallWrapper());
1230 // If we generate a global code snippet for deoptimization only, remember
1231 // the place to continue after deoptimization.
1232 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1235 // We have to return the passed value, not the return value of the setter.
1238 // Restore context register.
1239 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1246 #define __ ACCESS_MASM(masm())
1249 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1250 Handle<JSObject> object,
1251 Handle<Name> name) {
1252 __ Push(receiver(), this->name(), value());
1254 // Do tail-call to the runtime system.
1255 ExternalReference store_ic_property =
1256 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1257 __ TailCallExternalReference(store_ic_property, 3, 1);
1259 // Return the generated code.
1260 return GetCode(kind(), Code::FAST, name);
1264 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1265 Handle<JSObject> last,
1266 Handle<Name> name) {
1267 NonexistentHandlerFrontend(type, last, name);
1269 // Return undefined if maps of the full prototype chain are still the
1270 // same and no global property with this name contains a value.
1271 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1274 // Return the generated code.
1275 return GetCode(kind(), Code::FAST, name);
1279 Register* LoadStubCompiler::registers() {
1280 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1281 static Register registers[] = { r0, r2, r3, r1, r4, r5 };
1286 Register* KeyedLoadStubCompiler::registers() {
1287 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1288 static Register registers[] = { r1, r0, r2, r3, r4, r5 };
1293 Register StoreStubCompiler::value() {
1298 Register* StoreStubCompiler::registers() {
1299 // receiver, name, scratch1, scratch2, scratch3.
1300 static Register registers[] = { r1, r2, r3, r4, r5 };
1305 Register* KeyedStoreStubCompiler::registers() {
1306 // receiver, name, scratch1, scratch2, scratch3.
1307 static Register registers[] = { r2, r1, r3, r4, r5 };
1313 #define __ ACCESS_MASM(masm)
1316 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1317 Handle<HeapType> type,
1319 Handle<JSFunction> getter) {
1320 // ----------- S t a t e -------------
1323 // -- lr : return address
1324 // -----------------------------------
1326 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1328 if (!getter.is_null()) {
1329 // Call the JavaScript getter with the receiver on the stack.
1330 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1331 // Swap in the global receiver.
1334 receiver, JSGlobalObject::kGlobalReceiverOffset));
1337 ParameterCount actual(0);
1338 ParameterCount expected(getter);
1339 __ InvokeFunction(getter, expected, actual,
1340 CALL_FUNCTION, NullCallWrapper());
1342 // If we generate a global code snippet for deoptimization only, remember
1343 // the place to continue after deoptimization.
1344 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1347 // Restore context register.
1348 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1355 #define __ ACCESS_MASM(masm())
1358 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1359 Handle<HeapType> type,
1360 Handle<GlobalObject> global,
1361 Handle<PropertyCell> cell,
1363 bool is_dont_delete) {
1365 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1367 // Get the value from the cell.
1368 __ mov(r3, Operand(cell));
1369 __ ldr(r4, FieldMemOperand(r3, Cell::kValueOffset));
1371 // Check for deleted property if property can actually be deleted.
1372 if (!is_dont_delete) {
1373 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
1378 Counters* counters = isolate()->counters();
1379 __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
1383 HandlerFrontendFooter(name, &miss);
1385 // Return the generated code.
1386 return GetCode(kind(), Code::NORMAL, name);
1390 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1391 TypeHandleList* types,
1392 CodeHandleList* handlers,
1394 Code::StubType type,
1395 IcCheckType check) {
1398 if (check == PROPERTY &&
1399 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1400 __ cmp(this->name(), Operand(name));
1405 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1406 __ JumpIfSmi(receiver(), smi_target);
1408 Register map_reg = scratch1();
1410 int receiver_count = types->length();
1411 int number_of_handled_maps = 0;
1412 __ ldr(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
1413 for (int current = 0; current < receiver_count; ++current) {
1414 Handle<HeapType> type = types->at(current);
1415 Handle<Map> map = IC::TypeToMap(*type, isolate());
1416 if (!map->is_deprecated()) {
1417 number_of_handled_maps++;
1418 __ mov(ip, Operand(map));
1419 __ cmp(map_reg, ip);
1420 if (type->Is(HeapType::Number())) {
1421 ASSERT(!number_case.is_unused());
1422 __ bind(&number_case);
1424 __ Jump(handlers->at(current), RelocInfo::CODE_TARGET, eq);
1427 ASSERT(number_of_handled_maps != 0);
1430 TailCallBuiltin(masm(), MissBuiltin(kind()));
1432 // Return the generated code.
1433 InlineCacheState state =
1434 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1435 return GetICCode(kind(), type, name, state);
1439 void StoreStubCompiler::GenerateStoreArrayLength() {
1440 // Prepare tail call to StoreIC_ArrayLength.
1441 __ Push(receiver(), value());
1443 ExternalReference ref =
1444 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1446 __ TailCallExternalReference(ref, 2, 1);
1450 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1451 MapHandleList* receiver_maps,
1452 CodeHandleList* handler_stubs,
1453 MapHandleList* transitioned_maps) {
1455 __ JumpIfSmi(receiver(), &miss);
1457 int receiver_count = receiver_maps->length();
1458 __ ldr(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
1459 for (int i = 0; i < receiver_count; ++i) {
1460 __ mov(ip, Operand(receiver_maps->at(i)));
1461 __ cmp(scratch1(), ip);
1462 if (transitioned_maps->at(i).is_null()) {
1463 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq);
1466 __ b(ne, &next_map);
1467 __ mov(transition_map(), Operand(transitioned_maps->at(i)));
1468 __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, al);
1474 TailCallBuiltin(masm(), MissBuiltin(kind()));
1476 // Return the generated code.
1478 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1483 #define __ ACCESS_MASM(masm)
1486 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1487 MacroAssembler* masm) {
1488 // ---------- S t a t e --------------
1489 // -- lr : return address
1492 // -----------------------------------
1496 Register receiver = r1;
1498 __ UntagAndJumpIfNotSmi(r2, key, &miss);
1499 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
1500 __ LoadFromNumberDictionary(&slow, r4, key, r0, r2, r3, r5);
1504 __ IncrementCounter(
1505 masm->isolate()->counters()->keyed_load_external_array_slow(),
1508 // ---------- S t a t e --------------
1509 // -- lr : return address
1512 // -----------------------------------
1513 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1515 // Miss case, call the runtime.
1518 // ---------- S t a t e --------------
1519 // -- lr : return address
1522 // -----------------------------------
1523 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1529 } } // namespace v8::internal
1531 #endif // V8_TARGET_ARCH_ARM