1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_ARM64
9 #include "src/codegen.h"
10 #include "src/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h"
19 #define __ ACCESS_MASM(masm)
22 // Load the built-in Array function from the current context.
23 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
24 // Load the native context.
25 __ Ldr(result, GlobalObjectMemOperand());
27 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
28 // Load the InternalArray function from the native context.
31 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
35 // Load the built-in InternalArray function from the current context.
36 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
38 // Load the native context.
39 __ Ldr(result, GlobalObjectMemOperand());
41 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
42 // Load the InternalArray function from the native context.
43 __ Ldr(result, ContextMemOperand(result,
44 Context::INTERNAL_ARRAY_FUNCTION_INDEX));
48 void Builtins::Generate_Adaptor(MacroAssembler* masm,
50 BuiltinExtraArguments extra_args) {
51 // ----------- S t a t e -------------
52 // -- x0 : number of arguments excluding receiver
53 // -- x1 : called function (only guaranteed when
54 // extra_args requires it)
56 // -- sp[0] : last argument
58 // -- sp[4 * (argc - 1)] : first argument (argc == x0)
59 // -- sp[4 * argc] : receiver
60 // -----------------------------------
62 // Insert extra arguments.
63 int num_extra_args = 0;
64 if (extra_args == NEEDS_CALLED_FUNCTION) {
68 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
71 // JumpToExternalReference expects x0 to contain the number of arguments
72 // including the receiver and the extra arguments.
73 __ Add(x0, x0, num_extra_args + 1);
74 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
78 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
79 // ----------- S t a t e -------------
80 // -- x0 : number of arguments
81 // -- lr : return address
82 // -- sp[...]: constructor arguments
83 // -----------------------------------
84 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
85 Label generic_array_code;
87 // Get the InternalArray function.
88 GenerateLoadInternalArrayFunction(masm, x1);
90 if (FLAG_debug_code) {
91 // Initial map for the builtin InternalArray functions should be maps.
92 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
93 __ Tst(x10, kSmiTagMask);
94 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
95 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
96 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
99 // Run the native code for the InternalArray function called as a normal
101 InternalArrayConstructorStub stub(masm->isolate());
102 __ TailCallStub(&stub);
106 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
107 // ----------- S t a t e -------------
108 // -- x0 : number of arguments
109 // -- lr : return address
110 // -- sp[...]: constructor arguments
111 // -----------------------------------
112 ASM_LOCATION("Builtins::Generate_ArrayCode");
113 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
115 // Get the Array function.
116 GenerateLoadArrayFunction(masm, x1);
118 if (FLAG_debug_code) {
119 // Initial map for the builtin Array functions should be maps.
120 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
121 __ Tst(x10, kSmiTagMask);
122 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
123 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
124 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
127 // Run the native code for the Array function called as a normal function.
128 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
130 ArrayConstructorStub stub(masm->isolate());
131 __ TailCallStub(&stub);
135 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
136 // ----------- S t a t e -------------
137 // -- x0 : number of arguments
138 // -- x1 : constructor function
139 // -- lr : return address
140 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
141 // -- sp[argc * 8] : receiver
142 // -----------------------------------
143 ASM_LOCATION("Builtins::Generate_StringConstructCode");
144 Counters* counters = masm->isolate()->counters();
145 __ IncrementCounter(counters->string_ctor_calls(), 1, x10, x11);
148 Register function = x1;
149 if (FLAG_debug_code) {
150 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, x10);
151 __ Cmp(function, x10);
152 __ Assert(eq, kUnexpectedStringFunction);
155 // Load the first arguments in x0 and get rid of the rest.
157 __ Cbz(argc, &no_arguments);
158 // First args = sp[(argc - 1) * 8].
159 __ Sub(argc, argc, 1);
160 __ Drop(argc, kXRegSize);
161 // jssp now point to args[0], load and drop args[0] + receiver.
163 __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex));
166 Register argument = x2;
167 Label not_cached, argument_is_string;
168 __ LookupNumberStringCache(arg, // Input.
174 __ IncrementCounter(counters->string_ctor_cached_number(), 1, x10, x11);
175 __ Bind(&argument_is_string);
177 // ----------- S t a t e -------------
178 // -- x2 : argument converted to string
179 // -- x1 : constructor function
180 // -- lr : return address
181 // -----------------------------------
184 Register new_obj = x0;
185 __ Allocate(JSValue::kSize, new_obj, x10, x11, &gc_required, TAG_OBJECT);
187 // Initialize the String object.
189 __ LoadGlobalFunctionInitialMap(function, map, x10);
190 if (FLAG_debug_code) {
191 __ Ldrb(x4, FieldMemOperand(map, Map::kInstanceSizeOffset));
192 __ Cmp(x4, JSValue::kSize >> kPointerSizeLog2);
193 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
194 __ Ldrb(x4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
196 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
198 __ Str(map, FieldMemOperand(new_obj, HeapObject::kMapOffset));
201 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
202 __ Str(empty, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
203 __ Str(empty, FieldMemOperand(new_obj, JSObject::kElementsOffset));
205 __ Str(argument, FieldMemOperand(new_obj, JSValue::kValueOffset));
207 // Ensure the object is fully initialized.
208 STATIC_ASSERT(JSValue::kSize == (4 * kPointerSize));
212 // The argument was not found in the number to string cache. Check
213 // if it's a string already before calling the conversion builtin.
214 Label convert_argument;
215 __ Bind(¬_cached);
216 __ JumpIfSmi(arg, &convert_argument);
219 __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
220 __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset));
221 __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument);
222 __ Mov(argument, arg);
223 __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
224 __ B(&argument_is_string);
226 // Invoke the conversion builtin and put the result into x2.
227 __ Bind(&convert_argument);
228 __ Push(function); // Preserve the function.
229 __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
231 FrameScope scope(masm, StackFrame::INTERNAL);
233 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
236 __ Mov(argument, x0);
237 __ B(&argument_is_string);
239 // Load the empty string into x2, remove the receiver from the
240 // stack, and jump back to the case where the argument is a string.
241 __ Bind(&no_arguments);
242 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
244 __ B(&argument_is_string);
246 // At this point the argument is already a string. Call runtime to create a
248 __ Bind(&gc_required);
249 __ IncrementCounter(counters->string_ctor_gc_required(), 1, x10, x11);
251 FrameScope scope(masm, StackFrame::INTERNAL);
253 __ CallRuntime(Runtime::kNewStringWrapper, 1);
259 static void CallRuntimePassFunction(MacroAssembler* masm,
260 Runtime::FunctionId function_id) {
261 FrameScope scope(masm, StackFrame::INTERNAL);
262 // - Push a copy of the function onto the stack.
263 // - Push another copy as a parameter to the runtime call.
266 __ CallRuntime(function_id, 1);
268 // - Restore receiver.
273 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
274 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
275 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
276 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
281 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
282 __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
287 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
288 // Checking whether the queued function is ready for install is optional,
289 // since we come across interrupts and stack checks elsewhere. However, not
290 // checking may delay installing ready functions, and always checking would be
291 // quite expensive. A good compromise is to first check against stack limit as
292 // a cue for an interrupt signal.
294 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
297 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
298 GenerateTailCallToReturnedCode(masm);
301 GenerateTailCallToSharedCode(masm);
305 static void Generate_Runtime_NewObject(MacroAssembler* masm,
307 Register original_constructor,
308 Label* count_incremented,
310 if (create_memento) {
311 // Get the cell or allocation site.
312 __ Peek(x4, 2 * kXRegSize);
314 __ Push(x1); // Argument for Runtime_NewObject.
315 __ Push(original_constructor);
316 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
318 // If we ended up using the runtime, and we want a memento, then the
319 // runtime call made it for us, and we shouldn't do create count
321 __ jmp(count_incremented);
323 __ Push(x1); // Argument for Runtime_NewObject.
324 __ Push(original_constructor);
325 __ CallRuntime(Runtime::kNewObject, 2);
332 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
333 bool is_api_function,
334 bool create_memento) {
335 // ----------- S t a t e -------------
336 // -- x0 : number of arguments
337 // -- x1 : constructor function
338 // -- x2 : allocation site or undefined
339 // -- x3 : original constructor
340 // -- lr : return address
341 // -- sp[...]: constructor arguments
342 // -----------------------------------
344 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
345 // Should never create mementos for api functions.
346 DCHECK(!is_api_function || !create_memento);
348 Isolate* isolate = masm->isolate();
350 // Enter a construct frame.
352 FrameScope scope(masm, StackFrame::CONSTRUCT);
354 // Preserve the three incoming parameters on the stack.
355 if (create_memento) {
356 __ AssertUndefinedOrAllocationSite(x2, x10);
361 Register constructor = x1;
362 Register original_constructor = x3;
363 // x1: constructor function
365 __ Push(argc, constructor);
366 // sp[0] : Constructor function.
367 // sp[1]: number of arguments (smi-tagged)
369 Label rt_call, count_incremented, allocated, normal_new;
370 __ Cmp(constructor, original_constructor);
371 __ B(eq, &normal_new);
372 Generate_Runtime_NewObject(masm, create_memento, original_constructor,
373 &count_incremented, &allocated);
375 __ Bind(&normal_new);
377 // Try to allocate the object without transitioning into C code. If any of
378 // the preconditions is not met, the code bails out to the runtime call.
379 if (FLAG_inline_new) {
380 Label undo_allocation;
381 ExternalReference debug_step_in_fp =
382 ExternalReference::debug_step_in_fp_address(isolate);
383 __ Mov(x2, Operand(debug_step_in_fp));
384 __ Ldr(x2, MemOperand(x2));
385 __ Cbnz(x2, &rt_call);
386 // Load the initial map and verify that it is in fact a map.
387 Register init_map = x2;
389 FieldMemOperand(constructor,
390 JSFunction::kPrototypeOrInitialMapOffset));
391 __ JumpIfSmi(init_map, &rt_call);
392 __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
394 // Check that the constructor is not constructing a JSFunction (see
395 // comments in Runtime_NewObject in runtime.cc). In which case the initial
396 // map's instance type would be JS_FUNCTION_TYPE.
397 __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
400 Register constructon_count = x14;
401 if (!is_api_function) {
403 MemOperand bit_field3 =
404 FieldMemOperand(init_map, Map::kBitField3Offset);
405 // Check if slack tracking is enabled.
406 __ Ldr(x4, bit_field3);
407 __ DecodeField<Map::Counter>(constructon_count, x4);
408 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
410 // Decrease generous allocation count.
411 __ Subs(x4, x4, Operand(1 << Map::Counter::kShift));
412 __ Str(x4, bit_field3);
413 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
416 // Push the constructor and map to the stack, and the constructor again
417 // as argument to the runtime call.
418 __ Push(constructor, init_map, constructor);
419 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
420 __ Pop(init_map, constructor);
421 __ Mov(constructon_count, Operand(Map::kSlackTrackingCounterEnd - 1));
425 // Now allocate the JSObject on the heap.
426 Register obj_size = x3;
427 Register new_obj = x4;
428 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
429 if (create_memento) {
431 Operand(AllocationMemento::kSize / kPointerSize));
432 __ Allocate(x7, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
434 __ Allocate(obj_size, new_obj, x10, x11, &rt_call, SIZE_IN_WORDS);
437 // Allocated the JSObject, now initialize the fields. Map is set to
438 // initial map and properties and elements are set to empty fixed array.
439 // NB. the object pointer is not tagged, so MemOperand is used.
441 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
442 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset));
443 STATIC_ASSERT(JSObject::kElementsOffset ==
444 (JSObject::kPropertiesOffset + kPointerSize));
445 __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset));
447 Register first_prop = x5;
448 __ Add(first_prop, new_obj, JSObject::kHeaderSize);
450 // Fill all of the in-object properties with the appropriate filler.
451 Register filler = x7;
452 __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
454 // Obtain number of pre-allocated property fields and in-object
456 Register prealloc_fields = x10;
457 Register inobject_props = x11;
458 Register inst_sizes = x11;
459 __ Ldr(inst_sizes, FieldMemOperand(init_map, Map::kInstanceSizesOffset));
460 __ Ubfx(prealloc_fields, inst_sizes,
461 Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
463 __ Ubfx(inobject_props, inst_sizes,
464 Map::kInObjectPropertiesByte * kBitsPerByte, kBitsPerByte);
466 // Calculate number of property fields in the object.
467 Register prop_fields = x6;
468 __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize);
470 if (!is_api_function) {
471 Label no_inobject_slack_tracking;
473 // Check if slack tracking is enabled.
474 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
475 __ B(lt, &no_inobject_slack_tracking);
476 constructon_count = NoReg;
478 // Fill the pre-allocated fields with undef.
479 __ FillFields(first_prop, prealloc_fields, filler);
481 // Update first_prop register to be the offset of the first field after
482 // pre-allocated fields.
483 __ Add(first_prop, first_prop,
484 Operand(prealloc_fields, LSL, kPointerSizeLog2));
486 if (FLAG_debug_code) {
487 Register obj_end = x14;
488 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
489 __ Cmp(first_prop, obj_end);
490 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
493 // Fill the remaining fields with one pointer filler map.
494 __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
495 __ Sub(prop_fields, prop_fields, prealloc_fields);
497 __ bind(&no_inobject_slack_tracking);
499 if (create_memento) {
500 // Fill the pre-allocated fields with undef.
501 __ FillFields(first_prop, prop_fields, filler);
502 __ Add(first_prop, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
503 __ LoadRoot(x14, Heap::kAllocationMementoMapRootIndex);
504 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
505 __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
506 // Load the AllocationSite
507 __ Peek(x14, 2 * kXRegSize);
508 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
509 __ Str(x14, MemOperand(first_prop, kPointerSize, PostIndex));
512 // Fill all of the property fields with undef.
513 __ FillFields(first_prop, prop_fields, filler);
518 // Add the object tag to make the JSObject real, so that we can continue
519 // and jump into the continuation code at any time from now on. Any
520 // failures need to undo the allocation, so that the heap is in a
521 // consistent state and verifiable.
522 __ Add(new_obj, new_obj, kHeapObjectTag);
524 // Check if a non-empty properties array is needed. Continue with
525 // allocated object if not, or fall through to runtime call if it is.
526 Register element_count = x3;
527 __ Ldrb(element_count,
528 FieldMemOperand(init_map, Map::kUnusedPropertyFieldsOffset));
529 // The field instance sizes contains both pre-allocated property fields
530 // and in-object properties.
531 __ Add(element_count, element_count, prealloc_fields);
532 __ Subs(element_count, element_count, inobject_props);
534 // Done if no extra properties are to be allocated.
535 __ B(eq, &allocated);
536 __ Assert(pl, kPropertyAllocationCountFailed);
538 // Scale the number of elements by pointer size and add the header for
539 // FixedArrays to the start of the next object calculation from above.
540 Register new_array = x5;
541 Register array_size = x6;
542 __ Add(array_size, element_count, FixedArray::kHeaderSize / kPointerSize);
543 __ Allocate(array_size, new_array, x11, x12, &undo_allocation,
544 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP |
547 Register array_map = x10;
548 __ LoadRoot(array_map, Heap::kFixedArrayMapRootIndex);
549 __ Str(array_map, MemOperand(new_array, FixedArray::kMapOffset));
550 __ SmiTag(x0, element_count);
551 __ Str(x0, MemOperand(new_array, FixedArray::kLengthOffset));
553 // Initialize the fields to undefined.
554 Register elements = x10;
555 __ Add(elements, new_array, FixedArray::kHeaderSize);
556 __ FillFields(elements, element_count, filler);
558 // Store the initialized FixedArray into the properties field of the
560 __ Add(new_array, new_array, kHeapObjectTag);
561 __ Str(new_array, FieldMemOperand(new_obj, JSObject::kPropertiesOffset));
563 // Continue with JSObject being successfully allocated.
566 // Undo the setting of the new top so that the heap is verifiable. For
567 // example, the map's unused properties potentially do not match the
568 // allocated objects unused properties.
569 __ Bind(&undo_allocation);
570 __ UndoAllocationInNewSpace(new_obj, x14);
573 // Allocate the new receiver object using the runtime call.
575 Generate_Runtime_NewObject(masm, create_memento, constructor,
576 &count_incremented, &allocated);
578 // Receiver for constructor call allocated.
582 if (create_memento) {
583 __ Peek(x10, 2 * kXRegSize);
584 __ JumpIfRoot(x10, Heap::kUndefinedValueRootIndex, &count_incremented);
585 // r2 is an AllocationSite. We are creating a memento from it, so we
586 // need to increment the memento create count.
587 __ Ldr(x5, FieldMemOperand(x10,
588 AllocationSite::kPretenureCreateCountOffset));
589 __ Add(x5, x5, Operand(Smi::FromInt(1)));
590 __ Str(x5, FieldMemOperand(x10,
591 AllocationSite::kPretenureCreateCountOffset));
592 __ bind(&count_incremented);
597 // Reload the number of arguments from the stack.
598 // Set it up in x0 for the function call below.
601 // jssp[2]: constructor function
602 // jssp[3]: number of arguments (smi-tagged)
603 __ Peek(constructor, 2 * kXRegSize); // Load constructor.
604 __ Peek(argc, 3 * kXRegSize); // Load number of arguments.
607 // Set up pointer to last argument.
608 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
610 // Copy arguments and receiver to the expression stack.
611 // Copy 2 values every loop to use ldp/stp.
612 // x0: number of arguments
613 // x1: constructor function
614 // x2: address of last argument (caller sp)
617 // jssp[2]: constructor function
618 // jssp[3]: number of arguments (smi-tagged)
619 // Compute the start address of the copy in x3.
620 __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2));
621 Label loop, entry, done_copying_arguments;
624 __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex));
629 // Because we copied values 2 by 2 we may have copied one extra value.
630 // Drop it if that is the case.
631 __ B(eq, &done_copying_arguments);
633 __ Bind(&done_copying_arguments);
635 // Call the function.
636 // x0: number of arguments
637 // x1: constructor function
638 if (is_api_function) {
639 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
641 masm->isolate()->builtins()->HandleApiCallConstruct();
642 __ Call(code, RelocInfo::CODE_TARGET);
644 ParameterCount actual(argc);
645 __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper());
648 // Store offset of return address for deoptimizer.
649 if (!is_api_function) {
650 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
653 // Restore the context from the frame.
656 // jssp[1]: constructor function
657 // jssp[2]: number of arguments (smi-tagged)
658 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
660 // If the result is an object (in the ECMA sense), we should get rid
661 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
663 Label use_receiver, exit;
665 // If the result is a smi, it is *not* an object in the ECMA sense.
667 // jssp[0]: receiver (newly allocated object)
668 // jssp[1]: constructor function
669 // jssp[2]: number of arguments (smi-tagged)
670 __ JumpIfSmi(x0, &use_receiver);
672 // If the type of the result (stored in its map) is less than
673 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
674 __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge);
676 // Throw away the result of the constructor invocation and use the
677 // on-stack receiver as the result.
678 __ Bind(&use_receiver);
681 // Remove the receiver from the stack, remove caller arguments, and
685 // jssp[0]: receiver (newly allocated object)
686 // jssp[1]: constructor function
687 // jssp[2]: number of arguments (smi-tagged)
688 __ Peek(x1, 2 * kXRegSize);
690 // Leave construct frame.
695 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
700 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
701 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
705 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
706 Generate_JSConstructStubHelper(masm, true, false);
710 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
711 // ----------- S t a t e -------------
712 // -- x0 : number of arguments
713 // -- x1 : constructor function
714 // -- x2 : allocation site or undefined
715 // -- x3 : original constructor
716 // -- lr : return address
717 // -- sp[...]: constructor arguments
718 // -----------------------------------
719 ASM_LOCATION("Builtins::Generate_JSConstructStubForDerived");
721 // TODO(dslomov): support pretenuring
722 CHECK(!FLAG_pretenuring_call_new);
725 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
728 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
729 __ Push(x4, x3, x10);
730 // sp[0]: number of arguments
732 // sp[2]: receiver (the hole)
735 // Set up pointer to last argument.
736 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
738 // Copy arguments and receiver to the expression stack.
739 // Copy 2 values every loop to use ldp/stp.
740 // x0: number of arguments
741 // x1: constructor function
742 // x2: address of last argument (caller sp)
744 // jssp[1]: new.target
745 // jssp[2]: number of arguments (smi-tagged)
746 // Compute the start address of the copy in x4.
747 __ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
748 Label loop, entry, done_copying_arguments;
751 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
756 // Because we copied values 2 by 2 we may have copied one extra value.
757 // Drop it if that is the case.
758 __ B(eq, &done_copying_arguments);
760 __ Bind(&done_copying_arguments);
762 __ Add(x0, x0, Operand(1)); // new.target
766 ExternalReference debug_step_in_fp =
767 ExternalReference::debug_step_in_fp_address(masm->isolate());
768 __ Mov(x2, Operand(debug_step_in_fp));
769 __ Ldr(x2, MemOperand(x2));
770 __ Cbz(x2, &skip_step_in);
773 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
776 __ bind(&skip_step_in);
778 // Call the function.
779 // x0: number of arguments
780 // x1: constructor function
781 ParameterCount actual(x0);
782 __ InvokeFunction(x1, actual, CALL_FUNCTION, NullCallWrapper());
785 // Restore the context from the frame.
787 // jssp[0]: number of arguments (smi-tagged)
788 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
790 // Load number of arguments (smi).
793 // Leave construct frame
810 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
812 // Called from JSEntryStub::GenerateBody().
813 Register function = x1;
814 Register receiver = x2;
818 ProfileEntryHookStub::MaybeCallEntryHook(masm);
820 // Clear the context before we push it when entering the internal frame.
824 // Enter an internal frame.
825 FrameScope scope(masm, StackFrame::INTERNAL);
827 // Set up the context from the function argument.
828 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
830 __ InitializeRootRegister();
832 // Push the function and the receiver onto the stack.
833 __ Push(function, receiver);
835 // Copy arguments to the stack in a loop, in reverse order.
839 // Compute the copy end address.
840 __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2));
844 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
845 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
846 __ Push(x12); // Push the argument.
851 // Initialize all JavaScript callee-saved registers, since they will be seen
852 // by the garbage collector as part of handlers.
853 // The original values have been saved in JSEntryStub::GenerateBody().
854 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
861 // Don't initialize the reserved registers.
862 // x26 : root register (root).
863 // x27 : context pointer (cp).
864 // x28 : JS stack pointer (jssp).
865 // x29 : frame pointer (fp).
869 // No type feedback cell is available.
870 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
872 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
875 ParameterCount actual(x0);
876 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
878 // Exit the JS internal frame and remove the parameters (except function),
882 // Result is in x0. Return.
887 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
888 Generate_JSEntryTrampolineHelper(masm, false);
892 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
893 Generate_JSEntryTrampolineHelper(masm, true);
897 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
898 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
899 GenerateTailCallToReturnedCode(masm);
903 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
904 FrameScope scope(masm, StackFrame::INTERNAL);
905 Register function = x1;
907 // Preserve function. At the same time, push arguments for
908 // kCompileOptimized.
909 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
910 __ Push(function, function, x10);
912 __ CallRuntime(Runtime::kCompileOptimized, 2);
919 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
920 CallCompileOptimized(masm, false);
921 GenerateTailCallToReturnedCode(masm);
925 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
926 CallCompileOptimized(masm, true);
927 GenerateTailCallToReturnedCode(masm);
931 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
932 // For now, we are relying on the fact that make_code_young doesn't do any
933 // garbage collection which allows us to save/restore the registers without
934 // worrying about which of them contain pointers. We also don't build an
935 // internal frame to make the code fast, since we shouldn't have to do stack
936 // crawls in MakeCodeYoung. This seems a bit fragile.
938 // The following caller-saved registers must be saved and restored when
939 // calling through to the runtime:
940 // x0 - The address from which to resume execution.
942 // lr - The return address for the JSFunction itself. It has not yet been
943 // preserved on the stack because the frame setup code was replaced
944 // with a call to this stub, to handle code ageing.
946 FrameScope scope(masm, StackFrame::MANUAL);
947 __ Push(x0, x1, fp, lr);
948 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
950 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
951 __ Pop(lr, fp, x1, x0);
954 // The calling function has been made young again, so return to execute the
955 // real frame set-up code.
959 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
960 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
961 MacroAssembler* masm) { \
962 GenerateMakeCodeYoungAgainCommon(masm); \
964 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
965 MacroAssembler* masm) { \
966 GenerateMakeCodeYoungAgainCommon(masm); \
968 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
969 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
972 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
973 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
974 // that make_code_young doesn't do any garbage collection which allows us to
975 // save/restore the registers without worrying about which of them contain
978 // The following caller-saved registers must be saved and restored when
979 // calling through to the runtime:
980 // x0 - The address from which to resume execution.
982 // lr - The return address for the JSFunction itself. It has not yet been
983 // preserved on the stack because the frame setup code was replaced
984 // with a call to this stub, to handle code ageing.
986 FrameScope scope(masm, StackFrame::MANUAL);
987 __ Push(x0, x1, fp, lr);
988 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
990 ExternalReference::get_mark_code_as_executed_function(
991 masm->isolate()), 2);
992 __ Pop(lr, fp, x1, x0);
994 // Perform prologue operations usually performed by the young code stub.
995 __ EmitFrameSetupForCodeAgePatching(masm);
998 // Jump to point after the code-age stub.
999 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1004 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1005 GenerateMakeCodeYoungAgainCommon(masm);
1009 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1010 SaveFPRegsMode save_doubles) {
1012 FrameScope scope(masm, StackFrame::INTERNAL);
1014 // Preserve registers across notification, this is important for compiled
1015 // stubs that tail call the runtime on deopts passing their parameters in
1017 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1018 // registers here? According to the comment above, we should only need to
1019 // preserve the registers with parameters.
1020 __ PushXRegList(kSafepointSavedRegisters);
1021 // Pass the function and deoptimization type to the runtime system.
1022 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1023 __ PopXRegList(kSafepointSavedRegisters);
1026 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1029 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1030 // into lr before it jumps here.
1035 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1036 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1040 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1041 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1045 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1046 Deoptimizer::BailoutType type) {
1048 FrameScope scope(masm, StackFrame::INTERNAL);
1049 // Pass the deoptimization type to the runtime system.
1050 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1052 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1055 // Get the full codegen state from the stack and untag it.
1056 Register state = x6;
1060 // Switch on the state.
1061 Label with_tos_register, unknown_state;
1062 __ CompareAndBranch(
1063 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
1064 __ Drop(1); // Remove state.
1067 __ Bind(&with_tos_register);
1068 // Reload TOS register.
1069 __ Peek(x0, kPointerSize);
1070 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
1071 __ Drop(2); // Remove state and TOS.
1074 __ Bind(&unknown_state);
1075 __ Abort(kInvalidFullCodegenState);
1079 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1080 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1084 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1085 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1089 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1090 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1094 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1095 // Lookup the function in the JavaScript frame.
1096 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1098 FrameScope scope(masm, StackFrame::INTERNAL);
1099 // Pass function as argument.
1101 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1104 // If the code object is null, just return to the unoptimized code.
1106 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1111 // Load deoptimization data from the code object.
1112 // <deopt_data> = <code>[#deoptimization_data_offset]
1113 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1115 // Load the OSR entrypoint offset from the deoptimization data.
1116 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1117 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1118 DeoptimizationInputData::kOsrPcOffsetIndex)));
1120 // Compute the target address = code_obj + header_size + osr_offset
1121 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1123 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1125 // And "return" to the OSR entry point of the function.
1130 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1131 // We check the stack limit as indicator that recompilation might be done.
1133 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1136 FrameScope scope(masm, StackFrame::INTERNAL);
1137 __ CallRuntime(Runtime::kStackGuard, 0);
1139 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1140 RelocInfo::CODE_TARGET);
1147 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1149 call_type_JS_func = 0,
1150 call_type_func_proxy = 1,
1151 call_type_non_func = 2
1154 Register function = x1;
1155 Register call_type = x4;
1156 Register scratch1 = x10;
1157 Register scratch2 = x11;
1158 Register receiver_type = x13;
1160 ASM_LOCATION("Builtins::Generate_FunctionCall");
1161 // 1. Make sure we have at least one argument.
1163 __ Cbnz(argc, &done);
1164 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1170 // 2. Get the function to call (passed as receiver) from the stack, check
1171 // if it is a function.
1172 Label slow, non_function;
1173 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1174 __ JumpIfSmi(function, &non_function);
1175 __ JumpIfNotObjectType(function, scratch1, receiver_type,
1176 JS_FUNCTION_TYPE, &slow);
1178 // 3a. Patch the first argument if necessary when calling a function.
1179 Label shift_arguments;
1180 __ Mov(call_type, static_cast<int>(call_type_JS_func));
1181 { Label convert_to_object, use_global_proxy, patch_receiver;
1182 // Change context eagerly in case we need the global receiver.
1183 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
1185 // Do not transform the receiver for strict mode functions.
1186 // Also do not transform the receiver for native (Compilerhints already in
1189 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1190 __ Ldr(scratch2.W(),
1191 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
1192 __ TestAndBranchIfAnySet(
1194 (1 << SharedFunctionInfo::kStrictModeFunction) |
1195 (1 << SharedFunctionInfo::kNative),
1198 // Compute the receiver in sloppy mode.
1199 Register receiver = x2;
1200 __ Sub(scratch1, argc, 1);
1201 __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
1202 __ JumpIfSmi(receiver, &convert_to_object);
1204 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1206 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy);
1208 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1209 __ JumpIfObjectType(receiver, scratch1, scratch2,
1210 FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge);
1212 __ Bind(&convert_to_object);
1215 // Enter an internal frame in order to preserve argument count.
1216 FrameScope scope(masm, StackFrame::INTERNAL);
1219 __ Push(argc, receiver);
1220 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1221 __ Mov(receiver, x0);
1226 // Exit the internal frame.
1229 // Restore the function and flag in the registers.
1230 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1231 __ Mov(call_type, static_cast<int>(call_type_JS_func));
1232 __ B(&patch_receiver);
1234 __ Bind(&use_global_proxy);
1235 __ Ldr(receiver, GlobalObjectMemOperand());
1237 FieldMemOperand(receiver, GlobalObject::kGlobalProxyOffset));
1240 __ Bind(&patch_receiver);
1241 __ Sub(scratch1, argc, 1);
1242 __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2));
1244 __ B(&shift_arguments);
1247 // 3b. Check for function proxy.
1249 __ Mov(call_type, static_cast<int>(call_type_func_proxy));
1250 __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE);
1251 __ B(eq, &shift_arguments);
1252 __ Bind(&non_function);
1253 __ Mov(call_type, static_cast<int>(call_type_non_func));
1255 // 3c. Patch the first argument when calling a non-function. The
1256 // CALL_NON_FUNCTION builtin expects the non-function callee as
1257 // receiver, so overwrite the first argument which will ultimately
1258 // become the receiver.
1259 // call type (0: JS function, 1: function proxy, 2: non-function)
1260 __ Sub(scratch1, argc, 1);
1261 __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2));
1263 // 4. Shift arguments and return address one slot down on the stack
1264 // (overwriting the original receiver). Adjust argument count to make
1265 // the original first argument the new receiver.
1266 // call type (0: JS function, 1: function proxy, 2: non-function)
1267 __ Bind(&shift_arguments);
1269 // Calculate the copy start address (destination). Copy end address is jssp.
1270 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1271 __ Sub(scratch1, scratch2, kPointerSize);
1274 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1275 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1276 __ Cmp(scratch1, jssp);
1278 // Adjust the actual number of arguments and remove the top element
1279 // (which is a copy of the last argument).
1280 __ Sub(argc, argc, 1);
1284 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1285 // or a function proxy via CALL_FUNCTION_PROXY.
1286 // call type (0: JS function, 1: function proxy, 2: non-function)
1287 { Label js_function, non_proxy;
1288 __ Cbz(call_type, &js_function);
1289 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1291 __ Cmp(call_type, static_cast<int>(call_type_func_proxy));
1292 __ B(ne, &non_proxy);
1294 __ Push(function); // Re-add proxy object as additional argument.
1295 __ Add(argc, argc, 1);
1296 __ GetBuiltinFunction(function, Builtins::CALL_FUNCTION_PROXY);
1297 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1298 RelocInfo::CODE_TARGET);
1300 __ Bind(&non_proxy);
1301 __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
1302 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1303 RelocInfo::CODE_TARGET);
1304 __ Bind(&js_function);
1307 // 5b. Get the code to call from the function and check that the number of
1308 // expected arguments matches what we're providing. If so, jump
1309 // (tail-call) to the code in register edx without checking arguments.
1310 __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1313 SharedFunctionInfo::kFormalParameterCountOffset));
1314 Label dont_adapt_args;
1315 __ Cmp(x2, argc); // Check formal and actual parameter counts.
1316 __ B(eq, &dont_adapt_args);
1317 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1318 RelocInfo::CODE_TARGET);
1319 __ Bind(&dont_adapt_args);
1321 __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
1322 ParameterCount expected(0);
1323 __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1327 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1328 ASM_LOCATION("Builtins::Generate_FunctionApply");
1329 const int kIndexOffset =
1330 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1331 const int kLimitOffset =
1332 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1333 const int kArgsOffset = 2 * kPointerSize;
1334 const int kReceiverOffset = 3 * kPointerSize;
1335 const int kFunctionOffset = 4 * kPointerSize;
1338 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1340 Register args = x12;
1341 Register receiver = x14;
1342 Register function = x15;
1344 // Get the length of the arguments via a builtin call.
1345 __ Ldr(function, MemOperand(fp, kFunctionOffset));
1346 __ Ldr(args, MemOperand(fp, kArgsOffset));
1347 __ Push(function, args);
1348 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1351 // Check the stack for overflow.
1352 // We are not trying to catch interruptions (e.g. debug break and
1353 // preemption) here, so the "real stack limit" is checked.
1354 Label enough_stack_space;
1355 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1356 __ Ldr(function, MemOperand(fp, kFunctionOffset));
1357 // Make x10 the space we have left. The stack might already be overflowed
1358 // here which will cause x10 to become negative.
1359 // TODO(jbramley): Check that the stack usage here is safe.
1360 __ Sub(x10, jssp, x10);
1361 // Check if the arguments will overflow the stack.
1362 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
1363 __ B(gt, &enough_stack_space);
1364 // There is not enough stack space, so use a builtin to throw an appropriate
1366 __ Push(function, argc);
1367 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1368 // We should never return from the APPLY_OVERFLOW builtin.
1369 if (__ emit_debug_code()) {
1373 __ Bind(&enough_stack_space);
1374 // Push current limit and index.
1375 __ Mov(x1, 0); // Initial index.
1378 Label push_receiver;
1379 __ Ldr(receiver, MemOperand(fp, kReceiverOffset));
1381 // Check that the function is a JS function. Otherwise it must be a proxy.
1382 // When it is not the function proxy will be invoked later.
1383 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE,
1386 // Change context eagerly to get the right global object if necessary.
1387 __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
1388 // Load the shared function info.
1389 __ Ldr(x2, FieldMemOperand(function,
1390 JSFunction::kSharedFunctionInfoOffset));
1392 // Compute and push the receiver.
1393 // Do not transform the receiver for strict mode functions.
1394 Label convert_receiver_to_object, use_global_proxy;
1395 __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
1396 __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver);
1397 // Do not transform the receiver for native functions.
1398 __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver);
1400 // Compute the receiver in sloppy mode.
1401 __ JumpIfSmi(receiver, &convert_receiver_to_object);
1402 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy);
1403 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex,
1406 // Check if the receiver is already a JavaScript object.
1407 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1408 __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE,
1409 &push_receiver, ge);
1411 // Call a builtin to convert the receiver to a regular object.
1412 __ Bind(&convert_receiver_to_object);
1414 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1415 __ Mov(receiver, x0);
1416 __ B(&push_receiver);
1418 __ Bind(&use_global_proxy);
1419 __ Ldr(x10, GlobalObjectMemOperand());
1420 __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset));
1422 // Push the receiver
1423 __ Bind(&push_receiver);
1426 // Copy all arguments from the array to the stack.
1428 Register current = x0;
1429 __ Ldr(current, MemOperand(fp, kIndexOffset));
1433 // Load the current argument from the arguments array and push it.
1434 // TODO(all): Couldn't we optimize this for JS arrays?
1436 __ Ldr(x1, MemOperand(fp, kArgsOffset));
1437 __ Push(x1, current);
1439 // Call the runtime to access the property in the arguments array.
1440 __ CallRuntime(Runtime::kGetProperty, 2);
1443 // Use inline caching to access the arguments.
1444 __ Ldr(current, MemOperand(fp, kIndexOffset));
1445 __ Add(current, current, Smi::FromInt(1));
1446 __ Str(current, MemOperand(fp, kIndexOffset));
1448 // Test if the copy loop has finished copying all the elements from the
1449 // arguments object.
1451 __ Ldr(x1, MemOperand(fp, kLimitOffset));
1452 __ Cmp(current, x1);
1455 // At the end of the loop, the number of arguments is stored in 'current',
1456 // represented as a smi.
1458 function = x1; // From now on we want the function to be kept in x1;
1459 __ Ldr(function, MemOperand(fp, kFunctionOffset));
1461 // Call the function.
1463 ParameterCount actual(current);
1464 __ SmiUntag(current);
1465 __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy);
1466 __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper());
1467 frame_scope.GenerateLeaveFrame();
1471 // Call the function proxy.
1472 __ Bind(&call_proxy);
1475 __ Push(function); // Add function proxy as last argument.
1478 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
1479 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1480 RelocInfo::CODE_TARGET);
1487 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1488 Label* stack_overflow) {
1489 // ----------- S t a t e -------------
1490 // -- x0 : actual number of arguments
1491 // -- x1 : function (passed through to callee)
1492 // -- x2 : expected number of arguments
1493 // -----------------------------------
1494 // Check the stack for overflow.
1495 // We are not trying to catch interruptions (e.g. debug break and
1496 // preemption) here, so the "real stack limit" is checked.
1497 Label enough_stack_space;
1498 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1499 // Make x10 the space we have left. The stack might already be overflowed
1500 // here which will cause x10 to become negative.
1501 __ Sub(x10, jssp, x10);
1502 // Check if the arguments will overflow the stack.
1503 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1504 __ B(le, stack_overflow);
1508 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1510 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1512 __ Push(x11, x1, x10);
1514 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1518 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1519 // ----------- S t a t e -------------
1520 // -- x0 : result being passed through
1521 // -----------------------------------
1522 // Get the number of arguments passed (as a smi), tear down the frame and
1523 // then drop the parameters and the receiver.
1524 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1528 __ DropBySMI(x10, kXRegSize);
1533 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1534 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
1535 // ----------- S t a t e -------------
1536 // -- x0 : actual number of arguments
1537 // -- x1 : function (passed through to callee)
1538 // -- x2 : expected number of arguments
1539 // -----------------------------------
1541 Label stack_overflow;
1542 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1544 Register argc_actual = x0; // Excluding the receiver.
1545 Register argc_expected = x2; // Excluding the receiver.
1546 Register function = x1;
1547 Register code_entry = x3;
1549 Label invoke, dont_adapt_arguments;
1551 Label enough, too_few;
1552 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
1553 __ Cmp(argc_actual, argc_expected);
1555 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
1556 __ B(eq, &dont_adapt_arguments);
1558 { // Enough parameters: actual >= expected
1559 EnterArgumentsAdaptorFrame(masm);
1561 Register copy_start = x10;
1562 Register copy_end = x11;
1563 Register copy_to = x12;
1564 Register scratch1 = x13, scratch2 = x14;
1566 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
1568 // Adjust for fp, lr, and the receiver.
1569 __ Add(copy_start, fp, 3 * kPointerSize);
1570 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
1571 __ Sub(copy_end, copy_start, argc_expected);
1572 __ Sub(copy_end, copy_end, kPointerSize);
1573 __ Mov(copy_to, jssp);
1575 // Claim space for the arguments, the receiver, and one extra slot.
1576 // The extra slot ensures we do not write under jssp. It will be popped
1578 __ Add(scratch1, argc_expected, 2 * kPointerSize);
1579 __ Claim(scratch1, 1);
1581 // Copy the arguments (including the receiver) to the new stack frame.
1583 __ Bind(©_2_by_2);
1584 __ Ldp(scratch1, scratch2,
1585 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
1586 __ Stp(scratch1, scratch2,
1587 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1588 __ Cmp(copy_start, copy_end);
1589 __ B(hi, ©_2_by_2);
1591 // Correct the space allocated for the extra slot.
1597 { // Too few parameters: Actual < expected
1599 EnterArgumentsAdaptorFrame(masm);
1601 Register copy_from = x10;
1602 Register copy_end = x11;
1603 Register copy_to = x12;
1604 Register scratch1 = x13, scratch2 = x14;
1606 __ Lsl(argc_expected, argc_expected, kPointerSizeLog2);
1607 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
1609 // Adjust for fp, lr, and the receiver.
1610 __ Add(copy_from, fp, 3 * kPointerSize);
1611 __ Add(copy_from, copy_from, argc_actual);
1612 __ Mov(copy_to, jssp);
1613 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
1614 __ Sub(copy_end, copy_end, argc_actual);
1616 // Claim space for the arguments, the receiver, and one extra slot.
1617 // The extra slot ensures we do not write under jssp. It will be popped
1619 __ Add(scratch1, argc_expected, 2 * kPointerSize);
1620 __ Claim(scratch1, 1);
1622 // Copy the arguments (including the receiver) to the new stack frame.
1624 __ Bind(©_2_by_2);
1625 __ Ldp(scratch1, scratch2,
1626 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
1627 __ Stp(scratch1, scratch2,
1628 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1629 __ Cmp(copy_to, copy_end);
1630 __ B(hi, ©_2_by_2);
1632 __ Mov(copy_to, copy_end);
1634 // Fill the remaining expected arguments with undefined.
1635 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1636 __ Add(copy_end, jssp, kPointerSize);
1640 __ Stp(scratch1, scratch1,
1641 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1642 __ Cmp(copy_to, copy_end);
1645 // Correct the space allocated for the extra slot.
1649 // Arguments have been adapted. Now call the entry point.
1651 __ Call(code_entry);
1653 // Store offset of return address for deoptimizer.
1654 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1656 // Exit frame and return.
1657 LeaveArgumentsAdaptorFrame(masm);
1660 // Call the entry point without adapting the arguments.
1661 __ Bind(&dont_adapt_arguments);
1662 __ Jump(code_entry);
1664 __ Bind(&stack_overflow);
1666 FrameScope frame(masm, StackFrame::MANUAL);
1667 EnterArgumentsAdaptorFrame(masm);
1668 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1676 } } // namespace v8::internal
1678 #endif // V8_TARGET_ARCH_ARM