1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_ARM64
7 #include "src/arm64/frames-arm64.h"
8 #include "src/codegen.h"
9 #include "src/debug/debug.h"
10 #include "src/deoptimizer.h"
11 #include "src/full-codegen/full-codegen.h"
12 #include "src/runtime/runtime.h"
18 #define __ ACCESS_MASM(masm)
21 // Load the built-in Array function from the current context.
22 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
23 // Load the native context.
24 __ Ldr(result, GlobalObjectMemOperand());
26 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
27 // Load the InternalArray function from the native context.
30 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
34 // Load the built-in InternalArray function from the current context.
35 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
37 // Load the native context.
38 __ Ldr(result, GlobalObjectMemOperand());
40 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
41 // Load the InternalArray function from the native context.
42 __ Ldr(result, ContextMemOperand(result,
43 Context::INTERNAL_ARRAY_FUNCTION_INDEX));
47 void Builtins::Generate_Adaptor(MacroAssembler* masm,
49 BuiltinExtraArguments extra_args) {
50 // ----------- S t a t e -------------
51 // -- x0 : number of arguments excluding receiver
52 // -- x1 : called function (only guaranteed when
53 // extra_args requires it)
54 // -- sp[0] : last argument
56 // -- sp[4 * (argc - 1)] : first argument (argc == x0)
57 // -- sp[4 * argc] : receiver
58 // -----------------------------------
59 __ AssertFunction(x1);
61 // Make sure we operate in the context of the called function (for example
62 // ConstructStubs implemented in C++ will be run in the context of the caller
63 // instead of the callee, due to the way that [[Construct]] is defined for
64 // ordinary functions).
65 // TODO(bmeurer): Can we make this more robust?
66 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
68 // Insert extra arguments.
69 int num_extra_args = 0;
70 if (extra_args == NEEDS_CALLED_FUNCTION) {
74 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
77 // JumpToExternalReference expects x0 to contain the number of arguments
78 // including the receiver and the extra arguments.
79 __ Add(x0, x0, num_extra_args + 1);
80 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
84 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
85 // ----------- S t a t e -------------
86 // -- x0 : number of arguments
87 // -- lr : return address
88 // -- sp[...]: constructor arguments
89 // -----------------------------------
90 ASM_LOCATION("Builtins::Generate_InternalArrayCode");
91 Label generic_array_code;
93 // Get the InternalArray function.
94 GenerateLoadInternalArrayFunction(masm, x1);
96 if (FLAG_debug_code) {
97 // Initial map for the builtin InternalArray functions should be maps.
98 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
99 __ Tst(x10, kSmiTagMask);
100 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
101 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
102 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
105 // Run the native code for the InternalArray function called as a normal
107 InternalArrayConstructorStub stub(masm->isolate());
108 __ TailCallStub(&stub);
112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113 // ----------- S t a t e -------------
114 // -- x0 : number of arguments
115 // -- lr : return address
116 // -- sp[...]: constructor arguments
117 // -----------------------------------
118 ASM_LOCATION("Builtins::Generate_ArrayCode");
119 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
121 // Get the Array function.
122 GenerateLoadArrayFunction(masm, x1);
124 if (FLAG_debug_code) {
125 // Initial map for the builtin Array functions should be maps.
126 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
127 __ Tst(x10, kSmiTagMask);
128 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
129 __ CompareObjectType(x10, x11, x12, MAP_TYPE);
130 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
133 // Run the native code for the Array function called as a normal function.
134 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
136 ArrayConstructorStub stub(masm->isolate());
137 __ TailCallStub(&stub);
142 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
143 // ----------- S t a t e -------------
144 // -- x0 : number of arguments
145 // -- x1 : constructor function
146 // -- lr : return address
147 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
148 // -- sp[argc * 8] : receiver
149 // -----------------------------------
150 ASM_LOCATION("Builtins::Generate_StringConstructor");
152 // 1. Load the first argument into x0 and get rid of the rest (including the
156 __ Cbz(x0, &no_arguments);
159 __ Ldr(x0, MemOperand(jssp, 2 * kPointerSize, PostIndex));
162 // 2a. At least one argument, return x0 if it's a string, otherwise
163 // dispatch to appropriate conversion.
164 Label to_string, symbol_descriptive_string;
166 __ JumpIfSmi(x0, &to_string);
167 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
168 __ CompareObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE);
169 __ B(hi, &to_string);
170 __ B(eq, &symbol_descriptive_string);
174 // 2b. No arguments, return the empty string (and pop the receiver).
175 __ Bind(&no_arguments);
177 __ LoadRoot(x0, Heap::kempty_stringRootIndex);
182 // 3a. Convert x0 to a string.
185 ToStringStub stub(masm->isolate());
186 __ TailCallStub(&stub);
189 // 3b. Convert symbol in x0 to a string.
190 __ Bind(&symbol_descriptive_string);
193 __ TailCallRuntime(Runtime::kSymbolDescriptiveString, 1, 1);
199 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
200 // ----------- S t a t e -------------
201 // -- x0 : number of arguments
202 // -- x1 : constructor function
203 // -- lr : return address
204 // -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
205 // -- sp[argc * 8] : receiver
206 // -----------------------------------
207 ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
209 // 1. Load the first argument into x2 and get rid of the rest (including the
212 Label no_arguments, done;
213 __ Cbz(x0, &no_arguments);
216 __ Ldr(x2, MemOperand(jssp, 2 * kPointerSize, PostIndex));
218 __ Bind(&no_arguments);
220 __ LoadRoot(x2, Heap::kempty_stringRootIndex);
224 // 2. Make sure x2 is a string.
226 Label convert, done_convert;
227 __ JumpIfSmi(x2, &convert);
228 __ JumpIfObjectType(x2, x3, x3, FIRST_NONSTRING_TYPE, &done_convert, lo);
231 FrameScope scope(masm, StackFrame::INTERNAL);
232 ToStringStub stub(masm->isolate());
239 __ Bind(&done_convert);
242 // 3. Allocate a JSValue wrapper for the string.
244 // ----------- S t a t e -------------
245 // -- x1 : constructor function
246 // -- x2 : the first argument
247 // -- lr : return address
248 // -----------------------------------
250 Label allocate, done_allocate;
251 __ Allocate(JSValue::kSize, x0, x3, x4, &allocate, TAG_OBJECT);
252 __ Bind(&done_allocate);
254 // Initialize the JSValue in eax.
255 __ LoadGlobalFunctionInitialMap(x1, x3, x4);
256 __ Str(x3, FieldMemOperand(x0, HeapObject::kMapOffset));
257 __ LoadRoot(x3, Heap::kEmptyFixedArrayRootIndex);
258 __ Str(x3, FieldMemOperand(x0, JSObject::kPropertiesOffset));
259 __ Str(x3, FieldMemOperand(x0, JSObject::kElementsOffset));
260 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
261 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
264 // Fallback to the runtime to allocate in new space.
267 FrameScope scope(masm, StackFrame::INTERNAL);
269 __ Push(Smi::FromInt(JSValue::kSize));
270 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
273 __ B(&done_allocate);
278 static void CallRuntimePassFunction(MacroAssembler* masm,
279 Runtime::FunctionId function_id) {
280 FrameScope scope(masm, StackFrame::INTERNAL);
281 // - Push a copy of the function onto the stack.
282 // - Push another copy as a parameter to the runtime call.
285 __ CallRuntime(function_id, 1);
287 // - Restore receiver.
292 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
293 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
294 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
295 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
300 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
301 __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag);
306 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
307 // Checking whether the queued function is ready for install is optional,
308 // since we come across interrupts and stack checks elsewhere. However, not
309 // checking may delay installing ready functions, and always checking would be
310 // quite expensive. A good compromise is to first check against stack limit as
311 // a cue for an interrupt signal.
313 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
316 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
317 GenerateTailCallToReturnedCode(masm);
320 GenerateTailCallToSharedCode(masm);
324 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
325 bool is_api_function) {
326 // ----------- S t a t e -------------
327 // -- x0 : number of arguments
328 // -- x1 : constructor function
329 // -- x2 : allocation site or undefined
330 // -- x3 : original constructor
331 // -- lr : return address
332 // -- sp[...]: constructor arguments
333 // -----------------------------------
335 ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
337 Isolate* isolate = masm->isolate();
339 // Enter a construct frame.
341 FrameScope scope(masm, StackFrame::CONSTRUCT);
343 // Preserve the four incoming parameters on the stack.
345 Register constructor = x1;
346 Register allocation_site = x2;
347 Register original_constructor = x3;
349 // Preserve the incoming parameters on the stack.
350 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
352 __ Push(allocation_site, argc, constructor, original_constructor);
354 // sp[1]: Constructor function.
355 // sp[2]: number of arguments (smi-tagged)
356 // sp[3]: allocation site
358 // Try to allocate the object without transitioning into C code. If any of
359 // the preconditions is not met, the code bails out to the runtime call.
360 Label rt_call, allocated;
361 if (FLAG_inline_new) {
362 ExternalReference debug_step_in_fp =
363 ExternalReference::debug_step_in_fp_address(isolate);
364 __ Mov(x2, Operand(debug_step_in_fp));
365 __ Ldr(x2, MemOperand(x2));
366 __ Cbnz(x2, &rt_call);
368 // Fall back to runtime if the original constructor and function differ.
369 __ Cmp(constructor, original_constructor);
372 // Load the initial map and verify that it is in fact a map.
373 Register init_map = x2;
375 FieldMemOperand(constructor,
376 JSFunction::kPrototypeOrInitialMapOffset));
377 __ JumpIfSmi(init_map, &rt_call);
378 __ JumpIfNotObjectType(init_map, x10, x11, MAP_TYPE, &rt_call);
380 // Check that the constructor is not constructing a JSFunction (see
381 // comments in Runtime_NewObject in runtime.cc). In which case the initial
382 // map's instance type would be JS_FUNCTION_TYPE.
383 __ CompareInstanceType(init_map, x10, JS_FUNCTION_TYPE);
386 Register constructon_count = x14;
387 if (!is_api_function) {
389 MemOperand bit_field3 =
390 FieldMemOperand(init_map, Map::kBitField3Offset);
391 // Check if slack tracking is enabled.
392 __ Ldr(x4, bit_field3);
393 __ DecodeField<Map::Counter>(constructon_count, x4);
394 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
396 // Decrease generous allocation count.
397 __ Subs(x4, x4, Operand(1 << Map::Counter::kShift));
398 __ Str(x4, bit_field3);
399 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
402 // Push the constructor and map to the stack, and the constructor again
403 // as argument to the runtime call.
404 __ Push(constructor, init_map, constructor);
405 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
406 __ Pop(init_map, constructor);
407 __ Mov(constructon_count, Operand(Map::kSlackTrackingCounterEnd - 1));
411 // Now allocate the JSObject on the heap.
412 Label rt_call_reload_new_target;
413 Register obj_size = x3;
414 Register new_obj = x4;
415 __ Ldrb(obj_size, FieldMemOperand(init_map, Map::kInstanceSizeOffset));
416 __ Allocate(obj_size, new_obj, x10, x11, &rt_call_reload_new_target,
419 // Allocated the JSObject, now initialize the fields. Map is set to
420 // initial map and properties and elements are set to empty fixed array.
421 // NB. the object pointer is not tagged, so MemOperand is used.
423 __ LoadRoot(empty, Heap::kEmptyFixedArrayRootIndex);
424 __ Str(init_map, MemOperand(new_obj, JSObject::kMapOffset));
425 STATIC_ASSERT(JSObject::kElementsOffset ==
426 (JSObject::kPropertiesOffset + kPointerSize));
427 __ Stp(empty, empty, MemOperand(new_obj, JSObject::kPropertiesOffset));
429 Register first_prop = x5;
430 __ Add(first_prop, new_obj, JSObject::kHeaderSize);
432 // Fill all of the in-object properties with the appropriate filler.
433 Register filler = x7;
434 __ LoadRoot(filler, Heap::kUndefinedValueRootIndex);
436 // Obtain number of pre-allocated property fields and in-object
438 Register unused_props = x10;
439 Register inobject_props = x11;
440 Register inst_sizes_or_attrs = x11;
441 Register prealloc_fields = x10;
442 __ Ldr(inst_sizes_or_attrs,
443 FieldMemOperand(init_map, Map::kInstanceAttributesOffset));
444 __ Ubfx(unused_props, inst_sizes_or_attrs,
445 Map::kUnusedPropertyFieldsByte * kBitsPerByte, kBitsPerByte);
446 __ Ldr(inst_sizes_or_attrs,
447 FieldMemOperand(init_map, Map::kInstanceSizesOffset));
449 inobject_props, inst_sizes_or_attrs,
450 Map::kInObjectPropertiesOrConstructorFunctionIndexByte * kBitsPerByte,
452 __ Sub(prealloc_fields, inobject_props, unused_props);
454 // Calculate number of property fields in the object.
455 Register prop_fields = x6;
456 __ Sub(prop_fields, obj_size, JSObject::kHeaderSize / kPointerSize);
458 if (!is_api_function) {
459 Label no_inobject_slack_tracking;
461 // Check if slack tracking is enabled.
462 __ Cmp(constructon_count, Operand(Map::kSlackTrackingCounterEnd));
463 __ B(lt, &no_inobject_slack_tracking);
464 constructon_count = NoReg;
466 // Fill the pre-allocated fields with undef.
467 __ FillFields(first_prop, prealloc_fields, filler);
469 // Update first_prop register to be the offset of the first field after
470 // pre-allocated fields.
471 __ Add(first_prop, first_prop,
472 Operand(prealloc_fields, LSL, kPointerSizeLog2));
474 if (FLAG_debug_code) {
475 Register obj_end = x14;
476 __ Add(obj_end, new_obj, Operand(obj_size, LSL, kPointerSizeLog2));
477 __ Cmp(first_prop, obj_end);
478 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
481 // Fill the remaining fields with one pointer filler map.
482 __ LoadRoot(filler, Heap::kOnePointerFillerMapRootIndex);
483 __ Sub(prop_fields, prop_fields, prealloc_fields);
485 __ bind(&no_inobject_slack_tracking);
488 // Fill all of the property fields with undef.
489 __ FillFields(first_prop, prop_fields, filler);
493 // Add the object tag to make the JSObject real, so that we can continue
494 // and jump into the continuation code at any time from now on.
495 __ Add(new_obj, new_obj, kHeapObjectTag);
497 // Continue with JSObject being successfully allocated.
500 // Reload the original constructor and fall-through.
501 __ Bind(&rt_call_reload_new_target);
502 __ Peek(x3, 0 * kXRegSize);
505 // Allocate the new receiver object using the runtime call.
506 // x1: constructor function
507 // x3: original constructor
509 __ Push(constructor, original_constructor); // arguments 1-2
510 __ CallRuntime(Runtime::kNewObject, 2);
513 // Receiver for constructor call allocated.
517 // Restore the parameters.
518 __ Pop(original_constructor);
521 // Reload the number of arguments from the stack.
522 // Set it up in x0 for the function call below.
523 // jssp[0]: number of arguments (smi-tagged)
524 __ Peek(argc, 0); // Load number of arguments.
527 __ Push(original_constructor, x4, x4);
529 // Set up pointer to last argument.
530 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
532 // Copy arguments and receiver to the expression stack.
533 // Copy 2 values every loop to use ldp/stp.
534 // x0: number of arguments
535 // x1: constructor function
536 // x2: address of last argument (caller sp)
539 // jssp[2]: new.target
540 // jssp[3]: number of arguments (smi-tagged)
541 // Compute the start address of the copy in x3.
542 __ Add(x3, x2, Operand(argc, LSL, kPointerSizeLog2));
543 Label loop, entry, done_copying_arguments;
546 __ Ldp(x10, x11, MemOperand(x3, -2 * kPointerSize, PreIndex));
551 // Because we copied values 2 by 2 we may have copied one extra value.
552 // Drop it if that is the case.
553 __ B(eq, &done_copying_arguments);
555 __ Bind(&done_copying_arguments);
557 // Call the function.
558 // x0: number of arguments
559 // x1: constructor function
560 if (is_api_function) {
561 __ Ldr(cp, FieldMemOperand(constructor, JSFunction::kContextOffset));
563 masm->isolate()->builtins()->HandleApiCallConstruct();
564 __ Call(code, RelocInfo::CODE_TARGET);
566 ParameterCount actual(argc);
567 __ InvokeFunction(constructor, actual, CALL_FUNCTION, NullCallWrapper());
570 // Store offset of return address for deoptimizer.
571 if (!is_api_function) {
572 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
575 // Restore the context from the frame.
578 // jssp[1]: new.target
579 // jssp[2]: number of arguments (smi-tagged)
580 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
582 // If the result is an object (in the ECMA sense), we should get rid
583 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
585 Label use_receiver, exit;
587 // If the result is a smi, it is *not* an object in the ECMA sense.
589 // jssp[0]: receiver (newly allocated object)
590 // jssp[1]: number of arguments (smi-tagged)
591 __ JumpIfSmi(x0, &use_receiver);
593 // If the type of the result (stored in its map) is less than
594 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
595 __ JumpIfObjectType(x0, x1, x3, FIRST_SPEC_OBJECT_TYPE, &exit, ge);
597 // Throw away the result of the constructor invocation and use the
598 // on-stack receiver as the result.
599 __ Bind(&use_receiver);
602 // Remove the receiver from the stack, remove caller arguments, and
606 // jssp[0]: receiver (newly allocated object)
607 // jssp[1]: new.target (original constructor)
608 // jssp[2]: number of arguments (smi-tagged)
609 __ Peek(x1, 2 * kXRegSize);
611 // Leave construct frame.
616 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2);
621 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
622 Generate_JSConstructStubHelper(masm, false);
626 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
627 Generate_JSConstructStubHelper(masm, true);
631 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
632 // ----------- S t a t e -------------
633 // -- x0 : number of arguments
634 // -- x1 : constructor function
635 // -- x2 : allocation site or undefined
636 // -- x3 : original constructor
637 // -- lr : return address
638 // -- sp[...]: constructor arguments
639 // -----------------------------------
640 ASM_LOCATION("Builtins::Generate_JSConstructStubForDerived");
643 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
645 __ AssertUndefinedOrAllocationSite(x2, x10);
648 __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
649 __ Push(x2, x4, x3, x10);
650 // sp[0]: receiver (the hole)
652 // sp[2]: number of arguments
653 // sp[3]: allocation site
655 // Set up pointer to last argument.
656 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
658 // Copy arguments and receiver to the expression stack.
659 // Copy 2 values every loop to use ldp/stp.
660 // x0: number of arguments
661 // x1: constructor function
662 // x2: address of last argument (caller sp)
664 // jssp[1]: new.target
665 // jssp[2]: number of arguments (smi-tagged)
666 // Compute the start address of the copy in x4.
667 __ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
668 Label loop, entry, done_copying_arguments;
671 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
676 // Because we copied values 2 by 2 we may have copied one extra value.
677 // Drop it if that is the case.
678 __ B(eq, &done_copying_arguments);
680 __ Bind(&done_copying_arguments);
684 ExternalReference debug_step_in_fp =
685 ExternalReference::debug_step_in_fp_address(masm->isolate());
686 __ Mov(x2, Operand(debug_step_in_fp));
687 __ Ldr(x2, MemOperand(x2));
688 __ Cbz(x2, &skip_step_in);
691 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
694 __ bind(&skip_step_in);
696 // Call the function.
697 // x0: number of arguments
698 // x1: constructor function
699 ParameterCount actual(x0);
700 __ InvokeFunction(x1, actual, CALL_FUNCTION, NullCallWrapper());
703 // Restore the context from the frame.
705 // jssp[0]: number of arguments (smi-tagged)
706 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
708 // Load number of arguments (smi), skipping over new.target.
709 __ Peek(x1, kPointerSize);
711 // Leave construct frame
720 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
723 // Clobbers x10, x15; preserves all other registers.
724 static void Generate_CheckStackOverflow(MacroAssembler* masm,
725 const int calleeOffset, Register argc,
726 IsTagged argc_is_tagged) {
727 Register function = x15;
729 // Check the stack for overflow.
730 // We are not trying to catch interruptions (e.g. debug break and
731 // preemption) here, so the "real stack limit" is checked.
732 Label enough_stack_space;
733 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
734 __ Ldr(function, MemOperand(fp, calleeOffset));
735 // Make x10 the space we have left. The stack might already be overflowed
736 // here which will cause x10 to become negative.
737 // TODO(jbramley): Check that the stack usage here is safe.
738 __ Sub(x10, jssp, x10);
739 // Check if the arguments will overflow the stack.
740 if (argc_is_tagged == kArgcIsSmiTagged) {
741 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
743 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
744 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
746 __ B(gt, &enough_stack_space);
747 // There is not enough stack space, so use a builtin to throw an appropriate
749 if (argc_is_tagged == kArgcIsUntaggedInt) {
752 __ Push(function, argc);
753 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
754 // We should never return from the APPLY_OVERFLOW builtin.
755 if (__ emit_debug_code()) {
759 __ Bind(&enough_stack_space);
771 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
773 // Called from JSEntryStub::GenerateBody().
774 Register new_target = x0;
775 Register function = x1;
776 Register receiver = x2;
779 Register scratch = x10;
781 ProfileEntryHookStub::MaybeCallEntryHook(masm);
783 // Clear the context before we push it when entering the internal frame.
787 // Enter an internal frame.
788 FrameScope scope(masm, StackFrame::INTERNAL);
790 // Setup the context (we need to use the caller context from the isolate).
791 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
793 __ Ldr(cp, MemOperand(scratch));
795 __ InitializeRootRegister();
797 // Push the function and the receiver onto the stack.
798 __ Push(function, receiver);
800 // Check if we have enough stack space to push all arguments.
801 // The function is the first thing that was pushed above after entering
802 // the internal frame.
803 const int kFunctionOffset =
804 InternalFrameConstants::kCodeOffset - kPointerSize;
805 // Expects argument count in eax. Clobbers ecx, edx, edi.
806 Generate_CheckStackOverflow(masm, kFunctionOffset, argc,
809 // Copy arguments to the stack in a loop, in reverse order.
813 // Compute the copy end address.
814 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
818 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
819 __ Ldr(x12, MemOperand(x11)); // Dereference the handle.
820 __ Push(x12); // Push the argument.
822 __ Cmp(scratch, argv);
825 __ Mov(scratch, argc);
826 __ Mov(argc, new_target);
827 __ Mov(new_target, scratch);
831 // Initialize all JavaScript callee-saved registers, since they will be seen
832 // by the garbage collector as part of handlers.
833 // The original values have been saved in JSEntryStub::GenerateBody().
834 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
841 // Don't initialize the reserved registers.
842 // x26 : root register (root).
843 // x27 : context pointer (cp).
844 // x28 : JS stack pointer (jssp).
845 // x29 : frame pointer (fp).
847 Handle<Code> builtin = is_construct
848 ? masm->isolate()->builtins()->Construct()
849 : masm->isolate()->builtins()->Call();
850 __ Call(builtin, RelocInfo::CODE_TARGET);
852 // Exit the JS internal frame and remove the parameters (except function),
856 // Result is in x0. Return.
861 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
862 Generate_JSEntryTrampolineHelper(masm, false);
866 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
867 Generate_JSEntryTrampolineHelper(masm, true);
871 // Generate code for entering a JS function with the interpreter.
872 // On entry to the function the receiver and arguments have been pushed on the
873 // stack left to right. The actual argument count matches the formal parameter
874 // count expected by the function.
876 // The live registers are:
877 // - x1: the JS function object being called.
878 // - cp: our context.
879 // - fp: our caller's frame pointer.
880 // - jssp: stack pointer.
881 // - lr: return address.
883 // The function builds a JS frame. Please see JavaScriptFrameConstants in
884 // frames-arm64.h for its layout.
885 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
887 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
888 // Open a frame scope to indicate that there is a frame on the stack. The
889 // MANUAL indicates that the scope shouldn't actually generate code to set up
890 // the frame (that is done below).
891 FrameScope frame_scope(masm, StackFrame::MANUAL);
892 __ Push(lr, fp, cp, x1);
893 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
895 // Get the bytecode array from the function object and load the pointer to the
896 // first entry into kInterpreterBytecodeRegister.
897 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
898 __ Ldr(kInterpreterBytecodeArrayRegister,
899 FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
901 if (FLAG_debug_code) {
902 // Check function data field is actually a BytecodeArray object.
903 __ AssertNotSmi(kInterpreterBytecodeArrayRegister,
904 kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
905 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
906 BYTECODE_ARRAY_TYPE);
907 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
910 // Allocate the local and temporary register file on the stack.
912 // Load frame size from the BytecodeArray object.
913 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
914 BytecodeArray::kFrameSizeOffset));
916 // Do a stack check to ensure we don't go over the limit.
918 DCHECK(jssp.Is(__ StackPointer()));
919 __ Sub(x10, jssp, Operand(x11));
920 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
922 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
925 // If ok, push undefined as the initial value for all register file entries.
926 // Note: there should always be at least one stack slot for the return
927 // register in the register file.
929 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
930 // TODO(rmcilroy): Ensure we always have an even number of registers to
931 // allow stack to be 16 bit aligned (and remove need for jssp).
932 __ Lsr(x11, x11, kPointerSizeLog2);
933 __ PushMultipleTimes(x10, x11);
934 __ Bind(&loop_header);
937 // TODO(rmcilroy): List of things not currently dealt with here but done in
938 // fullcodegen's prologue:
939 // - Support profiler (specifically profiling_counter).
940 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
941 // - Allow simulator stop operations if FLAG_stop_at is set.
942 // - Deal with sloppy mode functions which need to replace the
943 // receiver with the global proxy when called as functions (without an
944 // explicit receiver object).
945 // - Code aging of the BytecodeArray object.
946 // - Supporting FLAG_trace.
948 // The following items are also not done here, and will probably be done using
949 // explicit bytecodes instead:
950 // - Allocating a new local context if applicable.
951 // - Setting up a local binding to the this function, which is used in
952 // derived constructors with super calls.
953 // - Setting new.target if required.
954 // - Dealing with REST parameters (only if
955 // https://codereview.chromium.org/1235153006 doesn't land by then).
956 // - Dealing with argument objects.
958 // Perform stack guard check.
961 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
963 __ CallRuntime(Runtime::kStackGuard, 0);
967 // Load accumulator, register file, bytecode offset, dispatch table into
969 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
970 __ Sub(kInterpreterRegisterFileRegister, fp,
971 Operand(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
972 __ Mov(kInterpreterBytecodeOffsetRegister,
973 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
974 __ LoadRoot(kInterpreterDispatchTableRegister,
975 Heap::kInterpreterTableRootIndex);
976 __ Add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
977 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
979 // Dispatch to the first bytecode handler for the function.
980 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
981 kInterpreterBytecodeOffsetRegister));
982 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
983 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
984 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
985 // and header removal.
986 __ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
991 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
992 // TODO(rmcilroy): List of things not currently dealt with here but done in
993 // fullcodegen's EmitReturnSequence.
994 // - Supporting FLAG_trace for Runtime::TraceExit.
995 // - Support profiler (specifically decrementing profiling_counter
996 // appropriately and calling out to HandleInterrupts if necessary).
998 // The return value is in accumulator, which is already in x0.
1000 // Leave the frame (also dropping the register file).
1001 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1003 // Drop receiver + arguments and return.
1004 __ Ldr(w1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1005 BytecodeArray::kParameterSizeOffset));
1011 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1012 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1013 GenerateTailCallToReturnedCode(masm);
1017 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1018 FrameScope scope(masm, StackFrame::INTERNAL);
1019 Register function = x1;
1021 // Preserve function. At the same time, push arguments for
1022 // kCompileOptimized.
1023 __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
1024 __ Push(function, function, x10);
1026 __ CallRuntime(Runtime::kCompileOptimized, 2);
1028 // Restore receiver.
1033 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1034 CallCompileOptimized(masm, false);
1035 GenerateTailCallToReturnedCode(masm);
1039 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1040 CallCompileOptimized(masm, true);
1041 GenerateTailCallToReturnedCode(masm);
1045 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1046 // For now, we are relying on the fact that make_code_young doesn't do any
1047 // garbage collection which allows us to save/restore the registers without
1048 // worrying about which of them contain pointers. We also don't build an
1049 // internal frame to make the code fast, since we shouldn't have to do stack
1050 // crawls in MakeCodeYoung. This seems a bit fragile.
1052 // The following caller-saved registers must be saved and restored when
1053 // calling through to the runtime:
1054 // x0 - The address from which to resume execution.
1056 // lr - The return address for the JSFunction itself. It has not yet been
1057 // preserved on the stack because the frame setup code was replaced
1058 // with a call to this stub, to handle code ageing.
1060 FrameScope scope(masm, StackFrame::MANUAL);
1061 __ Push(x0, x1, fp, lr);
1062 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1064 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1065 __ Pop(lr, fp, x1, x0);
1068 // The calling function has been made young again, so return to execute the
1069 // real frame set-up code.
1073 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1074 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1075 MacroAssembler* masm) { \
1076 GenerateMakeCodeYoungAgainCommon(masm); \
1078 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1079 MacroAssembler* masm) { \
1080 GenerateMakeCodeYoungAgainCommon(masm); \
1082 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1083 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1086 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1087 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1088 // that make_code_young doesn't do any garbage collection which allows us to
1089 // save/restore the registers without worrying about which of them contain
1092 // The following caller-saved registers must be saved and restored when
1093 // calling through to the runtime:
1094 // x0 - The address from which to resume execution.
1096 // lr - The return address for the JSFunction itself. It has not yet been
1097 // preserved on the stack because the frame setup code was replaced
1098 // with a call to this stub, to handle code ageing.
1100 FrameScope scope(masm, StackFrame::MANUAL);
1101 __ Push(x0, x1, fp, lr);
1102 __ Mov(x1, ExternalReference::isolate_address(masm->isolate()));
1104 ExternalReference::get_mark_code_as_executed_function(
1105 masm->isolate()), 2);
1106 __ Pop(lr, fp, x1, x0);
1108 // Perform prologue operations usually performed by the young code stub.
1109 __ EmitFrameSetupForCodeAgePatching(masm);
1112 // Jump to point after the code-age stub.
1113 __ Add(x0, x0, kNoCodeAgeSequenceLength);
1118 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1119 GenerateMakeCodeYoungAgainCommon(masm);
1123 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1124 Generate_MarkCodeAsExecutedOnce(masm);
1128 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1129 SaveFPRegsMode save_doubles) {
1131 FrameScope scope(masm, StackFrame::INTERNAL);
1133 // Preserve registers across notification, this is important for compiled
1134 // stubs that tail call the runtime on deopts passing their parameters in
1136 // TODO(jbramley): Is it correct (and appropriate) to use safepoint
1137 // registers here? According to the comment above, we should only need to
1138 // preserve the registers with parameters.
1139 __ PushXRegList(kSafepointSavedRegisters);
1140 // Pass the function and deoptimization type to the runtime system.
1141 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1142 __ PopXRegList(kSafepointSavedRegisters);
1145 // Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
1148 // Jump to the miss handler. Deoptimizer::EntryGenerator::Generate loads this
1149 // into lr before it jumps here.
1154 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1155 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1159 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1160 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1164 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1165 Deoptimizer::BailoutType type) {
1167 FrameScope scope(masm, StackFrame::INTERNAL);
1168 // Pass the deoptimization type to the runtime system.
1169 __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
1171 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1174 // Get the full codegen state from the stack and untag it.
1175 Register state = x6;
1179 // Switch on the state.
1180 Label with_tos_register, unknown_state;
1181 __ CompareAndBranch(
1182 state, FullCodeGenerator::NO_REGISTERS, ne, &with_tos_register);
1183 __ Drop(1); // Remove state.
1186 __ Bind(&with_tos_register);
1187 // Reload TOS register.
1188 __ Peek(x0, kPointerSize);
1189 __ CompareAndBranch(state, FullCodeGenerator::TOS_REG, ne, &unknown_state);
1190 __ Drop(2); // Remove state and TOS.
1193 __ Bind(&unknown_state);
1194 __ Abort(kInvalidFullCodegenState);
1198 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1199 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1203 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1204 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1208 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1209 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1213 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1214 // Lookup the function in the JavaScript frame.
1215 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1217 FrameScope scope(masm, StackFrame::INTERNAL);
1218 // Pass function as argument.
1220 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1223 // If the code object is null, just return to the unoptimized code.
1225 __ CompareAndBranch(x0, Smi::FromInt(0), ne, &skip);
1230 // Load deoptimization data from the code object.
1231 // <deopt_data> = <code>[#deoptimization_data_offset]
1232 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1234 // Load the OSR entrypoint offset from the deoptimization data.
1235 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1236 __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt(
1237 DeoptimizationInputData::kOsrPcOffsetIndex)));
1239 // Compute the target address = code_obj + header_size + osr_offset
1240 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1242 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
1244 // And "return" to the OSR entry point of the function.
1249 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1250 // We check the stack limit as indicator that recompilation might be done.
1252 __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
1255 FrameScope scope(masm, StackFrame::INTERNAL);
1256 __ CallRuntime(Runtime::kStackGuard, 0);
1258 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1259 RelocInfo::CODE_TARGET);
1266 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1268 Register function = x1;
1269 Register scratch1 = x10;
1270 Register scratch2 = x11;
1272 ASM_LOCATION("Builtins::Generate_FunctionCall");
1273 // 1. Make sure we have at least one argument.
1276 __ Cbnz(argc, &done);
1277 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1283 // 2. Get the callable to call (passed as receiver) from the stack.
1284 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
1286 // 3. Shift arguments and return address one slot down on the stack
1287 // (overwriting the original receiver). Adjust argument count to make
1288 // the original first argument the new receiver.
1291 // Calculate the copy start address (destination). Copy end address is jssp.
1292 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
1293 __ Sub(scratch1, scratch2, kPointerSize);
1296 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
1297 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
1298 __ Cmp(scratch1, jssp);
1300 // Adjust the actual number of arguments and remove the top element
1301 // (which is a copy of the last argument).
1302 __ Sub(argc, argc, 1);
1306 // 4. Call the callable.
1307 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1311 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1312 const int vectorOffset,
1313 const int argumentsOffset,
1314 const int indexOffset,
1315 const int limitOffset) {
1317 Register receiver = LoadDescriptor::ReceiverRegister();
1318 Register key = LoadDescriptor::NameRegister();
1319 Register slot = LoadDescriptor::SlotRegister();
1320 Register vector = LoadWithVectorDescriptor::VectorRegister();
1322 __ Ldr(key, MemOperand(fp, indexOffset));
1325 // Load the current argument from the arguments array.
1327 __ Ldr(receiver, MemOperand(fp, argumentsOffset));
1329 // Use inline caching to speed up access to arguments.
1330 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex();
1331 __ Mov(slot, Operand(Smi::FromInt(slot_index)));
1332 __ Ldr(vector, MemOperand(fp, vectorOffset));
1334 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1335 __ Call(ic, RelocInfo::CODE_TARGET);
1337 // Push the nth argument.
1340 __ Ldr(key, MemOperand(fp, indexOffset));
1341 __ Add(key, key, Smi::FromInt(1));
1342 __ Str(key, MemOperand(fp, indexOffset));
1344 // Test if the copy loop has finished copying all the elements from the
1345 // arguments object.
1347 __ Ldr(x1, MemOperand(fp, limitOffset));
1351 // On exit, the pushed arguments count is in x0, untagged
1357 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1358 const int kFormalParameters = targetIsArgument ? 3 : 2;
1359 const int kStackSize = kFormalParameters + 1;
1362 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1364 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1365 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1366 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1367 const int kVectorOffset =
1368 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1369 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1370 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1372 Register args = x12;
1373 Register receiver = x14;
1374 Register function = x15;
1375 Register apply_function = x1;
1380 FieldMemOperand(apply_function, JSFunction::kSharedFunctionInfoOffset));
1381 __ Ldr(apply_function,
1382 FieldMemOperand(apply_function,
1383 SharedFunctionInfo::kFeedbackVectorOffset));
1384 __ Push(apply_function);
1386 // Get the length of the arguments via a builtin call.
1387 __ Ldr(function, MemOperand(fp, kFunctionOffset));
1388 __ Ldr(args, MemOperand(fp, kArgumentsOffset));
1389 __ Push(function, args);
1390 if (targetIsArgument) {
1391 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1394 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1398 Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged);
1400 // Push current limit, index and receiver.
1401 __ Mov(x1, 0); // Initial index.
1402 __ Ldr(receiver, MemOperand(fp, kReceiverOffset));
1403 __ Push(argc, x1, receiver);
1405 // Copy all arguments from the array to the stack.
1406 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1407 kIndexOffset, kLimitOffset);
1409 // At the end of the loop, the number of arguments is stored in x0, untagged
1411 // Call the callable.
1412 // TODO(bmeurer): This should be a tail call according to ES6.
1413 __ Ldr(x1, MemOperand(fp, kFunctionOffset));
1414 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1416 __ Drop(kStackSize);
1421 static void Generate_ConstructHelper(MacroAssembler* masm) {
1422 const int kFormalParameters = 3;
1423 const int kStackSize = kFormalParameters + 1;
1426 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1428 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1429 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1430 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1431 const int kVectorOffset =
1432 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1433 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1434 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1436 // Is x11 safe to use?
1437 Register newTarget = x11;
1438 Register args = x12;
1439 Register function = x15;
1440 Register construct_function = x1;
1443 __ Ldr(construct_function,
1444 FieldMemOperand(construct_function,
1445 JSFunction::kSharedFunctionInfoOffset));
1446 __ Ldr(construct_function,
1447 FieldMemOperand(construct_function,
1448 SharedFunctionInfo::kFeedbackVectorOffset));
1449 __ Push(construct_function);
1451 // If newTarget is not supplied, set it to constructor
1452 Label validate_arguments;
1453 __ Ldr(x0, MemOperand(fp, kNewTargetOffset));
1454 __ CompareRoot(x0, Heap::kUndefinedValueRootIndex);
1455 __ B(ne, &validate_arguments);
1456 __ Ldr(x0, MemOperand(fp, kFunctionOffset));
1457 __ Str(x0, MemOperand(fp, kNewTargetOffset));
1459 // Validate arguments
1460 __ Bind(&validate_arguments);
1461 __ Ldr(function, MemOperand(fp, kFunctionOffset));
1462 __ Ldr(args, MemOperand(fp, kArgumentsOffset));
1463 __ Ldr(newTarget, MemOperand(fp, kNewTargetOffset));
1464 __ Push(function, args, newTarget);
1465 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1469 Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged);
1471 // Push current limit and index & constructor function as callee.
1472 __ Mov(x1, 0); // Initial index.
1473 __ Push(argc, x1, function);
1475 // Copy all arguments from the array to the stack.
1476 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1477 kIndexOffset, kLimitOffset);
1479 // Use undefined feedback vector
1480 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
1481 __ Ldr(x1, MemOperand(fp, kFunctionOffset));
1482 __ Ldr(x4, MemOperand(fp, kNewTargetOffset));
1484 // Call the function.
1485 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1486 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1488 // Leave internal frame.
1490 __ Drop(kStackSize);
1495 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1496 ASM_LOCATION("Builtins::Generate_FunctionApply");
1497 Generate_ApplyHelper(masm, false);
1501 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1502 ASM_LOCATION("Builtins::Generate_ReflectApply");
1503 Generate_ApplyHelper(masm, true);
1507 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1508 ASM_LOCATION("Builtins::Generate_ReflectConstruct");
1509 Generate_ConstructHelper(masm);
1513 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1514 Label* stack_overflow) {
1515 // ----------- S t a t e -------------
1516 // -- x0 : actual number of arguments
1517 // -- x1 : function (passed through to callee)
1518 // -- x2 : expected number of arguments
1519 // -----------------------------------
1520 // Check the stack for overflow.
1521 // We are not trying to catch interruptions (e.g. debug break and
1522 // preemption) here, so the "real stack limit" is checked.
1523 Label enough_stack_space;
1524 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
1525 // Make x10 the space we have left. The stack might already be overflowed
1526 // here which will cause x10 to become negative.
1527 __ Sub(x10, jssp, x10);
1528 // Check if the arguments will overflow the stack.
1529 __ Cmp(x10, Operand(x2, LSL, kPointerSizeLog2));
1530 __ B(le, stack_overflow);
1534 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1536 __ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1538 __ Push(x11, x1, x10);
1540 StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize);
1544 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1545 // ----------- S t a t e -------------
1546 // -- x0 : result being passed through
1547 // -----------------------------------
1548 // Get the number of arguments passed (as a smi), tear down the frame and
1549 // then drop the parameters and the receiver.
1550 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1554 __ DropBySMI(x10, kXRegSize);
1560 void Builtins::Generate_CallFunction(MacroAssembler* masm) {
1561 // ----------- S t a t e -------------
1562 // -- x0 : the number of arguments (not including the receiver)
1563 // -- x1 : the function to call (checked to be a JSFunction)
1564 // -----------------------------------
1566 Label convert, convert_global_proxy, convert_to_object, done_convert;
1567 __ AssertFunction(x1);
1568 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
1569 // slot is "classConstructor".
1570 // Enter the context of the function; ToObject has to run in the function
1571 // context, and we also need to take the global proxy from the function
1572 // context in case of conversion.
1573 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1574 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
1575 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1576 // We need to convert the receiver for non-native sloppy mode functions.
1577 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
1578 __ TestAndBranchIfAnySet(w3,
1579 (1 << SharedFunctionInfo::kNative) |
1580 (1 << SharedFunctionInfo::kStrictModeFunction),
1583 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
1585 // ----------- S t a t e -------------
1586 // -- x0 : the number of arguments (not including the receiver)
1587 // -- x1 : the function to call (checked to be a JSFunction)
1588 // -- x2 : the shared function info.
1589 // -- x3 : the receiver
1590 // -- cp : the function context.
1591 // -----------------------------------
1593 Label convert_receiver;
1594 __ JumpIfSmi(x3, &convert_to_object);
1595 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1596 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
1597 __ B(hs, &done_convert);
1598 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
1599 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
1600 __ Bind(&convert_global_proxy);
1602 // Patch receiver to global proxy.
1603 __ LoadGlobalProxy(x3);
1605 __ B(&convert_receiver);
1606 __ Bind(&convert_to_object);
1608 // Convert receiver using ToObject.
1609 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1610 // in the fast case? (fall back to AllocateInNewSpace?)
1611 FrameScope scope(masm, StackFrame::INTERNAL);
1615 ToObjectStub stub(masm->isolate());
1621 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1622 __ Bind(&convert_receiver);
1623 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
1625 __ Bind(&done_convert);
1627 // ----------- S t a t e -------------
1628 // -- x0 : the number of arguments (not including the receiver)
1629 // -- x1 : the function to call (checked to be a JSFunction)
1630 // -- x2 : the shared function info.
1631 // -- cp : the function context.
1632 // -----------------------------------
1635 x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
1636 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kCodeEntryOffset));
1637 ParameterCount actual(x0);
1638 ParameterCount expected(x2);
1639 __ InvokeCode(x3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
1644 void Builtins::Generate_Call(MacroAssembler* masm) {
1645 // ----------- S t a t e -------------
1646 // -- x0 : the number of arguments (not including the receiver)
1647 // -- x1 : the target to call (can be any Object).
1648 // -----------------------------------
1650 Label non_smi, non_jsfunction, non_function;
1651 __ JumpIfSmi(x1, &non_function);
1653 __ CompareObjectType(x1, x2, x2, JS_FUNCTION_TYPE);
1654 __ B(ne, &non_jsfunction);
1655 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1656 __ Bind(&non_jsfunction);
1657 __ Cmp(x2, JS_FUNCTION_PROXY_TYPE);
1658 __ B(ne, &non_function);
1660 // 1. Call to function proxy.
1661 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
1662 __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kCallTrapOffset));
1663 __ AssertNotSmi(x1);
1666 // 2. Call to something else, which might have a [[Call]] internal method (if
1667 // not we raise an exception).
1668 __ Bind(&non_function);
1669 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
1670 // be awesome instead; i.e. a trivial improvement would be to call into the
1671 // runtime and just deal with the API function there instead of returning a
1672 // delegate from a runtime call that just jumps back to the runtime once
1673 // called. Or, bonus points, call directly into the C API function here, as
1674 // we do in some Crankshaft fast cases.
1675 // Overwrite the original receiver with the (original) target.
1676 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
1678 // Determine the delegate for the target (if any).
1679 FrameScope scope(masm, StackFrame::INTERNAL);
1682 __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
1687 // The delegate is always a regular function.
1688 __ AssertFunction(x1);
1689 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1694 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
1695 // ----------- S t a t e -------------
1696 // -- x0 : the number of arguments (not including the receiver)
1697 // -- x1 : the constructor to call (checked to be a JSFunction)
1698 // -- x3 : the original constructor (checked to be a JSFunction)
1699 // -----------------------------------
1700 __ AssertFunction(x1);
1701 __ AssertFunction(x3);
1703 // Calling convention for function specific ConstructStubs require
1704 // x2 to contain either an AllocationSite or undefined.
1705 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
1707 // Tail call to the function-specific construct stub (still in the caller
1708 // context at this point).
1709 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
1710 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
1711 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
1717 void Builtins::Generate_Construct(MacroAssembler* masm) {
1718 // ----------- S t a t e -------------
1719 // -- x0 : the number of arguments (not including the receiver)
1720 // -- x1 : the constructor to call (can be any Object)
1721 // -- x3 : the original constructor (either the same as the constructor or
1722 // the JSFunction on which new was invoked initially)
1723 // -----------------------------------
1726 __ JumpIfSmi(x1, &slow);
1727 __ CompareObjectType(x1, x5, x5, JS_FUNCTION_TYPE);
1728 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
1729 RelocInfo::CODE_TARGET, eq);
1730 __ Cmp(x5, Operand(JS_FUNCTION_PROXY_TYPE));
1733 // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
1734 __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kConstructTrapOffset));
1735 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1739 // Determine the delegate for the target (if any).
1740 FrameScope scope(masm, StackFrame::INTERNAL);
1743 __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
1748 // The delegate is always a regular function.
1749 __ AssertFunction(x1);
1750 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1755 void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
1756 // ----------- S t a t e -------------
1757 // -- x0 : the number of arguments (not including the receiver)
1758 // -- x2 : the address of the first argument to be pushed. Subsequent
1759 // arguments should be consecutive above this, in the same order as
1760 // they are to be pushed onto the stack.
1761 // -- x1 : the target to call (can be any Object).
1763 // Find the address of the last argument.
1764 __ add(x3, x0, Operand(1)); // Add one for receiver.
1765 __ lsl(x3, x3, kPointerSizeLog2);
1768 // Push the arguments.
1769 Label loop_header, loop_check;
1773 __ Bind(&loop_header);
1774 // TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
1775 __ Ldr(x3, MemOperand(x2, -kPointerSize, PostIndex));
1776 __ Str(x3, MemOperand(x5, -kPointerSize, PreIndex));
1777 __ Bind(&loop_check);
1779 __ B(gt, &loop_header);
1782 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1786 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1787 ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
1788 // ----------- S t a t e -------------
1789 // -- x0 : actual number of arguments
1790 // -- x1 : function (passed through to callee)
1791 // -- x2 : expected number of arguments
1792 // -----------------------------------
1794 Label stack_overflow;
1795 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1797 Register argc_actual = x0; // Excluding the receiver.
1798 Register argc_expected = x2; // Excluding the receiver.
1799 Register function = x1;
1800 Register code_entry = x3;
1802 Label invoke, dont_adapt_arguments;
1804 Label enough, too_few;
1805 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset));
1806 __ Cmp(argc_actual, argc_expected);
1808 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
1809 __ B(eq, &dont_adapt_arguments);
1811 { // Enough parameters: actual >= expected
1812 EnterArgumentsAdaptorFrame(masm);
1814 Register copy_start = x10;
1815 Register copy_end = x11;
1816 Register copy_to = x12;
1817 Register scratch1 = x13, scratch2 = x14;
1819 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
1821 // Adjust for fp, lr, and the receiver.
1822 __ Add(copy_start, fp, 3 * kPointerSize);
1823 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2));
1824 __ Sub(copy_end, copy_start, scratch2);
1825 __ Sub(copy_end, copy_end, kPointerSize);
1826 __ Mov(copy_to, jssp);
1828 // Claim space for the arguments, the receiver, and one extra slot.
1829 // The extra slot ensures we do not write under jssp. It will be popped
1831 __ Add(scratch1, scratch2, 2 * kPointerSize);
1832 __ Claim(scratch1, 1);
1834 // Copy the arguments (including the receiver) to the new stack frame.
1836 __ Bind(©_2_by_2);
1837 __ Ldp(scratch1, scratch2,
1838 MemOperand(copy_start, - 2 * kPointerSize, PreIndex));
1839 __ Stp(scratch1, scratch2,
1840 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1841 __ Cmp(copy_start, copy_end);
1842 __ B(hi, ©_2_by_2);
1844 // Correct the space allocated for the extra slot.
1850 { // Too few parameters: Actual < expected
1853 Register copy_from = x10;
1854 Register copy_end = x11;
1855 Register copy_to = x12;
1856 Register scratch1 = x13, scratch2 = x14;
1858 // If the function is strong we need to throw an error.
1859 Label no_strong_error;
1861 FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
1862 __ Ldr(scratch2.W(),
1863 FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset));
1864 __ TestAndBranchIfAllClear(scratch2.W(),
1865 (1 << SharedFunctionInfo::kStrongModeFunction),
1868 // What we really care about is the required number of arguments.
1869 DCHECK_EQ(kPointerSize, kInt64Size);
1870 __ Ldr(scratch2.W(),
1871 FieldMemOperand(scratch1, SharedFunctionInfo::kLengthOffset));
1872 __ Cmp(argc_actual, Operand(scratch2, LSR, 1));
1873 __ B(ge, &no_strong_error);
1876 FrameScope frame(masm, StackFrame::MANUAL);
1877 EnterArgumentsAdaptorFrame(masm);
1878 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1881 __ Bind(&no_strong_error);
1882 EnterArgumentsAdaptorFrame(masm);
1884 __ Lsl(scratch2, argc_expected, kPointerSizeLog2);
1885 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2);
1887 // Adjust for fp, lr, and the receiver.
1888 __ Add(copy_from, fp, 3 * kPointerSize);
1889 __ Add(copy_from, copy_from, argc_actual);
1890 __ Mov(copy_to, jssp);
1891 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver.
1892 __ Sub(copy_end, copy_end, argc_actual);
1894 // Claim space for the arguments, the receiver, and one extra slot.
1895 // The extra slot ensures we do not write under jssp. It will be popped
1897 __ Add(scratch1, scratch2, 2 * kPointerSize);
1898 __ Claim(scratch1, 1);
1900 // Copy the arguments (including the receiver) to the new stack frame.
1902 __ Bind(©_2_by_2);
1903 __ Ldp(scratch1, scratch2,
1904 MemOperand(copy_from, - 2 * kPointerSize, PreIndex));
1905 __ Stp(scratch1, scratch2,
1906 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1907 __ Cmp(copy_to, copy_end);
1908 __ B(hi, ©_2_by_2);
1910 __ Mov(copy_to, copy_end);
1912 // Fill the remaining expected arguments with undefined.
1913 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
1914 __ Add(copy_end, jssp, kPointerSize);
1918 __ Stp(scratch1, scratch1,
1919 MemOperand(copy_to, - 2 * kPointerSize, PreIndex));
1920 __ Cmp(copy_to, copy_end);
1923 // Correct the space allocated for the extra slot.
1927 // Arguments have been adapted. Now call the entry point.
1929 __ Mov(argc_actual, argc_expected);
1930 // x0 : expected number of arguments
1931 // x1 : function (passed through to callee)
1932 __ Call(code_entry);
1934 // Store offset of return address for deoptimizer.
1935 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1937 // Exit frame and return.
1938 LeaveArgumentsAdaptorFrame(masm);
1941 // Call the entry point without adapting the arguments.
1942 __ Bind(&dont_adapt_arguments);
1943 __ Jump(code_entry);
1945 __ Bind(&stack_overflow);
1947 FrameScope frame(masm, StackFrame::MANUAL);
1948 EnterArgumentsAdaptorFrame(masm);
1949 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
1957 } // namespace internal
1960 #endif // V8_TARGET_ARCH_ARM