1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/codegen.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen/full-codegen.h"
18 #define __ ACCESS_MASM(masm)
21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- rax : number of arguments excluding receiver
26 // -- rdi : called function (only guaranteed when
27 // extra_args requires it)
29 // -- rsp[0] : return address
30 // -- rsp[8] : last argument
32 // -- rsp[8 * argc] : first argument (argc == rax)
33 // -- rsp[8 * (argc + 1)] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
40 __ PopReturnAddressTo(kScratchRegister);
42 __ PushReturnAddressFrom(kScratchRegister);
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
47 // JumpToExternalReference expects rax to contain the number of arguments
48 // including the receiver and the extra arguments.
49 __ addp(rax, Immediate(num_extra_args + 1));
50 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
54 static void CallRuntimePassFunction(
55 MacroAssembler* masm, Runtime::FunctionId function_id) {
56 FrameScope scope(masm, StackFrame::INTERNAL);
57 // Push a copy of the function onto the stack.
59 // Function is also the parameter to the runtime call.
62 __ CallRuntime(function_id, 1);
68 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
69 __ movp(kScratchRegister,
70 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
71 __ movp(kScratchRegister,
72 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
73 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
74 __ jmp(kScratchRegister);
78 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
79 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
85 // Checking whether the queued function is ready for install is optional,
86 // since we come across interrupts and stack checks elsewhere. However,
87 // not checking may delay installing ready functions, and always checking
88 // would be quite expensive. A good compromise is to first check against
89 // stack limit as a cue for an interrupt signal.
91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
92 __ j(above_equal, &ok);
94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
95 GenerateTailCallToReturnedCode(masm);
98 GenerateTailCallToSharedCode(masm);
102 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
103 bool is_api_function,
104 bool create_memento) {
105 // ----------- S t a t e -------------
106 // -- rax: number of arguments
107 // -- rdi: constructor function
108 // -- rbx: allocation site or undefined
109 // -- rdx: original constructor
110 // -----------------------------------
112 // Should never create mementos for api functions.
113 DCHECK(!is_api_function || !create_memento);
115 // Enter a construct frame.
117 FrameScope scope(masm, StackFrame::CONSTRUCT);
119 // Preserve the incoming parameters on the stack.
120 __ AssertUndefinedOrAllocationSite(rbx);
122 __ Integer32ToSmi(rax, rax);
127 // Try to allocate the object without transitioning into C code. If any of
128 // the preconditions is not met, the code bails out to the runtime call.
129 Label rt_call, allocated;
130 if (FLAG_inline_new) {
131 ExternalReference debug_step_in_fp =
132 ExternalReference::debug_step_in_fp_address(masm->isolate());
133 __ Move(kScratchRegister, debug_step_in_fp);
134 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
135 __ j(not_equal, &rt_call);
137 // Fall back to runtime if the original constructor and function differ.
139 __ j(not_equal, &rt_call);
141 // Verified that the constructor is a JSFunction.
142 // Load the initial map and verify that it is in fact a map.
144 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
145 // Will both indicate a NULL and a Smi
146 DCHECK(kSmiTag == 0);
147 __ JumpIfSmi(rax, &rt_call);
149 // rax: initial map (if proven valid below)
150 __ CmpObjectType(rax, MAP_TYPE, rbx);
151 __ j(not_equal, &rt_call);
153 // Check that the constructor is not constructing a JSFunction (see
154 // comments in Runtime_NewObject in runtime.cc). In which case the
155 // initial map's instance type would be JS_FUNCTION_TYPE.
158 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
159 __ j(equal, &rt_call);
160 if (!is_api_function) {
162 // The code below relies on these assumptions.
163 STATIC_ASSERT(Map::Counter::kShift + Map::Counter::kSize == 32);
164 // Check if slack tracking is enabled.
165 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
166 __ shrl(rsi, Immediate(Map::Counter::kShift));
167 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
168 __ j(less, &allocate);
169 // Decrease generous allocation count.
170 __ subl(FieldOperand(rax, Map::kBitField3Offset),
171 Immediate(1 << Map::Counter::kShift));
173 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
174 __ j(not_equal, &allocate);
180 __ Push(rdi); // constructor
181 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
186 __ movl(rsi, Immediate(Map::kSlackTrackingCounterEnd - 1));
191 // Now allocate the JSObject on the heap.
192 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
193 __ shlp(rdi, Immediate(kPointerSizeLog2));
194 if (create_memento) {
195 __ addp(rdi, Immediate(AllocationMemento::kSize));
197 // rdi: size of new object
203 NO_ALLOCATION_FLAGS);
204 Factory* factory = masm->isolate()->factory();
205 // Allocated the JSObject, now initialize the fields.
207 // rbx: JSObject (not HeapObject tagged - the actual address).
208 // rdi: start of next object (including memento if create_memento)
209 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
210 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
211 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
212 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
213 // Set extra fields in the newly allocated object.
216 // rdi: start of next object (including memento if create_memento)
217 // rsi: slack tracking counter (non-API function case)
218 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
219 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
220 if (!is_api_function) {
221 Label no_inobject_slack_tracking;
223 // Check if slack tracking is enabled.
224 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
225 __ j(less, &no_inobject_slack_tracking);
227 // Allocate object with a slack.
231 rax, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
232 __ movzxbp(rax, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
235 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
236 // rsi: offset of first field after pre-allocated fields
237 if (FLAG_debug_code) {
239 __ Assert(less_equal,
240 kUnexpectedNumberOfPreAllocatedPropertyFields);
242 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
243 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
244 // Fill the remaining fields with one pointer filler map.
246 __ bind(&no_inobject_slack_tracking);
248 if (create_memento) {
249 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
250 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
252 // Fill in memento fields if necessary.
253 // rsi: points to the allocated but uninitialized memento.
254 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
255 factory->allocation_memento_map());
256 // Get the cell or undefined.
257 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
258 __ AssertUndefinedOrAllocationSite(rdx);
259 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
261 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
264 // Add the object tag to make the JSObject real, so that we can continue
265 // and jump into the continuation code at any time from now on.
266 // rbx: JSObject (untagged)
267 __ orp(rbx, Immediate(kHeapObjectTag));
269 // Continue with JSObject being successfully allocated
270 // rbx: JSObject (tagged)
274 // Allocate the new receiver object using the runtime call.
275 // rdx: original constructor
277 int offset = kPointerSize;
278 if (create_memento) {
279 // Get the cell or allocation site.
280 __ movp(rdi, Operand(rsp, kPointerSize * 3));
281 __ Push(rdi); // argument 1: allocation site
282 offset += kPointerSize;
285 // Must restore rsi (context) and rdi (constructor) before calling runtime.
286 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
287 __ movp(rdi, Operand(rsp, offset));
288 __ Push(rdi); // argument 2/1: constructor function
289 __ Push(rdx); // argument 3/2: original constructor
290 if (create_memento) {
291 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
293 __ CallRuntime(Runtime::kNewObject, 2);
295 __ movp(rbx, rax); // store result in rbx
297 // Runtime_NewObjectWithAllocationSite increments allocation count.
298 // Skip the increment.
299 Label count_incremented;
300 if (create_memento) {
301 __ jmp(&count_incremented);
304 // New object allocated.
305 // rbx: newly allocated object
308 if (create_memento) {
309 __ movp(rcx, Operand(rsp, 3 * kPointerSize));
310 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
311 __ j(equal, &count_incremented);
312 // rcx is an AllocationSite. We are creating a memento from it, so we
313 // need to increment the memento create count.
315 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
317 __ bind(&count_incremented);
320 // Restore the parameters.
324 // Retrieve smi-tagged arguments count from the stack.
325 __ movp(rax, Operand(rsp, 0));
326 __ SmiToInteger32(rax, rax);
328 // Push new.target onto the construct frame. This is stored just below the
329 // receiver on the stack.
332 // Push the allocated receiver to the stack. We need two copies
333 // because we may have to return the original one and the calling
334 // conventions dictate that the called function pops the receiver.
338 // Set up pointer to last argument.
339 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
341 // Copy arguments and receiver to the expression stack.
346 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
349 __ j(greater_equal, &loop);
351 // Call the function.
352 if (is_api_function) {
353 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
355 masm->isolate()->builtins()->HandleApiCallConstruct();
356 __ Call(code, RelocInfo::CODE_TARGET);
358 ParameterCount actual(rax);
359 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
362 // Store offset of return address for deoptimizer.
363 if (!is_api_function) {
364 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
367 // Restore context from the frame.
368 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
370 // If the result is an object (in the ECMA sense), we should get rid
371 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
373 Label use_receiver, exit;
374 // If the result is a smi, it is *not* an object in the ECMA sense.
375 __ JumpIfSmi(rax, &use_receiver);
377 // If the type of the result (stored in its map) is less than
378 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
379 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
380 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
381 __ j(above_equal, &exit);
383 // Throw away the result of the constructor invocation and use the
384 // on-stack receiver as the result.
385 __ bind(&use_receiver);
386 __ movp(rax, Operand(rsp, 0));
388 // Restore the arguments count and leave the construct frame. The arguments
389 // count is stored below the reciever and the new.target.
391 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
393 // Leave construct frame.
396 // Remove caller arguments from the stack and return.
397 __ PopReturnAddressTo(rcx);
398 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
399 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
400 __ PushReturnAddressFrom(rcx);
401 Counters* counters = masm->isolate()->counters();
402 __ IncrementCounter(counters->constructed_objects(), 1);
407 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
408 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
412 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
413 Generate_JSConstructStubHelper(masm, true, false);
417 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
418 // ----------- S t a t e -------------
419 // -- rax: number of arguments
420 // -- rdi: constructor function
421 // -- rbx: allocation site or undefined
422 // -- rdx: original constructor
423 // -----------------------------------
426 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
428 // Preserve allocation site.
429 __ AssertUndefinedOrAllocationSite(rbx);
432 // Store a smi-tagged arguments count on the stack.
433 __ Integer32ToSmi(rax, rax);
435 __ SmiToInteger32(rax, rax);
440 // receiver is the hole.
441 __ Push(masm->isolate()->factory()->the_hole_value());
443 // Set up pointer to last argument.
444 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
446 // Copy arguments and receiver to the expression stack.
451 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
454 __ j(greater_equal, &loop);
458 ExternalReference debug_step_in_fp =
459 ExternalReference::debug_step_in_fp_address(masm->isolate());
460 __ Move(kScratchRegister, debug_step_in_fp);
461 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
462 __ j(equal, &skip_step_in);
467 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
471 __ bind(&skip_step_in);
473 // Call the function.
474 ParameterCount actual(rax);
475 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
477 // Restore context from the frame.
478 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
480 // Get arguments count, skipping over new.target.
481 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
482 } // Leave construct frame.
484 // Remove caller arguments from the stack and return.
485 __ PopReturnAddressTo(rcx);
486 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
487 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
488 __ PushReturnAddressFrom(rcx);
493 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
496 // Clobbers rcx, rdx, kScratchRegister; preserves all other registers.
497 static void Generate_CheckStackOverflow(MacroAssembler* masm,
498 const int calleeOffset,
499 IsTagged rax_is_tagged) {
500 // rax : the number of items to be pushed to the stack
502 // Check the stack for overflow. We are not trying to catch
503 // interruptions (e.g. debug break and preemption) here, so the "real stack
504 // limit" is checked.
506 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
508 // Make rcx the space we have left. The stack might already be overflowed
509 // here which will cause rcx to become negative.
510 __ subp(rcx, kScratchRegister);
511 // Make rdx the space we need for the array when it is unrolled onto the
513 if (rax_is_tagged == kRaxIsSmiTagged) {
514 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
516 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
518 __ shlq(rdx, Immediate(kPointerSizeLog2));
520 // Check if the arguments will overflow the stack.
522 __ j(greater, &okay); // Signed comparison.
524 // Out of stack space.
525 __ Push(Operand(rbp, calleeOffset));
526 if (rax_is_tagged == kRaxIsUntaggedInt) {
527 __ Integer32ToSmi(rax, rax);
530 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
536 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
538 ProfileEntryHookStub::MaybeCallEntryHook(masm);
540 // Expects five C++ function parameters.
541 // - Address entry (ignored)
542 // - JSFunction* function (
543 // - Object* receiver
546 // (see Handle::Invoke in execution.cc).
548 // Open a C++ scope for the FrameScope.
550 // Platform specific argument handling. After this, the stack contains
551 // an internal frame and the pushed function and receiver, and
552 // register rax and rbx holds the argument count and argument array,
553 // while rdi holds the function pointer and rsi the context.
556 // MSVC parameters in:
557 // rcx : entry (ignored)
563 // Clear the context before we push it when entering the internal frame.
565 // Enter an internal frame.
566 FrameScope scope(masm, StackFrame::INTERNAL);
568 // Load the function context into rsi.
569 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
571 // Push the function and the receiver onto the stack.
575 // Load the number of arguments and setup pointer to the arguments.
577 // Load the previous frame pointer to access C argument on stack
578 __ movp(kScratchRegister, Operand(rbp, 0));
579 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
580 // Load the function pointer into rdi.
583 // GCC parameters in:
584 // rdi : entry (ignored)
593 // Clear the context before we push it when entering the internal frame.
595 // Enter an internal frame.
596 FrameScope scope(masm, StackFrame::INTERNAL);
598 // Push the function and receiver and setup the context.
601 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
603 // Load the number of arguments and setup pointer to the arguments.
608 // Current stack contents:
609 // [rsp + 2 * kPointerSize ... ] : Internal frame
610 // [rsp + kPointerSize] : function
612 // Current register contents:
618 // Check if we have enough stack space to push all arguments.
619 // The function is the first thing that was pushed above after entering
620 // the internal frame.
621 const int kFunctionOffset =
622 InternalFrameConstants::kCodeOffset - kRegisterSize;
623 // Expects argument count in rax. Clobbers rcx, rdx.
624 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt);
626 // Copy arguments to the stack in a loop.
627 // Register rbx points to array of pointers to handle locations.
628 // Push the values of these handles.
630 __ Set(rcx, 0); // Set loop variable to 0.
633 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
634 __ Push(Operand(kScratchRegister, 0)); // dereference handle
635 __ addp(rcx, Immediate(1));
638 __ j(not_equal, &loop);
642 // No type feedback cell is available
643 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
644 // Expects rdi to hold function pointer.
645 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
648 ParameterCount actual(rax);
649 // Function must be in rdi.
650 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
652 // Exit the internal frame. Notice that this also removes the empty
653 // context and the function left on the stack by the code
657 // TODO(X64): Is argument correct? Is there a receiver to remove?
658 __ ret(1 * kPointerSize); // Remove receiver.
662 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
663 Generate_JSEntryTrampolineHelper(masm, false);
667 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
668 Generate_JSEntryTrampolineHelper(masm, true);
672 // Generate code for entering a JS function with the interpreter.
673 // On entry to the function the receiver and arguments have been pushed on the
674 // stack left to right. The actual argument count matches the formal parameter
675 // count expected by the function.
677 // The live registers are:
678 // o rdi: the JS function object being called
679 // o rsi: our context
680 // o rbp: the caller's frame pointer
681 // o rsp: stack pointer (pointing to return address)
683 // The function builds a JS frame. Please see JavaScriptFrameConstants in
684 // frames-x64.h for its layout.
685 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
687 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
688 // Open a frame scope to indicate that there is a frame on the stack. The
689 // MANUAL indicates that the scope shouldn't actually generate code to set up
690 // the frame (that is done below).
691 FrameScope frame_scope(masm, StackFrame::MANUAL);
692 __ pushq(rbp); // Caller's frame pointer.
694 __ Push(rsi); // Callee's context.
695 __ Push(rdi); // Callee's JS function.
697 // Get the bytecode array from the function object and load the pointer to the
698 // first entry into edi (InterpreterBytecodeRegister).
699 __ movp(r14, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
700 __ movp(r14, FieldOperand(r14, SharedFunctionInfo::kFunctionDataOffset));
702 if (FLAG_debug_code) {
703 // Check function data field is actually a BytecodeArray object.
704 __ AssertNotSmi(r14);
705 __ CmpObjectType(r14, BYTECODE_ARRAY_TYPE, rax);
706 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
709 // Allocate the local and temporary register file on the stack.
711 // Load frame size from the BytecodeArray object.
712 __ movl(rcx, FieldOperand(r14, BytecodeArray::kFrameSizeOffset));
714 // Do a stack check to ensure we don't go over the limit.
718 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
719 __ j(above_equal, &ok, Label::kNear);
720 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
723 // If ok, push undefined as the initial value for all register file entries.
724 // Note: there should always be at least one stack slot for the return
725 // register in the register file.
727 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
728 __ bind(&loop_header);
729 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
731 // Continue loop if not done.
732 __ subp(rcx, Immediate(kPointerSize));
733 __ j(not_equal, &loop_header, Label::kNear);
736 // TODO(rmcilroy): List of things not currently dealt with here but done in
737 // fullcodegen's prologue:
738 // - Support profiler (specifically profiling_counter).
739 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
740 // - Allow simulator stop operations if FLAG_stop_at is set.
741 // - Deal with sloppy mode functions which need to replace the
742 // receiver with the global proxy when called as functions (without an
743 // explicit receiver object).
744 // - Code aging of the BytecodeArray object.
745 // - Supporting FLAG_trace.
747 // The following items are also not done here, and will probably be done using
748 // explicit bytecodes instead:
749 // - Allocating a new local context if applicable.
750 // - Setting up a local binding to the this function, which is used in
751 // derived constructors with super calls.
752 // - Setting new.target if required.
753 // - Dealing with REST parameters (only if
754 // https://codereview.chromium.org/1235153006 doesn't land by then).
755 // - Dealing with argument objects.
757 // Perform stack guard check.
760 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
761 __ j(above_equal, &ok, Label::kNear);
762 __ CallRuntime(Runtime::kStackGuard, 0);
766 // Load bytecode offset and dispatch table into registers.
767 __ movp(r12, Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
768 __ LoadRoot(r15, Heap::kInterpreterTableRootIndex);
769 __ addp(r15, Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
771 // Dispatch to the first bytecode handler for the function.
772 __ movzxbp(rax, Operand(r14, r12, times_1, 0));
773 __ movp(rax, Operand(r15, rax, times_pointer_size, 0));
774 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
775 // and header removal.
776 __ addp(rax, Immediate(Code::kHeaderSize - kHeapObjectTag));
781 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
782 // TODO(rmcilroy): List of things not currently dealt with here but done in
783 // fullcodegen's EmitReturnSequence.
784 // - Supporting FLAG_trace for Runtime::TraceExit.
785 // - Support profiler (specifically decrementing profiling_counter
786 // appropriately and calling out to HandleInterrupts if necessary).
788 // Load return value into r0.
789 __ movp(rax, Operand(rbp, -kPointerSize -
790 StandardFrameConstants::kFixedFrameSizeFromFp));
791 // Leave the frame (also dropping the register file).
793 // Return droping receiver + arguments.
794 // TODO(rmcilroy): Get number of arguments from BytecodeArray.
795 __ Ret(1 * kPointerSize, rcx);
799 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
800 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
801 GenerateTailCallToReturnedCode(masm);
805 static void CallCompileOptimized(MacroAssembler* masm,
807 FrameScope scope(masm, StackFrame::INTERNAL);
808 // Push a copy of the function onto the stack.
810 // Function is also the parameter to the runtime call.
812 // Whether to compile in a background thread.
813 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
815 __ CallRuntime(Runtime::kCompileOptimized, 2);
821 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
822 CallCompileOptimized(masm, false);
823 GenerateTailCallToReturnedCode(masm);
827 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
828 CallCompileOptimized(masm, true);
829 GenerateTailCallToReturnedCode(masm);
833 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
834 // For now, we are relying on the fact that make_code_young doesn't do any
835 // garbage collection which allows us to save/restore the registers without
836 // worrying about which of them contain pointers. We also don't build an
837 // internal frame to make the code faster, since we shouldn't have to do stack
838 // crawls in MakeCodeYoung. This seems a bit fragile.
840 // Re-execute the code that was patched back to the young age when
842 __ subp(Operand(rsp, 0), Immediate(5));
844 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
845 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
847 FrameScope scope(masm, StackFrame::MANUAL);
848 __ PrepareCallCFunction(2);
850 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
857 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
858 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
859 MacroAssembler* masm) { \
860 GenerateMakeCodeYoungAgainCommon(masm); \
862 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
863 MacroAssembler* masm) { \
864 GenerateMakeCodeYoungAgainCommon(masm); \
866 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
867 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
870 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
871 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
872 // that make_code_young doesn't do any garbage collection which allows us to
873 // save/restore the registers without worrying about which of them contain
876 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
877 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
878 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
880 FrameScope scope(masm, StackFrame::MANUAL);
881 __ PrepareCallCFunction(2);
883 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
888 // Perform prologue operations usually performed by the young code stub.
889 __ PopReturnAddressTo(kScratchRegister);
890 __ pushq(rbp); // Caller's frame pointer.
892 __ Push(rsi); // Callee's context.
893 __ Push(rdi); // Callee's JS Function.
894 __ PushReturnAddressFrom(kScratchRegister);
896 // Jump to point after the code-age stub.
901 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
902 GenerateMakeCodeYoungAgainCommon(masm);
906 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
907 Generate_MarkCodeAsExecutedOnce(masm);
911 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
912 SaveFPRegsMode save_doubles) {
913 // Enter an internal frame.
915 FrameScope scope(masm, StackFrame::INTERNAL);
917 // Preserve registers across notification, this is important for compiled
918 // stubs that tail call the runtime on deopts passing their parameters in
921 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
923 // Tear down internal frame.
926 __ DropUnderReturnAddress(1); // Ignore state offset
927 __ ret(0); // Return to IC Miss stub, continuation still on stack.
931 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
932 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
936 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
937 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
941 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
942 Deoptimizer::BailoutType type) {
943 // Enter an internal frame.
945 FrameScope scope(masm, StackFrame::INTERNAL);
947 // Pass the deoptimization type to the runtime system.
948 __ Push(Smi::FromInt(static_cast<int>(type)));
950 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
951 // Tear down internal frame.
954 // Get the full codegen state from the stack and untag it.
955 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
957 // Switch on the state.
958 Label not_no_registers, not_tos_rax;
959 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
960 __ j(not_equal, ¬_no_registers, Label::kNear);
961 __ ret(1 * kPointerSize); // Remove state.
963 __ bind(¬_no_registers);
964 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
965 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
966 __ j(not_equal, ¬_tos_rax, Label::kNear);
967 __ ret(2 * kPointerSize); // Remove state, rax.
969 __ bind(¬_tos_rax);
970 __ Abort(kNoCasesLeft);
974 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
975 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
979 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
980 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
984 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
985 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
989 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
991 // rsp[0] : Return address
992 // rsp[8] : Argument n
993 // rsp[16] : Argument n-1
995 // rsp[8 * n] : Argument 1
996 // rsp[8 * (n + 1)] : Receiver (function to call)
998 // rax contains the number of arguments, n, not counting the receiver.
1000 // 1. Make sure we have at least one argument.
1003 __ j(not_zero, &done);
1004 __ PopReturnAddressTo(rbx);
1005 __ Push(masm->isolate()->factory()->undefined_value());
1006 __ PushReturnAddressFrom(rbx);
1011 // 2. Get the function to call (passed as receiver) from the stack, check
1012 // if it is a function.
1013 Label slow, non_function;
1014 StackArgumentsAccessor args(rsp, rax);
1015 __ movp(rdi, args.GetReceiverOperand());
1016 __ JumpIfSmi(rdi, &non_function);
1017 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1018 __ j(not_equal, &slow);
1020 // 3a. Patch the first argument if necessary when calling a function.
1021 Label shift_arguments;
1022 __ Set(rdx, 0); // indicate regular JS_FUNCTION
1023 { Label convert_to_object, use_global_proxy, patch_receiver;
1024 // Change context eagerly in case we need the global receiver.
1025 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1027 // Do not transform the receiver for strict mode functions.
1028 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1029 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
1030 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1031 __ j(not_equal, &shift_arguments);
1033 // Do not transform the receiver for natives.
1034 // SharedFunctionInfo is already loaded into rbx.
1035 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
1036 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1037 __ j(not_zero, &shift_arguments);
1039 // Compute the receiver in sloppy mode.
1040 __ movp(rbx, args.GetArgumentOperand(1));
1041 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
1043 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1044 __ j(equal, &use_global_proxy);
1045 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1046 __ j(equal, &use_global_proxy);
1048 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1049 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1050 __ j(above_equal, &shift_arguments);
1052 __ bind(&convert_to_object);
1054 // Enter an internal frame in order to preserve argument count.
1055 FrameScope scope(masm, StackFrame::INTERNAL);
1056 __ Integer32ToSmi(rax, rax);
1060 ToObjectStub stub(masm->isolate());
1063 __ Set(rdx, 0); // indicate regular JS_FUNCTION
1066 __ SmiToInteger32(rax, rax);
1069 // Restore the function to rdi.
1070 __ movp(rdi, args.GetReceiverOperand());
1071 __ jmp(&patch_receiver, Label::kNear);
1073 __ bind(&use_global_proxy);
1075 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1076 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1078 __ bind(&patch_receiver);
1079 __ movp(args.GetArgumentOperand(1), rbx);
1081 __ jmp(&shift_arguments);
1084 // 3b. Check for function proxy.
1086 __ Set(rdx, 1); // indicate function proxy
1087 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
1088 __ j(equal, &shift_arguments);
1089 __ bind(&non_function);
1090 __ Set(rdx, 2); // indicate non-function
1092 // 3c. Patch the first argument when calling a non-function. The
1093 // CALL_NON_FUNCTION builtin expects the non-function callee as
1094 // receiver, so overwrite the first argument which will ultimately
1095 // become the receiver.
1096 __ movp(args.GetArgumentOperand(1), rdi);
1098 // 4. Shift arguments and return address one slot down on the stack
1099 // (overwriting the original receiver). Adjust argument count to make
1100 // the original first argument the new receiver.
1101 __ bind(&shift_arguments);
1104 StackArgumentsAccessor args(rsp, rcx);
1106 __ movp(rbx, args.GetArgumentOperand(1));
1107 __ movp(args.GetArgumentOperand(0), rbx);
1109 __ j(not_zero, &loop); // While non-zero.
1110 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1111 __ decp(rax); // One fewer argument (first argument is new receiver).
1114 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1115 // or a function proxy via CALL_FUNCTION_PROXY.
1116 { Label function, non_proxy;
1118 __ j(zero, &function);
1120 __ cmpp(rdx, Immediate(1));
1121 __ j(not_equal, &non_proxy);
1123 __ PopReturnAddressTo(rdx);
1124 __ Push(rdi); // re-add proxy object as additional argument
1125 __ PushReturnAddressFrom(rdx);
1127 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1128 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1129 RelocInfo::CODE_TARGET);
1131 __ bind(&non_proxy);
1132 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
1133 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1134 RelocInfo::CODE_TARGET);
1138 // 5b. Get the code to call from the function and check that the number of
1139 // expected arguments matches what we're providing. If so, jump
1140 // (tail-call) to the code in register edx without checking arguments.
1141 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1142 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
1143 SharedFunctionInfo::kFormalParameterCountOffset);
1144 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1147 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1148 RelocInfo::CODE_TARGET);
1150 ParameterCount expected(0);
1151 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1155 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1156 const int argumentsOffset,
1157 const int indexOffset,
1158 const int limitOffset) {
1159 Register receiver = LoadDescriptor::ReceiverRegister();
1160 Register key = LoadDescriptor::NameRegister();
1161 Register slot = LoadDescriptor::SlotRegister();
1162 Register vector = LoadWithVectorDescriptor::VectorRegister();
1164 // Copy all arguments from the array to the stack.
1166 __ movp(key, Operand(rbp, indexOffset));
1169 __ movp(receiver, Operand(rbp, argumentsOffset)); // load arguments
1171 // Use inline caching to speed up access to arguments.
1172 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1173 FeedbackVectorSpec spec(0, 1, kinds);
1174 Handle<TypeFeedbackVector> feedback_vector =
1175 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1176 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1177 __ Move(slot, Smi::FromInt(index));
1178 __ Move(vector, feedback_vector);
1180 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1181 __ Call(ic, RelocInfo::CODE_TARGET);
1182 // It is important that we do not have a test instruction after the
1183 // call. A test instruction after the call is used to indicate that
1184 // we have generated an inline version of the keyed load. In this
1185 // case, we know that we are not generating a test instruction next.
1187 // Push the nth argument.
1190 // Update the index on the stack and in register key.
1191 __ movp(key, Operand(rbp, indexOffset));
1192 __ SmiAddConstant(key, key, Smi::FromInt(1));
1193 __ movp(Operand(rbp, indexOffset), key);
1196 __ cmpp(key, Operand(rbp, limitOffset));
1197 __ j(not_equal, &loop);
1199 // On exit, the pushed arguments count is in rax, untagged
1200 __ SmiToInteger64(rax, key);
1204 // Used by FunctionApply and ReflectApply
1205 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1206 const int kFormalParameters = targetIsArgument ? 3 : 2;
1207 const int kStackSize = kFormalParameters + 1;
1210 // rsp : return address
1211 // rsp[8] : arguments
1212 // rsp[16] : receiver ("this")
1213 // rsp[24] : function
1215 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1217 // rbp : Old base pointer
1218 // rbp[8] : return address
1219 // rbp[16] : function arguments
1220 // rbp[24] : receiver
1221 // rbp[32] : function
1222 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1223 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1224 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
1226 __ Push(Operand(rbp, kFunctionOffset));
1227 __ Push(Operand(rbp, kArgumentsOffset));
1228 if (targetIsArgument) {
1229 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1231 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1234 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1236 // Push current index and limit.
1237 const int kLimitOffset =
1238 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1239 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1240 __ Push(rax); // limit
1241 __ Push(Immediate(0)); // index
1243 // Get the receiver.
1244 __ movp(rbx, Operand(rbp, kReceiverOffset));
1246 // Check that the function is a JS function (otherwise it must be a proxy).
1247 Label push_receiver;
1248 __ movp(rdi, Operand(rbp, kFunctionOffset));
1249 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1250 __ j(not_equal, &push_receiver);
1252 // Change context eagerly to get the right global object if necessary.
1253 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1255 // Do not transform the receiver for strict mode functions.
1256 Label call_to_object, use_global_proxy;
1257 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1258 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1259 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1260 __ j(not_equal, &push_receiver);
1262 // Do not transform the receiver for natives.
1263 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1264 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1265 __ j(not_equal, &push_receiver);
1267 // Compute the receiver in sloppy mode.
1268 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1269 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1270 __ j(equal, &use_global_proxy);
1271 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1272 __ j(equal, &use_global_proxy);
1274 // If given receiver is already a JavaScript object then there's no
1275 // reason for converting it.
1276 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1277 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1278 __ j(above_equal, &push_receiver);
1280 // Convert the receiver to an object.
1281 __ bind(&call_to_object);
1283 ToObjectStub stub(masm->isolate());
1286 __ jmp(&push_receiver, Label::kNear);
1288 __ bind(&use_global_proxy);
1290 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1291 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1293 // Push the receiver.
1294 __ bind(&push_receiver);
1297 // Loop over the arguments array, pushing each value to the stack
1298 Generate_PushAppliedArguments(
1299 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1301 // Call the function.
1303 ParameterCount actual(rax);
1304 __ movp(rdi, Operand(rbp, kFunctionOffset));
1305 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1306 __ j(not_equal, &call_proxy);
1307 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1309 frame_scope.GenerateLeaveFrame();
1310 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1312 // Call the function proxy.
1313 __ bind(&call_proxy);
1314 __ Push(rdi); // add function proxy as last argument
1317 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1318 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1319 RelocInfo::CODE_TARGET);
1321 // Leave internal frame.
1323 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1327 // Used by ReflectConstruct
1328 static void Generate_ConstructHelper(MacroAssembler* masm) {
1329 const int kFormalParameters = 3;
1330 const int kStackSize = kFormalParameters + 1;
1333 // rsp : return address
1334 // rsp[8] : original constructor (new.target)
1335 // rsp[16] : arguments
1336 // rsp[24] : constructor
1338 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1340 // rbp : Old base pointer
1341 // rbp[8] : return address
1342 // rbp[16] : original constructor (new.target)
1343 // rbp[24] : arguments
1344 // rbp[32] : constructor
1345 static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1346 static const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1347 static const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1349 // If newTarget is not supplied, set it to constructor
1350 Label validate_arguments;
1351 __ movp(rax, Operand(rbp, kNewTargetOffset));
1352 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1353 __ j(not_equal, &validate_arguments, Label::kNear);
1354 __ movp(rax, Operand(rbp, kFunctionOffset));
1355 __ movp(Operand(rbp, kNewTargetOffset), rax);
1357 // Validate arguments
1358 __ bind(&validate_arguments);
1359 __ Push(Operand(rbp, kFunctionOffset));
1360 __ Push(Operand(rbp, kArgumentsOffset));
1361 __ Push(Operand(rbp, kNewTargetOffset));
1362 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1364 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1366 // Push current index and limit.
1367 const int kLimitOffset =
1368 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1369 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1370 __ Push(rax); // limit
1371 __ Push(Immediate(0)); // index
1372 // Push the constructor function as callee.
1373 __ Push(Operand(rbp, kFunctionOffset));
1375 // Loop over the arguments array, pushing each value to the stack
1376 Generate_PushAppliedArguments(
1377 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1379 // Use undefined feedback vector
1380 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1381 __ movp(rdi, Operand(rbp, kFunctionOffset));
1382 __ movp(rcx, Operand(rbp, kNewTargetOffset));
1384 // Call the function.
1385 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1386 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1388 // Leave internal frame.
1390 // remove this, target, arguments and newTarget
1391 __ ret(kStackSize * kPointerSize);
1395 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1396 Generate_ApplyHelper(masm, false);
1400 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1401 Generate_ApplyHelper(masm, true);
1405 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1406 Generate_ConstructHelper(masm);
1410 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1411 // ----------- S t a t e -------------
1413 // -- rsp[0] : return address
1414 // -- rsp[8] : last argument
1415 // -----------------------------------
1416 Label generic_array_code;
1418 // Get the InternalArray function.
1419 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1421 if (FLAG_debug_code) {
1422 // Initial map for the builtin InternalArray functions should be maps.
1423 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1424 // Will both indicate a NULL and a Smi.
1425 STATIC_ASSERT(kSmiTag == 0);
1426 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1427 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1428 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1429 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1432 // Run the native code for the InternalArray function called as a normal
1435 InternalArrayConstructorStub stub(masm->isolate());
1436 __ TailCallStub(&stub);
1440 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1441 // ----------- S t a t e -------------
1443 // -- rsp[0] : return address
1444 // -- rsp[8] : last argument
1445 // -----------------------------------
1446 Label generic_array_code;
1448 // Get the Array function.
1449 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1451 if (FLAG_debug_code) {
1452 // Initial map for the builtin Array functions should be maps.
1453 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1454 // Will both indicate a NULL and a Smi.
1455 STATIC_ASSERT(kSmiTag == 0);
1456 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1457 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1458 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1459 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1463 // Run the native code for the Array function called as a normal function.
1465 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1466 ArrayConstructorStub stub(masm->isolate());
1467 __ TailCallStub(&stub);
1471 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1472 // ----------- S t a t e -------------
1473 // -- rax : number of arguments
1474 // -- rdi : constructor function
1475 // -- rsp[0] : return address
1476 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1477 // -- rsp[(argc + 1) * 8] : receiver
1478 // -----------------------------------
1479 Counters* counters = masm->isolate()->counters();
1480 __ IncrementCounter(counters->string_ctor_calls(), 1);
1482 if (FLAG_debug_code) {
1483 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1485 __ Assert(equal, kUnexpectedStringFunction);
1488 // Load the first argument into rax and get rid of the rest
1489 // (including the receiver).
1490 StackArgumentsAccessor args(rsp, rax);
1493 __ j(zero, &no_arguments);
1494 __ movp(rbx, args.GetArgumentOperand(1));
1495 __ PopReturnAddressTo(rcx);
1496 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1497 __ PushReturnAddressFrom(rcx);
1500 // Lookup the argument in the number to string cache.
1501 Label not_cached, argument_is_string;
1502 __ LookupNumberStringCache(rax, // Input.
1507 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1508 __ bind(&argument_is_string);
1510 // ----------- S t a t e -------------
1511 // -- rbx : argument converted to string
1512 // -- rdi : constructor function
1513 // -- rsp[0] : return address
1514 // -----------------------------------
1516 // Allocate a JSValue and put the tagged pointer into rax.
1518 __ Allocate(JSValue::kSize,
1520 rcx, // New allocation top (we ignore it).
1526 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1527 if (FLAG_debug_code) {
1528 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1529 Immediate(JSValue::kSize >> kPointerSizeLog2));
1530 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1531 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1532 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1534 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1536 // Set properties and elements.
1537 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1538 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1539 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1542 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1544 // Ensure the object is fully initialized.
1545 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1547 // We're done. Return.
1550 // The argument was not found in the number to string cache. Check
1551 // if it's a string already before calling the conversion builtin.
1552 Label convert_argument;
1553 __ bind(¬_cached);
1554 STATIC_ASSERT(kSmiTag == 0);
1555 __ JumpIfSmi(rax, &convert_argument);
1556 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1557 __ j(NegateCondition(is_string), &convert_argument);
1559 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1560 __ jmp(&argument_is_string);
1562 // Invoke the conversion builtin and put the result into rbx.
1563 __ bind(&convert_argument);
1564 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1566 FrameScope scope(masm, StackFrame::INTERNAL);
1567 __ Push(rdi); // Preserve the function.
1569 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1573 __ jmp(&argument_is_string);
1575 // Load the empty string into rbx, remove the receiver from the
1576 // stack, and jump back to the case where the argument is a string.
1577 __ bind(&no_arguments);
1578 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1579 __ PopReturnAddressTo(rcx);
1580 __ leap(rsp, Operand(rsp, kPointerSize));
1581 __ PushReturnAddressFrom(rcx);
1582 __ jmp(&argument_is_string);
1584 // At this point the argument is already a string. Call runtime to
1585 // create a string wrapper.
1586 __ bind(&gc_required);
1587 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1589 FrameScope scope(masm, StackFrame::INTERNAL);
1591 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1597 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1598 Label* stack_overflow) {
1599 // ----------- S t a t e -------------
1600 // -- rax : actual number of arguments
1601 // -- rbx : expected number of arguments
1602 // -- rdi: function (passed through to callee)
1603 // -----------------------------------
1604 // Check the stack for overflow. We are not trying to catch
1605 // interruptions (e.g. debug break and preemption) here, so the "real stack
1606 // limit" is checked.
1608 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1610 // Make rcx the space we have left. The stack might already be overflowed
1611 // here which will cause rcx to become negative.
1613 // Make rdx the space we need for the array when it is unrolled onto the
1616 __ shlp(rdx, Immediate(kPointerSizeLog2));
1617 // Check if the arguments will overflow the stack.
1619 __ j(less_equal, stack_overflow); // Signed comparison.
1623 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1627 // Store the arguments adaptor context sentinel.
1628 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1630 // Push the function on the stack.
1633 // Preserve the number of arguments on the stack. Must preserve rax,
1634 // rbx and rcx because these registers are used when copying the
1635 // arguments and the receiver.
1636 __ Integer32ToSmi(r8, rax);
1641 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1642 // Retrieve the number of arguments from the stack. Number is a Smi.
1643 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1649 // Remove caller arguments from the stack.
1650 __ PopReturnAddressTo(rcx);
1651 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1652 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1653 __ PushReturnAddressFrom(rcx);
1657 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1658 // ----------- S t a t e -------------
1659 // -- rax : actual number of arguments
1660 // -- rbx : expected number of arguments
1661 // -- rdi: function (passed through to callee)
1662 // -----------------------------------
1664 Label invoke, dont_adapt_arguments;
1665 Counters* counters = masm->isolate()->counters();
1666 __ IncrementCounter(counters->arguments_adaptors(), 1);
1668 Label stack_overflow;
1669 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1671 Label enough, too_few;
1672 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1674 __ j(less, &too_few);
1675 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1676 __ j(equal, &dont_adapt_arguments);
1678 { // Enough parameters: Actual >= expected.
1680 EnterArgumentsAdaptorFrame(masm);
1682 // Copy receiver and all expected arguments.
1683 const int offset = StandardFrameConstants::kCallerSPOffset;
1684 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1685 __ Set(r8, -1); // account for receiver
1690 __ Push(Operand(rax, 0));
1691 __ subp(rax, Immediate(kPointerSize));
1697 { // Too few parameters: Actual < expected.
1700 // If the function is strong we need to throw an error.
1701 Label no_strong_error;
1702 __ movp(kScratchRegister,
1703 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1704 __ testb(FieldOperand(kScratchRegister,
1705 SharedFunctionInfo::kStrongModeByteOffset),
1706 Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1707 __ j(equal, &no_strong_error, Label::kNear);
1709 // What we really care about is the required number of arguments.
1711 if (kPointerSize == kInt32Size) {
1714 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1715 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1717 // See comment near kLengthOffset in src/objects.h
1720 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1721 __ shrq(kScratchRegister, Immediate(1));
1724 __ cmpp(rax, kScratchRegister);
1725 __ j(greater_equal, &no_strong_error, Label::kNear);
1728 FrameScope frame(masm, StackFrame::MANUAL);
1729 EnterArgumentsAdaptorFrame(masm);
1730 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1733 __ bind(&no_strong_error);
1734 EnterArgumentsAdaptorFrame(masm);
1736 // Copy receiver and all actual arguments.
1737 const int offset = StandardFrameConstants::kCallerSPOffset;
1738 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1739 __ Set(r8, -1); // account for receiver
1744 __ Push(Operand(rdi, 0));
1745 __ subp(rdi, Immediate(kPointerSize));
1749 // Fill remaining expected arguments with undefined values.
1751 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1754 __ Push(kScratchRegister);
1758 // Restore function pointer.
1759 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1762 // Call the entry point.
1766 // Store offset of return address for deoptimizer.
1767 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1769 // Leave frame and return.
1770 LeaveArgumentsAdaptorFrame(masm);
1773 // -------------------------------------------
1774 // Dont adapt arguments.
1775 // -------------------------------------------
1776 __ bind(&dont_adapt_arguments);
1779 __ bind(&stack_overflow);
1781 FrameScope frame(masm, StackFrame::MANUAL);
1782 EnterArgumentsAdaptorFrame(masm);
1783 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1789 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1790 // Lookup the function in the JavaScript frame.
1791 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1793 FrameScope scope(masm, StackFrame::INTERNAL);
1794 // Pass function as argument.
1796 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1800 // If the code object is null, just return to the unoptimized code.
1801 __ cmpp(rax, Immediate(0));
1802 __ j(not_equal, &skip, Label::kNear);
1807 // Load deoptimization data from the code object.
1808 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1810 // Load the OSR entrypoint offset from the deoptimization data.
1811 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1812 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1814 // Compute the target address = code_obj + header_size + osr_offset
1815 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1817 // Overwrite the return address on the stack.
1818 __ movq(StackOperandForReturnAddress(0), rax);
1820 // And "return" to the OSR entry point of the function.
1825 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1826 // We check the stack limit as indicator that recompilation might be done.
1828 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1829 __ j(above_equal, &ok);
1831 FrameScope scope(masm, StackFrame::INTERNAL);
1832 __ CallRuntime(Runtime::kStackGuard, 0);
1834 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1835 RelocInfo::CODE_TARGET);
1844 } // namespace internal
1847 #endif // V8_TARGET_ARCH_X64