1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/codegen.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
18 #define __ ACCESS_MASM(masm)
21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- rax : number of arguments excluding receiver
26 // -- rdi : called function (only guaranteed when
27 // extra_args requires it)
29 // -- rsp[0] : return address
30 // -- rsp[8] : last argument
32 // -- rsp[8 * argc] : first argument (argc == rax)
33 // -- rsp[8 * (argc + 1)] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
40 __ PopReturnAddressTo(kScratchRegister);
42 __ PushReturnAddressFrom(kScratchRegister);
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
47 // JumpToExternalReference expects rax to contain the number of arguments
48 // including the receiver and the extra arguments.
49 __ addp(rax, Immediate(num_extra_args + 1));
50 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
54 static void CallRuntimePassFunction(
55 MacroAssembler* masm, Runtime::FunctionId function_id) {
56 FrameScope scope(masm, StackFrame::INTERNAL);
57 // Push a copy of the function onto the stack.
59 // Function is also the parameter to the runtime call.
62 __ CallRuntime(function_id, 1);
68 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
69 __ movp(kScratchRegister,
70 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
71 __ movp(kScratchRegister,
72 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
73 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
74 __ jmp(kScratchRegister);
78 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
79 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
85 // Checking whether the queued function is ready for install is optional,
86 // since we come across interrupts and stack checks elsewhere. However,
87 // not checking may delay installing ready functions, and always checking
88 // would be quite expensive. A good compromise is to first check against
89 // stack limit as a cue for an interrupt signal.
91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
92 __ j(above_equal, &ok);
94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
95 GenerateTailCallToReturnedCode(masm);
98 GenerateTailCallToSharedCode(masm);
102 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
103 bool is_api_function,
104 bool create_memento) {
105 // ----------- S t a t e -------------
106 // -- rax: number of arguments
107 // -- rdi: constructor function
108 // -- rbx: allocation site or undefined
109 // -----------------------------------
111 // Should never create mementos for api functions.
112 DCHECK(!is_api_function || !create_memento);
114 // Enter a construct frame.
116 FrameScope scope(masm, StackFrame::CONSTRUCT);
118 if (create_memento) {
119 __ AssertUndefinedOrAllocationSite(rbx);
123 // Store a smi-tagged arguments count on the stack.
124 __ Integer32ToSmi(rax, rax);
127 // Push the function to invoke on the stack.
130 // Try to allocate the object without transitioning into C code. If any of
131 // the preconditions is not met, the code bails out to the runtime call.
132 Label rt_call, allocated;
133 if (FLAG_inline_new) {
134 Label undo_allocation;
136 ExternalReference debug_step_in_fp =
137 ExternalReference::debug_step_in_fp_address(masm->isolate());
138 __ Move(kScratchRegister, debug_step_in_fp);
139 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
140 __ j(not_equal, &rt_call);
142 // Verified that the constructor is a JSFunction.
143 // Load the initial map and verify that it is in fact a map.
145 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
146 // Will both indicate a NULL and a Smi
147 DCHECK(kSmiTag == 0);
148 __ JumpIfSmi(rax, &rt_call);
150 // rax: initial map (if proven valid below)
151 __ CmpObjectType(rax, MAP_TYPE, rbx);
152 __ j(not_equal, &rt_call);
154 // Check that the constructor is not constructing a JSFunction (see
155 // comments in Runtime_NewObject in runtime.cc). In which case the
156 // initial map's instance type would be JS_FUNCTION_TYPE.
159 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
160 __ j(equal, &rt_call);
162 if (!is_api_function) {
164 // The code below relies on these assumptions.
165 STATIC_ASSERT(JSFunction::kNoSlackTracking == 0);
166 STATIC_ASSERT(Map::ConstructionCount::kShift +
167 Map::ConstructionCount::kSize == 32);
168 // Check if slack tracking is enabled.
169 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
170 __ shrl(rsi, Immediate(Map::ConstructionCount::kShift));
171 __ j(zero, &allocate); // JSFunction::kNoSlackTracking
172 // Decrease generous allocation count.
173 __ subl(FieldOperand(rax, Map::kBitField3Offset),
174 Immediate(1 << Map::ConstructionCount::kShift));
176 __ cmpl(rsi, Immediate(JSFunction::kFinishSlackTracking));
177 __ j(not_equal, &allocate);
182 __ Push(rdi); // constructor
183 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
187 __ xorl(rsi, rsi); // JSFunction::kNoSlackTracking
192 // Now allocate the JSObject on the heap.
193 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
194 __ shlp(rdi, Immediate(kPointerSizeLog2));
195 if (create_memento) {
196 __ addp(rdi, Immediate(AllocationMemento::kSize));
198 // rdi: size of new object
204 NO_ALLOCATION_FLAGS);
205 Factory* factory = masm->isolate()->factory();
206 // Allocated the JSObject, now initialize the fields.
208 // rbx: JSObject (not HeapObject tagged - the actual address).
209 // rdi: start of next object (including memento if create_memento)
210 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
211 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
212 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
213 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
214 // Set extra fields in the newly allocated object.
217 // rdi: start of next object (including memento if create_memento)
218 // rsi: slack tracking counter (non-API function case)
219 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
220 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
221 if (!is_api_function) {
222 Label no_inobject_slack_tracking;
224 // Check if slack tracking is enabled.
225 __ cmpl(rsi, Immediate(JSFunction::kNoSlackTracking));
226 __ j(equal, &no_inobject_slack_tracking);
228 // Allocate object with a slack.
230 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
232 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
233 // rsi: offset of first field after pre-allocated fields
234 if (FLAG_debug_code) {
236 __ Assert(less_equal,
237 kUnexpectedNumberOfPreAllocatedPropertyFields);
239 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
240 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
241 // Fill the remaining fields with one pointer filler map.
243 __ bind(&no_inobject_slack_tracking);
245 if (create_memento) {
246 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
247 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
249 // Fill in memento fields if necessary.
250 // rsi: points to the allocated but uninitialized memento.
251 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
252 factory->allocation_memento_map());
253 // Get the cell or undefined.
254 __ movp(rdx, Operand(rsp, kPointerSize*2));
255 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
257 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
260 // Add the object tag to make the JSObject real, so that we can continue
261 // and jump into the continuation code at any time from now on. Any
262 // failures need to undo the allocation, so that the heap is in a
263 // consistent state and verifiable.
266 // rdi: start of next object
267 __ orp(rbx, Immediate(kHeapObjectTag));
269 // Check if a non-empty properties array is needed.
270 // Allocate and initialize a FixedArray if it is.
273 // rdi: start of next object
274 // Calculate total properties described map.
275 __ movzxbp(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
277 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
279 // Calculate unused properties past the end of the in-object properties.
280 __ movzxbp(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
282 // Done if no extra properties are to be allocated.
283 __ j(zero, &allocated);
284 __ Assert(positive, kPropertyAllocationCountFailed);
286 // Scale the number of elements by pointer size and add the header for
287 // FixedArrays to the start of the next object calculation from above.
289 // rdi: start of next object (will be start of FixedArray)
290 // rdx: number of elements in properties array
291 __ Allocate(FixedArray::kHeaderSize,
298 RESULT_CONTAINS_TOP);
300 // Initialize the FixedArray.
303 // rdx: number of elements
304 // rax: start of next object
305 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
306 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
307 __ Integer32ToSmi(rdx, rdx);
308 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
310 // Initialize the fields to undefined.
313 // rax: start of next object
314 // rdx: number of elements
316 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
317 __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
320 __ movp(Operand(rcx, 0), rdx);
321 __ addp(rcx, Immediate(kPointerSize));
327 // Store the initialized FixedArray into the properties field of
331 __ orp(rdi, Immediate(kHeapObjectTag)); // add the heap tag
332 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
335 // Continue with JSObject being successfully allocated
339 // Undo the setting of the new top so that the heap is verifiable. For
340 // example, the map's unused properties potentially do not match the
341 // allocated objects unused properties.
342 // rbx: JSObject (previous new top)
343 __ bind(&undo_allocation);
344 __ UndoAllocationInNewSpace(rbx);
347 // Allocate the new receiver object using the runtime call.
348 // rdi: function (constructor)
351 if (create_memento) {
352 // Get the cell or allocation site.
353 __ movp(rdi, Operand(rsp, kPointerSize*2));
355 offset = kPointerSize;
358 // Must restore rsi (context) and rdi (constructor) before calling runtime.
359 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
360 __ movp(rdi, Operand(rsp, offset));
362 if (create_memento) {
363 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
365 __ CallRuntime(Runtime::kNewObject, 1);
367 __ movp(rbx, rax); // store result in rbx
369 // If we ended up using the runtime, and we want a memento, then the
370 // runtime call made it for us, and we shouldn't do create count
372 Label count_incremented;
373 if (create_memento) {
374 __ jmp(&count_incremented);
377 // New object allocated.
378 // rbx: newly allocated object
381 if (create_memento) {
382 __ movp(rcx, Operand(rsp, kPointerSize*2));
383 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
384 __ j(equal, &count_incremented);
385 // rcx is an AllocationSite. We are creating a memento from it, so we
386 // need to increment the memento create count.
388 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
390 __ bind(&count_incremented);
393 // Retrieve the function from the stack.
396 // Retrieve smi-tagged arguments count from the stack.
397 __ movp(rax, Operand(rsp, 0));
398 __ SmiToInteger32(rax, rax);
400 // Push the allocated receiver to the stack. We need two copies
401 // because we may have to return the original one and the calling
402 // conventions dictate that the called function pops the receiver.
406 // Set up pointer to last argument.
407 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
409 // Copy arguments and receiver to the expression stack.
414 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
417 __ j(greater_equal, &loop);
419 // Call the function.
420 if (is_api_function) {
421 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
423 masm->isolate()->builtins()->HandleApiCallConstruct();
424 __ Call(code, RelocInfo::CODE_TARGET);
426 ParameterCount actual(rax);
427 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
430 // Store offset of return address for deoptimizer.
431 if (!is_api_function) {
432 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
435 // Restore context from the frame.
436 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
438 // If the result is an object (in the ECMA sense), we should get rid
439 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
441 Label use_receiver, exit;
442 // If the result is a smi, it is *not* an object in the ECMA sense.
443 __ JumpIfSmi(rax, &use_receiver);
445 // If the type of the result (stored in its map) is less than
446 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
447 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
448 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
449 __ j(above_equal, &exit);
451 // Throw away the result of the constructor invocation and use the
452 // on-stack receiver as the result.
453 __ bind(&use_receiver);
454 __ movp(rax, Operand(rsp, 0));
456 // Restore the arguments count and leave the construct frame.
458 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
460 // Leave construct frame.
463 // Remove caller arguments from the stack and return.
464 __ PopReturnAddressTo(rcx);
465 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
466 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
467 __ PushReturnAddressFrom(rcx);
468 Counters* counters = masm->isolate()->counters();
469 __ IncrementCounter(counters->constructed_objects(), 1);
474 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
475 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
479 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
480 Generate_JSConstructStubHelper(masm, true, false);
484 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
486 ProfileEntryHookStub::MaybeCallEntryHook(masm);
488 // Expects five C++ function parameters.
489 // - Address entry (ignored)
490 // - JSFunction* function (
491 // - Object* receiver
494 // (see Handle::Invoke in execution.cc).
496 // Open a C++ scope for the FrameScope.
498 // Platform specific argument handling. After this, the stack contains
499 // an internal frame and the pushed function and receiver, and
500 // register rax and rbx holds the argument count and argument array,
501 // while rdi holds the function pointer and rsi the context.
504 // MSVC parameters in:
505 // rcx : entry (ignored)
511 // Clear the context before we push it when entering the internal frame.
513 // Enter an internal frame.
514 FrameScope scope(masm, StackFrame::INTERNAL);
516 // Load the function context into rsi.
517 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
519 // Push the function and the receiver onto the stack.
523 // Load the number of arguments and setup pointer to the arguments.
525 // Load the previous frame pointer to access C argument on stack
526 __ movp(kScratchRegister, Operand(rbp, 0));
527 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
528 // Load the function pointer into rdi.
531 // GCC parameters in:
532 // rdi : entry (ignored)
541 // Clear the context before we push it when entering the internal frame.
543 // Enter an internal frame.
544 FrameScope scope(masm, StackFrame::INTERNAL);
546 // Push the function and receiver and setup the context.
549 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
551 // Load the number of arguments and setup pointer to the arguments.
556 // Current stack contents:
557 // [rsp + 2 * kPointerSize ... ] : Internal frame
558 // [rsp + kPointerSize] : function
560 // Current register contents:
566 // Copy arguments to the stack in a loop.
567 // Register rbx points to array of pointers to handle locations.
568 // Push the values of these handles.
570 __ Set(rcx, 0); // Set loop variable to 0.
573 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
574 __ Push(Operand(kScratchRegister, 0)); // dereference handle
575 __ addp(rcx, Immediate(1));
578 __ j(not_equal, &loop);
582 // No type feedback cell is available
583 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
584 // Expects rdi to hold function pointer.
585 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
588 ParameterCount actual(rax);
589 // Function must be in rdi.
590 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
592 // Exit the internal frame. Notice that this also removes the empty
593 // context and the function left on the stack by the code
597 // TODO(X64): Is argument correct? Is there a receiver to remove?
598 __ ret(1 * kPointerSize); // Remove receiver.
602 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
603 Generate_JSEntryTrampolineHelper(masm, false);
607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
608 Generate_JSEntryTrampolineHelper(masm, true);
612 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
613 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
614 GenerateTailCallToReturnedCode(masm);
618 static void CallCompileOptimized(MacroAssembler* masm,
620 FrameScope scope(masm, StackFrame::INTERNAL);
621 // Push a copy of the function onto the stack.
623 // Function is also the parameter to the runtime call.
625 // Whether to compile in a background thread.
626 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
628 __ CallRuntime(Runtime::kCompileOptimized, 2);
634 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
635 CallCompileOptimized(masm, false);
636 GenerateTailCallToReturnedCode(masm);
640 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
641 CallCompileOptimized(masm, true);
642 GenerateTailCallToReturnedCode(masm);
646 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
647 // For now, we are relying on the fact that make_code_young doesn't do any
648 // garbage collection which allows us to save/restore the registers without
649 // worrying about which of them contain pointers. We also don't build an
650 // internal frame to make the code faster, since we shouldn't have to do stack
651 // crawls in MakeCodeYoung. This seems a bit fragile.
653 // Re-execute the code that was patched back to the young age when
655 __ subp(Operand(rsp, 0), Immediate(5));
657 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
658 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
660 FrameScope scope(masm, StackFrame::MANUAL);
661 __ PrepareCallCFunction(2);
663 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
670 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
671 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
672 MacroAssembler* masm) { \
673 GenerateMakeCodeYoungAgainCommon(masm); \
675 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
676 MacroAssembler* masm) { \
677 GenerateMakeCodeYoungAgainCommon(masm); \
679 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
680 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
683 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
684 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
685 // that make_code_young doesn't do any garbage collection which allows us to
686 // save/restore the registers without worrying about which of them contain
689 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
690 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
691 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
693 FrameScope scope(masm, StackFrame::MANUAL);
694 __ PrepareCallCFunction(2);
696 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
701 // Perform prologue operations usually performed by the young code stub.
702 __ PopReturnAddressTo(kScratchRegister);
703 __ pushq(rbp); // Caller's frame pointer.
705 __ Push(rsi); // Callee's context.
706 __ Push(rdi); // Callee's JS Function.
707 __ PushReturnAddressFrom(kScratchRegister);
709 // Jump to point after the code-age stub.
714 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
715 GenerateMakeCodeYoungAgainCommon(masm);
719 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
720 SaveFPRegsMode save_doubles) {
721 // Enter an internal frame.
723 FrameScope scope(masm, StackFrame::INTERNAL);
725 // Preserve registers across notification, this is important for compiled
726 // stubs that tail call the runtime on deopts passing their parameters in
729 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
731 // Tear down internal frame.
734 __ DropUnderReturnAddress(1); // Ignore state offset
735 __ ret(0); // Return to IC Miss stub, continuation still on stack.
739 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
740 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
744 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
745 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
749 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
750 Deoptimizer::BailoutType type) {
751 // Enter an internal frame.
753 FrameScope scope(masm, StackFrame::INTERNAL);
755 // Pass the deoptimization type to the runtime system.
756 __ Push(Smi::FromInt(static_cast<int>(type)));
758 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
759 // Tear down internal frame.
762 // Get the full codegen state from the stack and untag it.
763 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
765 // Switch on the state.
766 Label not_no_registers, not_tos_rax;
767 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
768 __ j(not_equal, ¬_no_registers, Label::kNear);
769 __ ret(1 * kPointerSize); // Remove state.
771 __ bind(¬_no_registers);
772 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
773 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
774 __ j(not_equal, ¬_tos_rax, Label::kNear);
775 __ ret(2 * kPointerSize); // Remove state, rax.
777 __ bind(¬_tos_rax);
778 __ Abort(kNoCasesLeft);
782 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
783 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
787 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
788 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
792 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
793 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
797 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
799 // rsp[0] : Return address
800 // rsp[8] : Argument n
801 // rsp[16] : Argument n-1
803 // rsp[8 * n] : Argument 1
804 // rsp[8 * (n + 1)] : Receiver (function to call)
806 // rax contains the number of arguments, n, not counting the receiver.
808 // 1. Make sure we have at least one argument.
811 __ j(not_zero, &done);
812 __ PopReturnAddressTo(rbx);
813 __ Push(masm->isolate()->factory()->undefined_value());
814 __ PushReturnAddressFrom(rbx);
819 // 2. Get the function to call (passed as receiver) from the stack, check
820 // if it is a function.
821 Label slow, non_function;
822 StackArgumentsAccessor args(rsp, rax);
823 __ movp(rdi, args.GetReceiverOperand());
824 __ JumpIfSmi(rdi, &non_function);
825 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
826 __ j(not_equal, &slow);
828 // 3a. Patch the first argument if necessary when calling a function.
829 Label shift_arguments;
830 __ Set(rdx, 0); // indicate regular JS_FUNCTION
831 { Label convert_to_object, use_global_proxy, patch_receiver;
832 // Change context eagerly in case we need the global receiver.
833 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
835 // Do not transform the receiver for strict mode functions.
836 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
837 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
838 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
839 __ j(not_equal, &shift_arguments);
841 // Do not transform the receiver for natives.
842 // SharedFunctionInfo is already loaded into rbx.
843 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
844 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
845 __ j(not_zero, &shift_arguments);
847 // Compute the receiver in sloppy mode.
848 __ movp(rbx, args.GetArgumentOperand(1));
849 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
851 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
852 __ j(equal, &use_global_proxy);
853 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
854 __ j(equal, &use_global_proxy);
856 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
857 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
858 __ j(above_equal, &shift_arguments);
860 __ bind(&convert_to_object);
862 // Enter an internal frame in order to preserve argument count.
863 FrameScope scope(masm, StackFrame::INTERNAL);
864 __ Integer32ToSmi(rax, rax);
868 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
870 __ Set(rdx, 0); // indicate regular JS_FUNCTION
873 __ SmiToInteger32(rax, rax);
876 // Restore the function to rdi.
877 __ movp(rdi, args.GetReceiverOperand());
878 __ jmp(&patch_receiver, Label::kNear);
880 __ bind(&use_global_proxy);
882 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
883 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
885 __ bind(&patch_receiver);
886 __ movp(args.GetArgumentOperand(1), rbx);
888 __ jmp(&shift_arguments);
891 // 3b. Check for function proxy.
893 __ Set(rdx, 1); // indicate function proxy
894 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
895 __ j(equal, &shift_arguments);
896 __ bind(&non_function);
897 __ Set(rdx, 2); // indicate non-function
899 // 3c. Patch the first argument when calling a non-function. The
900 // CALL_NON_FUNCTION builtin expects the non-function callee as
901 // receiver, so overwrite the first argument which will ultimately
902 // become the receiver.
903 __ movp(args.GetArgumentOperand(1), rdi);
905 // 4. Shift arguments and return address one slot down on the stack
906 // (overwriting the original receiver). Adjust argument count to make
907 // the original first argument the new receiver.
908 __ bind(&shift_arguments);
911 StackArgumentsAccessor args(rsp, rcx);
913 __ movp(rbx, args.GetArgumentOperand(1));
914 __ movp(args.GetArgumentOperand(0), rbx);
916 __ j(not_zero, &loop); // While non-zero.
917 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
918 __ decp(rax); // One fewer argument (first argument is new receiver).
921 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
922 // or a function proxy via CALL_FUNCTION_PROXY.
923 { Label function, non_proxy;
925 __ j(zero, &function);
927 __ cmpp(rdx, Immediate(1));
928 __ j(not_equal, &non_proxy);
930 __ PopReturnAddressTo(rdx);
931 __ Push(rdi); // re-add proxy object as additional argument
932 __ PushReturnAddressFrom(rdx);
934 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
935 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
936 RelocInfo::CODE_TARGET);
939 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
940 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
941 RelocInfo::CODE_TARGET);
945 // 5b. Get the code to call from the function and check that the number of
946 // expected arguments matches what we're providing. If so, jump
947 // (tail-call) to the code in register edx without checking arguments.
948 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
949 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
950 SharedFunctionInfo::kFormalParameterCountOffset);
951 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
954 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
955 RelocInfo::CODE_TARGET);
957 ParameterCount expected(0);
958 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
962 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
964 // rsp : return address
965 // rsp[8] : arguments
966 // rsp[16] : receiver ("this")
967 // rsp[24] : function
969 FrameScope frame_scope(masm, StackFrame::INTERNAL);
971 // rbp : Old base pointer
972 // rbp[8] : return address
973 // rbp[16] : function arguments
974 // rbp[24] : receiver
975 // rbp[32] : function
976 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
977 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
978 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
980 __ Push(Operand(rbp, kFunctionOffset));
981 __ Push(Operand(rbp, kArgumentsOffset));
982 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
984 // Check the stack for overflow. We are not trying to catch
985 // interruptions (e.g. debug break and preemption) here, so the "real stack
986 // limit" is checked.
988 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
990 // Make rcx the space we have left. The stack might already be overflowed
991 // here which will cause rcx to become negative.
992 __ subp(rcx, kScratchRegister);
993 // Make rdx the space we need for the array when it is unrolled onto the
995 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
996 // Check if the arguments will overflow the stack.
998 __ j(greater, &okay); // Signed comparison.
1000 // Out of stack space.
1001 __ Push(Operand(rbp, kFunctionOffset));
1003 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1005 // End of stack check.
1007 // Push current index and limit.
1008 const int kLimitOffset =
1009 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1010 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1011 __ Push(rax); // limit
1012 __ Push(Immediate(0)); // index
1014 // Get the receiver.
1015 __ movp(rbx, Operand(rbp, kReceiverOffset));
1017 // Check that the function is a JS function (otherwise it must be a proxy).
1018 Label push_receiver;
1019 __ movp(rdi, Operand(rbp, kFunctionOffset));
1020 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1021 __ j(not_equal, &push_receiver);
1023 // Change context eagerly to get the right global object if necessary.
1024 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1026 // Do not transform the receiver for strict mode functions.
1027 Label call_to_object, use_global_proxy;
1028 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1029 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1030 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1031 __ j(not_equal, &push_receiver);
1033 // Do not transform the receiver for natives.
1034 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1035 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1036 __ j(not_equal, &push_receiver);
1038 // Compute the receiver in sloppy mode.
1039 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1040 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1041 __ j(equal, &use_global_proxy);
1042 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1043 __ j(equal, &use_global_proxy);
1045 // If given receiver is already a JavaScript object then there's no
1046 // reason for converting it.
1047 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1048 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1049 __ j(above_equal, &push_receiver);
1051 // Convert the receiver to an object.
1052 __ bind(&call_to_object);
1054 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1056 __ jmp(&push_receiver, Label::kNear);
1058 __ bind(&use_global_proxy);
1060 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1061 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1063 // Push the receiver.
1064 __ bind(&push_receiver);
1067 // Copy all arguments from the array to the stack.
1069 Register receiver = LoadDescriptor::ReceiverRegister();
1070 Register key = LoadDescriptor::NameRegister();
1071 __ movp(key, Operand(rbp, kIndexOffset));
1074 __ movp(receiver, Operand(rbp, kArgumentsOffset)); // load arguments
1076 // Use inline caching to speed up access to arguments.
1077 if (FLAG_vector_ics) {
1078 // TODO(mvstanton): Vector-based ics need additional infrastructure to
1079 // be embedded here. For now, just call the runtime.
1082 __ CallRuntime(Runtime::kGetProperty, 2);
1084 Handle<Code> ic = CodeFactory::KeyedLoadIC(masm->isolate()).code();
1085 __ Call(ic, RelocInfo::CODE_TARGET);
1086 // It is important that we do not have a test instruction after the
1087 // call. A test instruction after the call is used to indicate that
1088 // we have generated an inline version of the keyed load. In this
1089 // case, we know that we are not generating a test instruction next.
1092 // Push the nth argument.
1095 // Update the index on the stack and in register key.
1096 __ movp(key, Operand(rbp, kIndexOffset));
1097 __ SmiAddConstant(key, key, Smi::FromInt(1));
1098 __ movp(Operand(rbp, kIndexOffset), key);
1101 __ cmpp(key, Operand(rbp, kLimitOffset));
1102 __ j(not_equal, &loop);
1104 // Call the function.
1106 ParameterCount actual(rax);
1107 __ SmiToInteger32(rax, key);
1108 __ movp(rdi, Operand(rbp, kFunctionOffset));
1109 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1110 __ j(not_equal, &call_proxy);
1111 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1113 frame_scope.GenerateLeaveFrame();
1114 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1116 // Call the function proxy.
1117 __ bind(&call_proxy);
1118 __ Push(rdi); // add function proxy as last argument
1121 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1122 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1123 RelocInfo::CODE_TARGET);
1125 // Leave internal frame.
1127 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1131 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1132 // ----------- S t a t e -------------
1134 // -- rsp[0] : return address
1135 // -- rsp[8] : last argument
1136 // -----------------------------------
1137 Label generic_array_code;
1139 // Get the InternalArray function.
1140 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1142 if (FLAG_debug_code) {
1143 // Initial map for the builtin InternalArray functions should be maps.
1144 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1145 // Will both indicate a NULL and a Smi.
1146 STATIC_ASSERT(kSmiTag == 0);
1147 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1148 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1149 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1150 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1153 // Run the native code for the InternalArray function called as a normal
1156 InternalArrayConstructorStub stub(masm->isolate());
1157 __ TailCallStub(&stub);
1161 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1162 // ----------- S t a t e -------------
1164 // -- rsp[0] : return address
1165 // -- rsp[8] : last argument
1166 // -----------------------------------
1167 Label generic_array_code;
1169 // Get the Array function.
1170 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1172 if (FLAG_debug_code) {
1173 // Initial map for the builtin Array functions should be maps.
1174 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1175 // Will both indicate a NULL and a Smi.
1176 STATIC_ASSERT(kSmiTag == 0);
1177 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1178 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1179 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1180 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1183 // Run the native code for the Array function called as a normal function.
1185 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1186 ArrayConstructorStub stub(masm->isolate());
1187 __ TailCallStub(&stub);
1191 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1192 // ----------- S t a t e -------------
1193 // -- rax : number of arguments
1194 // -- rdi : constructor function
1195 // -- rsp[0] : return address
1196 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1197 // -- rsp[(argc + 1) * 8] : receiver
1198 // -----------------------------------
1199 Counters* counters = masm->isolate()->counters();
1200 __ IncrementCounter(counters->string_ctor_calls(), 1);
1202 if (FLAG_debug_code) {
1203 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1205 __ Assert(equal, kUnexpectedStringFunction);
1208 // Load the first argument into rax and get rid of the rest
1209 // (including the receiver).
1210 StackArgumentsAccessor args(rsp, rax);
1213 __ j(zero, &no_arguments);
1214 __ movp(rbx, args.GetArgumentOperand(1));
1215 __ PopReturnAddressTo(rcx);
1216 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1217 __ PushReturnAddressFrom(rcx);
1220 // Lookup the argument in the number to string cache.
1221 Label not_cached, argument_is_string;
1222 __ LookupNumberStringCache(rax, // Input.
1227 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1228 __ bind(&argument_is_string);
1230 // ----------- S t a t e -------------
1231 // -- rbx : argument converted to string
1232 // -- rdi : constructor function
1233 // -- rsp[0] : return address
1234 // -----------------------------------
1236 // Allocate a JSValue and put the tagged pointer into rax.
1238 __ Allocate(JSValue::kSize,
1240 rcx, // New allocation top (we ignore it).
1246 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1247 if (FLAG_debug_code) {
1248 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1249 Immediate(JSValue::kSize >> kPointerSizeLog2));
1250 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1251 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1252 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1254 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1256 // Set properties and elements.
1257 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1258 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1259 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1262 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1264 // Ensure the object is fully initialized.
1265 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1267 // We're done. Return.
1270 // The argument was not found in the number to string cache. Check
1271 // if it's a string already before calling the conversion builtin.
1272 Label convert_argument;
1273 __ bind(¬_cached);
1274 STATIC_ASSERT(kSmiTag == 0);
1275 __ JumpIfSmi(rax, &convert_argument);
1276 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1277 __ j(NegateCondition(is_string), &convert_argument);
1279 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1280 __ jmp(&argument_is_string);
1282 // Invoke the conversion builtin and put the result into rbx.
1283 __ bind(&convert_argument);
1284 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1286 FrameScope scope(masm, StackFrame::INTERNAL);
1287 __ Push(rdi); // Preserve the function.
1289 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1293 __ jmp(&argument_is_string);
1295 // Load the empty string into rbx, remove the receiver from the
1296 // stack, and jump back to the case where the argument is a string.
1297 __ bind(&no_arguments);
1298 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1299 __ PopReturnAddressTo(rcx);
1300 __ leap(rsp, Operand(rsp, kPointerSize));
1301 __ PushReturnAddressFrom(rcx);
1302 __ jmp(&argument_is_string);
1304 // At this point the argument is already a string. Call runtime to
1305 // create a string wrapper.
1306 __ bind(&gc_required);
1307 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1309 FrameScope scope(masm, StackFrame::INTERNAL);
1311 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1317 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1318 Label* stack_overflow) {
1319 // ----------- S t a t e -------------
1320 // -- rax : actual number of arguments
1321 // -- rbx : expected number of arguments
1322 // -- rdi: function (passed through to callee)
1323 // -----------------------------------
1324 // Check the stack for overflow. We are not trying to catch
1325 // interruptions (e.g. debug break and preemption) here, so the "real stack
1326 // limit" is checked.
1328 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1330 // Make rcx the space we have left. The stack might already be overflowed
1331 // here which will cause rcx to become negative.
1333 // Make rdx the space we need for the array when it is unrolled onto the
1336 __ shlp(rdx, Immediate(kPointerSizeLog2));
1337 // Check if the arguments will overflow the stack.
1339 __ j(less_equal, stack_overflow); // Signed comparison.
1343 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1347 // Store the arguments adaptor context sentinel.
1348 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1350 // Push the function on the stack.
1353 // Preserve the number of arguments on the stack. Must preserve rax,
1354 // rbx and rcx because these registers are used when copying the
1355 // arguments and the receiver.
1356 __ Integer32ToSmi(r8, rax);
1361 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1362 // Retrieve the number of arguments from the stack. Number is a Smi.
1363 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1369 // Remove caller arguments from the stack.
1370 __ PopReturnAddressTo(rcx);
1371 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1372 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1373 __ PushReturnAddressFrom(rcx);
1377 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1378 // ----------- S t a t e -------------
1379 // -- rax : actual number of arguments
1380 // -- rbx : expected number of arguments
1381 // -- rdi: function (passed through to callee)
1382 // -----------------------------------
1384 Label invoke, dont_adapt_arguments;
1385 Counters* counters = masm->isolate()->counters();
1386 __ IncrementCounter(counters->arguments_adaptors(), 1);
1388 Label stack_overflow;
1389 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1391 Label enough, too_few;
1392 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1394 __ j(less, &too_few);
1395 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1396 __ j(equal, &dont_adapt_arguments);
1398 { // Enough parameters: Actual >= expected.
1400 EnterArgumentsAdaptorFrame(masm);
1402 // Copy receiver and all expected arguments.
1403 const int offset = StandardFrameConstants::kCallerSPOffset;
1404 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1405 __ Set(r8, -1); // account for receiver
1410 __ Push(Operand(rax, 0));
1411 __ subp(rax, Immediate(kPointerSize));
1417 { // Too few parameters: Actual < expected.
1419 EnterArgumentsAdaptorFrame(masm);
1421 // Copy receiver and all actual arguments.
1422 const int offset = StandardFrameConstants::kCallerSPOffset;
1423 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1424 __ Set(r8, -1); // account for receiver
1429 __ Push(Operand(rdi, 0));
1430 __ subp(rdi, Immediate(kPointerSize));
1434 // Fill remaining expected arguments with undefined values.
1436 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1439 __ Push(kScratchRegister);
1443 // Restore function pointer.
1444 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1447 // Call the entry point.
1451 // Store offset of return address for deoptimizer.
1452 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1454 // Leave frame and return.
1455 LeaveArgumentsAdaptorFrame(masm);
1458 // -------------------------------------------
1459 // Dont adapt arguments.
1460 // -------------------------------------------
1461 __ bind(&dont_adapt_arguments);
1464 __ bind(&stack_overflow);
1466 FrameScope frame(masm, StackFrame::MANUAL);
1467 EnterArgumentsAdaptorFrame(masm);
1468 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1474 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1475 // Lookup the function in the JavaScript frame.
1476 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1478 FrameScope scope(masm, StackFrame::INTERNAL);
1479 // Pass function as argument.
1481 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1485 // If the code object is null, just return to the unoptimized code.
1486 __ cmpp(rax, Immediate(0));
1487 __ j(not_equal, &skip, Label::kNear);
1492 // Load deoptimization data from the code object.
1493 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1495 // Load the OSR entrypoint offset from the deoptimization data.
1496 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1497 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1499 // Compute the target address = code_obj + header_size + osr_offset
1500 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1502 // Overwrite the return address on the stack.
1503 __ movq(StackOperandForReturnAddress(0), rax);
1505 // And "return" to the OSR entry point of the function.
1510 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1511 // We check the stack limit as indicator that recompilation might be done.
1513 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1514 __ j(above_equal, &ok);
1516 FrameScope scope(masm, StackFrame::INTERNAL);
1517 __ CallRuntime(Runtime::kStackGuard, 0);
1519 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1520 RelocInfo::CODE_TARGET);
1529 } } // namespace v8::internal
1531 #endif // V8_TARGET_ARCH_X64