1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "stub-cache.h"
41 #define __ ACCESS_MASM(masm)
44 void Builtins::Generate_Adaptor(MacroAssembler* masm,
46 BuiltinExtraArguments extra_args) {
47 // ----------- S t a t e -------------
48 // -- rax : number of arguments excluding receiver
49 // -- rdi : called function (only guaranteed when
50 // extra_args requires it)
52 // -- rsp[0] : return address
53 // -- rsp[8] : last argument
55 // -- rsp[8 * argc] : first argument (argc == rax)
56 // -- rsp[8 * (argc + 1)] : receiver
57 // -----------------------------------
59 // Insert extra arguments.
60 int num_extra_args = 0;
61 if (extra_args == NEEDS_CALLED_FUNCTION) {
63 __ PopReturnAddressTo(kScratchRegister);
65 __ PushReturnAddressFrom(kScratchRegister);
67 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
70 // JumpToExternalReference expects rax to contain the number of arguments
71 // including the receiver and the extra arguments.
72 __ addq(rax, Immediate(num_extra_args + 1));
73 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
77 static void CallRuntimePassFunction(
78 MacroAssembler* masm, Runtime::FunctionId function_id) {
79 FrameScope scope(masm, StackFrame::INTERNAL);
80 // Push a copy of the function onto the stack.
82 // Function is also the parameter to the runtime call.
85 __ CallRuntime(function_id, 1);
91 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
92 __ movp(kScratchRegister,
93 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
94 __ movp(kScratchRegister,
95 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
96 __ lea(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
97 __ jmp(kScratchRegister);
101 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
102 __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
107 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
108 // Checking whether the queued function is ready for install is optional,
109 // since we come across interrupts and stack checks elsewhere. However,
110 // not checking may delay installing ready functions, and always checking
111 // would be quite expensive. A good compromise is to first check against
112 // stack limit as a cue for an interrupt signal.
114 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
115 __ j(above_equal, &ok);
117 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
118 GenerateTailCallToReturnedCode(masm);
121 GenerateTailCallToSharedCode(masm);
125 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
126 bool is_api_function,
127 bool count_constructions) {
128 // ----------- S t a t e -------------
129 // -- rax: number of arguments
130 // -- rdi: constructor function
131 // -----------------------------------
133 // Should never count constructions for api objects.
134 ASSERT(!is_api_function || !count_constructions);
136 // Enter a construct frame.
138 FrameScope scope(masm, StackFrame::CONSTRUCT);
140 // Store a smi-tagged arguments count on the stack.
141 __ Integer32ToSmi(rax, rax);
144 // Push the function to invoke on the stack.
147 // Try to allocate the object without transitioning into C code. If any of
148 // the preconditions is not met, the code bails out to the runtime call.
149 Label rt_call, allocated;
150 if (FLAG_inline_new) {
151 Label undo_allocation;
153 #ifdef ENABLE_DEBUGGER_SUPPORT
154 ExternalReference debug_step_in_fp =
155 ExternalReference::debug_step_in_fp_address(masm->isolate());
156 __ Move(kScratchRegister, debug_step_in_fp);
157 __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
158 __ j(not_equal, &rt_call);
161 // Verified that the constructor is a JSFunction.
162 // Load the initial map and verify that it is in fact a map.
164 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
165 // Will both indicate a NULL and a Smi
166 ASSERT(kSmiTag == 0);
167 __ JumpIfSmi(rax, &rt_call);
169 // rax: initial map (if proven valid below)
170 __ CmpObjectType(rax, MAP_TYPE, rbx);
171 __ j(not_equal, &rt_call);
173 // Check that the constructor is not constructing a JSFunction (see
174 // comments in Runtime_NewObject in runtime.cc). In which case the
175 // initial map's instance type would be JS_FUNCTION_TYPE.
178 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
179 __ j(equal, &rt_call);
181 if (count_constructions) {
183 // Decrease generous allocation count.
184 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
185 __ decb(FieldOperand(rcx,
186 SharedFunctionInfo::kConstructionCountOffset));
187 __ j(not_zero, &allocate);
192 __ push(rdi); // constructor
193 // The call will replace the stub, so the countdown is only done once.
194 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
202 // Now allocate the JSObject on the heap.
203 __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
204 __ shl(rdi, Immediate(kPointerSizeLog2));
205 // rdi: size of new object
211 NO_ALLOCATION_FLAGS);
212 // Allocated the JSObject, now initialize the fields.
214 // rbx: JSObject (not HeapObject tagged - the actual address).
215 // rdi: start of next object
216 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
217 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
218 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
219 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
220 // Set extra fields in the newly allocated object.
223 // rdi: start of next object
224 __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
225 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
226 if (count_constructions) {
228 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
230 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
231 // rsi: offset of first field after pre-allocated fields
232 if (FLAG_debug_code) {
234 __ Assert(less_equal,
235 kUnexpectedNumberOfPreAllocatedPropertyFields);
237 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
238 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
240 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
242 // Add the object tag to make the JSObject real, so that we can continue
243 // and jump into the continuation code at any time from now on. Any
244 // failures need to undo the allocation, so that the heap is in a
245 // consistent state and verifiable.
248 // rdi: start of next object
249 __ or_(rbx, Immediate(kHeapObjectTag));
251 // Check if a non-empty properties array is needed.
252 // Allocate and initialize a FixedArray if it is.
255 // rdi: start of next object
256 // Calculate total properties described map.
257 __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
259 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
261 // Calculate unused properties past the end of the in-object properties.
262 __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
264 // Done if no extra properties are to be allocated.
265 __ j(zero, &allocated);
266 __ Assert(positive, kPropertyAllocationCountFailed);
268 // Scale the number of elements by pointer size and add the header for
269 // FixedArrays to the start of the next object calculation from above.
271 // rdi: start of next object (will be start of FixedArray)
272 // rdx: number of elements in properties array
273 __ Allocate(FixedArray::kHeaderSize,
280 RESULT_CONTAINS_TOP);
282 // Initialize the FixedArray.
285 // rdx: number of elements
286 // rax: start of next object
287 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
288 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
289 __ Integer32ToSmi(rdx, rdx);
290 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
292 // Initialize the fields to undefined.
295 // rax: start of next object
296 // rdx: number of elements
298 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
299 __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
302 __ movp(Operand(rcx, 0), rdx);
303 __ addq(rcx, Immediate(kPointerSize));
309 // Store the initialized FixedArray into the properties field of
313 __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
314 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
317 // Continue with JSObject being successfully allocated
321 // Undo the setting of the new top so that the heap is verifiable. For
322 // example, the map's unused properties potentially do not match the
323 // allocated objects unused properties.
324 // rbx: JSObject (previous new top)
325 __ bind(&undo_allocation);
326 __ UndoAllocationInNewSpace(rbx);
329 // Allocate the new receiver object using the runtime call.
330 // rdi: function (constructor)
332 // Must restore rdi (constructor) before calling runtime.
333 __ movp(rdi, Operand(rsp, 0));
335 __ CallRuntime(Runtime::kNewObject, 1);
336 __ movp(rbx, rax); // store result in rbx
338 // New object allocated.
339 // rbx: newly allocated object
341 // Retrieve the function from the stack.
344 // Retrieve smi-tagged arguments count from the stack.
345 __ movp(rax, Operand(rsp, 0));
346 __ SmiToInteger32(rax, rax);
348 // Push the allocated receiver to the stack. We need two copies
349 // because we may have to return the original one and the calling
350 // conventions dictate that the called function pops the receiver.
354 // Set up pointer to last argument.
355 __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
357 // Copy arguments and receiver to the expression stack.
362 __ push(Operand(rbx, rcx, times_pointer_size, 0));
365 __ j(greater_equal, &loop);
367 // Call the function.
368 if (is_api_function) {
369 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
371 masm->isolate()->builtins()->HandleApiCallConstruct();
372 __ Call(code, RelocInfo::CODE_TARGET);
374 ParameterCount actual(rax);
375 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
378 // Store offset of return address for deoptimizer.
379 if (!is_api_function && !count_constructions) {
380 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
383 // Restore context from the frame.
384 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
386 // If the result is an object (in the ECMA sense), we should get rid
387 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
389 Label use_receiver, exit;
390 // If the result is a smi, it is *not* an object in the ECMA sense.
391 __ JumpIfSmi(rax, &use_receiver);
393 // If the type of the result (stored in its map) is less than
394 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
395 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
396 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
397 __ j(above_equal, &exit);
399 // Throw away the result of the constructor invocation and use the
400 // on-stack receiver as the result.
401 __ bind(&use_receiver);
402 __ movp(rax, Operand(rsp, 0));
404 // Restore the arguments count and leave the construct frame.
406 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
408 // Leave construct frame.
411 // Remove caller arguments from the stack and return.
412 __ PopReturnAddressTo(rcx);
413 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
414 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
415 __ PushReturnAddressFrom(rcx);
416 Counters* counters = masm->isolate()->counters();
417 __ IncrementCounter(counters->constructed_objects(), 1);
422 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
423 Generate_JSConstructStubHelper(masm, false, true);
427 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
428 Generate_JSConstructStubHelper(masm, false, false);
432 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
433 Generate_JSConstructStubHelper(masm, true, false);
437 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
439 ProfileEntryHookStub::MaybeCallEntryHook(masm);
441 // Expects five C++ function parameters.
442 // - Address entry (ignored)
443 // - JSFunction* function (
444 // - Object* receiver
447 // (see Handle::Invoke in execution.cc).
449 // Open a C++ scope for the FrameScope.
451 // Platform specific argument handling. After this, the stack contains
452 // an internal frame and the pushed function and receiver, and
453 // register rax and rbx holds the argument count and argument array,
454 // while rdi holds the function pointer and rsi the context.
457 // MSVC parameters in:
458 // rcx : entry (ignored)
464 // Clear the context before we push it when entering the internal frame.
466 // Enter an internal frame.
467 FrameScope scope(masm, StackFrame::INTERNAL);
469 // Load the function context into rsi.
470 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
472 // Push the function and the receiver onto the stack.
476 // Load the number of arguments and setup pointer to the arguments.
478 // Load the previous frame pointer to access C argument on stack
479 __ movp(kScratchRegister, Operand(rbp, 0));
480 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
481 // Load the function pointer into rdi.
484 // GCC parameters in:
485 // rdi : entry (ignored)
494 // Clear the context before we push it when entering the internal frame.
496 // Enter an internal frame.
497 FrameScope scope(masm, StackFrame::INTERNAL);
499 // Push the function and receiver and setup the context.
502 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
504 // Load the number of arguments and setup pointer to the arguments.
509 // Current stack contents:
510 // [rsp + 2 * kPointerSize ... ] : Internal frame
511 // [rsp + kPointerSize] : function
513 // Current register contents:
519 // Copy arguments to the stack in a loop.
520 // Register rbx points to array of pointers to handle locations.
521 // Push the values of these handles.
523 __ Set(rcx, 0); // Set loop variable to 0.
526 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
527 __ push(Operand(kScratchRegister, 0)); // dereference handle
528 __ addq(rcx, Immediate(1));
531 __ j(not_equal, &loop);
535 // No type feedback cell is available
536 Handle<Object> undefined_sentinel(
537 masm->isolate()->factory()->undefined_value());
538 __ Move(rbx, undefined_sentinel);
539 // Expects rdi to hold function pointer.
540 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
543 ParameterCount actual(rax);
544 // Function must be in rdi.
545 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
547 // Exit the internal frame. Notice that this also removes the empty
548 // context and the function left on the stack by the code
552 // TODO(X64): Is argument correct? Is there a receiver to remove?
553 __ ret(1 * kPointerSize); // Remove receiver.
557 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
558 Generate_JSEntryTrampolineHelper(masm, false);
562 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
563 Generate_JSEntryTrampolineHelper(masm, true);
567 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
568 CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
569 GenerateTailCallToReturnedCode(masm);
573 static void CallCompileOptimized(MacroAssembler* masm,
575 FrameScope scope(masm, StackFrame::INTERNAL);
576 // Push a copy of the function onto the stack.
578 // Function is also the parameter to the runtime call.
580 // Whether to compile in a background thread.
581 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
583 __ CallRuntime(Runtime::kCompileOptimized, 2);
589 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
590 CallCompileOptimized(masm, false);
591 GenerateTailCallToReturnedCode(masm);
595 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
596 CallCompileOptimized(masm, true);
597 GenerateTailCallToReturnedCode(masm);
601 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
602 // For now, we are relying on the fact that make_code_young doesn't do any
603 // garbage collection which allows us to save/restore the registers without
604 // worrying about which of them contain pointers. We also don't build an
605 // internal frame to make the code faster, since we shouldn't have to do stack
606 // crawls in MakeCodeYoung. This seems a bit fragile.
608 // Re-execute the code that was patched back to the young age when
610 __ subq(Operand(rsp, 0), Immediate(5));
612 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
613 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
615 FrameScope scope(masm, StackFrame::MANUAL);
616 __ PrepareCallCFunction(2);
618 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
625 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
626 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
627 MacroAssembler* masm) { \
628 GenerateMakeCodeYoungAgainCommon(masm); \
630 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
631 MacroAssembler* masm) { \
632 GenerateMakeCodeYoungAgainCommon(masm); \
634 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
635 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
638 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
639 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
640 // that make_code_young doesn't do any garbage collection which allows us to
641 // save/restore the registers without worrying about which of them contain
644 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
645 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
646 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
648 FrameScope scope(masm, StackFrame::MANUAL);
649 __ PrepareCallCFunction(2);
651 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
656 // Perform prologue operations usually performed by the young code stub.
657 __ PopReturnAddressTo(kScratchRegister);
658 __ push(rbp); // Caller's frame pointer.
660 __ push(rsi); // Callee's context.
661 __ push(rdi); // Callee's JS Function.
662 __ PushReturnAddressFrom(kScratchRegister);
664 // Jump to point after the code-age stub.
669 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
670 GenerateMakeCodeYoungAgainCommon(masm);
674 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
675 SaveFPRegsMode save_doubles) {
676 // Enter an internal frame.
678 FrameScope scope(masm, StackFrame::INTERNAL);
680 // Preserve registers across notification, this is important for compiled
681 // stubs that tail call the runtime on deopts passing their parameters in
684 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
686 // Tear down internal frame.
689 __ pop(MemOperand(rsp, 0)); // Ignore state offset
690 __ ret(0); // Return to IC Miss stub, continuation still on stack.
694 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
695 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
699 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
700 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
704 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
705 Deoptimizer::BailoutType type) {
706 // Enter an internal frame.
708 FrameScope scope(masm, StackFrame::INTERNAL);
710 // Pass the deoptimization type to the runtime system.
711 __ Push(Smi::FromInt(static_cast<int>(type)));
713 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
714 // Tear down internal frame.
717 // Get the full codegen state from the stack and untag it.
718 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
720 // Switch on the state.
721 Label not_no_registers, not_tos_rax;
722 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
723 __ j(not_equal, ¬_no_registers, Label::kNear);
724 __ ret(1 * kPointerSize); // Remove state.
726 __ bind(¬_no_registers);
727 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
728 __ cmpq(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
729 __ j(not_equal, ¬_tos_rax, Label::kNear);
730 __ ret(2 * kPointerSize); // Remove state, rax.
732 __ bind(¬_tos_rax);
733 __ Abort(kNoCasesLeft);
737 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
738 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
742 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
743 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
747 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
748 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
752 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
754 // rsp[0] : Return address
755 // rsp[8] : Argument n
756 // rsp[16] : Argument n-1
758 // rsp[8 * n] : Argument 1
759 // rsp[8 * (n + 1)] : Receiver (function to call)
761 // rax contains the number of arguments, n, not counting the receiver.
763 // 1. Make sure we have at least one argument.
766 __ j(not_zero, &done);
767 __ PopReturnAddressTo(rbx);
768 __ Push(masm->isolate()->factory()->undefined_value());
769 __ PushReturnAddressFrom(rbx);
774 // 2. Get the function to call (passed as receiver) from the stack, check
775 // if it is a function.
776 Label slow, non_function;
777 StackArgumentsAccessor args(rsp, rax);
778 __ movp(rdi, args.GetReceiverOperand());
779 __ JumpIfSmi(rdi, &non_function);
780 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
781 __ j(not_equal, &slow);
783 // 3a. Patch the first argument if necessary when calling a function.
784 Label shift_arguments;
785 __ Set(rdx, 0); // indicate regular JS_FUNCTION
786 { Label convert_to_object, use_global_receiver, patch_receiver;
787 // Change context eagerly in case we need the global receiver.
788 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
790 // Do not transform the receiver for strict mode functions.
791 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
792 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
793 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
794 __ j(not_equal, &shift_arguments);
796 // Do not transform the receiver for natives.
797 // SharedFunctionInfo is already loaded into rbx.
798 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
799 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
800 __ j(not_zero, &shift_arguments);
802 // Compute the receiver in non-strict mode.
803 __ movp(rbx, args.GetArgumentOperand(1));
804 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
806 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
807 __ j(equal, &use_global_receiver);
808 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
809 __ j(equal, &use_global_receiver);
811 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
812 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
813 __ j(above_equal, &shift_arguments);
815 __ bind(&convert_to_object);
817 // Enter an internal frame in order to preserve argument count.
818 FrameScope scope(masm, StackFrame::INTERNAL);
819 __ Integer32ToSmi(rax, rax);
823 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
825 __ Set(rdx, 0); // indicate regular JS_FUNCTION
828 __ SmiToInteger32(rax, rax);
831 // Restore the function to rdi.
832 __ movp(rdi, args.GetReceiverOperand());
833 __ jmp(&patch_receiver, Label::kNear);
835 __ bind(&use_global_receiver);
837 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
838 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
840 __ bind(&patch_receiver);
841 __ movp(args.GetArgumentOperand(1), rbx);
843 __ jmp(&shift_arguments);
846 // 3b. Check for function proxy.
848 __ Set(rdx, 1); // indicate function proxy
849 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
850 __ j(equal, &shift_arguments);
851 __ bind(&non_function);
852 __ Set(rdx, 2); // indicate non-function
854 // 3c. Patch the first argument when calling a non-function. The
855 // CALL_NON_FUNCTION builtin expects the non-function callee as
856 // receiver, so overwrite the first argument which will ultimately
857 // become the receiver.
858 __ movp(args.GetArgumentOperand(1), rdi);
860 // 4. Shift arguments and return address one slot down on the stack
861 // (overwriting the original receiver). Adjust argument count to make
862 // the original first argument the new receiver.
863 __ bind(&shift_arguments);
867 __ movp(rbx, Operand(rsp, rcx, times_pointer_size, 0));
868 __ movp(Operand(rsp, rcx, times_pointer_size, 1 * kPointerSize), rbx);
870 __ j(not_sign, &loop); // While non-negative (to copy return address).
871 __ pop(rbx); // Discard copy of return address.
872 __ decq(rax); // One fewer argument (first argument is new receiver).
875 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
876 // or a function proxy via CALL_FUNCTION_PROXY.
877 { Label function, non_proxy;
879 __ j(zero, &function);
881 __ cmpq(rdx, Immediate(1));
882 __ j(not_equal, &non_proxy);
884 __ PopReturnAddressTo(rdx);
885 __ push(rdi); // re-add proxy object as additional argument
886 __ PushReturnAddressFrom(rdx);
888 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
889 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
890 RelocInfo::CODE_TARGET);
893 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
894 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
895 RelocInfo::CODE_TARGET);
899 // 5b. Get the code to call from the function and check that the number of
900 // expected arguments matches what we're providing. If so, jump
901 // (tail-call) to the code in register edx without checking arguments.
902 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
905 SharedFunctionInfo::kFormalParameterCountOffset));
906 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
909 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
910 RelocInfo::CODE_TARGET);
912 ParameterCount expected(0);
913 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
917 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
919 // rsp : return address
920 // rsp[8] : arguments
921 // rsp[16] : receiver ("this")
922 // rsp[24] : function
924 FrameScope frame_scope(masm, StackFrame::INTERNAL);
926 // rbp : Old base pointer
927 // rbp[8] : return address
928 // rbp[16] : function arguments
929 // rbp[24] : receiver
930 // rbp[32] : function
931 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
932 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
933 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
935 __ push(Operand(rbp, kFunctionOffset));
936 __ push(Operand(rbp, kArgumentsOffset));
937 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
939 // Check the stack for overflow. We are not trying to catch
940 // interruptions (e.g. debug break and preemption) here, so the "real stack
941 // limit" is checked.
943 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
945 // Make rcx the space we have left. The stack might already be overflowed
946 // here which will cause rcx to become negative.
947 __ subq(rcx, kScratchRegister);
948 // Make rdx the space we need for the array when it is unrolled onto the
950 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
951 // Check if the arguments will overflow the stack.
953 __ j(greater, &okay); // Signed comparison.
955 // Out of stack space.
956 __ push(Operand(rbp, kFunctionOffset));
958 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
960 // End of stack check.
962 // Push current index and limit.
963 const int kLimitOffset =
964 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
965 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
966 __ push(rax); // limit
967 __ push(Immediate(0)); // index
970 __ movp(rbx, Operand(rbp, kReceiverOffset));
972 // Check that the function is a JS function (otherwise it must be a proxy).
974 __ movp(rdi, Operand(rbp, kFunctionOffset));
975 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
976 __ j(not_equal, &push_receiver);
978 // Change context eagerly to get the right global object if necessary.
979 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
981 // Do not transform the receiver for strict mode functions.
982 Label call_to_object, use_global_receiver;
983 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
984 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
985 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
986 __ j(not_equal, &push_receiver);
988 // Do not transform the receiver for natives.
989 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
990 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
991 __ j(not_equal, &push_receiver);
993 // Compute the receiver in non-strict mode.
994 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
995 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
996 __ j(equal, &use_global_receiver);
997 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
998 __ j(equal, &use_global_receiver);
1000 // If given receiver is already a JavaScript object then there's no
1001 // reason for converting it.
1002 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1003 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1004 __ j(above_equal, &push_receiver);
1006 // Convert the receiver to an object.
1007 __ bind(&call_to_object);
1009 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1011 __ jmp(&push_receiver, Label::kNear);
1013 __ bind(&use_global_receiver);
1015 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1016 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
1018 // Push the receiver.
1019 __ bind(&push_receiver);
1022 // Copy all arguments from the array to the stack.
1024 __ movp(rax, Operand(rbp, kIndexOffset));
1027 __ movp(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
1029 // Use inline caching to speed up access to arguments.
1031 masm->isolate()->builtins()->KeyedLoadIC_Initialize();
1032 __ Call(ic, RelocInfo::CODE_TARGET);
1033 // It is important that we do not have a test instruction after the
1034 // call. A test instruction after the call is used to indicate that
1035 // we have generated an inline version of the keyed load. In this
1036 // case, we know that we are not generating a test instruction next.
1038 // Push the nth argument.
1041 // Update the index on the stack and in register rax.
1042 __ movp(rax, Operand(rbp, kIndexOffset));
1043 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
1044 __ movp(Operand(rbp, kIndexOffset), rax);
1047 __ cmpq(rax, Operand(rbp, kLimitOffset));
1048 __ j(not_equal, &loop);
1050 // Call the function.
1052 ParameterCount actual(rax);
1053 __ SmiToInteger32(rax, rax);
1054 __ movp(rdi, Operand(rbp, kFunctionOffset));
1055 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1056 __ j(not_equal, &call_proxy);
1057 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1059 frame_scope.GenerateLeaveFrame();
1060 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1062 // Call the function proxy.
1063 __ bind(&call_proxy);
1064 __ push(rdi); // add function proxy as last argument
1067 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1068 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1069 RelocInfo::CODE_TARGET);
1071 // Leave internal frame.
1073 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
1077 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1078 // ----------- S t a t e -------------
1080 // -- rsp[0] : return address
1081 // -- rsp[8] : last argument
1082 // -----------------------------------
1083 Label generic_array_code;
1085 // Get the InternalArray function.
1086 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1088 if (FLAG_debug_code) {
1089 // Initial map for the builtin InternalArray functions should be maps.
1090 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1091 // Will both indicate a NULL and a Smi.
1092 STATIC_ASSERT(kSmiTag == 0);
1093 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1094 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1095 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1096 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1099 // Run the native code for the InternalArray function called as a normal
1102 InternalArrayConstructorStub stub(masm->isolate());
1103 __ TailCallStub(&stub);
1107 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1108 // ----------- S t a t e -------------
1110 // -- rsp[0] : return address
1111 // -- rsp[8] : last argument
1112 // -----------------------------------
1113 Label generic_array_code;
1115 // Get the Array function.
1116 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1118 if (FLAG_debug_code) {
1119 // Initial map for the builtin Array functions should be maps.
1120 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1121 // Will both indicate a NULL and a Smi.
1122 STATIC_ASSERT(kSmiTag == 0);
1123 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1124 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1125 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1126 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1129 // Run the native code for the Array function called as a normal function.
1131 Handle<Object> undefined_sentinel(
1132 masm->isolate()->heap()->undefined_value(),
1134 __ Move(rbx, undefined_sentinel);
1135 ArrayConstructorStub stub(masm->isolate());
1136 __ TailCallStub(&stub);
1140 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1141 // ----------- S t a t e -------------
1142 // -- rax : number of arguments
1143 // -- rdi : constructor function
1144 // -- rsp[0] : return address
1145 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1146 // -- rsp[(argc + 1) * 8] : receiver
1147 // -----------------------------------
1148 Counters* counters = masm->isolate()->counters();
1149 __ IncrementCounter(counters->string_ctor_calls(), 1);
1151 if (FLAG_debug_code) {
1152 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1154 __ Assert(equal, kUnexpectedStringFunction);
1157 // Load the first argument into rax and get rid of the rest
1158 // (including the receiver).
1159 StackArgumentsAccessor args(rsp, rax);
1162 __ j(zero, &no_arguments);
1163 __ movp(rbx, args.GetArgumentOperand(1));
1164 __ PopReturnAddressTo(rcx);
1165 __ lea(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1166 __ PushReturnAddressFrom(rcx);
1169 // Lookup the argument in the number to string cache.
1170 Label not_cached, argument_is_string;
1171 __ LookupNumberStringCache(rax, // Input.
1176 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1177 __ bind(&argument_is_string);
1179 // ----------- S t a t e -------------
1180 // -- rbx : argument converted to string
1181 // -- rdi : constructor function
1182 // -- rsp[0] : return address
1183 // -----------------------------------
1185 // Allocate a JSValue and put the tagged pointer into rax.
1187 __ Allocate(JSValue::kSize,
1189 rcx, // New allocation top (we ignore it).
1195 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1196 if (FLAG_debug_code) {
1197 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1198 Immediate(JSValue::kSize >> kPointerSizeLog2));
1199 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1200 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1201 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1203 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1205 // Set properties and elements.
1206 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1207 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1208 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1211 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1213 // Ensure the object is fully initialized.
1214 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1216 // We're done. Return.
1219 // The argument was not found in the number to string cache. Check
1220 // if it's a string already before calling the conversion builtin.
1221 Label convert_argument;
1222 __ bind(¬_cached);
1223 STATIC_ASSERT(kSmiTag == 0);
1224 __ JumpIfSmi(rax, &convert_argument);
1225 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1226 __ j(NegateCondition(is_string), &convert_argument);
1228 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1229 __ jmp(&argument_is_string);
1231 // Invoke the conversion builtin and put the result into rbx.
1232 __ bind(&convert_argument);
1233 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1235 FrameScope scope(masm, StackFrame::INTERNAL);
1236 __ push(rdi); // Preserve the function.
1238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1242 __ jmp(&argument_is_string);
1244 // Load the empty string into rbx, remove the receiver from the
1245 // stack, and jump back to the case where the argument is a string.
1246 __ bind(&no_arguments);
1247 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1248 __ PopReturnAddressTo(rcx);
1249 __ lea(rsp, Operand(rsp, kPointerSize));
1250 __ PushReturnAddressFrom(rcx);
1251 __ jmp(&argument_is_string);
1253 // At this point the argument is already a string. Call runtime to
1254 // create a string wrapper.
1255 __ bind(&gc_required);
1256 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1258 FrameScope scope(masm, StackFrame::INTERNAL);
1260 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1266 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1270 // Store the arguments adaptor context sentinel.
1271 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1273 // Push the function on the stack.
1276 // Preserve the number of arguments on the stack. Must preserve rax,
1277 // rbx and rcx because these registers are used when copying the
1278 // arguments and the receiver.
1279 __ Integer32ToSmi(r8, rax);
1284 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1285 // Retrieve the number of arguments from the stack. Number is a Smi.
1286 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1292 // Remove caller arguments from the stack.
1293 __ PopReturnAddressTo(rcx);
1294 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1295 __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1296 __ PushReturnAddressFrom(rcx);
1300 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1301 // ----------- S t a t e -------------
1302 // -- rax : actual number of arguments
1303 // -- rbx : expected number of arguments
1304 // -- rdi: function (passed through to callee)
1305 // -----------------------------------
1307 Label invoke, dont_adapt_arguments;
1308 Counters* counters = masm->isolate()->counters();
1309 __ IncrementCounter(counters->arguments_adaptors(), 1);
1311 Label enough, too_few;
1312 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1314 __ j(less, &too_few);
1315 __ cmpq(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1316 __ j(equal, &dont_adapt_arguments);
1318 { // Enough parameters: Actual >= expected.
1320 EnterArgumentsAdaptorFrame(masm);
1322 // Copy receiver and all expected arguments.
1323 const int offset = StandardFrameConstants::kCallerSPOffset;
1324 __ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
1325 __ Set(r8, -1); // account for receiver
1330 __ push(Operand(rax, 0));
1331 __ subq(rax, Immediate(kPointerSize));
1337 { // Too few parameters: Actual < expected.
1339 EnterArgumentsAdaptorFrame(masm);
1341 // Copy receiver and all actual arguments.
1342 const int offset = StandardFrameConstants::kCallerSPOffset;
1343 __ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
1344 __ Set(r8, -1); // account for receiver
1349 __ push(Operand(rdi, 0));
1350 __ subq(rdi, Immediate(kPointerSize));
1354 // Fill remaining expected arguments with undefined values.
1356 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1359 __ push(kScratchRegister);
1363 // Restore function pointer.
1364 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1367 // Call the entry point.
1371 // Store offset of return address for deoptimizer.
1372 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1374 // Leave frame and return.
1375 LeaveArgumentsAdaptorFrame(masm);
1378 // -------------------------------------------
1379 // Dont adapt arguments.
1380 // -------------------------------------------
1381 __ bind(&dont_adapt_arguments);
1386 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1387 // Lookup the function in the JavaScript frame.
1388 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1390 FrameScope scope(masm, StackFrame::INTERNAL);
1391 // Pass function as argument.
1393 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1397 // If the code object is null, just return to the unoptimized code.
1398 __ cmpq(rax, Immediate(0));
1399 __ j(not_equal, &skip, Label::kNear);
1404 // Load deoptimization data from the code object.
1405 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1407 // Load the OSR entrypoint offset from the deoptimization data.
1408 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1409 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1411 // Compute the target address = code_obj + header_size + osr_offset
1412 __ lea(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1414 // Overwrite the return address on the stack.
1415 __ movq(StackOperandForReturnAddress(0), rax);
1417 // And "return" to the OSR entry point of the function.
1422 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1423 // We check the stack limit as indicator that recompilation might be done.
1425 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1426 __ j(above_equal, &ok);
1428 FrameScope scope(masm, StackFrame::INTERNAL);
1429 __ CallRuntime(Runtime::kStackGuard, 0);
1431 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1432 RelocInfo::CODE_TARGET);
1441 } } // namespace v8::internal
1443 #endif // V8_TARGET_ARCH_X64