1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/code-factory.h"
10 #include "src/codegen.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
18 #define __ ACCESS_MASM(masm)
21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- rax : number of arguments excluding receiver
26 // -- rdi : called function (only guaranteed when
27 // extra_args requires it)
29 // -- rsp[0] : return address
30 // -- rsp[8] : last argument
32 // -- rsp[8 * argc] : first argument (argc == rax)
33 // -- rsp[8 * (argc + 1)] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
40 __ PopReturnAddressTo(kScratchRegister);
42 __ PushReturnAddressFrom(kScratchRegister);
44 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
47 // JumpToExternalReference expects rax to contain the number of arguments
48 // including the receiver and the extra arguments.
49 __ addp(rax, Immediate(num_extra_args + 1));
50 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
54 static void CallRuntimePassFunction(
55 MacroAssembler* masm, Runtime::FunctionId function_id) {
56 FrameScope scope(masm, StackFrame::INTERNAL);
57 // Push a copy of the function onto the stack.
59 // Function is also the parameter to the runtime call.
62 __ CallRuntime(function_id, 1);
68 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
69 __ movp(kScratchRegister,
70 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
71 __ movp(kScratchRegister,
72 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
73 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
74 __ jmp(kScratchRegister);
78 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
79 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
84 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
85 // Checking whether the queued function is ready for install is optional,
86 // since we come across interrupts and stack checks elsewhere. However,
87 // not checking may delay installing ready functions, and always checking
88 // would be quite expensive. A good compromise is to first check against
89 // stack limit as a cue for an interrupt signal.
91 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
92 __ j(above_equal, &ok);
94 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
95 GenerateTailCallToReturnedCode(masm);
98 GenerateTailCallToSharedCode(masm);
102 static void Generate_Runtime_NewObject(MacroAssembler* masm,
104 Register original_constructor,
105 Label* count_incremented,
108 if (create_memento) {
109 // Get the cell or allocation site.
110 __ movp(rdi, Operand(rsp, kPointerSize * 2));
112 offset = kPointerSize;
115 // Must restore rsi (context) and rdi (constructor) before calling runtime.
116 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
117 __ movp(rdi, Operand(rsp, offset));
119 __ Push(original_constructor);
120 if (create_memento) {
121 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
123 __ CallRuntime(Runtime::kNewObject, 2);
125 __ movp(rbx, rax); // store result in rbx
127 // Runtime_NewObjectWithAllocationSite increments allocation count.
128 // Skip the increment.
129 if (create_memento) {
130 __ jmp(count_incremented);
137 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
138 bool is_api_function,
139 bool create_memento) {
140 // ----------- S t a t e -------------
141 // -- rax: number of arguments
142 // -- rdi: constructor function
143 // -- rbx: allocation site or undefined
144 // -- rdx: original constructor
145 // -----------------------------------
147 // Should never create mementos for api functions.
148 DCHECK(!is_api_function || !create_memento);
150 // Enter a construct frame.
152 FrameScope scope(masm, StackFrame::CONSTRUCT);
154 if (create_memento) {
155 __ AssertUndefinedOrAllocationSite(rbx);
159 // Store a smi-tagged arguments count on the stack.
160 __ Integer32ToSmi(rax, rax);
163 // Push the function to invoke on the stack.
166 Label rt_call, normal_new, allocated, count_incremented;
168 __ j(equal, &normal_new);
170 Generate_Runtime_NewObject(masm, create_memento, rdx, &count_incremented,
173 __ bind(&normal_new);
174 // Try to allocate the object without transitioning into C code. If any of
175 // the preconditions is not met, the code bails out to the runtime call.
176 if (FLAG_inline_new) {
177 Label undo_allocation;
179 ExternalReference debug_step_in_fp =
180 ExternalReference::debug_step_in_fp_address(masm->isolate());
181 __ Move(kScratchRegister, debug_step_in_fp);
182 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
183 __ j(not_equal, &rt_call);
185 // Verified that the constructor is a JSFunction.
186 // Load the initial map and verify that it is in fact a map.
188 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
189 // Will both indicate a NULL and a Smi
190 DCHECK(kSmiTag == 0);
191 __ JumpIfSmi(rax, &rt_call);
193 // rax: initial map (if proven valid below)
194 __ CmpObjectType(rax, MAP_TYPE, rbx);
195 __ j(not_equal, &rt_call);
197 // Check that the constructor is not constructing a JSFunction (see
198 // comments in Runtime_NewObject in runtime.cc). In which case the
199 // initial map's instance type would be JS_FUNCTION_TYPE.
202 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
203 __ j(equal, &rt_call);
204 if (!is_api_function) {
206 // The code below relies on these assumptions.
207 STATIC_ASSERT(Map::Counter::kShift + Map::Counter::kSize == 32);
208 // Check if slack tracking is enabled.
209 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
210 __ shrl(rsi, Immediate(Map::Counter::kShift));
211 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
212 __ j(less, &allocate);
213 // Decrease generous allocation count.
214 __ subl(FieldOperand(rax, Map::kBitField3Offset),
215 Immediate(1 << Map::Counter::kShift));
217 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
218 __ j(not_equal, &allocate);
223 __ Push(rdi); // constructor
224 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
228 __ movl(rsi, Immediate(Map::kSlackTrackingCounterEnd - 1));
233 // Now allocate the JSObject on the heap.
234 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
235 __ shlp(rdi, Immediate(kPointerSizeLog2));
236 if (create_memento) {
237 __ addp(rdi, Immediate(AllocationMemento::kSize));
239 // rdi: size of new object
245 NO_ALLOCATION_FLAGS);
246 Factory* factory = masm->isolate()->factory();
247 // Allocated the JSObject, now initialize the fields.
249 // rbx: JSObject (not HeapObject tagged - the actual address).
250 // rdi: start of next object (including memento if create_memento)
251 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
252 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
253 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
254 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
255 // Set extra fields in the newly allocated object.
258 // rdi: start of next object (including memento if create_memento)
259 // rsi: slack tracking counter (non-API function case)
260 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
261 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
262 if (!is_api_function) {
263 Label no_inobject_slack_tracking;
265 // Check if slack tracking is enabled.
266 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
267 __ j(less, &no_inobject_slack_tracking);
269 // Allocate object with a slack.
271 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
273 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
274 // rsi: offset of first field after pre-allocated fields
275 if (FLAG_debug_code) {
277 __ Assert(less_equal,
278 kUnexpectedNumberOfPreAllocatedPropertyFields);
280 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
281 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
282 // Fill the remaining fields with one pointer filler map.
284 __ bind(&no_inobject_slack_tracking);
286 if (create_memento) {
287 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
288 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
290 // Fill in memento fields if necessary.
291 // rsi: points to the allocated but uninitialized memento.
292 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
293 factory->allocation_memento_map());
294 // Get the cell or undefined.
295 __ movp(rdx, Operand(rsp, kPointerSize*2));
296 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
298 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
301 // Add the object tag to make the JSObject real, so that we can continue
302 // and jump into the continuation code at any time from now on. Any
303 // failures need to undo the allocation, so that the heap is in a
304 // consistent state and verifiable.
307 // rdi: start of next object
308 __ orp(rbx, Immediate(kHeapObjectTag));
310 // Check if a non-empty properties array is needed.
311 // Allocate and initialize a FixedArray if it is.
314 // rdi: start of next object
315 // Calculate total properties described map.
316 __ movzxbp(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
318 FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
320 // Calculate unused properties past the end of the in-object properties.
321 __ movzxbp(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
323 // Done if no extra properties are to be allocated.
324 __ j(zero, &allocated);
325 __ Assert(positive, kPropertyAllocationCountFailed);
327 // Scale the number of elements by pointer size and add the header for
328 // FixedArrays to the start of the next object calculation from above.
330 // rdi: start of next object (will be start of FixedArray)
331 // rdx: number of elements in properties array
332 __ Allocate(FixedArray::kHeaderSize,
339 RESULT_CONTAINS_TOP);
341 // Initialize the FixedArray.
344 // rdx: number of elements
345 // rax: start of next object
346 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
347 __ movp(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
348 __ Integer32ToSmi(rdx, rdx);
349 __ movp(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
351 // Initialize the fields to undefined.
354 // rax: start of next object
355 // rdx: number of elements
357 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
358 __ leap(rcx, Operand(rdi, FixedArray::kHeaderSize));
361 __ movp(Operand(rcx, 0), rdx);
362 __ addp(rcx, Immediate(kPointerSize));
368 // Store the initialized FixedArray into the properties field of
372 __ orp(rdi, Immediate(kHeapObjectTag)); // add the heap tag
373 __ movp(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
376 // Continue with JSObject being successfully allocated
380 // Undo the setting of the new top so that the heap is verifiable. For
381 // example, the map's unused properties potentially do not match the
382 // allocated objects unused properties.
383 // rbx: JSObject (previous new top)
384 __ bind(&undo_allocation);
385 __ UndoAllocationInNewSpace(rbx);
388 // Allocate the new receiver object using the runtime call.
389 // rdi: function (constructor)
391 Generate_Runtime_NewObject(masm, create_memento, rdi, &count_incremented,
394 // New object allocated.
395 // rbx: newly allocated object
398 if (create_memento) {
399 __ movp(rcx, Operand(rsp, kPointerSize*2));
400 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
401 __ j(equal, &count_incremented);
402 // rcx is an AllocationSite. We are creating a memento from it, so we
403 // need to increment the memento create count.
405 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
407 __ bind(&count_incremented);
410 // Retrieve the function from the stack.
413 // Retrieve smi-tagged arguments count from the stack.
414 __ movp(rax, Operand(rsp, 0));
415 __ SmiToInteger32(rax, rax);
417 // Push the allocated receiver to the stack. We need two copies
418 // because we may have to return the original one and the calling
419 // conventions dictate that the called function pops the receiver.
423 // Set up pointer to last argument.
424 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
426 // Copy arguments and receiver to the expression stack.
431 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
434 __ j(greater_equal, &loop);
436 // Call the function.
437 if (is_api_function) {
438 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
440 masm->isolate()->builtins()->HandleApiCallConstruct();
441 __ Call(code, RelocInfo::CODE_TARGET);
443 ParameterCount actual(rax);
444 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
447 // Store offset of return address for deoptimizer.
448 if (!is_api_function) {
449 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
452 // Restore context from the frame.
453 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
455 // If the result is an object (in the ECMA sense), we should get rid
456 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
458 Label use_receiver, exit;
459 // If the result is a smi, it is *not* an object in the ECMA sense.
460 __ JumpIfSmi(rax, &use_receiver);
462 // If the type of the result (stored in its map) is less than
463 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
464 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
465 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
466 __ j(above_equal, &exit);
468 // Throw away the result of the constructor invocation and use the
469 // on-stack receiver as the result.
470 __ bind(&use_receiver);
471 __ movp(rax, Operand(rsp, 0));
473 // Restore the arguments count and leave the construct frame.
475 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
477 // Leave construct frame.
480 // Remove caller arguments from the stack and return.
481 __ PopReturnAddressTo(rcx);
482 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
483 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
484 __ PushReturnAddressFrom(rcx);
485 Counters* counters = masm->isolate()->counters();
486 __ IncrementCounter(counters->constructed_objects(), 1);
491 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
492 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
496 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
497 Generate_JSConstructStubHelper(masm, true, false);
501 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
502 // ----------- S t a t e -------------
503 // -- rax: number of arguments
504 // -- rdi: constructor function
505 // -- rbx: allocation site or undefined
506 // -- rdx: original constructor
507 // -----------------------------------
508 // TODO(dslomov): support pretenuring
509 CHECK(!FLAG_pretenuring_call_new);
512 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
514 // Store a smi-tagged arguments count on the stack.
515 __ Integer32ToSmi(rax, rax);
517 __ SmiToInteger32(rax, rax);
522 // receiver is the hole.
523 __ Push(masm->isolate()->factory()->the_hole_value());
525 // Set up pointer to last argument.
526 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
528 // Copy arguments and receiver to the expression stack.
533 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
536 __ j(greater_equal, &loop);
538 __ incp(rax); // Pushed new.target.
542 ExternalReference debug_step_in_fp =
543 ExternalReference::debug_step_in_fp_address(masm->isolate());
544 __ Move(kScratchRegister, debug_step_in_fp);
545 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
546 __ j(equal, &skip_step_in);
551 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
555 __ bind(&skip_step_in);
557 // Call the function.
558 ParameterCount actual(rax);
559 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
561 // Restore context from the frame.
562 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
564 __ movp(rbx, Operand(rsp, 0)); // Get arguments count.
565 } // Leave construct frame.
567 // Remove caller arguments from the stack and return.
568 __ PopReturnAddressTo(rcx);
569 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
570 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
571 __ PushReturnAddressFrom(rcx);
576 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
578 ProfileEntryHookStub::MaybeCallEntryHook(masm);
580 // Expects five C++ function parameters.
581 // - Address entry (ignored)
582 // - JSFunction* function (
583 // - Object* receiver
586 // (see Handle::Invoke in execution.cc).
588 // Open a C++ scope for the FrameScope.
590 // Platform specific argument handling. After this, the stack contains
591 // an internal frame and the pushed function and receiver, and
592 // register rax and rbx holds the argument count and argument array,
593 // while rdi holds the function pointer and rsi the context.
596 // MSVC parameters in:
597 // rcx : entry (ignored)
603 // Clear the context before we push it when entering the internal frame.
605 // Enter an internal frame.
606 FrameScope scope(masm, StackFrame::INTERNAL);
608 // Load the function context into rsi.
609 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
611 // Push the function and the receiver onto the stack.
615 // Load the number of arguments and setup pointer to the arguments.
617 // Load the previous frame pointer to access C argument on stack
618 __ movp(kScratchRegister, Operand(rbp, 0));
619 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
620 // Load the function pointer into rdi.
623 // GCC parameters in:
624 // rdi : entry (ignored)
633 // Clear the context before we push it when entering the internal frame.
635 // Enter an internal frame.
636 FrameScope scope(masm, StackFrame::INTERNAL);
638 // Push the function and receiver and setup the context.
641 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
643 // Load the number of arguments and setup pointer to the arguments.
648 // Current stack contents:
649 // [rsp + 2 * kPointerSize ... ] : Internal frame
650 // [rsp + kPointerSize] : function
652 // Current register contents:
658 // Copy arguments to the stack in a loop.
659 // Register rbx points to array of pointers to handle locations.
660 // Push the values of these handles.
662 __ Set(rcx, 0); // Set loop variable to 0.
665 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
666 __ Push(Operand(kScratchRegister, 0)); // dereference handle
667 __ addp(rcx, Immediate(1));
670 __ j(not_equal, &loop);
674 // No type feedback cell is available
675 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
676 // Expects rdi to hold function pointer.
677 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
680 ParameterCount actual(rax);
681 // Function must be in rdi.
682 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
684 // Exit the internal frame. Notice that this also removes the empty
685 // context and the function left on the stack by the code
689 // TODO(X64): Is argument correct? Is there a receiver to remove?
690 __ ret(1 * kPointerSize); // Remove receiver.
694 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
695 Generate_JSEntryTrampolineHelper(masm, false);
699 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
700 Generate_JSEntryTrampolineHelper(masm, true);
704 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
705 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
706 GenerateTailCallToReturnedCode(masm);
710 static void CallCompileOptimized(MacroAssembler* masm,
712 FrameScope scope(masm, StackFrame::INTERNAL);
713 // Push a copy of the function onto the stack.
715 // Function is also the parameter to the runtime call.
717 // Whether to compile in a background thread.
718 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
720 __ CallRuntime(Runtime::kCompileOptimized, 2);
726 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
727 CallCompileOptimized(masm, false);
728 GenerateTailCallToReturnedCode(masm);
732 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
733 CallCompileOptimized(masm, true);
734 GenerateTailCallToReturnedCode(masm);
738 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
739 // For now, we are relying on the fact that make_code_young doesn't do any
740 // garbage collection which allows us to save/restore the registers without
741 // worrying about which of them contain pointers. We also don't build an
742 // internal frame to make the code faster, since we shouldn't have to do stack
743 // crawls in MakeCodeYoung. This seems a bit fragile.
745 // Re-execute the code that was patched back to the young age when
747 __ subp(Operand(rsp, 0), Immediate(5));
749 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
750 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
752 FrameScope scope(masm, StackFrame::MANUAL);
753 __ PrepareCallCFunction(2);
755 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
762 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
763 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
764 MacroAssembler* masm) { \
765 GenerateMakeCodeYoungAgainCommon(masm); \
767 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
768 MacroAssembler* masm) { \
769 GenerateMakeCodeYoungAgainCommon(masm); \
771 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
772 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
775 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
776 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
777 // that make_code_young doesn't do any garbage collection which allows us to
778 // save/restore the registers without worrying about which of them contain
781 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
782 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
783 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
785 FrameScope scope(masm, StackFrame::MANUAL);
786 __ PrepareCallCFunction(2);
788 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
793 // Perform prologue operations usually performed by the young code stub.
794 __ PopReturnAddressTo(kScratchRegister);
795 __ pushq(rbp); // Caller's frame pointer.
797 __ Push(rsi); // Callee's context.
798 __ Push(rdi); // Callee's JS Function.
799 __ PushReturnAddressFrom(kScratchRegister);
801 // Jump to point after the code-age stub.
806 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
807 GenerateMakeCodeYoungAgainCommon(masm);
811 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
812 SaveFPRegsMode save_doubles) {
813 // Enter an internal frame.
815 FrameScope scope(masm, StackFrame::INTERNAL);
817 // Preserve registers across notification, this is important for compiled
818 // stubs that tail call the runtime on deopts passing their parameters in
821 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
823 // Tear down internal frame.
826 __ DropUnderReturnAddress(1); // Ignore state offset
827 __ ret(0); // Return to IC Miss stub, continuation still on stack.
831 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
832 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
836 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
837 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
841 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
842 Deoptimizer::BailoutType type) {
843 // Enter an internal frame.
845 FrameScope scope(masm, StackFrame::INTERNAL);
847 // Pass the deoptimization type to the runtime system.
848 __ Push(Smi::FromInt(static_cast<int>(type)));
850 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
851 // Tear down internal frame.
854 // Get the full codegen state from the stack and untag it.
855 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
857 // Switch on the state.
858 Label not_no_registers, not_tos_rax;
859 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
860 __ j(not_equal, ¬_no_registers, Label::kNear);
861 __ ret(1 * kPointerSize); // Remove state.
863 __ bind(¬_no_registers);
864 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
865 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
866 __ j(not_equal, ¬_tos_rax, Label::kNear);
867 __ ret(2 * kPointerSize); // Remove state, rax.
869 __ bind(¬_tos_rax);
870 __ Abort(kNoCasesLeft);
874 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
875 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
879 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
880 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
884 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
885 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
889 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
891 // rsp[0] : Return address
892 // rsp[8] : Argument n
893 // rsp[16] : Argument n-1
895 // rsp[8 * n] : Argument 1
896 // rsp[8 * (n + 1)] : Receiver (function to call)
898 // rax contains the number of arguments, n, not counting the receiver.
900 // 1. Make sure we have at least one argument.
903 __ j(not_zero, &done);
904 __ PopReturnAddressTo(rbx);
905 __ Push(masm->isolate()->factory()->undefined_value());
906 __ PushReturnAddressFrom(rbx);
911 // 2. Get the function to call (passed as receiver) from the stack, check
912 // if it is a function.
913 Label slow, non_function;
914 StackArgumentsAccessor args(rsp, rax);
915 __ movp(rdi, args.GetReceiverOperand());
916 __ JumpIfSmi(rdi, &non_function);
917 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
918 __ j(not_equal, &slow);
920 // 3a. Patch the first argument if necessary when calling a function.
921 Label shift_arguments;
922 __ Set(rdx, 0); // indicate regular JS_FUNCTION
923 { Label convert_to_object, use_global_proxy, patch_receiver;
924 // Change context eagerly in case we need the global receiver.
925 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
927 // Do not transform the receiver for strict mode functions.
928 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
929 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
930 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
931 __ j(not_equal, &shift_arguments);
933 // Do not transform the receiver for natives.
934 // SharedFunctionInfo is already loaded into rbx.
935 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
936 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
937 __ j(not_zero, &shift_arguments);
939 // Compute the receiver in sloppy mode.
940 __ movp(rbx, args.GetArgumentOperand(1));
941 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
943 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
944 __ j(equal, &use_global_proxy);
945 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
946 __ j(equal, &use_global_proxy);
948 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
949 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
950 __ j(above_equal, &shift_arguments);
952 __ bind(&convert_to_object);
954 // Enter an internal frame in order to preserve argument count.
955 FrameScope scope(masm, StackFrame::INTERNAL);
956 __ Integer32ToSmi(rax, rax);
960 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
962 __ Set(rdx, 0); // indicate regular JS_FUNCTION
965 __ SmiToInteger32(rax, rax);
968 // Restore the function to rdi.
969 __ movp(rdi, args.GetReceiverOperand());
970 __ jmp(&patch_receiver, Label::kNear);
972 __ bind(&use_global_proxy);
974 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
975 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
977 __ bind(&patch_receiver);
978 __ movp(args.GetArgumentOperand(1), rbx);
980 __ jmp(&shift_arguments);
983 // 3b. Check for function proxy.
985 __ Set(rdx, 1); // indicate function proxy
986 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
987 __ j(equal, &shift_arguments);
988 __ bind(&non_function);
989 __ Set(rdx, 2); // indicate non-function
991 // 3c. Patch the first argument when calling a non-function. The
992 // CALL_NON_FUNCTION builtin expects the non-function callee as
993 // receiver, so overwrite the first argument which will ultimately
994 // become the receiver.
995 __ movp(args.GetArgumentOperand(1), rdi);
997 // 4. Shift arguments and return address one slot down on the stack
998 // (overwriting the original receiver). Adjust argument count to make
999 // the original first argument the new receiver.
1000 __ bind(&shift_arguments);
1003 StackArgumentsAccessor args(rsp, rcx);
1005 __ movp(rbx, args.GetArgumentOperand(1));
1006 __ movp(args.GetArgumentOperand(0), rbx);
1008 __ j(not_zero, &loop); // While non-zero.
1009 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1010 __ decp(rax); // One fewer argument (first argument is new receiver).
1013 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1014 // or a function proxy via CALL_FUNCTION_PROXY.
1015 { Label function, non_proxy;
1017 __ j(zero, &function);
1019 __ cmpp(rdx, Immediate(1));
1020 __ j(not_equal, &non_proxy);
1022 __ PopReturnAddressTo(rdx);
1023 __ Push(rdi); // re-add proxy object as additional argument
1024 __ PushReturnAddressFrom(rdx);
1026 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1027 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1028 RelocInfo::CODE_TARGET);
1030 __ bind(&non_proxy);
1031 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
1032 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1033 RelocInfo::CODE_TARGET);
1037 // 5b. Get the code to call from the function and check that the number of
1038 // expected arguments matches what we're providing. If so, jump
1039 // (tail-call) to the code in register edx without checking arguments.
1040 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1041 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
1042 SharedFunctionInfo::kFormalParameterCountOffset);
1043 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1046 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1047 RelocInfo::CODE_TARGET);
1049 ParameterCount expected(0);
1050 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1054 static void Generate_CheckStackOverflow(MacroAssembler* masm,
1055 const int calleeOffset) {
1056 // rax : the number of items to be pushed to the stack
1058 // Check the stack for overflow. We are not trying to catch
1059 // interruptions (e.g. debug break and preemption) here, so the "real stack
1060 // limit" is checked.
1062 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
1064 // Make rcx the space we have left. The stack might already be overflowed
1065 // here which will cause rcx to become negative.
1066 __ subp(rcx, kScratchRegister);
1067 // Make rdx the space we need for the array when it is unrolled onto the
1069 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
1070 // Check if the arguments will overflow the stack.
1072 __ j(greater, &okay); // Signed comparison.
1074 // Out of stack space.
1075 __ Push(Operand(rbp, calleeOffset));
1077 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1083 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1084 const int argumentsOffset,
1085 const int indexOffset,
1086 const int limitOffset) {
1087 Register receiver = LoadDescriptor::ReceiverRegister();
1088 Register key = LoadDescriptor::NameRegister();
1090 // Copy all arguments from the array to the stack.
1092 __ movp(key, Operand(rbp, indexOffset));
1095 __ movp(receiver, Operand(rbp, argumentsOffset)); // load arguments
1097 // Use inline caching to speed up access to arguments.
1098 if (FLAG_vector_ics) {
1099 // TODO(mvstanton): Vector-based ics need additional infrastructure to
1100 // be embedded here. For now, just call the runtime.
1103 __ CallRuntime(Runtime::kGetProperty, 2);
1105 Handle<Code> ic = CodeFactory::KeyedLoadIC(masm->isolate()).code();
1106 __ Call(ic, RelocInfo::CODE_TARGET);
1107 // It is important that we do not have a test instruction after the
1108 // call. A test instruction after the call is used to indicate that
1109 // we have generated an inline version of the keyed load. In this
1110 // case, we know that we are not generating a test instruction next.
1113 // Push the nth argument.
1116 // Update the index on the stack and in register key.
1117 __ movp(key, Operand(rbp, indexOffset));
1118 __ SmiAddConstant(key, key, Smi::FromInt(1));
1119 __ movp(Operand(rbp, indexOffset), key);
1122 __ cmpp(key, Operand(rbp, limitOffset));
1123 __ j(not_equal, &loop);
1125 // On exit, the pushed arguments count is in rax, untagged
1126 __ SmiToInteger64(rax, key);
1130 // Used by FunctionApply and ReflectApply
1131 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1132 const int kFormalParameters = targetIsArgument ? 3 : 2;
1133 const int kStackSize = kFormalParameters + 1;
1136 // rsp : return address
1137 // rsp[8] : arguments
1138 // rsp[16] : receiver ("this")
1139 // rsp[24] : function
1141 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1143 // rbp : Old base pointer
1144 // rbp[8] : return address
1145 // rbp[16] : function arguments
1146 // rbp[24] : receiver
1147 // rbp[32] : function
1148 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1149 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1150 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
1152 __ Push(Operand(rbp, kFunctionOffset));
1153 __ Push(Operand(rbp, kArgumentsOffset));
1154 if (targetIsArgument) {
1155 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1157 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1160 Generate_CheckStackOverflow(masm, kFunctionOffset);
1162 // Push current index and limit.
1163 const int kLimitOffset =
1164 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1165 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1166 __ Push(rax); // limit
1167 __ Push(Immediate(0)); // index
1169 // Get the receiver.
1170 __ movp(rbx, Operand(rbp, kReceiverOffset));
1172 // Check that the function is a JS function (otherwise it must be a proxy).
1173 Label push_receiver;
1174 __ movp(rdi, Operand(rbp, kFunctionOffset));
1175 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1176 __ j(not_equal, &push_receiver);
1178 // Change context eagerly to get the right global object if necessary.
1179 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1181 // Do not transform the receiver for strict mode functions.
1182 Label call_to_object, use_global_proxy;
1183 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1184 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1185 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1186 __ j(not_equal, &push_receiver);
1188 // Do not transform the receiver for natives.
1189 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1190 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1191 __ j(not_equal, &push_receiver);
1193 // Compute the receiver in sloppy mode.
1194 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1195 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1196 __ j(equal, &use_global_proxy);
1197 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1198 __ j(equal, &use_global_proxy);
1200 // If given receiver is already a JavaScript object then there's no
1201 // reason for converting it.
1202 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1203 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1204 __ j(above_equal, &push_receiver);
1206 // Convert the receiver to an object.
1207 __ bind(&call_to_object);
1209 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1211 __ jmp(&push_receiver, Label::kNear);
1213 __ bind(&use_global_proxy);
1215 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1216 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1218 // Push the receiver.
1219 __ bind(&push_receiver);
1222 // Loop over the arguments array, pushing each value to the stack
1223 Generate_PushAppliedArguments(
1224 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1226 // Call the function.
1228 ParameterCount actual(rax);
1229 __ movp(rdi, Operand(rbp, kFunctionOffset));
1230 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1231 __ j(not_equal, &call_proxy);
1232 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1234 frame_scope.GenerateLeaveFrame();
1235 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1237 // Call the function proxy.
1238 __ bind(&call_proxy);
1239 __ Push(rdi); // add function proxy as last argument
1242 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1243 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1244 RelocInfo::CODE_TARGET);
1246 // Leave internal frame.
1248 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1252 // Used by ReflectConstruct
1253 static void Generate_ConstructHelper(MacroAssembler* masm) {
1254 const int kFormalParameters = 3;
1255 const int kStackSize = kFormalParameters + 1;
1258 // rsp : return address
1259 // rsp[8] : original constructor (new.target)
1260 // rsp[16] : arguments
1261 // rsp[24] : constructor
1263 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1265 // rbp : Old base pointer
1266 // rbp[8] : return address
1267 // rbp[16] : original constructor (new.target)
1268 // rbp[24] : arguments
1269 // rbp[32] : constructor
1270 static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1271 static const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1272 static const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1274 // If newTarget is not supplied, set it to constructor
1275 Label validate_arguments;
1276 __ movp(rax, Operand(rbp, kNewTargetOffset));
1277 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1278 __ j(not_equal, &validate_arguments, Label::kNear);
1279 __ movp(rax, Operand(rbp, kFunctionOffset));
1280 __ movp(Operand(rbp, kNewTargetOffset), rax);
1282 // Validate arguments
1283 __ bind(&validate_arguments);
1284 __ Push(Operand(rbp, kFunctionOffset));
1285 __ Push(Operand(rbp, kArgumentsOffset));
1286 __ Push(Operand(rbp, kNewTargetOffset));
1287 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1289 Generate_CheckStackOverflow(masm, kFunctionOffset);
1291 // Push current index and limit.
1292 const int kLimitOffset =
1293 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1294 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1295 __ Push(rax); // limit
1296 __ Push(Immediate(0)); // index
1297 // Push newTarget and callee functions
1298 __ Push(Operand(rbp, kNewTargetOffset));
1299 __ Push(Operand(rbp, kFunctionOffset));
1301 // Loop over the arguments array, pushing each value to the stack
1302 Generate_PushAppliedArguments(
1303 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1305 // Use undefined feedback vector
1306 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1307 __ movp(rdi, Operand(rbp, kFunctionOffset));
1309 // Call the function.
1310 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1311 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1315 // Leave internal frame.
1317 // remove this, target, arguments and newTarget
1318 __ ret(kStackSize * kPointerSize);
1322 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1323 Generate_ApplyHelper(masm, false);
1327 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1328 Generate_ApplyHelper(masm, true);
1332 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1333 Generate_ConstructHelper(masm);
1337 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1338 // ----------- S t a t e -------------
1340 // -- rsp[0] : return address
1341 // -- rsp[8] : last argument
1342 // -----------------------------------
1343 Label generic_array_code;
1345 // Get the InternalArray function.
1346 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1348 if (FLAG_debug_code) {
1349 // Initial map for the builtin InternalArray functions should be maps.
1350 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1351 // Will both indicate a NULL and a Smi.
1352 STATIC_ASSERT(kSmiTag == 0);
1353 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1354 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1355 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1356 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1359 // Run the native code for the InternalArray function called as a normal
1362 InternalArrayConstructorStub stub(masm->isolate());
1363 __ TailCallStub(&stub);
1367 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1368 // ----------- S t a t e -------------
1370 // -- rsp[0] : return address
1371 // -- rsp[8] : last argument
1372 // -----------------------------------
1373 Label generic_array_code;
1375 // Get the Array function.
1376 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1378 if (FLAG_debug_code) {
1379 // Initial map for the builtin Array functions should be maps.
1380 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1381 // Will both indicate a NULL and a Smi.
1382 STATIC_ASSERT(kSmiTag == 0);
1383 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1384 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1385 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1386 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1390 // Run the native code for the Array function called as a normal function.
1392 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1393 ArrayConstructorStub stub(masm->isolate());
1394 __ TailCallStub(&stub);
1398 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1399 // ----------- S t a t e -------------
1400 // -- rax : number of arguments
1401 // -- rdi : constructor function
1402 // -- rsp[0] : return address
1403 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1404 // -- rsp[(argc + 1) * 8] : receiver
1405 // -----------------------------------
1406 Counters* counters = masm->isolate()->counters();
1407 __ IncrementCounter(counters->string_ctor_calls(), 1);
1409 if (FLAG_debug_code) {
1410 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1412 __ Assert(equal, kUnexpectedStringFunction);
1415 // Load the first argument into rax and get rid of the rest
1416 // (including the receiver).
1417 StackArgumentsAccessor args(rsp, rax);
1420 __ j(zero, &no_arguments);
1421 __ movp(rbx, args.GetArgumentOperand(1));
1422 __ PopReturnAddressTo(rcx);
1423 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1424 __ PushReturnAddressFrom(rcx);
1427 // Lookup the argument in the number to string cache.
1428 Label not_cached, argument_is_string;
1429 __ LookupNumberStringCache(rax, // Input.
1434 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1435 __ bind(&argument_is_string);
1437 // ----------- S t a t e -------------
1438 // -- rbx : argument converted to string
1439 // -- rdi : constructor function
1440 // -- rsp[0] : return address
1441 // -----------------------------------
1443 // Allocate a JSValue and put the tagged pointer into rax.
1445 __ Allocate(JSValue::kSize,
1447 rcx, // New allocation top (we ignore it).
1453 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1454 if (FLAG_debug_code) {
1455 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1456 Immediate(JSValue::kSize >> kPointerSizeLog2));
1457 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1458 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1459 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1461 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1463 // Set properties and elements.
1464 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1465 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1466 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1469 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1471 // Ensure the object is fully initialized.
1472 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1474 // We're done. Return.
1477 // The argument was not found in the number to string cache. Check
1478 // if it's a string already before calling the conversion builtin.
1479 Label convert_argument;
1480 __ bind(¬_cached);
1481 STATIC_ASSERT(kSmiTag == 0);
1482 __ JumpIfSmi(rax, &convert_argument);
1483 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1484 __ j(NegateCondition(is_string), &convert_argument);
1486 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1487 __ jmp(&argument_is_string);
1489 // Invoke the conversion builtin and put the result into rbx.
1490 __ bind(&convert_argument);
1491 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1493 FrameScope scope(masm, StackFrame::INTERNAL);
1494 __ Push(rdi); // Preserve the function.
1496 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1500 __ jmp(&argument_is_string);
1502 // Load the empty string into rbx, remove the receiver from the
1503 // stack, and jump back to the case where the argument is a string.
1504 __ bind(&no_arguments);
1505 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1506 __ PopReturnAddressTo(rcx);
1507 __ leap(rsp, Operand(rsp, kPointerSize));
1508 __ PushReturnAddressFrom(rcx);
1509 __ jmp(&argument_is_string);
1511 // At this point the argument is already a string. Call runtime to
1512 // create a string wrapper.
1513 __ bind(&gc_required);
1514 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1516 FrameScope scope(masm, StackFrame::INTERNAL);
1518 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1524 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1525 Label* stack_overflow) {
1526 // ----------- S t a t e -------------
1527 // -- rax : actual number of arguments
1528 // -- rbx : expected number of arguments
1529 // -- rdi: function (passed through to callee)
1530 // -----------------------------------
1531 // Check the stack for overflow. We are not trying to catch
1532 // interruptions (e.g. debug break and preemption) here, so the "real stack
1533 // limit" is checked.
1535 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1537 // Make rcx the space we have left. The stack might already be overflowed
1538 // here which will cause rcx to become negative.
1540 // Make rdx the space we need for the array when it is unrolled onto the
1543 __ shlp(rdx, Immediate(kPointerSizeLog2));
1544 // Check if the arguments will overflow the stack.
1546 __ j(less_equal, stack_overflow); // Signed comparison.
1550 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1554 // Store the arguments adaptor context sentinel.
1555 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1557 // Push the function on the stack.
1560 // Preserve the number of arguments on the stack. Must preserve rax,
1561 // rbx and rcx because these registers are used when copying the
1562 // arguments and the receiver.
1563 __ Integer32ToSmi(r8, rax);
1568 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1569 // Retrieve the number of arguments from the stack. Number is a Smi.
1570 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1576 // Remove caller arguments from the stack.
1577 __ PopReturnAddressTo(rcx);
1578 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1579 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1580 __ PushReturnAddressFrom(rcx);
1584 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1585 // ----------- S t a t e -------------
1586 // -- rax : actual number of arguments
1587 // -- rbx : expected number of arguments
1588 // -- rdi: function (passed through to callee)
1589 // -----------------------------------
1591 Label invoke, dont_adapt_arguments;
1592 Counters* counters = masm->isolate()->counters();
1593 __ IncrementCounter(counters->arguments_adaptors(), 1);
1595 Label stack_overflow;
1596 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1598 Label enough, too_few;
1599 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1601 __ j(less, &too_few);
1602 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1603 __ j(equal, &dont_adapt_arguments);
1605 { // Enough parameters: Actual >= expected.
1607 EnterArgumentsAdaptorFrame(masm);
1609 // Copy receiver and all expected arguments.
1610 const int offset = StandardFrameConstants::kCallerSPOffset;
1611 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1612 __ Set(r8, -1); // account for receiver
1617 __ Push(Operand(rax, 0));
1618 __ subp(rax, Immediate(kPointerSize));
1624 { // Too few parameters: Actual < expected.
1626 EnterArgumentsAdaptorFrame(masm);
1628 // Copy receiver and all actual arguments.
1629 const int offset = StandardFrameConstants::kCallerSPOffset;
1630 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1631 __ Set(r8, -1); // account for receiver
1636 __ Push(Operand(rdi, 0));
1637 __ subp(rdi, Immediate(kPointerSize));
1641 // Fill remaining expected arguments with undefined values.
1643 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1646 __ Push(kScratchRegister);
1650 // Restore function pointer.
1651 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1654 // Call the entry point.
1658 // Store offset of return address for deoptimizer.
1659 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1661 // Leave frame and return.
1662 LeaveArgumentsAdaptorFrame(masm);
1665 // -------------------------------------------
1666 // Dont adapt arguments.
1667 // -------------------------------------------
1668 __ bind(&dont_adapt_arguments);
1671 __ bind(&stack_overflow);
1673 FrameScope frame(masm, StackFrame::MANUAL);
1674 EnterArgumentsAdaptorFrame(masm);
1675 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1681 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1682 // Lookup the function in the JavaScript frame.
1683 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1685 FrameScope scope(masm, StackFrame::INTERNAL);
1686 // Pass function as argument.
1688 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1692 // If the code object is null, just return to the unoptimized code.
1693 __ cmpp(rax, Immediate(0));
1694 __ j(not_equal, &skip, Label::kNear);
1699 // Load deoptimization data from the code object.
1700 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1702 // Load the OSR entrypoint offset from the deoptimization data.
1703 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1704 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1706 // Compute the target address = code_obj + header_size + osr_offset
1707 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1709 // Overwrite the return address on the stack.
1710 __ movq(StackOperandForReturnAddress(0), rax);
1712 // And "return" to the OSR entry point of the function.
1717 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1718 // We check the stack limit as indicator that recompilation might be done.
1720 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1721 __ j(above_equal, &ok);
1723 FrameScope scope(masm, StackFrame::INTERNAL);
1724 __ CallRuntime(Runtime::kStackGuard, 0);
1726 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1727 RelocInfo::CODE_TARGET);
1736 } } // namespace v8::internal
1738 #endif // V8_TARGET_ARCH_X64