1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/interpreter/bytecodes.h"
14 #include "src/runtime/runtime.h"
20 #define __ ACCESS_MASM(masm)
23 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
24 BuiltinExtraArguments extra_args) {
25 // ----------- S t a t e -------------
26 // -- r3 : number of arguments excluding receiver
27 // -- r4 : called function (only guaranteed when
28 // extra_args requires it)
30 // -- sp[0] : last argument
32 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
33 // -- sp[4 * argc] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 // JumpToExternalReference expects r0 to contain the number of arguments
46 // including the receiver and the extra arguments.
47 __ addi(r3, r3, Operand(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
52 // Load the built-in InternalArray function from the current context.
53 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
55 // Load the native context.
58 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
59 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
60 // Load the InternalArray function from the native context.
62 MemOperand(result, Context::SlotOffset(
63 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
67 // Load the built-in Array function from the current context.
68 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
69 // Load the native context.
72 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
73 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
74 // Load the Array function from the native context.
77 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
81 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82 // ----------- S t a t e -------------
83 // -- r3 : number of arguments
84 // -- lr : return address
85 // -- sp[...]: constructor arguments
86 // -----------------------------------
87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89 // Get the InternalArray function.
90 GenerateLoadInternalArrayFunction(masm, r4);
92 if (FLAG_debug_code) {
93 // Initial map for the builtin InternalArray functions should be maps.
94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
101 // Run the native code for the InternalArray function called as a normal
104 InternalArrayConstructorStub stub(masm->isolate());
105 __ TailCallStub(&stub);
109 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110 // ----------- S t a t e -------------
111 // -- r3 : number of arguments
112 // -- lr : return address
113 // -- sp[...]: constructor arguments
114 // -----------------------------------
115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117 // Get the Array function.
118 GenerateLoadArrayFunction(masm, r4);
120 if (FLAG_debug_code) {
121 // Initial map for the builtin Array functions should be maps.
122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123 __ TestIfSmi(r5, r0);
124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 // Run the native code for the Array function called as a normal function.
132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133 ArrayConstructorStub stub(masm->isolate());
134 __ TailCallStub(&stub);
138 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
139 // ----------- S t a t e -------------
140 // -- r3 : number of arguments
141 // -- r4 : constructor function
142 // -- lr : return address
143 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
144 // -- sp[argc * 4] : receiver
145 // -----------------------------------
146 Counters* counters = masm->isolate()->counters();
147 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
149 Register function = r4;
150 if (FLAG_debug_code) {
151 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
152 __ cmp(function, r5);
153 __ Assert(eq, kUnexpectedStringFunction);
156 // Load the first arguments in r3 and get rid of the rest.
158 __ cmpi(r3, Operand::Zero());
159 __ beq(&no_arguments);
160 // First args = sp[(argc - 1) * 4].
161 __ subi(r3, r3, Operand(1));
162 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
164 __ LoadP(r3, MemOperand(sp));
165 // sp now point to args[0], drop args[0] + receiver.
168 Register argument = r5;
169 Label not_cached, argument_is_string;
170 __ LookupNumberStringCache(r3, // Input.
176 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
177 __ bind(&argument_is_string);
179 // ----------- S t a t e -------------
180 // -- r5 : argument converted to string
181 // -- r4 : constructor function
182 // -- lr : return address
183 // -----------------------------------
186 __ Allocate(JSValue::kSize,
190 &gc_required, TAG_OBJECT);
192 // Initialising the String Object.
194 __ LoadGlobalFunctionInitialMap(function, map, r7);
195 if (FLAG_debug_code) {
196 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
197 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
198 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
199 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
200 __ cmpi(r7, Operand::Zero());
201 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
205 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
206 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
207 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
209 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
211 // Ensure the object is fully initialized.
212 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
216 // The argument was not found in the number to string cache. Check
217 // if it's a string already before calling the conversion builtin.
218 Label convert_argument;
219 __ bind(¬_cached);
220 __ JumpIfSmi(r3, &convert_argument);
223 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
224 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
225 STATIC_ASSERT(kNotStringTag != 0);
226 __ andi(r0, r6, Operand(kIsNotStringMask));
227 __ bne(&convert_argument, cr0);
229 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
230 __ b(&argument_is_string);
232 // Invoke the conversion builtin and put the result into r5.
233 __ bind(&convert_argument);
234 __ push(function); // Preserve the function.
235 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
237 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
239 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
243 __ b(&argument_is_string);
245 // Load the empty string into r5, remove the receiver from the
246 // stack, and jump back to the case where the argument is a string.
247 __ bind(&no_arguments);
248 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250 __ b(&argument_is_string);
252 // At this point the argument is already a string. Call runtime to
253 // create a string wrapper.
254 __ bind(&gc_required);
255 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
257 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kNewStringWrapper, 1);
265 static void CallRuntimePassFunction(MacroAssembler* masm,
266 Runtime::FunctionId function_id) {
267 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
268 // Push a copy of the function onto the stack.
269 // Push function as parameter to the runtime call.
272 __ CallRuntime(function_id, 1);
278 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
279 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
280 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
281 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
282 __ JumpToJSEntry(ip);
286 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
287 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
288 __ JumpToJSEntry(ip);
292 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
293 // Checking whether the queued function is ready for install is optional,
294 // since we come across interrupts and stack checks elsewhere. However,
295 // not checking may delay installing ready functions, and always checking
296 // would be quite expensive. A good compromise is to first check against
297 // stack limit as a cue for an interrupt signal.
299 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
303 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
304 GenerateTailCallToReturnedCode(masm);
307 GenerateTailCallToSharedCode(masm);
311 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
312 bool is_api_function,
313 bool create_memento) {
314 // ----------- S t a t e -------------
315 // -- r3 : number of arguments
316 // -- r4 : constructor function
317 // -- r5 : allocation site or undefined
318 // -- r6 : original constructor
319 // -- lr : return address
320 // -- sp[...]: constructor arguments
321 // -----------------------------------
323 // Should never create mementos for api functions.
324 DCHECK(!is_api_function || !create_memento);
326 Isolate* isolate = masm->isolate();
328 // Enter a construct frame.
330 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
332 // Preserve the incoming parameters on the stack.
333 __ AssertUndefinedOrAllocationSite(r5, r7);
335 __ Push(r5, r3, r4, r6);
337 // Try to allocate the object without transitioning into C code. If any of
338 // the preconditions is not met, the code bails out to the runtime call.
339 Label rt_call, allocated;
340 if (FLAG_inline_new) {
341 ExternalReference debug_step_in_fp =
342 ExternalReference::debug_step_in_fp_address(isolate);
343 __ mov(r5, Operand(debug_step_in_fp));
344 __ LoadP(r5, MemOperand(r5));
345 __ cmpi(r5, Operand::Zero());
348 // Fall back to runtime if the original constructor and function differ.
352 // Load the initial map and verify that it is in fact a map.
353 // r4: constructor function
355 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
356 __ JumpIfSmi(r5, &rt_call);
357 __ CompareObjectType(r5, r8, r7, MAP_TYPE);
360 // Check that the constructor is not constructing a JSFunction (see
361 // comments in Runtime_NewObject in runtime.cc). In which case the
362 // initial map's instance type would be JS_FUNCTION_TYPE.
363 // r4: constructor function
365 __ CompareInstanceType(r5, r8, JS_FUNCTION_TYPE);
368 if (!is_api_function) {
370 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
371 // Check if slack tracking is enabled.
372 __ lwz(r7, bit_field3);
373 __ DecodeField<Map::Counter>(r11, r7);
374 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
376 // Decrease generous allocation count.
377 __ Add(r7, r7, -(1 << Map::Counter::kShift), r0);
378 __ stw(r7, bit_field3);
379 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
384 __ Push(r5, r4); // r4 = constructor
385 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
392 // Now allocate the JSObject on the heap.
393 // r4: constructor function
395 Label rt_call_reload_new_target;
396 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
397 if (create_memento) {
398 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
401 __ Allocate(r6, r7, r8, r9, &rt_call_reload_new_target, SIZE_IN_WORDS);
403 // Allocated the JSObject, now initialize the fields. Map is set to
404 // initial map and properties and elements are set to empty fixed array.
405 // r4: constructor function
407 // r6: object size (including memento if create_memento)
408 // r7: JSObject (not tagged)
409 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
411 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
412 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
413 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
414 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
416 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
417 __ add(r9, r7, r9); // End of object.
419 // Fill all the in-object properties with the appropriate filler.
420 // r4: constructor function
422 // r6: object size (in words, including memento if create_memento)
423 // r7: JSObject (not tagged)
424 // r8: First in-object property of JSObject (not tagged)
426 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
427 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
429 if (!is_api_function) {
430 Label no_inobject_slack_tracking;
432 // Check if slack tracking is enabled.
433 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
434 __ blt(&no_inobject_slack_tracking);
436 // Allocate object with a slack.
437 __ lbz(r3, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
438 __ lbz(r5, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
440 if (FLAG_debug_code) {
441 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
443 // r0: offset of first field after pre-allocated fields
445 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
449 __ cmpi(r3, Operand::Zero());
451 __ InitializeNFieldsWithFiller(r8, r3, r10);
454 // To allow for truncation.
455 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
456 // Fill the remaining fields with one pointer filler map.
458 __ bind(&no_inobject_slack_tracking);
461 if (create_memento) {
462 __ subi(r3, r9, Operand(AllocationMemento::kSize));
463 __ InitializeFieldsWithFiller(r8, r3, r10);
465 // Fill in memento fields.
466 // r8: points to the allocated but uninitialized memento.
467 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
468 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
469 // Load the AllocationSite
470 __ LoadP(r10, MemOperand(sp, 3 * kPointerSize));
471 __ AssertUndefinedOrAllocationSite(r10, r3);
473 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
474 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
477 __ InitializeFieldsWithFiller(r8, r9, r10);
480 // Add the object tag to make the JSObject real, so that we can continue
481 // and jump into the continuation code at any time from now on.
482 __ addi(r7, r7, Operand(kHeapObjectTag));
484 // Continue with JSObject being successfully allocated
488 // Reload the original constructor and fall-through.
489 __ bind(&rt_call_reload_new_target);
490 __ LoadP(r6, MemOperand(sp, 0 * kPointerSize));
493 // Allocate the new receiver object using the runtime call.
494 // r4: constructor function
495 // r6: original constructor
497 if (create_memento) {
498 // Get the cell or allocation site.
499 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
501 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
504 __ CallRuntime(Runtime::kNewObject, 2);
508 // Runtime_NewObjectWithAllocationSite increments allocation count.
509 // Skip the increment.
510 Label count_incremented;
511 if (create_memento) {
512 __ b(&count_incremented);
515 // Receiver for constructor call allocated.
519 if (create_memento) {
520 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
521 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
523 __ beq(&count_incremented);
524 // r5 is an AllocationSite. We are creating a memento from it, so we
525 // need to increment the memento create count.
527 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
528 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
530 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
532 __ bind(&count_incremented);
535 // Restore the parameters.
538 // Retrieve smi-tagged arguments count from the stack.
539 __ LoadP(r6, MemOperand(sp));
541 // Push new.target onto the construct frame. This is stored just below the
542 // receiver on the stack.
545 // Set up pointer to last argument.
546 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
548 // Copy arguments and receiver to the expression stack.
549 // r4: constructor function
550 // r5: address of last argument (caller sp)
551 // r6: number of arguments (smi-tagged)
555 // sp[3]: number of arguments (smi-tagged)
557 __ SmiUntag(r3, r6, SetRC);
558 __ beq(&no_args, cr0);
559 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
563 __ subi(ip, ip, Operand(kPointerSize));
564 __ LoadPX(r0, MemOperand(r5, ip));
565 __ StorePX(r0, MemOperand(sp, ip));
569 // Call the function.
570 // r3: number of arguments
571 // r4: constructor function
572 if (is_api_function) {
573 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
574 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
575 __ Call(code, RelocInfo::CODE_TARGET);
577 ParameterCount actual(r3);
578 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
581 // Store offset of return address for deoptimizer.
582 if (!is_api_function) {
583 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
586 // Restore context from the frame.
590 // sp[2]: number of arguments (smi-tagged)
591 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
593 // If the result is an object (in the ECMA sense), we should get rid
594 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
596 Label use_receiver, exit;
598 // If the result is a smi, it is *not* an object in the ECMA sense.
602 // sp[2]: number of arguments (smi-tagged)
603 __ JumpIfSmi(r3, &use_receiver);
605 // If the type of the result (stored in its map) is less than
606 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
607 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
610 // Throw away the result of the constructor invocation and use the
611 // on-stack receiver as the result.
612 __ bind(&use_receiver);
613 __ LoadP(r3, MemOperand(sp));
615 // Remove receiver from the stack, remove caller arguments, and
619 // sp[0]: receiver (newly allocated object)
620 // sp[1]: new.target (original constructor)
621 // sp[2]: number of arguments (smi-tagged)
622 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
624 // Leave construct frame.
627 __ SmiToPtrArrayOffset(r4, r4);
629 __ addi(sp, sp, Operand(kPointerSize));
630 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
635 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
636 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
640 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
641 Generate_JSConstructStubHelper(masm, true, false);
645 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
646 // ----------- S t a t e -------------
647 // -- r3 : number of arguments
648 // -- r4 : constructor function
649 // -- r5 : allocation site or undefined
650 // -- r6 : original constructor
651 // -- lr : return address
652 // -- sp[...]: constructor arguments
653 // -----------------------------------
656 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
658 __ AssertUndefinedOrAllocationSite(r5, r7);
660 // Smi-tagged arguments count.
662 __ SmiTag(r7, SetRC);
664 // receiver is the hole.
665 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
667 // allocation site, smi arguments count, new.target, receiver
668 __ Push(r5, r7, r6, ip);
670 // Set up pointer to last argument.
671 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
673 // Copy arguments and receiver to the expression stack.
674 // r3: number of arguments
675 // r4: constructor function
676 // r5: address of last argument (caller sp)
677 // r7: number of arguments (smi-tagged)
678 // cr0: compare against zero of arguments
681 // sp[2]: number of arguments (smi-tagged)
683 __ beq(&no_args, cr0);
684 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
687 __ subi(ip, ip, Operand(kPointerSize));
688 __ LoadPX(r0, MemOperand(r5, ip));
695 ExternalReference debug_step_in_fp =
696 ExternalReference::debug_step_in_fp_address(masm->isolate());
697 __ mov(r5, Operand(debug_step_in_fp));
698 __ LoadP(r5, MemOperand(r5));
699 __ and_(r0, r5, r5, SetRC);
700 __ beq(&skip_step_in, cr0);
703 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
706 __ bind(&skip_step_in);
708 // Call the function.
709 // r3: number of arguments
710 // r4: constructor function
711 ParameterCount actual(r3);
712 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
714 // Restore context from the frame.
716 // sp[0]: number of arguments (smi-tagged)
717 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
718 // Get arguments count, skipping over new.target.
719 __ LoadP(r4, MemOperand(sp, kPointerSize));
721 // Leave construct frame.
724 __ SmiToPtrArrayOffset(r4, r4);
726 __ addi(sp, sp, Operand(kPointerSize));
731 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
734 // Clobbers r5; preserves all other registers.
735 static void Generate_CheckStackOverflow(MacroAssembler* masm,
736 const int calleeOffset, Register argc,
737 IsTagged argc_is_tagged) {
738 // Check the stack for overflow. We are not trying to catch
739 // interruptions (e.g. debug break and preemption) here, so the "real stack
740 // limit" is checked.
742 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
743 // Make r5 the space we have left. The stack might already be overflowed
744 // here which will cause r5 to become negative.
746 // Check if the arguments will overflow the stack.
747 if (argc_is_tagged == kArgcIsSmiTagged) {
748 __ SmiToPtrArrayOffset(r0, argc);
750 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
751 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
754 __ bgt(&okay); // Signed comparison.
756 // Out of stack space.
757 __ LoadP(r4, MemOperand(fp, calleeOffset));
758 if (argc_is_tagged == kArgcIsUntaggedInt) {
762 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
768 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
770 // Called from Generate_JS_Entry
776 // r0,r8-r9, cp may be clobbered
777 ProfileEntryHookStub::MaybeCallEntryHook(masm);
779 // Clear the context before we push it when entering the internal frame.
780 __ li(cp, Operand::Zero());
782 // Enter an internal frame.
784 FrameScope scope(masm, StackFrame::INTERNAL);
786 // Set up the context from the function argument.
787 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
789 __ InitializeRootRegister();
791 // Push the function and the receiver onto the stack.
795 // Check if we have enough stack space to push all arguments.
796 // The function is the first thing that was pushed above after entering
797 // the internal frame.
798 const int kFunctionOffset =
799 InternalFrameConstants::kCodeOffset - kPointerSize;
801 Generate_CheckStackOverflow(masm, kFunctionOffset, r6, kArgcIsUntaggedInt);
803 // Copy arguments to the stack in a loop.
806 // r7: argv, i.e. points to first arg
808 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
810 // r5 points past last arg.
813 __ LoadP(r8, MemOperand(r7)); // read next parameter
814 __ addi(r7, r7, Operand(kPointerSize));
815 __ LoadP(r0, MemOperand(r8)); // dereference handle
816 __ push(r0); // push parameter
821 // Initialize all JavaScript callee-saved registers, since they will be seen
822 // by the garbage collector as part of handlers.
823 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
829 // Invoke the code and pass argc as r3.
832 // No type feedback cell is available
833 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
834 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
837 ParameterCount actual(r3);
838 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
840 // Exit the JS frame and remove the parameters (except function), and
849 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
850 Generate_JSEntryTrampolineHelper(masm, false);
854 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
855 Generate_JSEntryTrampolineHelper(masm, true);
859 // Generate code for entering a JS function with the interpreter.
860 // On entry to the function the receiver and arguments have been pushed on the
861 // stack left to right. The actual argument count matches the formal parameter
862 // count expected by the function.
864 // The live registers are:
865 // o r4: the JS function object being called.
867 // o pp: the caller's constant pool pointer (if enabled)
868 // o fp: the caller's frame pointer
869 // o sp: stack pointer
870 // o lr: return address
872 // The function builds a JS frame. Please see JavaScriptFrameConstants in
873 // frames-ppc.h for its layout.
874 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
876 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
877 // Open a frame scope to indicate that there is a frame on the stack. The
878 // MANUAL indicates that the scope shouldn't actually generate code to set up
879 // the frame (that is done below).
880 FrameScope frame_scope(masm, StackFrame::MANUAL);
881 __ PushFixedFrame(r4);
882 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
884 // Get the bytecode array from the function object and load the pointer to the
885 // first entry into kInterpreterBytecodeRegister.
886 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
887 __ LoadP(kInterpreterBytecodeArrayRegister,
888 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
890 if (FLAG_debug_code) {
891 // Check function data field is actually a BytecodeArray object.
892 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
893 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
894 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
895 BYTECODE_ARRAY_TYPE);
896 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
899 // Allocate the local and temporary register file on the stack.
901 // Load frame size from the BytecodeArray object.
902 __ LoadP(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
903 BytecodeArray::kFrameSizeOffset));
905 // Do a stack check to ensure we don't go over the limit.
908 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
911 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
914 // If ok, push undefined as the initial value for all register file entries.
915 // Note: there should always be at least one stack slot for the return
916 // register in the register file.
917 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
919 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
920 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2));
921 __ bind(&loop_header);
923 __ bdnz(&loop_header);
926 // TODO(rmcilroy): List of things not currently dealt with here but done in
927 // fullcodegen's prologue:
928 // - Support profiler (specifically profiling_counter).
929 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
930 // - Allow simulator stop operations if FLAG_stop_at is set.
931 // - Deal with sloppy mode functions which need to replace the
932 // receiver with the global proxy when called as functions (without an
933 // explicit receiver object).
934 // - Code aging of the BytecodeArray object.
935 // - Supporting FLAG_trace.
937 // The following items are also not done here, and will probably be done using
938 // explicit bytecodes instead:
939 // - Allocating a new local context if applicable.
940 // - Setting up a local binding to the this function, which is used in
941 // derived constructors with super calls.
942 // - Setting new.target if required.
943 // - Dealing with REST parameters (only if
944 // https://codereview.chromium.org/1235153006 doesn't land by then).
945 // - Dealing with argument objects.
947 // Perform stack guard check.
950 __ LoadRoot(r0, Heap::kStackLimitRootIndex);
953 __ CallRuntime(Runtime::kStackGuard, 0);
957 // Load bytecode offset and dispatch table into registers.
958 __ mov(kInterpreterBytecodeOffsetRegister,
959 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
960 __ LoadRoot(kInterpreterDispatchTableRegister,
961 Heap::kInterpreterTableRootIndex);
962 __ addi(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
963 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
965 // Dispatch to the first bytecode handler for the function.
966 __ lbzx(r3, MemOperand(kInterpreterBytecodeArrayRegister,
967 kInterpreterBytecodeOffsetRegister));
968 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
969 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
970 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
971 // and header removal.
972 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
977 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
978 // TODO(rmcilroy): List of things not currently dealt with here but done in
979 // fullcodegen's EmitReturnSequence.
980 // - Supporting FLAG_trace for Runtime::TraceExit.
981 // - Support profiler (specifically decrementing profiling_counter
982 // appropriately and calling out to HandleInterrupts if necessary).
984 // Load return value into r3.
986 MemOperand(fp, -kPointerSize -
987 StandardFrameConstants::kFixedFrameSizeFromFp));
988 // Leave the frame (also dropping the register file).
989 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
990 // Drop receiver + arguments.
991 __ Drop(1); // TODO(rmcilroy): Get number of arguments from BytecodeArray.
996 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
997 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
998 GenerateTailCallToReturnedCode(masm);
1002 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1003 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1004 // Push a copy of the function onto the stack.
1005 // Push function as parameter to the runtime call.
1007 // Whether to compile in a background thread.
1009 r0, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1012 __ CallRuntime(Runtime::kCompileOptimized, 2);
1013 // Restore receiver.
1018 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1019 CallCompileOptimized(masm, false);
1020 GenerateTailCallToReturnedCode(masm);
1024 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1025 CallCompileOptimized(masm, true);
1026 GenerateTailCallToReturnedCode(masm);
1030 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1031 // For now, we are relying on the fact that make_code_young doesn't do any
1032 // garbage collection which allows us to save/restore the registers without
1033 // worrying about which of them contain pointers. We also don't build an
1034 // internal frame to make the code faster, since we shouldn't have to do stack
1035 // crawls in MakeCodeYoung. This seems a bit fragile.
1037 // Point r3 at the start of the PlatformCodeAge sequence.
1040 // The following registers must be saved and restored when calling through to
1042 // r3 - contains return address (beginning of patch sequence)
1044 // lr - return address
1045 FrameScope scope(masm, StackFrame::MANUAL);
1047 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1048 __ PrepareCallCFunction(2, 0, r5);
1049 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1051 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1052 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1058 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1059 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1060 MacroAssembler* masm) { \
1061 GenerateMakeCodeYoungAgainCommon(masm); \
1063 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1064 MacroAssembler* masm) { \
1065 GenerateMakeCodeYoungAgainCommon(masm); \
1067 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1068 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1071 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1072 // For now, we are relying on the fact that make_code_young doesn't do any
1073 // garbage collection which allows us to save/restore the registers without
1074 // worrying about which of them contain pointers. We also don't build an
1075 // internal frame to make the code faster, since we shouldn't have to do stack
1076 // crawls in MakeCodeYoung. This seems a bit fragile.
1078 // Point r3 at the start of the PlatformCodeAge sequence.
1081 // The following registers must be saved and restored when calling through to
1083 // r3 - contains return address (beginning of patch sequence)
1085 // lr - return address
1086 FrameScope scope(masm, StackFrame::MANUAL);
1088 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1089 __ PrepareCallCFunction(2, 0, r5);
1090 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1092 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1094 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1098 // Perform prologue operations usually performed by the young code stub.
1099 __ PushFixedFrame(r4);
1100 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1102 // Jump to point after the code-age stub.
1103 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1108 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1109 GenerateMakeCodeYoungAgainCommon(masm);
1113 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1114 Generate_MarkCodeAsExecutedOnce(masm);
1118 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1119 SaveFPRegsMode save_doubles) {
1121 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1123 // Preserve registers across notification, this is important for compiled
1124 // stubs that tail call the runtime on deopts passing their parameters in
1126 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1127 // Pass the function and deoptimization type to the runtime system.
1128 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1129 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1132 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1133 __ blr(); // Jump to miss handler
1137 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1138 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1142 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1143 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1147 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1148 Deoptimizer::BailoutType type) {
1150 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1151 // Pass the function and deoptimization type to the runtime system.
1152 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1154 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1157 // Get the full codegen state from the stack and untag it -> r9.
1158 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1160 // Switch on the state.
1161 Label with_tos_register, unknown_state;
1162 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1163 __ bne(&with_tos_register);
1164 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1167 __ bind(&with_tos_register);
1168 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1169 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1170 __ bne(&unknown_state);
1171 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1174 __ bind(&unknown_state);
1175 __ stop("no cases left");
1179 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1180 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1184 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1185 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1189 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1190 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1194 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1195 // Lookup the function in the JavaScript frame.
1196 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1198 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1199 // Pass function as argument.
1201 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1204 // If the code object is null, just return to the unoptimized code.
1206 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1212 // Load deoptimization data from the code object.
1213 // <deopt_data> = <code>[#deoptimization_data_offset]
1214 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1217 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1218 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1220 if (FLAG_enable_embedded_constant_pool) {
1221 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1224 // Load the OSR entrypoint offset from the deoptimization data.
1225 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1226 __ LoadP(r4, FieldMemOperand(
1227 r4, FixedArray::OffsetOfElementAt(
1228 DeoptimizationInputData::kOsrPcOffsetIndex)));
1231 // Compute the target address = code start + osr_offset
1234 // And "return" to the OSR entry point of the function.
1241 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1242 // We check the stack limit as indicator that recompilation might be done.
1244 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1248 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1249 __ CallRuntime(Runtime::kStackGuard, 0);
1251 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1252 RelocInfo::CODE_TARGET);
1259 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1260 // 1. Make sure we have at least one argument.
1261 // r3: actual number of arguments
1264 __ cmpi(r3, Operand::Zero());
1266 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1268 __ addi(r3, r3, Operand(1));
1272 // 2. Get the function to call (passed as receiver) from the stack, check
1273 // if it is a function.
1274 // r3: actual number of arguments
1275 Label slow, non_function;
1276 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1278 __ LoadP(r4, MemOperand(r4));
1279 __ JumpIfSmi(r4, &non_function);
1280 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1283 // 3a. Patch the first argument if necessary when calling a function.
1284 // r3: actual number of arguments
1286 Label shift_arguments;
1287 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1289 Label convert_to_object, use_global_proxy, patch_receiver;
1290 // Change context eagerly in case we need the global receiver.
1291 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1293 // Do not transform the receiver for strict mode functions.
1294 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1295 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1297 #if V8_TARGET_ARCH_PPC64
1298 SharedFunctionInfo::kStrictModeFunction,
1300 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1303 __ bne(&shift_arguments, cr0);
1305 // Do not transform the receiver for native (Compilerhints already in r6).
1307 #if V8_TARGET_ARCH_PPC64
1308 SharedFunctionInfo::kNative,
1310 SharedFunctionInfo::kNative + kSmiTagSize,
1313 __ bne(&shift_arguments, cr0);
1315 // Compute the receiver in sloppy mode.
1316 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1318 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1319 // r3: actual number of arguments
1321 // r5: first argument
1322 __ JumpIfSmi(r5, &convert_to_object);
1324 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1326 __ beq(&use_global_proxy);
1327 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1329 __ beq(&use_global_proxy);
1331 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1332 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1333 __ bge(&shift_arguments);
1335 __ bind(&convert_to_object);
1338 // Enter an internal frame in order to preserve argument count.
1339 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1343 ToObjectStub stub(masm->isolate());
1350 // Exit the internal frame.
1353 // Restore the function to r4, and the flag to r7.
1354 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1356 __ LoadP(r4, MemOperand(r7));
1357 __ li(r7, Operand::Zero());
1358 __ b(&patch_receiver);
1360 __ bind(&use_global_proxy);
1361 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1362 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1364 __ bind(&patch_receiver);
1365 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1367 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1369 __ b(&shift_arguments);
1372 // 3b. Check for function proxy.
1374 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1375 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1376 __ beq(&shift_arguments);
1377 __ bind(&non_function);
1378 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1380 // 3c. Patch the first argument when calling a non-function. The
1381 // CALL_NON_FUNCTION builtin expects the non-function callee as
1382 // receiver, so overwrite the first argument which will ultimately
1383 // become the receiver.
1384 // r3: actual number of arguments
1386 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1387 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1389 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1391 // 4. Shift arguments and return address one slot down on the stack
1392 // (overwriting the original receiver). Adjust argument count to make
1393 // the original first argument the new receiver.
1394 // r3: actual number of arguments
1396 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1397 __ bind(&shift_arguments);
1400 // Calculate the copy start address (destination). Copy end address is sp.
1401 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1405 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1406 __ StoreP(ip, MemOperand(r5));
1407 __ subi(r5, r5, Operand(kPointerSize));
1410 // Adjust the actual number of arguments and remove the top element
1411 // (which is a copy of the last argument).
1412 __ subi(r3, r3, Operand(1));
1416 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1417 // or a function proxy via CALL_FUNCTION_PROXY.
1418 // r3: actual number of arguments
1420 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1422 Label function, non_proxy;
1423 __ cmpi(r7, Operand::Zero());
1425 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1426 __ li(r5, Operand::Zero());
1427 __ cmpi(r7, Operand(1));
1430 __ push(r4); // re-add proxy object as additional argument
1431 __ addi(r3, r3, Operand(1));
1432 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1433 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1434 RelocInfo::CODE_TARGET);
1436 __ bind(&non_proxy);
1437 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1438 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1439 RelocInfo::CODE_TARGET);
1443 // 5b. Get the code to call from the function and check that the number of
1444 // expected arguments matches what we're providing. If so, jump
1445 // (tail-call) to the code in register ip without checking arguments.
1446 // r3: actual number of arguments
1448 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1450 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1451 #if !V8_TARGET_ARCH_PPC64
1454 __ cmp(r5, r3); // Check formal and actual parameter counts.
1455 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1456 RelocInfo::CODE_TARGET, ne);
1458 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1459 ParameterCount expected(0);
1460 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1464 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1465 const int argumentsOffset,
1466 const int indexOffset,
1467 const int limitOffset) {
1468 Register receiver = LoadDescriptor::ReceiverRegister();
1469 Register key = LoadDescriptor::NameRegister();
1470 Register slot = LoadDescriptor::SlotRegister();
1471 Register vector = LoadWithVectorDescriptor::VectorRegister();
1473 // Copy all arguments from the array to the stack.
1475 __ LoadP(key, MemOperand(fp, indexOffset));
1478 __ LoadP(receiver, MemOperand(fp, argumentsOffset));
1480 // Use inline caching to speed up access to arguments.
1481 FeedbackVectorSpec spec(0, Code::KEYED_LOAD_IC);
1482 Handle<TypeFeedbackVector> feedback_vector =
1483 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1484 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1485 __ LoadSmiLiteral(slot, Smi::FromInt(index));
1486 __ Move(vector, feedback_vector);
1488 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1489 __ Call(ic, RelocInfo::CODE_TARGET);
1491 // Push the nth argument.
1494 // Update the index on the stack and in register key.
1495 __ LoadP(key, MemOperand(fp, indexOffset));
1496 __ AddSmiLiteral(key, key, Smi::FromInt(1), r0);
1497 __ StoreP(key, MemOperand(fp, indexOffset));
1499 // Test if the copy loop has finished copying all the elements from the
1500 // arguments object.
1502 __ LoadP(r0, MemOperand(fp, limitOffset));
1506 // On exit, the pushed arguments count is in r3, untagged
1507 __ SmiUntag(r3, key);
1511 // Used by FunctionApply and ReflectApply
1512 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1513 const int kFormalParameters = targetIsArgument ? 3 : 2;
1514 const int kStackSize = kFormalParameters + 1;
1517 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1518 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1519 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1520 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1522 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1524 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1526 if (targetIsArgument) {
1527 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1529 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1532 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged);
1534 // Push current limit and index.
1535 const int kIndexOffset =
1536 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1537 const int kLimitOffset =
1538 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1539 __ li(r4, Operand::Zero());
1540 __ Push(r3, r4); // limit and initial index.
1542 // Get the receiver.
1543 __ LoadP(r3, MemOperand(fp, kReceiverOffset));
1545 // Check that the function is a JS function (otherwise it must be a proxy).
1546 Label push_receiver;
1547 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1548 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1549 __ bne(&push_receiver);
1551 // Change context eagerly to get the right global object if necessary.
1552 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1553 // Load the shared function info while the function is still in r4.
1554 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1556 // Compute the receiver.
1557 // Do not transform the receiver for strict mode functions.
1558 Label call_to_object, use_global_proxy;
1559 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1561 #if V8_TARGET_ARCH_PPC64
1562 SharedFunctionInfo::kStrictModeFunction,
1564 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1567 __ bne(&push_receiver, cr0);
1569 // Do not transform the receiver for strict mode functions.
1571 #if V8_TARGET_ARCH_PPC64
1572 SharedFunctionInfo::kNative,
1574 SharedFunctionInfo::kNative + kSmiTagSize,
1577 __ bne(&push_receiver, cr0);
1579 // Compute the receiver in sloppy mode.
1580 __ JumpIfSmi(r3, &call_to_object);
1581 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1583 __ beq(&use_global_proxy);
1584 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1586 __ beq(&use_global_proxy);
1588 // Check if the receiver is already a JavaScript object.
1590 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1591 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1592 __ bge(&push_receiver);
1594 // Convert the receiver to a regular object.
1596 __ bind(&call_to_object);
1597 ToObjectStub stub(masm->isolate());
1599 __ b(&push_receiver);
1601 __ bind(&use_global_proxy);
1602 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1603 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1605 // Push the receiver.
1607 __ bind(&push_receiver);
1610 // Copy all arguments from the array to the stack.
1611 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1614 // Call the function.
1616 ParameterCount actual(r3);
1617 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1618 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1619 __ bne(&call_proxy);
1620 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1622 __ LeaveFrame(StackFrame::INTERNAL, kStackSize * kPointerSize);
1625 // Call the function proxy.
1626 __ bind(&call_proxy);
1627 __ push(r4); // add function proxy as last argument
1628 __ addi(r3, r3, Operand(1));
1629 __ li(r5, Operand::Zero());
1630 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1631 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1632 RelocInfo::CODE_TARGET);
1634 // Tear down the internal frame and remove function, receiver and args.
1636 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1641 static void Generate_ConstructHelper(MacroAssembler* masm) {
1642 const int kFormalParameters = 3;
1643 const int kStackSize = kFormalParameters + 1;
1646 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1647 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1648 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1649 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1651 // If newTarget is not supplied, set it to constructor
1652 Label validate_arguments;
1653 __ LoadP(r3, MemOperand(fp, kNewTargetOffset));
1654 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
1655 __ bne(&validate_arguments);
1656 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1657 __ StoreP(r3, MemOperand(fp, kNewTargetOffset));
1659 // Validate arguments
1660 __ bind(&validate_arguments);
1661 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1663 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1665 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target
1667 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1669 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged);
1671 // Push current limit and index.
1672 const int kIndexOffset =
1673 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1674 const int kLimitOffset =
1675 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1676 __ li(r4, Operand::Zero());
1677 __ Push(r3, r4); // limit and initial index.
1678 // Push the constructor function as callee
1679 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1682 // Copy all arguments from the array to the stack.
1683 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1686 // Use undefined feedback vector
1687 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1688 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1689 __ LoadP(r7, MemOperand(fp, kNewTargetOffset));
1691 // Call the function.
1692 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1693 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1695 // Leave internal frame.
1697 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1702 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1703 Generate_ApplyHelper(masm, false);
1707 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1708 Generate_ApplyHelper(masm, true);
1712 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1713 Generate_ConstructHelper(masm);
1717 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1718 Label* stack_overflow) {
1719 // ----------- S t a t e -------------
1720 // -- r3 : actual number of arguments
1721 // -- r4 : function (passed through to callee)
1722 // -- r5 : expected number of arguments
1723 // -----------------------------------
1724 // Check the stack for overflow. We are not trying to catch
1725 // interruptions (e.g. debug break and preemption) here, so the "real stack
1726 // limit" is checked.
1727 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1728 // Make r8 the space we have left. The stack might already be overflowed
1729 // here which will cause r8 to become negative.
1731 // Check if the arguments will overflow the stack.
1732 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1734 __ ble(stack_overflow); // Signed comparison.
1738 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1740 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1743 if (FLAG_enable_embedded_constant_pool) {
1744 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1746 __ Push(fp, r7, r4, r3);
1748 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1753 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1754 // ----------- S t a t e -------------
1755 // -- r3 : result being passed through
1756 // -----------------------------------
1757 // Get the number of arguments passed (as a smi), tear down the frame and
1758 // then tear down the parameters.
1759 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1761 int stack_adjustment = kPointerSize; // adjust for receiver
1762 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1763 __ SmiToPtrArrayOffset(r0, r4);
1768 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1769 // ----------- S t a t e -------------
1770 // -- r3 : actual number of arguments
1771 // -- r4 : function (passed through to callee)
1772 // -- r5 : expected number of arguments
1773 // -----------------------------------
1775 Label stack_overflow;
1776 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1777 Label invoke, dont_adapt_arguments;
1779 Label enough, too_few;
1780 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1783 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1784 __ beq(&dont_adapt_arguments);
1786 { // Enough parameters: actual >= expected
1788 EnterArgumentsAdaptorFrame(masm);
1790 // Calculate copy start address into r3 and copy end address into r5.
1791 // r3: actual number of arguments as a smi
1793 // r5: expected number of arguments
1794 // ip: code entry to call
1795 __ SmiToPtrArrayOffset(r3, r3);
1797 // adjust for return address and receiver
1798 __ addi(r3, r3, Operand(2 * kPointerSize));
1799 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1802 // Copy the arguments (including the receiver) to the new stack frame.
1803 // r3: copy start address
1805 // r5: copy end address
1806 // ip: code entry to call
1810 __ LoadP(r0, MemOperand(r3, 0));
1812 __ cmp(r3, r5); // Compare before moving to next argument.
1813 __ subi(r3, r3, Operand(kPointerSize));
1819 { // Too few parameters: Actual < expected
1822 // If the function is strong we need to throw an error.
1823 Label no_strong_error;
1824 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1825 __ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset));
1827 #if V8_TARGET_ARCH_PPC64
1828 SharedFunctionInfo::kStrongModeFunction,
1830 SharedFunctionInfo::kStrongModeFunction + kSmiTagSize,
1833 __ beq(&no_strong_error, cr0);
1835 // What we really care about is the required number of arguments.
1836 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kLengthOffset));
1837 #if V8_TARGET_ARCH_PPC64
1838 // See commment near kLenghtOffset in src/objects.h
1839 __ srawi(r7, r7, kSmiTagSize);
1844 __ bge(&no_strong_error);
1847 FrameScope frame(masm, StackFrame::MANUAL);
1848 EnterArgumentsAdaptorFrame(masm);
1849 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1852 __ bind(&no_strong_error);
1853 EnterArgumentsAdaptorFrame(masm);
1855 // Calculate copy start address into r0 and copy end address is fp.
1856 // r3: actual number of arguments as a smi
1858 // r5: expected number of arguments
1859 // ip: code entry to call
1860 __ SmiToPtrArrayOffset(r3, r3);
1863 // Copy the arguments (including the receiver) to the new stack frame.
1864 // r3: copy start address
1866 // r5: expected number of arguments
1867 // ip: code entry to call
1870 // Adjust load for return address and receiver.
1871 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1873 __ cmp(r3, fp); // Compare before moving to next argument.
1874 __ subi(r3, r3, Operand(kPointerSize));
1877 // Fill the remaining expected arguments with undefined.
1879 // r5: expected number of arguments
1880 // ip: code entry to call
1881 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1882 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1884 // Adjust for frame.
1885 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1895 // Call the entry point.
1899 // Store offset of return address for deoptimizer.
1900 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1902 // Exit frame and return.
1903 LeaveArgumentsAdaptorFrame(masm);
1907 // -------------------------------------------
1908 // Dont adapt arguments.
1909 // -------------------------------------------
1910 __ bind(&dont_adapt_arguments);
1911 __ JumpToJSEntry(ip);
1913 __ bind(&stack_overflow);
1915 FrameScope frame(masm, StackFrame::MANUAL);
1916 EnterArgumentsAdaptorFrame(masm);
1917 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1924 } // namespace internal
1927 #endif // V8_TARGET_ARCH_PPC