1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/interpreter/bytecodes.h"
14 #include "src/runtime/runtime.h"
20 #define __ ACCESS_MASM(masm)
23 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
24 BuiltinExtraArguments extra_args) {
25 // ----------- S t a t e -------------
26 // -- r3 : number of arguments excluding receiver
27 // -- r4 : called function (only guaranteed when
28 // extra_args requires it)
30 // -- sp[0] : last argument
32 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
33 // -- sp[4 * argc] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 // JumpToExternalReference expects r0 to contain the number of arguments
46 // including the receiver and the extra arguments.
47 __ addi(r3, r3, Operand(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
52 // Load the built-in InternalArray function from the current context.
53 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
55 // Load the native context.
58 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
59 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
60 // Load the InternalArray function from the native context.
62 MemOperand(result, Context::SlotOffset(
63 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
67 // Load the built-in Array function from the current context.
68 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
69 // Load the native context.
72 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
73 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
74 // Load the Array function from the native context.
77 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
81 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82 // ----------- S t a t e -------------
83 // -- r3 : number of arguments
84 // -- lr : return address
85 // -- sp[...]: constructor arguments
86 // -----------------------------------
87 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89 // Get the InternalArray function.
90 GenerateLoadInternalArrayFunction(masm, r4);
92 if (FLAG_debug_code) {
93 // Initial map for the builtin InternalArray functions should be maps.
94 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
96 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
101 // Run the native code for the InternalArray function called as a normal
104 InternalArrayConstructorStub stub(masm->isolate());
105 __ TailCallStub(&stub);
109 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110 // ----------- S t a t e -------------
111 // -- r3 : number of arguments
112 // -- lr : return address
113 // -- sp[...]: constructor arguments
114 // -----------------------------------
115 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
117 // Get the Array function.
118 GenerateLoadArrayFunction(masm, r4);
120 if (FLAG_debug_code) {
121 // Initial map for the builtin Array functions should be maps.
122 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123 __ TestIfSmi(r5, r0);
124 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
130 // Run the native code for the Array function called as a normal function.
132 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133 ArrayConstructorStub stub(masm->isolate());
134 __ TailCallStub(&stub);
138 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
139 // ----------- S t a t e -------------
140 // -- r3 : number of arguments
141 // -- r4 : constructor function
142 // -- lr : return address
143 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
144 // -- sp[argc * 4] : receiver
145 // -----------------------------------
146 Counters* counters = masm->isolate()->counters();
147 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
149 Register function = r4;
150 if (FLAG_debug_code) {
151 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
152 __ cmp(function, r5);
153 __ Assert(eq, kUnexpectedStringFunction);
156 // Load the first arguments in r3 and get rid of the rest.
158 __ cmpi(r3, Operand::Zero());
159 __ beq(&no_arguments);
160 // First args = sp[(argc - 1) * 4].
161 __ subi(r3, r3, Operand(1));
162 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
164 __ LoadP(r3, MemOperand(sp));
165 // sp now point to args[0], drop args[0] + receiver.
168 Register argument = r5;
169 Label not_cached, argument_is_string;
170 __ LookupNumberStringCache(r3, // Input.
176 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
177 __ bind(&argument_is_string);
179 // ----------- S t a t e -------------
180 // -- r5 : argument converted to string
181 // -- r4 : constructor function
182 // -- lr : return address
183 // -----------------------------------
186 __ Allocate(JSValue::kSize,
190 &gc_required, TAG_OBJECT);
192 // Initialising the String Object.
194 __ LoadGlobalFunctionInitialMap(function, map, r7);
195 if (FLAG_debug_code) {
196 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
197 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
198 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
199 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
200 __ cmpi(r7, Operand::Zero());
201 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
205 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
206 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
207 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
209 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
211 // Ensure the object is fully initialized.
212 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
216 // The argument was not found in the number to string cache. Check
217 // if it's a string already before calling the conversion builtin.
218 Label convert_argument;
219 __ bind(¬_cached);
220 __ JumpIfSmi(r3, &convert_argument);
223 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
224 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
225 STATIC_ASSERT(kNotStringTag != 0);
226 __ andi(r0, r6, Operand(kIsNotStringMask));
227 __ bne(&convert_argument, cr0);
229 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
230 __ b(&argument_is_string);
232 // Invoke the conversion builtin and put the result into r5.
233 __ bind(&convert_argument);
234 __ push(function); // Preserve the function.
235 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
237 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
239 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
243 __ b(&argument_is_string);
245 // Load the empty string into r5, remove the receiver from the
246 // stack, and jump back to the case where the argument is a string.
247 __ bind(&no_arguments);
248 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250 __ b(&argument_is_string);
252 // At this point the argument is already a string. Call runtime to
253 // create a string wrapper.
254 __ bind(&gc_required);
255 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
257 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kNewStringWrapper, 1);
265 static void CallRuntimePassFunction(MacroAssembler* masm,
266 Runtime::FunctionId function_id) {
267 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
268 // Push a copy of the function onto the stack.
269 // Push function as parameter to the runtime call.
272 __ CallRuntime(function_id, 1);
278 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
279 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
280 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
281 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
282 __ JumpToJSEntry(ip);
286 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
287 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
288 __ JumpToJSEntry(ip);
292 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
293 // Checking whether the queued function is ready for install is optional,
294 // since we come across interrupts and stack checks elsewhere. However,
295 // not checking may delay installing ready functions, and always checking
296 // would be quite expensive. A good compromise is to first check against
297 // stack limit as a cue for an interrupt signal.
299 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
303 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
304 GenerateTailCallToReturnedCode(masm);
307 GenerateTailCallToSharedCode(masm);
311 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
312 bool is_api_function,
313 bool create_memento) {
314 // ----------- S t a t e -------------
315 // -- r3 : number of arguments
316 // -- r4 : constructor function
317 // -- r5 : allocation site or undefined
318 // -- r6 : original constructor
319 // -- lr : return address
320 // -- sp[...]: constructor arguments
321 // -----------------------------------
323 // Should never create mementos for api functions.
324 DCHECK(!is_api_function || !create_memento);
326 Isolate* isolate = masm->isolate();
328 // Enter a construct frame.
330 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
332 // Preserve the incoming parameters on the stack.
333 __ AssertUndefinedOrAllocationSite(r5, r7);
335 __ Push(r5, r3, r4, r6);
337 // Try to allocate the object without transitioning into C code. If any of
338 // the preconditions is not met, the code bails out to the runtime call.
339 Label rt_call, allocated;
340 if (FLAG_inline_new) {
341 ExternalReference debug_step_in_fp =
342 ExternalReference::debug_step_in_fp_address(isolate);
343 __ mov(r5, Operand(debug_step_in_fp));
344 __ LoadP(r5, MemOperand(r5));
345 __ cmpi(r5, Operand::Zero());
348 // Fall back to runtime if the original constructor and function differ.
352 // Load the initial map and verify that it is in fact a map.
353 // r4: constructor function
355 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
356 __ JumpIfSmi(r5, &rt_call);
357 __ CompareObjectType(r5, r8, r7, MAP_TYPE);
360 // Check that the constructor is not constructing a JSFunction (see
361 // comments in Runtime_NewObject in runtime.cc). In which case the
362 // initial map's instance type would be JS_FUNCTION_TYPE.
363 // r4: constructor function
365 __ CompareInstanceType(r5, r8, JS_FUNCTION_TYPE);
368 if (!is_api_function) {
370 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
371 // Check if slack tracking is enabled.
372 __ lwz(r7, bit_field3);
373 __ DecodeField<Map::Counter>(r11, r7);
374 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
376 // Decrease generous allocation count.
377 __ Add(r7, r7, -(1 << Map::Counter::kShift), r0);
378 __ stw(r7, bit_field3);
379 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
384 __ Push(r5, r4); // r4 = constructor
385 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
392 // Now allocate the JSObject on the heap.
393 // r4: constructor function
395 Label rt_call_reload_new_target;
396 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
397 if (create_memento) {
398 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
401 __ Allocate(r6, r7, r8, r9, &rt_call_reload_new_target, SIZE_IN_WORDS);
403 // Allocated the JSObject, now initialize the fields. Map is set to
404 // initial map and properties and elements are set to empty fixed array.
405 // r4: constructor function
407 // r6: object size (including memento if create_memento)
408 // r7: JSObject (not tagged)
409 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
411 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
412 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
413 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
414 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
416 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
417 __ add(r9, r7, r9); // End of object.
419 // Fill all the in-object properties with the appropriate filler.
420 // r4: constructor function
422 // r6: object size (in words, including memento if create_memento)
423 // r7: JSObject (not tagged)
424 // r8: First in-object property of JSObject (not tagged)
426 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
427 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
429 if (!is_api_function) {
430 Label no_inobject_slack_tracking;
432 // Check if slack tracking is enabled.
433 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
434 __ blt(&no_inobject_slack_tracking);
436 // Allocate object with a slack.
440 r5, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
441 __ lbz(r5, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
443 if (FLAG_debug_code) {
444 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
446 // r0: offset of first field after pre-allocated fields
448 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
452 __ cmpi(r3, Operand::Zero());
454 __ InitializeNFieldsWithFiller(r8, r3, r10);
457 // To allow for truncation.
458 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
459 // Fill the remaining fields with one pointer filler map.
461 __ bind(&no_inobject_slack_tracking);
464 if (create_memento) {
465 __ subi(r3, r9, Operand(AllocationMemento::kSize));
466 __ InitializeFieldsWithFiller(r8, r3, r10);
468 // Fill in memento fields.
469 // r8: points to the allocated but uninitialized memento.
470 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
471 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
472 // Load the AllocationSite
473 __ LoadP(r10, MemOperand(sp, 3 * kPointerSize));
474 __ AssertUndefinedOrAllocationSite(r10, r3);
476 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
477 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
480 __ InitializeFieldsWithFiller(r8, r9, r10);
483 // Add the object tag to make the JSObject real, so that we can continue
484 // and jump into the continuation code at any time from now on.
485 __ addi(r7, r7, Operand(kHeapObjectTag));
487 // Continue with JSObject being successfully allocated
491 // Reload the original constructor and fall-through.
492 __ bind(&rt_call_reload_new_target);
493 __ LoadP(r6, MemOperand(sp, 0 * kPointerSize));
496 // Allocate the new receiver object using the runtime call.
497 // r4: constructor function
498 // r6: original constructor
500 if (create_memento) {
501 // Get the cell or allocation site.
502 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
504 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
507 __ CallRuntime(Runtime::kNewObject, 2);
511 // Runtime_NewObjectWithAllocationSite increments allocation count.
512 // Skip the increment.
513 Label count_incremented;
514 if (create_memento) {
515 __ b(&count_incremented);
518 // Receiver for constructor call allocated.
522 if (create_memento) {
523 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
524 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
526 __ beq(&count_incremented);
527 // r5 is an AllocationSite. We are creating a memento from it, so we
528 // need to increment the memento create count.
530 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
531 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
533 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
535 __ bind(&count_incremented);
538 // Restore the parameters.
541 // Retrieve smi-tagged arguments count from the stack.
542 __ LoadP(r6, MemOperand(sp));
544 // Push new.target onto the construct frame. This is stored just below the
545 // receiver on the stack.
548 // Set up pointer to last argument.
549 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
551 // Copy arguments and receiver to the expression stack.
552 // r4: constructor function
553 // r5: address of last argument (caller sp)
554 // r6: number of arguments (smi-tagged)
558 // sp[3]: number of arguments (smi-tagged)
560 __ SmiUntag(r3, r6, SetRC);
561 __ beq(&no_args, cr0);
562 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
566 __ subi(ip, ip, Operand(kPointerSize));
567 __ LoadPX(r0, MemOperand(r5, ip));
568 __ StorePX(r0, MemOperand(sp, ip));
572 // Call the function.
573 // r3: number of arguments
574 // r4: constructor function
575 if (is_api_function) {
576 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
577 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
578 __ Call(code, RelocInfo::CODE_TARGET);
580 ParameterCount actual(r3);
581 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
584 // Store offset of return address for deoptimizer.
585 if (!is_api_function) {
586 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
589 // Restore context from the frame.
593 // sp[2]: number of arguments (smi-tagged)
594 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
596 // If the result is an object (in the ECMA sense), we should get rid
597 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
599 Label use_receiver, exit;
601 // If the result is a smi, it is *not* an object in the ECMA sense.
605 // sp[2]: number of arguments (smi-tagged)
606 __ JumpIfSmi(r3, &use_receiver);
608 // If the type of the result (stored in its map) is less than
609 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
610 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
613 // Throw away the result of the constructor invocation and use the
614 // on-stack receiver as the result.
615 __ bind(&use_receiver);
616 __ LoadP(r3, MemOperand(sp));
618 // Remove receiver from the stack, remove caller arguments, and
622 // sp[0]: receiver (newly allocated object)
623 // sp[1]: new.target (original constructor)
624 // sp[2]: number of arguments (smi-tagged)
625 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
627 // Leave construct frame.
630 __ SmiToPtrArrayOffset(r4, r4);
632 __ addi(sp, sp, Operand(kPointerSize));
633 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
638 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
639 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
643 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
644 Generate_JSConstructStubHelper(masm, true, false);
648 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
649 // ----------- S t a t e -------------
650 // -- r3 : number of arguments
651 // -- r4 : constructor function
652 // -- r5 : allocation site or undefined
653 // -- r6 : original constructor
654 // -- lr : return address
655 // -- sp[...]: constructor arguments
656 // -----------------------------------
659 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
661 __ AssertUndefinedOrAllocationSite(r5, r7);
663 // Smi-tagged arguments count.
665 __ SmiTag(r7, SetRC);
667 // receiver is the hole.
668 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
670 // allocation site, smi arguments count, new.target, receiver
671 __ Push(r5, r7, r6, ip);
673 // Set up pointer to last argument.
674 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
676 // Copy arguments and receiver to the expression stack.
677 // r3: number of arguments
678 // r4: constructor function
679 // r5: address of last argument (caller sp)
680 // r7: number of arguments (smi-tagged)
681 // cr0: compare against zero of arguments
684 // sp[2]: number of arguments (smi-tagged)
686 __ beq(&no_args, cr0);
687 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
690 __ subi(ip, ip, Operand(kPointerSize));
691 __ LoadPX(r0, MemOperand(r5, ip));
698 ExternalReference debug_step_in_fp =
699 ExternalReference::debug_step_in_fp_address(masm->isolate());
700 __ mov(r5, Operand(debug_step_in_fp));
701 __ LoadP(r5, MemOperand(r5));
702 __ and_(r0, r5, r5, SetRC);
703 __ beq(&skip_step_in, cr0);
706 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
709 __ bind(&skip_step_in);
711 // Call the function.
712 // r3: number of arguments
713 // r4: constructor function
714 ParameterCount actual(r3);
715 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
717 // Restore context from the frame.
719 // sp[0]: number of arguments (smi-tagged)
720 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
721 // Get arguments count, skipping over new.target.
722 __ LoadP(r4, MemOperand(sp, kPointerSize));
724 // Leave construct frame.
727 __ SmiToPtrArrayOffset(r4, r4);
729 __ addi(sp, sp, Operand(kPointerSize));
734 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
737 // Clobbers r5; preserves all other registers.
738 static void Generate_CheckStackOverflow(MacroAssembler* masm,
739 const int calleeOffset, Register argc,
740 IsTagged argc_is_tagged) {
741 // Check the stack for overflow. We are not trying to catch
742 // interruptions (e.g. debug break and preemption) here, so the "real stack
743 // limit" is checked.
745 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
746 // Make r5 the space we have left. The stack might already be overflowed
747 // here which will cause r5 to become negative.
749 // Check if the arguments will overflow the stack.
750 if (argc_is_tagged == kArgcIsSmiTagged) {
751 __ SmiToPtrArrayOffset(r0, argc);
753 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
754 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
757 __ bgt(&okay); // Signed comparison.
759 // Out of stack space.
760 __ LoadP(r4, MemOperand(fp, calleeOffset));
761 if (argc_is_tagged == kArgcIsUntaggedInt) {
765 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
771 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
773 // Called from Generate_JS_Entry
779 // r0,r8-r9, cp may be clobbered
780 ProfileEntryHookStub::MaybeCallEntryHook(masm);
782 // Clear the context before we push it when entering the internal frame.
783 __ li(cp, Operand::Zero());
785 // Enter an internal frame.
787 FrameScope scope(masm, StackFrame::INTERNAL);
789 // Set up the context from the function argument.
790 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
792 __ InitializeRootRegister();
794 // Push the function and the receiver onto the stack.
798 // Check if we have enough stack space to push all arguments.
799 // The function is the first thing that was pushed above after entering
800 // the internal frame.
801 const int kFunctionOffset =
802 InternalFrameConstants::kCodeOffset - kPointerSize;
804 Generate_CheckStackOverflow(masm, kFunctionOffset, r6, kArgcIsUntaggedInt);
806 // Copy arguments to the stack in a loop.
809 // r7: argv, i.e. points to first arg
811 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
813 // r5 points past last arg.
816 __ LoadP(r8, MemOperand(r7)); // read next parameter
817 __ addi(r7, r7, Operand(kPointerSize));
818 __ LoadP(r0, MemOperand(r8)); // dereference handle
819 __ push(r0); // push parameter
824 // Initialize all JavaScript callee-saved registers, since they will be seen
825 // by the garbage collector as part of handlers.
826 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
832 // Invoke the code and pass argc as r3.
835 // No type feedback cell is available
836 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
837 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
840 ParameterCount actual(r3);
841 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
843 // Exit the JS frame and remove the parameters (except function), and
852 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
853 Generate_JSEntryTrampolineHelper(masm, false);
857 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
858 Generate_JSEntryTrampolineHelper(masm, true);
862 // Generate code for entering a JS function with the interpreter.
863 // On entry to the function the receiver and arguments have been pushed on the
864 // stack left to right. The actual argument count matches the formal parameter
865 // count expected by the function.
867 // The live registers are:
868 // o r4: the JS function object being called.
870 // o pp: the caller's constant pool pointer (if enabled)
871 // o fp: the caller's frame pointer
872 // o sp: stack pointer
873 // o lr: return address
875 // The function builds a JS frame. Please see JavaScriptFrameConstants in
876 // frames-ppc.h for its layout.
877 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
879 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
880 // Open a frame scope to indicate that there is a frame on the stack. The
881 // MANUAL indicates that the scope shouldn't actually generate code to set up
882 // the frame (that is done below).
883 FrameScope frame_scope(masm, StackFrame::MANUAL);
884 __ PushFixedFrame(r4);
885 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
887 // Get the bytecode array from the function object and load the pointer to the
888 // first entry into kInterpreterBytecodeRegister.
889 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
890 __ LoadP(kInterpreterBytecodeArrayRegister,
891 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
893 if (FLAG_debug_code) {
894 // Check function data field is actually a BytecodeArray object.
895 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
896 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
897 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
898 BYTECODE_ARRAY_TYPE);
899 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
902 // Allocate the local and temporary register file on the stack.
904 // Load frame size from the BytecodeArray object.
905 __ LoadP(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
906 BytecodeArray::kFrameSizeOffset));
908 // Do a stack check to ensure we don't go over the limit.
911 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
914 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
917 // If ok, push undefined as the initial value for all register file entries.
918 // Note: there should always be at least one stack slot for the return
919 // register in the register file.
920 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
922 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
923 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2));
924 __ bind(&loop_header);
926 __ bdnz(&loop_header);
929 // TODO(rmcilroy): List of things not currently dealt with here but done in
930 // fullcodegen's prologue:
931 // - Support profiler (specifically profiling_counter).
932 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
933 // - Allow simulator stop operations if FLAG_stop_at is set.
934 // - Deal with sloppy mode functions which need to replace the
935 // receiver with the global proxy when called as functions (without an
936 // explicit receiver object).
937 // - Code aging of the BytecodeArray object.
938 // - Supporting FLAG_trace.
940 // The following items are also not done here, and will probably be done using
941 // explicit bytecodes instead:
942 // - Allocating a new local context if applicable.
943 // - Setting up a local binding to the this function, which is used in
944 // derived constructors with super calls.
945 // - Setting new.target if required.
946 // - Dealing with REST parameters (only if
947 // https://codereview.chromium.org/1235153006 doesn't land by then).
948 // - Dealing with argument objects.
950 // Perform stack guard check.
953 __ LoadRoot(r0, Heap::kStackLimitRootIndex);
956 __ CallRuntime(Runtime::kStackGuard, 0);
960 // Load bytecode offset and dispatch table into registers.
961 __ mov(kInterpreterBytecodeOffsetRegister,
962 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
963 __ LoadRoot(kInterpreterDispatchTableRegister,
964 Heap::kInterpreterTableRootIndex);
965 __ addi(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
966 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
968 // Dispatch to the first bytecode handler for the function.
969 __ lbzx(r3, MemOperand(kInterpreterBytecodeArrayRegister,
970 kInterpreterBytecodeOffsetRegister));
971 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
972 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
973 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
974 // and header removal.
975 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
980 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
981 // TODO(rmcilroy): List of things not currently dealt with here but done in
982 // fullcodegen's EmitReturnSequence.
983 // - Supporting FLAG_trace for Runtime::TraceExit.
984 // - Support profiler (specifically decrementing profiling_counter
985 // appropriately and calling out to HandleInterrupts if necessary).
987 // Load return value into r3.
989 MemOperand(fp, -kPointerSize -
990 StandardFrameConstants::kFixedFrameSizeFromFp));
991 // Leave the frame (also dropping the register file).
992 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
993 // Drop receiver + arguments.
994 __ Drop(1); // TODO(rmcilroy): Get number of arguments from BytecodeArray.
999 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1000 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1001 GenerateTailCallToReturnedCode(masm);
1005 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1006 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1007 // Push a copy of the function onto the stack.
1008 // Push function as parameter to the runtime call.
1010 // Whether to compile in a background thread.
1012 r0, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1015 __ CallRuntime(Runtime::kCompileOptimized, 2);
1016 // Restore receiver.
1021 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1022 CallCompileOptimized(masm, false);
1023 GenerateTailCallToReturnedCode(masm);
1027 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1028 CallCompileOptimized(masm, true);
1029 GenerateTailCallToReturnedCode(masm);
1033 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1034 // For now, we are relying on the fact that make_code_young doesn't do any
1035 // garbage collection which allows us to save/restore the registers without
1036 // worrying about which of them contain pointers. We also don't build an
1037 // internal frame to make the code faster, since we shouldn't have to do stack
1038 // crawls in MakeCodeYoung. This seems a bit fragile.
1040 // Point r3 at the start of the PlatformCodeAge sequence.
1043 // The following registers must be saved and restored when calling through to
1045 // r3 - contains return address (beginning of patch sequence)
1047 // lr - return address
1048 FrameScope scope(masm, StackFrame::MANUAL);
1050 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1051 __ PrepareCallCFunction(2, 0, r5);
1052 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1054 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1055 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1061 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1062 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1063 MacroAssembler* masm) { \
1064 GenerateMakeCodeYoungAgainCommon(masm); \
1066 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1067 MacroAssembler* masm) { \
1068 GenerateMakeCodeYoungAgainCommon(masm); \
1070 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1071 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1074 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1075 // For now, we are relying on the fact that make_code_young doesn't do any
1076 // garbage collection which allows us to save/restore the registers without
1077 // worrying about which of them contain pointers. We also don't build an
1078 // internal frame to make the code faster, since we shouldn't have to do stack
1079 // crawls in MakeCodeYoung. This seems a bit fragile.
1081 // Point r3 at the start of the PlatformCodeAge sequence.
1084 // The following registers must be saved and restored when calling through to
1086 // r3 - contains return address (beginning of patch sequence)
1088 // lr - return address
1089 FrameScope scope(masm, StackFrame::MANUAL);
1091 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1092 __ PrepareCallCFunction(2, 0, r5);
1093 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1095 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1097 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1101 // Perform prologue operations usually performed by the young code stub.
1102 __ PushFixedFrame(r4);
1103 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1105 // Jump to point after the code-age stub.
1106 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1111 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1112 GenerateMakeCodeYoungAgainCommon(masm);
1116 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1117 Generate_MarkCodeAsExecutedOnce(masm);
1121 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1122 SaveFPRegsMode save_doubles) {
1124 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1126 // Preserve registers across notification, this is important for compiled
1127 // stubs that tail call the runtime on deopts passing their parameters in
1129 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1130 // Pass the function and deoptimization type to the runtime system.
1131 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1132 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1135 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1136 __ blr(); // Jump to miss handler
1140 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1141 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1145 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1146 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1150 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1151 Deoptimizer::BailoutType type) {
1153 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1154 // Pass the function and deoptimization type to the runtime system.
1155 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1157 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1160 // Get the full codegen state from the stack and untag it -> r9.
1161 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1163 // Switch on the state.
1164 Label with_tos_register, unknown_state;
1165 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1166 __ bne(&with_tos_register);
1167 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1170 __ bind(&with_tos_register);
1171 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1172 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1173 __ bne(&unknown_state);
1174 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1177 __ bind(&unknown_state);
1178 __ stop("no cases left");
1182 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1183 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1187 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1188 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1192 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1193 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1197 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1198 // Lookup the function in the JavaScript frame.
1199 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1201 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1202 // Pass function as argument.
1204 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1207 // If the code object is null, just return to the unoptimized code.
1209 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1215 // Load deoptimization data from the code object.
1216 // <deopt_data> = <code>[#deoptimization_data_offset]
1217 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1220 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1221 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1223 if (FLAG_enable_embedded_constant_pool) {
1224 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1227 // Load the OSR entrypoint offset from the deoptimization data.
1228 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1229 __ LoadP(r4, FieldMemOperand(
1230 r4, FixedArray::OffsetOfElementAt(
1231 DeoptimizationInputData::kOsrPcOffsetIndex)));
1234 // Compute the target address = code start + osr_offset
1237 // And "return" to the OSR entry point of the function.
1244 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1245 // We check the stack limit as indicator that recompilation might be done.
1247 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1251 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1252 __ CallRuntime(Runtime::kStackGuard, 0);
1254 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1255 RelocInfo::CODE_TARGET);
1262 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1263 // 1. Make sure we have at least one argument.
1264 // r3: actual number of arguments
1267 __ cmpi(r3, Operand::Zero());
1269 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1271 __ addi(r3, r3, Operand(1));
1275 // 2. Get the function to call (passed as receiver) from the stack, check
1276 // if it is a function.
1277 // r3: actual number of arguments
1278 Label slow, non_function;
1279 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1281 __ LoadP(r4, MemOperand(r4));
1282 __ JumpIfSmi(r4, &non_function);
1283 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1286 // 3a. Patch the first argument if necessary when calling a function.
1287 // r3: actual number of arguments
1289 Label shift_arguments;
1290 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1292 Label convert_to_object, use_global_proxy, patch_receiver;
1293 // Change context eagerly in case we need the global receiver.
1294 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1296 // Do not transform the receiver for strict mode functions.
1297 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1298 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1300 #if V8_TARGET_ARCH_PPC64
1301 SharedFunctionInfo::kStrictModeFunction,
1303 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1306 __ bne(&shift_arguments, cr0);
1308 // Do not transform the receiver for native (Compilerhints already in r6).
1310 #if V8_TARGET_ARCH_PPC64
1311 SharedFunctionInfo::kNative,
1313 SharedFunctionInfo::kNative + kSmiTagSize,
1316 __ bne(&shift_arguments, cr0);
1318 // Compute the receiver in sloppy mode.
1319 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1321 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1322 // r3: actual number of arguments
1324 // r5: first argument
1325 __ JumpIfSmi(r5, &convert_to_object);
1327 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1329 __ beq(&use_global_proxy);
1330 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1332 __ beq(&use_global_proxy);
1334 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1335 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1336 __ bge(&shift_arguments);
1338 __ bind(&convert_to_object);
1341 // Enter an internal frame in order to preserve argument count.
1342 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1346 ToObjectStub stub(masm->isolate());
1353 // Exit the internal frame.
1356 // Restore the function to r4, and the flag to r7.
1357 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1359 __ LoadP(r4, MemOperand(r7));
1360 __ li(r7, Operand::Zero());
1361 __ b(&patch_receiver);
1363 __ bind(&use_global_proxy);
1364 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1365 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1367 __ bind(&patch_receiver);
1368 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1370 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1372 __ b(&shift_arguments);
1375 // 3b. Check for function proxy.
1377 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1378 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1379 __ beq(&shift_arguments);
1380 __ bind(&non_function);
1381 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1383 // 3c. Patch the first argument when calling a non-function. The
1384 // CALL_NON_FUNCTION builtin expects the non-function callee as
1385 // receiver, so overwrite the first argument which will ultimately
1386 // become the receiver.
1387 // r3: actual number of arguments
1389 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1390 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1392 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1394 // 4. Shift arguments and return address one slot down on the stack
1395 // (overwriting the original receiver). Adjust argument count to make
1396 // the original first argument the new receiver.
1397 // r3: actual number of arguments
1399 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1400 __ bind(&shift_arguments);
1403 // Calculate the copy start address (destination). Copy end address is sp.
1404 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1408 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1409 __ StoreP(ip, MemOperand(r5));
1410 __ subi(r5, r5, Operand(kPointerSize));
1413 // Adjust the actual number of arguments and remove the top element
1414 // (which is a copy of the last argument).
1415 __ subi(r3, r3, Operand(1));
1419 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1420 // or a function proxy via CALL_FUNCTION_PROXY.
1421 // r3: actual number of arguments
1423 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1425 Label function, non_proxy;
1426 __ cmpi(r7, Operand::Zero());
1428 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1429 __ li(r5, Operand::Zero());
1430 __ cmpi(r7, Operand(1));
1433 __ push(r4); // re-add proxy object as additional argument
1434 __ addi(r3, r3, Operand(1));
1435 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1436 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1437 RelocInfo::CODE_TARGET);
1439 __ bind(&non_proxy);
1440 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1441 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1442 RelocInfo::CODE_TARGET);
1446 // 5b. Get the code to call from the function and check that the number of
1447 // expected arguments matches what we're providing. If so, jump
1448 // (tail-call) to the code in register ip without checking arguments.
1449 // r3: actual number of arguments
1451 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1453 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1454 #if !V8_TARGET_ARCH_PPC64
1457 __ cmp(r5, r3); // Check formal and actual parameter counts.
1458 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1459 RelocInfo::CODE_TARGET, ne);
1461 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1462 ParameterCount expected(0);
1463 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1467 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1468 const int argumentsOffset,
1469 const int indexOffset,
1470 const int limitOffset) {
1471 Register receiver = LoadDescriptor::ReceiverRegister();
1472 Register key = LoadDescriptor::NameRegister();
1473 Register slot = LoadDescriptor::SlotRegister();
1474 Register vector = LoadWithVectorDescriptor::VectorRegister();
1476 // Copy all arguments from the array to the stack.
1478 __ LoadP(key, MemOperand(fp, indexOffset));
1481 __ LoadP(receiver, MemOperand(fp, argumentsOffset));
1483 // Use inline caching to speed up access to arguments.
1484 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1485 FeedbackVectorSpec spec(0, 1, kinds);
1486 Handle<TypeFeedbackVector> feedback_vector =
1487 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1488 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1489 __ LoadSmiLiteral(slot, Smi::FromInt(index));
1490 __ Move(vector, feedback_vector);
1492 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1493 __ Call(ic, RelocInfo::CODE_TARGET);
1495 // Push the nth argument.
1498 // Update the index on the stack and in register key.
1499 __ LoadP(key, MemOperand(fp, indexOffset));
1500 __ AddSmiLiteral(key, key, Smi::FromInt(1), r0);
1501 __ StoreP(key, MemOperand(fp, indexOffset));
1503 // Test if the copy loop has finished copying all the elements from the
1504 // arguments object.
1506 __ LoadP(r0, MemOperand(fp, limitOffset));
1510 // On exit, the pushed arguments count is in r3, untagged
1511 __ SmiUntag(r3, key);
1515 // Used by FunctionApply and ReflectApply
1516 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1517 const int kFormalParameters = targetIsArgument ? 3 : 2;
1518 const int kStackSize = kFormalParameters + 1;
1521 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1522 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1523 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1524 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1526 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1528 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1530 if (targetIsArgument) {
1531 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1533 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1536 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged);
1538 // Push current limit and index.
1539 const int kIndexOffset =
1540 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1541 const int kLimitOffset =
1542 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1543 __ li(r4, Operand::Zero());
1544 __ Push(r3, r4); // limit and initial index.
1546 // Get the receiver.
1547 __ LoadP(r3, MemOperand(fp, kReceiverOffset));
1549 // Check that the function is a JS function (otherwise it must be a proxy).
1550 Label push_receiver;
1551 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1552 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1553 __ bne(&push_receiver);
1555 // Change context eagerly to get the right global object if necessary.
1556 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1557 // Load the shared function info while the function is still in r4.
1558 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1560 // Compute the receiver.
1561 // Do not transform the receiver for strict mode functions.
1562 Label call_to_object, use_global_proxy;
1563 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1565 #if V8_TARGET_ARCH_PPC64
1566 SharedFunctionInfo::kStrictModeFunction,
1568 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1571 __ bne(&push_receiver, cr0);
1573 // Do not transform the receiver for strict mode functions.
1575 #if V8_TARGET_ARCH_PPC64
1576 SharedFunctionInfo::kNative,
1578 SharedFunctionInfo::kNative + kSmiTagSize,
1581 __ bne(&push_receiver, cr0);
1583 // Compute the receiver in sloppy mode.
1584 __ JumpIfSmi(r3, &call_to_object);
1585 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1587 __ beq(&use_global_proxy);
1588 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1590 __ beq(&use_global_proxy);
1592 // Check if the receiver is already a JavaScript object.
1594 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1595 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1596 __ bge(&push_receiver);
1598 // Convert the receiver to a regular object.
1600 __ bind(&call_to_object);
1601 ToObjectStub stub(masm->isolate());
1603 __ b(&push_receiver);
1605 __ bind(&use_global_proxy);
1606 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1607 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1609 // Push the receiver.
1611 __ bind(&push_receiver);
1614 // Copy all arguments from the array to the stack.
1615 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1618 // Call the function.
1620 ParameterCount actual(r3);
1621 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1622 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1623 __ bne(&call_proxy);
1624 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1626 __ LeaveFrame(StackFrame::INTERNAL, kStackSize * kPointerSize);
1629 // Call the function proxy.
1630 __ bind(&call_proxy);
1631 __ push(r4); // add function proxy as last argument
1632 __ addi(r3, r3, Operand(1));
1633 __ li(r5, Operand::Zero());
1634 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1635 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1636 RelocInfo::CODE_TARGET);
1638 // Tear down the internal frame and remove function, receiver and args.
1640 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1645 static void Generate_ConstructHelper(MacroAssembler* masm) {
1646 const int kFormalParameters = 3;
1647 const int kStackSize = kFormalParameters + 1;
1650 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1651 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1652 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1653 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1655 // If newTarget is not supplied, set it to constructor
1656 Label validate_arguments;
1657 __ LoadP(r3, MemOperand(fp, kNewTargetOffset));
1658 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
1659 __ bne(&validate_arguments);
1660 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1661 __ StoreP(r3, MemOperand(fp, kNewTargetOffset));
1663 // Validate arguments
1664 __ bind(&validate_arguments);
1665 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1667 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1669 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target
1671 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1673 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsSmiTagged);
1675 // Push current limit and index.
1676 const int kIndexOffset =
1677 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1678 const int kLimitOffset =
1679 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1680 __ li(r4, Operand::Zero());
1681 __ Push(r3, r4); // limit and initial index.
1682 // Push the constructor function as callee
1683 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1686 // Copy all arguments from the array to the stack.
1687 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1690 // Use undefined feedback vector
1691 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1692 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1693 __ LoadP(r7, MemOperand(fp, kNewTargetOffset));
1695 // Call the function.
1696 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1697 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1699 // Leave internal frame.
1701 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1706 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1707 Generate_ApplyHelper(masm, false);
1711 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1712 Generate_ApplyHelper(masm, true);
1716 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1717 Generate_ConstructHelper(masm);
1721 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1722 Label* stack_overflow) {
1723 // ----------- S t a t e -------------
1724 // -- r3 : actual number of arguments
1725 // -- r4 : function (passed through to callee)
1726 // -- r5 : expected number of arguments
1727 // -----------------------------------
1728 // Check the stack for overflow. We are not trying to catch
1729 // interruptions (e.g. debug break and preemption) here, so the "real stack
1730 // limit" is checked.
1731 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1732 // Make r8 the space we have left. The stack might already be overflowed
1733 // here which will cause r8 to become negative.
1735 // Check if the arguments will overflow the stack.
1736 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1738 __ ble(stack_overflow); // Signed comparison.
1742 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1744 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1747 if (FLAG_enable_embedded_constant_pool) {
1748 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1750 __ Push(fp, r7, r4, r3);
1752 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1757 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1758 // ----------- S t a t e -------------
1759 // -- r3 : result being passed through
1760 // -----------------------------------
1761 // Get the number of arguments passed (as a smi), tear down the frame and
1762 // then tear down the parameters.
1763 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1765 int stack_adjustment = kPointerSize; // adjust for receiver
1766 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1767 __ SmiToPtrArrayOffset(r0, r4);
1772 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1773 // ----------- S t a t e -------------
1774 // -- r3 : actual number of arguments
1775 // -- r4 : function (passed through to callee)
1776 // -- r5 : expected number of arguments
1777 // -----------------------------------
1779 Label stack_overflow;
1780 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1781 Label invoke, dont_adapt_arguments;
1783 Label enough, too_few;
1784 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1787 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1788 __ beq(&dont_adapt_arguments);
1790 { // Enough parameters: actual >= expected
1792 EnterArgumentsAdaptorFrame(masm);
1794 // Calculate copy start address into r3 and copy end address into r5.
1795 // r3: actual number of arguments as a smi
1797 // r5: expected number of arguments
1798 // ip: code entry to call
1799 __ SmiToPtrArrayOffset(r3, r3);
1801 // adjust for return address and receiver
1802 __ addi(r3, r3, Operand(2 * kPointerSize));
1803 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1806 // Copy the arguments (including the receiver) to the new stack frame.
1807 // r3: copy start address
1809 // r5: copy end address
1810 // ip: code entry to call
1814 __ LoadP(r0, MemOperand(r3, 0));
1816 __ cmp(r3, r5); // Compare before moving to next argument.
1817 __ subi(r3, r3, Operand(kPointerSize));
1823 { // Too few parameters: Actual < expected
1826 // If the function is strong we need to throw an error.
1827 Label no_strong_error;
1828 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1829 __ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset));
1831 #if V8_TARGET_ARCH_PPC64
1832 SharedFunctionInfo::kStrongModeFunction,
1834 SharedFunctionInfo::kStrongModeFunction + kSmiTagSize,
1837 __ beq(&no_strong_error, cr0);
1839 // What we really care about is the required number of arguments.
1840 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kLengthOffset));
1841 #if V8_TARGET_ARCH_PPC64
1842 // See commment near kLenghtOffset in src/objects.h
1843 __ srawi(r7, r7, kSmiTagSize);
1848 __ bge(&no_strong_error);
1851 FrameScope frame(masm, StackFrame::MANUAL);
1852 EnterArgumentsAdaptorFrame(masm);
1853 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1856 __ bind(&no_strong_error);
1857 EnterArgumentsAdaptorFrame(masm);
1859 // Calculate copy start address into r0 and copy end address is fp.
1860 // r3: actual number of arguments as a smi
1862 // r5: expected number of arguments
1863 // ip: code entry to call
1864 __ SmiToPtrArrayOffset(r3, r3);
1867 // Copy the arguments (including the receiver) to the new stack frame.
1868 // r3: copy start address
1870 // r5: expected number of arguments
1871 // ip: code entry to call
1874 // Adjust load for return address and receiver.
1875 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1877 __ cmp(r3, fp); // Compare before moving to next argument.
1878 __ subi(r3, r3, Operand(kPointerSize));
1881 // Fill the remaining expected arguments with undefined.
1883 // r5: expected number of arguments
1884 // ip: code entry to call
1885 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1886 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1888 // Adjust for frame.
1889 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1899 // Call the entry point.
1903 // Store offset of return address for deoptimizer.
1904 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1906 // Exit frame and return.
1907 LeaveArgumentsAdaptorFrame(masm);
1911 // -------------------------------------------
1912 // Dont adapt arguments.
1913 // -------------------------------------------
1914 __ bind(&dont_adapt_arguments);
1915 __ JumpToJSEntry(ip);
1917 __ bind(&stack_overflow);
1919 FrameScope frame(masm, StackFrame::MANUAL);
1920 EnterArgumentsAdaptorFrame(masm);
1921 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1928 } // namespace internal
1931 #endif // V8_TARGET_ARCH_PPC