1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h"
19 #define __ ACCESS_MASM(masm)
22 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- r3 : number of arguments excluding receiver
26 // -- r4 : called function (only guaranteed when
27 // extra_args requires it)
29 // -- sp[0] : last argument
31 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
32 // -- sp[4 * argc] : receiver
33 // -----------------------------------
35 // Insert extra arguments.
36 int num_extra_args = 0;
37 if (extra_args == NEEDS_CALLED_FUNCTION) {
41 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
44 // JumpToExternalReference expects r0 to contain the number of arguments
45 // including the receiver and the extra arguments.
46 __ addi(r3, r3, Operand(num_extra_args + 1));
47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
51 // Load the built-in InternalArray function from the current context.
52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
54 // Load the native context.
57 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
58 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
59 // Load the InternalArray function from the native context.
61 MemOperand(result, Context::SlotOffset(
62 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
66 // Load the built-in Array function from the current context.
67 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
68 // Load the native context.
71 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
72 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
73 // Load the Array function from the native context.
76 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
80 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
81 // ----------- S t a t e -------------
82 // -- r3 : number of arguments
83 // -- lr : return address
84 // -- sp[...]: constructor arguments
85 // -----------------------------------
86 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88 // Get the InternalArray function.
89 GenerateLoadInternalArrayFunction(masm, r4);
91 if (FLAG_debug_code) {
92 // Initial map for the builtin InternalArray functions should be maps.
93 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
96 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
97 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
100 // Run the native code for the InternalArray function called as a normal
103 InternalArrayConstructorStub stub(masm->isolate());
104 __ TailCallStub(&stub);
108 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
109 // ----------- S t a t e -------------
110 // -- r3 : number of arguments
111 // -- lr : return address
112 // -- sp[...]: constructor arguments
113 // -----------------------------------
114 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116 // Get the Array function.
117 GenerateLoadArrayFunction(masm, r4);
119 if (FLAG_debug_code) {
120 // Initial map for the builtin Array functions should be maps.
121 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
122 __ TestIfSmi(r5, r0);
123 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
124 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
125 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
129 // Run the native code for the Array function called as a normal function.
131 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
132 ArrayConstructorStub stub(masm->isolate());
133 __ TailCallStub(&stub);
137 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
138 // ----------- S t a t e -------------
139 // -- r3 : number of arguments
140 // -- r4 : constructor function
141 // -- lr : return address
142 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
143 // -- sp[argc * 4] : receiver
144 // -----------------------------------
145 Counters* counters = masm->isolate()->counters();
146 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
148 Register function = r4;
149 if (FLAG_debug_code) {
150 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
151 __ cmp(function, r5);
152 __ Assert(eq, kUnexpectedStringFunction);
155 // Load the first arguments in r3 and get rid of the rest.
157 __ cmpi(r3, Operand::Zero());
158 __ beq(&no_arguments);
159 // First args = sp[(argc - 1) * 4].
160 __ subi(r3, r3, Operand(1));
161 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
163 __ LoadP(r3, MemOperand(sp));
164 // sp now point to args[0], drop args[0] + receiver.
167 Register argument = r5;
168 Label not_cached, argument_is_string;
169 __ LookupNumberStringCache(r3, // Input.
175 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
176 __ bind(&argument_is_string);
178 // ----------- S t a t e -------------
179 // -- r5 : argument converted to string
180 // -- r4 : constructor function
181 // -- lr : return address
182 // -----------------------------------
185 __ Allocate(JSValue::kSize,
189 &gc_required, TAG_OBJECT);
191 // Initialising the String Object.
193 __ LoadGlobalFunctionInitialMap(function, map, r7);
194 if (FLAG_debug_code) {
195 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
196 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
197 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
198 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
199 __ cmpi(r7, Operand::Zero());
200 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
202 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
204 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
205 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
206 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
208 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
210 // Ensure the object is fully initialized.
211 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
215 // The argument was not found in the number to string cache. Check
216 // if it's a string already before calling the conversion builtin.
217 Label convert_argument;
218 __ bind(¬_cached);
219 __ JumpIfSmi(r3, &convert_argument);
222 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
223 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
224 STATIC_ASSERT(kNotStringTag != 0);
225 __ andi(r0, r6, Operand(kIsNotStringMask));
226 __ bne(&convert_argument, cr0);
228 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
229 __ b(&argument_is_string);
231 // Invoke the conversion builtin and put the result into r5.
232 __ bind(&convert_argument);
233 __ push(function); // Preserve the function.
234 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
236 FrameScope scope(masm, StackFrame::INTERNAL);
238 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
242 __ b(&argument_is_string);
244 // Load the empty string into r5, remove the receiver from the
245 // stack, and jump back to the case where the argument is a string.
246 __ bind(&no_arguments);
247 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
249 __ b(&argument_is_string);
251 // At this point the argument is already a string. Call runtime to
252 // create a string wrapper.
253 __ bind(&gc_required);
254 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
256 FrameScope scope(masm, StackFrame::INTERNAL);
258 __ CallRuntime(Runtime::kNewStringWrapper, 1);
264 static void CallRuntimePassFunction(MacroAssembler* masm,
265 Runtime::FunctionId function_id) {
266 FrameScope scope(masm, StackFrame::INTERNAL);
267 // Push a copy of the function onto the stack.
268 // Push function as parameter to the runtime call.
271 __ CallRuntime(function_id, 1);
277 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
278 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
279 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
280 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
281 __ JumpToJSEntry(ip);
285 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
286 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
287 __ JumpToJSEntry(ip);
291 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
292 // Checking whether the queued function is ready for install is optional,
293 // since we come across interrupts and stack checks elsewhere. However,
294 // not checking may delay installing ready functions, and always checking
295 // would be quite expensive. A good compromise is to first check against
296 // stack limit as a cue for an interrupt signal.
298 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
302 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
303 GenerateTailCallToReturnedCode(masm);
306 GenerateTailCallToSharedCode(masm);
310 static void Generate_Runtime_NewObject(MacroAssembler* masm,
312 Register original_constructor,
313 Label* count_incremented,
315 // ----------- S t a t e -------------
316 // -- r4: argument for Runtime_NewObject
317 // -----------------------------------
318 Register result = r7;
320 if (create_memento) {
321 // Get the cell or allocation site.
322 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
323 __ Push(r5, r4, original_constructor);
324 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
326 // Runtime_NewObjectWithAllocationSite increments allocation count.
327 // Skip the increment.
328 __ b(count_incremented);
330 __ Push(r4, original_constructor);
331 __ CallRuntime(Runtime::kNewObject, 2);
338 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
339 bool is_api_function,
340 bool create_memento) {
341 // ----------- S t a t e -------------
342 // -- r3 : number of arguments
343 // -- r4 : constructor function
344 // -- r5 : allocation site or undefined
345 // -- r6 : original constructor
346 // -- lr : return address
347 // -- sp[...]: constructor arguments
348 // -----------------------------------
350 // Should never create mementos for api functions.
351 DCHECK(!is_api_function || !create_memento);
353 Isolate* isolate = masm->isolate();
355 // Enter a construct frame.
357 FrameScope scope(masm, StackFrame::CONSTRUCT);
359 if (create_memento) {
360 __ AssertUndefinedOrAllocationSite(r5, r7);
364 // Preserve the two incoming parameters on the stack.
368 Label rt_call, allocated, normal_new, count_incremented;
372 // Original constructor and function are different.
373 Generate_Runtime_NewObject(masm, create_memento, r6, &count_incremented,
375 __ bind(&normal_new);
377 // Try to allocate the object without transitioning into C code. If any of
378 // the preconditions is not met, the code bails out to the runtime call.
379 if (FLAG_inline_new) {
380 Label undo_allocation;
381 ExternalReference debug_step_in_fp =
382 ExternalReference::debug_step_in_fp_address(isolate);
383 __ mov(r5, Operand(debug_step_in_fp));
384 __ LoadP(r5, MemOperand(r5));
385 __ cmpi(r5, Operand::Zero());
388 // Load the initial map and verify that it is in fact a map.
389 // r4: constructor function
391 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
392 __ JumpIfSmi(r5, &rt_call);
393 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
396 // Check that the constructor is not constructing a JSFunction (see
397 // comments in Runtime_NewObject in runtime.cc). In which case the
398 // initial map's instance type would be JS_FUNCTION_TYPE.
399 // r4: constructor function
401 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
404 if (!is_api_function) {
406 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
407 // Check if slack tracking is enabled.
408 __ lwz(r7, bit_field3);
409 __ DecodeField<Map::Counter>(r11, r7);
410 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
412 // Decrease generous allocation count.
413 __ Add(r7, r7, -(1 << Map::Counter::kShift), r0);
414 __ stw(r7, bit_field3);
415 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
420 __ Push(r5, r4); // r4 = constructor
421 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
428 // Now allocate the JSObject on the heap.
429 // r4: constructor function
431 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
432 if (create_memento) {
433 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
436 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS);
438 // Allocated the JSObject, now initialize the fields. Map is set to
439 // initial map and properties and elements are set to empty fixed array.
440 // r4: constructor function
442 // r6: object size (not including memento if create_memento)
443 // r7: JSObject (not tagged)
444 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
446 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
447 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
448 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
449 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
451 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
452 __ add(r9, r7, r9); // End of object.
454 // Fill all the in-object properties with the appropriate filler.
455 // r4: constructor function
457 // r6: object size (in words, including memento if create_memento)
458 // r7: JSObject (not tagged)
459 // r8: First in-object property of JSObject (not tagged)
461 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
462 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
464 if (!is_api_function) {
465 Label no_inobject_slack_tracking;
467 // Check if slack tracking is enabled.
468 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
469 __ blt(&no_inobject_slack_tracking);
471 // Allocate object with a slack.
472 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
473 if (FLAG_debug_code) {
474 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
476 // r0: offset of first field after pre-allocated fields
478 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
482 __ cmpi(r3, Operand::Zero());
484 __ InitializeNFieldsWithFiller(r8, r3, r10);
487 // To allow for truncation.
488 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
489 // Fill the remaining fields with one pointer filler map.
491 __ bind(&no_inobject_slack_tracking);
494 if (create_memento) {
495 __ subi(r3, r9, Operand(AllocationMemento::kSize));
496 __ InitializeFieldsWithFiller(r8, r3, r10);
498 // Fill in memento fields.
499 // r8: points to the allocated but uninitialized memento.
500 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
501 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
502 // Load the AllocationSite
503 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize));
505 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
506 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
509 __ InitializeFieldsWithFiller(r8, r9, r10);
512 // Add the object tag to make the JSObject real, so that we can continue
513 // and jump into the continuation code at any time from now on. Any
514 // failures need to undo the allocation, so that the heap is in a
515 // consistent state and verifiable.
516 __ addi(r7, r7, Operand(kHeapObjectTag));
518 // Check if a non-empty properties array is needed. Continue with
519 // allocated object if not fall through to runtime call if it is.
520 // r4: constructor function
522 // r8: start of next object (not tagged)
523 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
524 // The field instance sizes contains both pre-allocated property fields
525 // and in-object properties.
526 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
528 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
529 __ sub(r6, r6, r0, LeaveOE, SetRC);
531 // Done if no extra properties are to be allocated.
532 __ beq(&allocated, cr0);
533 __ Assert(ge, kPropertyAllocationCountFailed, cr0);
535 // Scale the number of elements by pointer size and add the header for
536 // FixedArrays to the start of the next object calculation from above.
538 // r6: number of elements in properties array
540 // r8: start of next object
541 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize));
543 r3, r8, r9, r5, &undo_allocation,
544 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
546 // Initialize the FixedArray.
548 // r6: number of elements in properties array
550 // r8: FixedArray (not tagged)
551 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
553 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
554 __ StoreP(r9, MemOperand(r5));
555 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
557 __ StoreP(r3, MemOperand(r5, kPointerSize));
558 __ addi(r5, r5, Operand(2 * kPointerSize));
560 // Initialize the fields to undefined.
561 // r4: constructor function
562 // r5: First element of FixedArray (not tagged)
563 // r6: number of elements in properties array
565 // r8: FixedArray (not tagged)
566 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
569 __ cmpi(r6, Operand::Zero());
571 if (!is_api_function || create_memento) {
572 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
573 } else if (FLAG_debug_code) {
574 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
576 __ Assert(eq, kUndefinedValueNotLoaded);
578 __ InitializeNFieldsWithFiller(r5, r6, r10);
582 // Store the initialized FixedArray into the properties field of
584 // r4: constructor function
586 // r8: FixedArray (not tagged)
587 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag.
588 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0);
590 // Continue with JSObject being successfully allocated
591 // r4: constructor function
595 // Undo the setting of the new top so that the heap is verifiable. For
596 // example, the map's unused properties potentially do not match the
597 // allocated objects unused properties.
598 // r7: JSObject (previous new top)
599 __ bind(&undo_allocation);
600 __ UndoAllocationInNewSpace(r7, r8);
603 // Allocate the new receiver object using the runtime call.
604 // r4: constructor function
606 Generate_Runtime_NewObject(masm, create_memento, r4, &count_incremented,
609 // Receiver for constructor call allocated.
613 if (create_memento) {
614 __ LoadP(r5, MemOperand(sp, kPointerSize * 2));
615 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
617 __ beq(&count_incremented);
618 // r5 is an AllocationSite. We are creating a memento from it, so we
619 // need to increment the memento create count.
621 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
622 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
624 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
626 __ bind(&count_incremented);
631 // Reload the number of arguments and the constructor from the stack.
634 // sp[2]: constructor function
635 // sp[3]: number of arguments (smi-tagged)
636 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
637 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize));
639 // Set up pointer to last argument.
640 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
642 // Set up number of arguments for function call below
645 // Copy arguments and receiver to the expression stack.
646 // r3: number of arguments
647 // r4: constructor function
648 // r5: address of last argument (caller sp)
649 // r6: number of arguments (smi-tagged)
652 // sp[2]: constructor function
653 // sp[3]: number of arguments (smi-tagged)
655 __ cmpi(r3, Operand::Zero());
657 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
660 __ subi(ip, ip, Operand(kPointerSize));
661 __ LoadPX(r0, MemOperand(r5, ip));
666 // Call the function.
667 // r3: number of arguments
668 // r4: constructor function
669 if (is_api_function) {
670 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
671 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
672 __ Call(code, RelocInfo::CODE_TARGET);
674 ParameterCount actual(r3);
675 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
678 // Store offset of return address for deoptimizer.
679 if (!is_api_function) {
680 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
683 // Restore context from the frame.
686 // sp[1]: constructor function
687 // sp[2]: number of arguments (smi-tagged)
688 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
690 // If the result is an object (in the ECMA sense), we should get rid
691 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
693 Label use_receiver, exit;
695 // If the result is a smi, it is *not* an object in the ECMA sense.
697 // sp[0]: receiver (newly allocated object)
698 // sp[1]: constructor function
699 // sp[2]: number of arguments (smi-tagged)
700 __ JumpIfSmi(r3, &use_receiver);
702 // If the type of the result (stored in its map) is less than
703 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
704 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
707 // Throw away the result of the constructor invocation and use the
708 // on-stack receiver as the result.
709 __ bind(&use_receiver);
710 __ LoadP(r3, MemOperand(sp));
712 // Remove receiver from the stack, remove caller arguments, and
716 // sp[0]: receiver (newly allocated object)
717 // sp[1]: constructor function
718 // sp[2]: number of arguments (smi-tagged)
719 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
721 // Leave construct frame.
724 __ SmiToPtrArrayOffset(r4, r4);
726 __ addi(sp, sp, Operand(kPointerSize));
727 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
732 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
733 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
737 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
738 Generate_JSConstructStubHelper(masm, true, false);
742 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
743 // ----------- S t a t e -------------
744 // -- r3 : number of arguments
745 // -- r4 : constructor function
746 // -- r5 : allocation site or undefined
747 // -- r6 : original constructor
748 // -- lr : return address
749 // -- sp[...]: constructor arguments
750 // -----------------------------------
752 // TODO(dslomov): support pretenuring
753 CHECK(!FLAG_pretenuring_call_new);
756 FrameScope scope(masm, StackFrame::CONSTRUCT);
758 // Smi-tagged arguments count.
760 __ SmiTag(r7, SetRC);
762 // receiver is the hole.
763 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
765 // smi arguments count, new.target, receiver
768 // Set up pointer to last argument.
769 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
771 // Copy arguments and receiver to the expression stack.
772 // r3: number of arguments
773 // r4: constructor function
774 // r5: address of last argument (caller sp)
775 // r7: number of arguments (smi-tagged)
776 // cr0: compare against zero of arguments
779 // sp[2]: number of arguments (smi-tagged)
781 __ beq(&no_args, cr0);
782 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
785 __ subi(ip, ip, Operand(kPointerSize));
786 __ LoadPX(r0, MemOperand(r5, ip));
791 __ addi(r3, r3, Operand(1));
795 ExternalReference debug_step_in_fp =
796 ExternalReference::debug_step_in_fp_address(masm->isolate());
797 __ mov(r5, Operand(debug_step_in_fp));
798 __ LoadP(r5, MemOperand(r5));
799 __ and_(r0, r5, r5, SetRC);
800 __ beq(&skip_step_in, cr0);
803 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
806 __ bind(&skip_step_in);
808 // Call the function.
809 // r3: number of arguments
810 // r4: constructor function
811 ParameterCount actual(r3);
812 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
814 // Restore context from the frame.
816 // sp[0]: number of arguments (smi-tagged)
817 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
818 __ LoadP(r4, MemOperand(sp, 0));
820 // Leave construct frame.
823 __ SmiToPtrArrayOffset(r4, r4);
825 __ addi(sp, sp, Operand(kPointerSize));
830 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
832 // Called from Generate_JS_Entry
838 // r0,r8-r9, cp may be clobbered
839 ProfileEntryHookStub::MaybeCallEntryHook(masm);
841 // Clear the context before we push it when entering the internal frame.
842 __ li(cp, Operand::Zero());
844 // Enter an internal frame.
846 FrameScope scope(masm, StackFrame::INTERNAL);
848 // Set up the context from the function argument.
849 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
851 __ InitializeRootRegister();
853 // Push the function and the receiver onto the stack.
857 // Copy arguments to the stack in a loop.
860 // r7: argv, i.e. points to first arg
862 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
864 // r5 points past last arg.
867 __ LoadP(r8, MemOperand(r7)); // read next parameter
868 __ addi(r7, r7, Operand(kPointerSize));
869 __ LoadP(r0, MemOperand(r8)); // dereference handle
870 __ push(r0); // push parameter
875 // Initialize all JavaScript callee-saved registers, since they will be seen
876 // by the garbage collector as part of handlers.
877 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
883 // Invoke the code and pass argc as r3.
886 // No type feedback cell is available
887 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
888 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
891 ParameterCount actual(r3);
892 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
894 // Exit the JS frame and remove the parameters (except function), and
903 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
904 Generate_JSEntryTrampolineHelper(masm, false);
908 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
909 Generate_JSEntryTrampolineHelper(masm, true);
913 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
914 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
915 GenerateTailCallToReturnedCode(masm);
919 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
920 FrameScope scope(masm, StackFrame::INTERNAL);
921 // Push a copy of the function onto the stack.
922 // Push function as parameter to the runtime call.
924 // Whether to compile in a background thread.
926 r0, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
929 __ CallRuntime(Runtime::kCompileOptimized, 2);
935 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
936 CallCompileOptimized(masm, false);
937 GenerateTailCallToReturnedCode(masm);
941 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
942 CallCompileOptimized(masm, true);
943 GenerateTailCallToReturnedCode(masm);
947 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
948 // For now, we are relying on the fact that make_code_young doesn't do any
949 // garbage collection which allows us to save/restore the registers without
950 // worrying about which of them contain pointers. We also don't build an
951 // internal frame to make the code faster, since we shouldn't have to do stack
952 // crawls in MakeCodeYoung. This seems a bit fragile.
954 // Point r3 at the start of the PlatformCodeAge sequence.
957 // The following registers must be saved and restored when calling through to
959 // r3 - contains return address (beginning of patch sequence)
961 // lr - return address
962 FrameScope scope(masm, StackFrame::MANUAL);
964 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
965 __ PrepareCallCFunction(2, 0, r5);
966 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
968 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
969 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
975 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
976 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
977 MacroAssembler* masm) { \
978 GenerateMakeCodeYoungAgainCommon(masm); \
980 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
981 MacroAssembler* masm) { \
982 GenerateMakeCodeYoungAgainCommon(masm); \
984 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
985 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
988 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
989 // For now, we are relying on the fact that make_code_young doesn't do any
990 // garbage collection which allows us to save/restore the registers without
991 // worrying about which of them contain pointers. We also don't build an
992 // internal frame to make the code faster, since we shouldn't have to do stack
993 // crawls in MakeCodeYoung. This seems a bit fragile.
995 // Point r3 at the start of the PlatformCodeAge sequence.
998 // The following registers must be saved and restored when calling through to
1000 // r3 - contains return address (beginning of patch sequence)
1002 // lr - return address
1003 FrameScope scope(masm, StackFrame::MANUAL);
1005 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1006 __ PrepareCallCFunction(2, 0, r5);
1007 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1009 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1011 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
1015 // Perform prologue operations usually performed by the young code stub.
1016 __ PushFixedFrame(r4);
1017 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1019 // Jump to point after the code-age stub.
1020 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1025 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1026 GenerateMakeCodeYoungAgainCommon(masm);
1030 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1031 SaveFPRegsMode save_doubles) {
1033 FrameScope scope(masm, StackFrame::INTERNAL);
1035 // Preserve registers across notification, this is important for compiled
1036 // stubs that tail call the runtime on deopts passing their parameters in
1038 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1039 // Pass the function and deoptimization type to the runtime system.
1040 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1041 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1044 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1045 __ blr(); // Jump to miss handler
1049 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1050 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1054 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1055 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1059 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1060 Deoptimizer::BailoutType type) {
1062 FrameScope scope(masm, StackFrame::INTERNAL);
1063 // Pass the function and deoptimization type to the runtime system.
1064 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1066 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1069 // Get the full codegen state from the stack and untag it -> r9.
1070 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1072 // Switch on the state.
1073 Label with_tos_register, unknown_state;
1074 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1075 __ bne(&with_tos_register);
1076 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1079 __ bind(&with_tos_register);
1080 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1081 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1082 __ bne(&unknown_state);
1083 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1086 __ bind(&unknown_state);
1087 __ stop("no cases left");
1091 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1092 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1096 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1097 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1101 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1102 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1106 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1107 // Lookup the function in the JavaScript frame.
1108 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1110 FrameScope scope(masm, StackFrame::INTERNAL);
1111 // Pass function as argument.
1113 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1116 // If the code object is null, just return to the unoptimized code.
1118 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1124 // Load deoptimization data from the code object.
1125 // <deopt_data> = <code>[#deoptimization_data_offset]
1126 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1129 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1131 // Load the OSR entrypoint offset from the deoptimization data.
1132 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1133 __ LoadP(r4, FieldMemOperand(
1134 r4, FixedArray::OffsetOfElementAt(
1135 DeoptimizationInputData::kOsrPcOffsetIndex)));
1138 // Compute the target address = code start + osr_offset
1141 // And "return" to the OSR entry point of the function.
1148 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1149 // We check the stack limit as indicator that recompilation might be done.
1151 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1155 FrameScope scope(masm, StackFrame::INTERNAL);
1156 __ CallRuntime(Runtime::kStackGuard, 0);
1158 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1159 RelocInfo::CODE_TARGET);
1166 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1167 // 1. Make sure we have at least one argument.
1168 // r3: actual number of arguments
1171 __ cmpi(r3, Operand::Zero());
1173 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1175 __ addi(r3, r3, Operand(1));
1179 // 2. Get the function to call (passed as receiver) from the stack, check
1180 // if it is a function.
1181 // r3: actual number of arguments
1182 Label slow, non_function;
1183 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1185 __ LoadP(r4, MemOperand(r4));
1186 __ JumpIfSmi(r4, &non_function);
1187 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1190 // 3a. Patch the first argument if necessary when calling a function.
1191 // r3: actual number of arguments
1193 Label shift_arguments;
1194 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1196 Label convert_to_object, use_global_proxy, patch_receiver;
1197 // Change context eagerly in case we need the global receiver.
1198 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1200 // Do not transform the receiver for strict mode functions.
1201 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1202 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1204 #if V8_TARGET_ARCH_PPC64
1205 SharedFunctionInfo::kStrictModeFunction,
1207 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1210 __ bne(&shift_arguments, cr0);
1212 // Do not transform the receiver for native (Compilerhints already in r6).
1214 #if V8_TARGET_ARCH_PPC64
1215 SharedFunctionInfo::kNative,
1217 SharedFunctionInfo::kNative + kSmiTagSize,
1220 __ bne(&shift_arguments, cr0);
1222 // Compute the receiver in sloppy mode.
1223 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1225 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1226 // r3: actual number of arguments
1228 // r5: first argument
1229 __ JumpIfSmi(r5, &convert_to_object);
1231 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1233 __ beq(&use_global_proxy);
1234 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1236 __ beq(&use_global_proxy);
1238 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1239 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1240 __ bge(&shift_arguments);
1242 __ bind(&convert_to_object);
1245 // Enter an internal frame in order to preserve argument count.
1246 FrameScope scope(masm, StackFrame::INTERNAL);
1249 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1255 // Exit the internal frame.
1258 // Restore the function to r4, and the flag to r7.
1259 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1261 __ LoadP(r4, MemOperand(r7));
1262 __ li(r7, Operand::Zero());
1263 __ b(&patch_receiver);
1265 __ bind(&use_global_proxy);
1266 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1267 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1269 __ bind(&patch_receiver);
1270 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1272 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1274 __ b(&shift_arguments);
1277 // 3b. Check for function proxy.
1279 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1280 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1281 __ beq(&shift_arguments);
1282 __ bind(&non_function);
1283 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1285 // 3c. Patch the first argument when calling a non-function. The
1286 // CALL_NON_FUNCTION builtin expects the non-function callee as
1287 // receiver, so overwrite the first argument which will ultimately
1288 // become the receiver.
1289 // r3: actual number of arguments
1291 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1292 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1294 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1296 // 4. Shift arguments and return address one slot down on the stack
1297 // (overwriting the original receiver). Adjust argument count to make
1298 // the original first argument the new receiver.
1299 // r3: actual number of arguments
1301 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1302 __ bind(&shift_arguments);
1305 // Calculate the copy start address (destination). Copy end address is sp.
1306 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1310 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1311 __ StoreP(ip, MemOperand(r5));
1312 __ subi(r5, r5, Operand(kPointerSize));
1315 // Adjust the actual number of arguments and remove the top element
1316 // (which is a copy of the last argument).
1317 __ subi(r3, r3, Operand(1));
1321 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1322 // or a function proxy via CALL_FUNCTION_PROXY.
1323 // r3: actual number of arguments
1325 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1327 Label function, non_proxy;
1328 __ cmpi(r7, Operand::Zero());
1330 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1331 __ li(r5, Operand::Zero());
1332 __ cmpi(r7, Operand(1));
1335 __ push(r4); // re-add proxy object as additional argument
1336 __ addi(r3, r3, Operand(1));
1337 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1338 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1339 RelocInfo::CODE_TARGET);
1341 __ bind(&non_proxy);
1342 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1343 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1344 RelocInfo::CODE_TARGET);
1348 // 5b. Get the code to call from the function and check that the number of
1349 // expected arguments matches what we're providing. If so, jump
1350 // (tail-call) to the code in register edx without checking arguments.
1351 // r3: actual number of arguments
1353 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1355 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1356 #if !V8_TARGET_ARCH_PPC64
1359 __ cmp(r5, r3); // Check formal and actual parameter counts.
1360 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1361 RelocInfo::CODE_TARGET, ne);
1363 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1364 ParameterCount expected(0);
1365 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1369 static void Generate_CheckStackOverflow(MacroAssembler* masm,
1370 const int calleeOffset) {
1371 // Check the stack for overflow. We are not trying to catch
1372 // interruptions (e.g. debug break and preemption) here, so the "real stack
1373 // limit" is checked.
1375 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1376 // Make r5 the space we have left. The stack might already be overflowed
1377 // here which will cause r5 to become negative.
1379 // Check if the arguments will overflow the stack.
1380 __ SmiToPtrArrayOffset(r0, r3);
1382 __ bgt(&okay); // Signed comparison.
1384 // Out of stack space.
1385 __ LoadP(r4, MemOperand(fp, calleeOffset));
1387 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1393 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1394 const int argumentsOffset,
1395 const int indexOffset,
1396 const int limitOffset) {
1398 __ LoadP(r3, MemOperand(fp, indexOffset));
1401 // Load the current argument from the arguments array and push it to the
1403 // r3: current argument index
1405 __ LoadP(r4, MemOperand(fp, argumentsOffset));
1408 // Call the runtime to access the property in the arguments array.
1409 __ CallRuntime(Runtime::kGetProperty, 2);
1412 // Use inline caching to access the arguments.
1413 __ LoadP(r3, MemOperand(fp, indexOffset));
1414 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1415 __ StoreP(r3, MemOperand(fp, indexOffset));
1417 // Test if the copy loop has finished copying all the elements from the
1418 // arguments object.
1420 __ LoadP(r4, MemOperand(fp, limitOffset));
1424 // On exit, the pushed arguments count is in r0, untagged
1429 // Used by FunctionApply and ReflectApply
1430 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1431 const int kFormalParameters = targetIsArgument ? 3 : 2;
1432 const int kStackSize = kFormalParameters + 1;
1435 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1436 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1437 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1438 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1440 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1442 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1444 if (targetIsArgument) {
1445 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1447 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1450 Generate_CheckStackOverflow(masm, kFunctionOffset);
1452 // Push current limit and index.
1453 const int kIndexOffset =
1454 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1455 const int kLimitOffset =
1456 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1457 __ li(r4, Operand::Zero());
1458 __ Push(r3, r4); // limit and initial index.
1460 // Get the receiver.
1461 __ LoadP(r3, MemOperand(fp, kReceiverOffset));
1463 // Check that the function is a JS function (otherwise it must be a proxy).
1464 Label push_receiver;
1465 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1466 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1467 __ bne(&push_receiver);
1469 // Change context eagerly to get the right global object if necessary.
1470 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1471 // Load the shared function info while the function is still in r4.
1472 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1474 // Compute the receiver.
1475 // Do not transform the receiver for strict mode functions.
1476 Label call_to_object, use_global_proxy;
1477 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1479 #if V8_TARGET_ARCH_PPC64
1480 SharedFunctionInfo::kStrictModeFunction,
1482 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1485 __ bne(&push_receiver, cr0);
1487 // Do not transform the receiver for strict mode functions.
1489 #if V8_TARGET_ARCH_PPC64
1490 SharedFunctionInfo::kNative,
1492 SharedFunctionInfo::kNative + kSmiTagSize,
1495 __ bne(&push_receiver, cr0);
1497 // Compute the receiver in sloppy mode.
1498 __ JumpIfSmi(r3, &call_to_object);
1499 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1501 __ beq(&use_global_proxy);
1502 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1504 __ beq(&use_global_proxy);
1506 // Check if the receiver is already a JavaScript object.
1508 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1509 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1510 __ bge(&push_receiver);
1512 // Convert the receiver to a regular object.
1514 __ bind(&call_to_object);
1516 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1517 __ b(&push_receiver);
1519 __ bind(&use_global_proxy);
1520 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1521 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1523 // Push the receiver.
1525 __ bind(&push_receiver);
1528 // Copy all arguments from the array to the stack.
1529 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1532 // Call the function.
1534 ParameterCount actual(r3);
1535 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1536 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1537 __ bne(&call_proxy);
1538 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1540 __ LeaveFrame(StackFrame::INTERNAL, kStackSize * kPointerSize);
1543 // Call the function proxy.
1544 __ bind(&call_proxy);
1545 __ push(r4); // add function proxy as last argument
1546 __ addi(r3, r3, Operand(1));
1547 __ li(r5, Operand::Zero());
1548 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1549 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1550 RelocInfo::CODE_TARGET);
1552 // Tear down the internal frame and remove function, receiver and args.
1554 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1559 static void Generate_ConstructHelper(MacroAssembler* masm) {
1560 const int kFormalParameters = 3;
1561 const int kStackSize = kFormalParameters + 1;
1564 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1565 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1566 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1567 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1569 // If newTarget is not supplied, set it to constructor
1570 Label validate_arguments;
1571 __ LoadP(r3, MemOperand(fp, kNewTargetOffset));
1572 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
1573 __ bne(&validate_arguments);
1574 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1575 __ StoreP(r3, MemOperand(fp, kNewTargetOffset));
1577 // Validate arguments
1578 __ bind(&validate_arguments);
1579 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1581 __ LoadP(r3, MemOperand(fp, kArgumentsOffset)); // get the args array
1583 __ LoadP(r3, MemOperand(fp, kNewTargetOffset)); // get the new.target
1585 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1587 Generate_CheckStackOverflow(masm, kFunctionOffset);
1589 // Push current limit and index.
1590 const int kIndexOffset =
1591 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1592 const int kLimitOffset =
1593 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1594 __ li(r4, Operand::Zero());
1595 __ Push(r3, r4); // limit and initial index.
1596 // Push newTarget and callee functions
1597 __ LoadP(r3, MemOperand(fp, kNewTargetOffset));
1599 __ LoadP(r3, MemOperand(fp, kFunctionOffset));
1602 // Copy all arguments from the array to the stack.
1603 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1606 // Use undefined feedback vector
1607 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1608 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1610 // Call the function.
1611 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1612 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1616 // Leave internal frame.
1618 __ addi(sp, sp, Operand(kStackSize * kPointerSize));
1623 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1624 Generate_ApplyHelper(masm, false);
1628 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1629 Generate_ApplyHelper(masm, true);
1633 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1634 Generate_ConstructHelper(masm);
1638 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1639 Label* stack_overflow) {
1640 // ----------- S t a t e -------------
1641 // -- r3 : actual number of arguments
1642 // -- r4 : function (passed through to callee)
1643 // -- r5 : expected number of arguments
1644 // -----------------------------------
1645 // Check the stack for overflow. We are not trying to catch
1646 // interruptions (e.g. debug break and preemption) here, so the "real stack
1647 // limit" is checked.
1648 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1649 // Make r8 the space we have left. The stack might already be overflowed
1650 // here which will cause r8 to become negative.
1652 // Check if the arguments will overflow the stack.
1653 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1655 __ ble(stack_overflow); // Signed comparison.
1659 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1661 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1664 __ Push(fp, r7, r4, r3);
1665 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1670 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1671 // ----------- S t a t e -------------
1672 // -- r3 : result being passed through
1673 // -----------------------------------
1674 // Get the number of arguments passed (as a smi), tear down the frame and
1675 // then tear down the parameters.
1676 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1678 int stack_adjustment = kPointerSize; // adjust for receiver
1679 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1680 __ SmiToPtrArrayOffset(r0, r4);
1685 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1686 // ----------- S t a t e -------------
1687 // -- r3 : actual number of arguments
1688 // -- r4 : function (passed through to callee)
1689 // -- r5 : expected number of arguments
1690 // -----------------------------------
1692 Label stack_overflow;
1693 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1694 Label invoke, dont_adapt_arguments;
1696 Label enough, too_few;
1697 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1700 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1701 __ beq(&dont_adapt_arguments);
1703 { // Enough parameters: actual >= expected
1705 EnterArgumentsAdaptorFrame(masm);
1707 // Calculate copy start address into r3 and copy end address into r5.
1708 // r3: actual number of arguments as a smi
1710 // r5: expected number of arguments
1711 // ip: code entry to call
1712 __ SmiToPtrArrayOffset(r3, r3);
1714 // adjust for return address and receiver
1715 __ addi(r3, r3, Operand(2 * kPointerSize));
1716 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1719 // Copy the arguments (including the receiver) to the new stack frame.
1720 // r3: copy start address
1722 // r5: copy end address
1723 // ip: code entry to call
1727 __ LoadP(r0, MemOperand(r3, 0));
1729 __ cmp(r3, r5); // Compare before moving to next argument.
1730 __ subi(r3, r3, Operand(kPointerSize));
1736 { // Too few parameters: Actual < expected
1738 EnterArgumentsAdaptorFrame(masm);
1740 // Calculate copy start address into r0 and copy end address is fp.
1741 // r3: actual number of arguments as a smi
1743 // r5: expected number of arguments
1744 // ip: code entry to call
1745 __ SmiToPtrArrayOffset(r3, r3);
1748 // Copy the arguments (including the receiver) to the new stack frame.
1749 // r3: copy start address
1751 // r5: expected number of arguments
1752 // ip: code entry to call
1755 // Adjust load for return address and receiver.
1756 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1758 __ cmp(r3, fp); // Compare before moving to next argument.
1759 __ subi(r3, r3, Operand(kPointerSize));
1762 // Fill the remaining expected arguments with undefined.
1764 // r5: expected number of arguments
1765 // ip: code entry to call
1766 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1767 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1769 // Adjust for frame.
1770 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1780 // Call the entry point.
1784 // Store offset of return address for deoptimizer.
1785 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1787 // Exit frame and return.
1788 LeaveArgumentsAdaptorFrame(masm);
1792 // -------------------------------------------
1793 // Dont adapt arguments.
1794 // -------------------------------------------
1795 __ bind(&dont_adapt_arguments);
1796 __ JumpToJSEntry(ip);
1798 __ bind(&stack_overflow);
1800 FrameScope frame(masm, StackFrame::MANUAL);
1801 EnterArgumentsAdaptorFrame(masm);
1802 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1810 } // namespace v8::internal
1812 #endif // V8_TARGET_ARCH_PPC