1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h"
19 #define __ ACCESS_MASM(masm)
22 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- r3 : number of arguments excluding receiver
26 // -- r4 : called function (only guaranteed when
27 // extra_args requires it)
29 // -- sp[0] : last argument
31 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
32 // -- sp[4 * argc] : receiver
33 // -----------------------------------
35 // Insert extra arguments.
36 int num_extra_args = 0;
37 if (extra_args == NEEDS_CALLED_FUNCTION) {
41 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
44 // JumpToExternalReference expects r0 to contain the number of arguments
45 // including the receiver and the extra arguments.
46 __ addi(r3, r3, Operand(num_extra_args + 1));
47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
51 // Load the built-in InternalArray function from the current context.
52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
54 // Load the native context.
57 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
58 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
59 // Load the InternalArray function from the native context.
61 MemOperand(result, Context::SlotOffset(
62 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
66 // Load the built-in Array function from the current context.
67 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
68 // Load the native context.
71 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
72 __ LoadP(result, FieldMemOperand(result, GlobalObject::kNativeContextOffset));
73 // Load the Array function from the native context.
76 MemOperand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
80 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
81 // ----------- S t a t e -------------
82 // -- r3 : number of arguments
83 // -- lr : return address
84 // -- sp[...]: constructor arguments
85 // -----------------------------------
86 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88 // Get the InternalArray function.
89 GenerateLoadInternalArrayFunction(masm, r4);
91 if (FLAG_debug_code) {
92 // Initial map for the builtin InternalArray functions should be maps.
93 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
96 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
97 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
100 // Run the native code for the InternalArray function called as a normal
103 InternalArrayConstructorStub stub(masm->isolate());
104 __ TailCallStub(&stub);
108 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
109 // ----------- S t a t e -------------
110 // -- r3 : number of arguments
111 // -- lr : return address
112 // -- sp[...]: constructor arguments
113 // -----------------------------------
114 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116 // Get the Array function.
117 GenerateLoadArrayFunction(masm, r4);
119 if (FLAG_debug_code) {
120 // Initial map for the builtin Array functions should be maps.
121 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
122 __ TestIfSmi(r5, r0);
123 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
124 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
125 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
128 // Run the native code for the Array function called as a normal function.
130 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
131 ArrayConstructorStub stub(masm->isolate());
132 __ TailCallStub(&stub);
136 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
137 // ----------- S t a t e -------------
138 // -- r3 : number of arguments
139 // -- r4 : constructor function
140 // -- lr : return address
141 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
142 // -- sp[argc * 4] : receiver
143 // -----------------------------------
144 Counters* counters = masm->isolate()->counters();
145 __ IncrementCounter(counters->string_ctor_calls(), 1, r5, r6);
147 Register function = r4;
148 if (FLAG_debug_code) {
149 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r5);
150 __ cmp(function, r5);
151 __ Assert(eq, kUnexpectedStringFunction);
154 // Load the first arguments in r3 and get rid of the rest.
156 __ cmpi(r3, Operand::Zero());
157 __ beq(&no_arguments);
158 // First args = sp[(argc - 1) * 4].
159 __ subi(r3, r3, Operand(1));
160 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
162 __ LoadP(r3, MemOperand(sp));
163 // sp now point to args[0], drop args[0] + receiver.
166 Register argument = r5;
167 Label not_cached, argument_is_string;
168 __ LookupNumberStringCache(r3, // Input.
174 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r6, r7);
175 __ bind(&argument_is_string);
177 // ----------- S t a t e -------------
178 // -- r5 : argument converted to string
179 // -- r4 : constructor function
180 // -- lr : return address
181 // -----------------------------------
184 __ Allocate(JSValue::kSize,
188 &gc_required, TAG_OBJECT);
190 // Initialising the String Object.
192 __ LoadGlobalFunctionInitialMap(function, map, r7);
193 if (FLAG_debug_code) {
194 __ lbz(r7, FieldMemOperand(map, Map::kInstanceSizeOffset));
195 __ cmpi(r7, Operand(JSValue::kSize >> kPointerSizeLog2));
196 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
197 __ lbz(r7, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
198 __ cmpi(r7, Operand::Zero());
199 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
201 __ StoreP(map, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
203 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
204 __ StoreP(r6, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
205 __ StoreP(r6, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
207 __ StoreP(argument, FieldMemOperand(r3, JSValue::kValueOffset), r0);
209 // Ensure the object is fully initialized.
210 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
214 // The argument was not found in the number to string cache. Check
215 // if it's a string already before calling the conversion builtin.
216 Label convert_argument;
217 __ bind(¬_cached);
218 __ JumpIfSmi(r3, &convert_argument);
221 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
222 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceTypeOffset));
223 STATIC_ASSERT(kNotStringTag != 0);
224 __ andi(r0, r6, Operand(kIsNotStringMask));
225 __ bne(&convert_argument, cr0);
227 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
228 __ b(&argument_is_string);
230 // Invoke the conversion builtin and put the result into r5.
231 __ bind(&convert_argument);
232 __ push(function); // Preserve the function.
233 __ IncrementCounter(counters->string_ctor_conversions(), 1, r6, r7);
235 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
237 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
241 __ b(&argument_is_string);
243 // Load the empty string into r5, remove the receiver from the
244 // stack, and jump back to the case where the argument is a string.
245 __ bind(&no_arguments);
246 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
248 __ b(&argument_is_string);
250 // At this point the argument is already a string. Call runtime to
251 // create a string wrapper.
252 __ bind(&gc_required);
253 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r6, r7);
255 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
257 __ CallRuntime(Runtime::kNewStringWrapper, 1);
263 static void CallRuntimePassFunction(MacroAssembler* masm,
264 Runtime::FunctionId function_id) {
265 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
266 // Push a copy of the function onto the stack.
267 // Push function as parameter to the runtime call.
270 __ CallRuntime(function_id, 1);
276 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
277 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
278 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
279 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
280 __ JumpToJSEntry(ip);
284 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
285 __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
286 __ JumpToJSEntry(ip);
290 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
291 // Checking whether the queued function is ready for install is optional,
292 // since we come across interrupts and stack checks elsewhere. However,
293 // not checking may delay installing ready functions, and always checking
294 // would be quite expensive. A good compromise is to first check against
295 // stack limit as a cue for an interrupt signal.
297 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
301 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
302 GenerateTailCallToReturnedCode(masm);
305 GenerateTailCallToSharedCode(masm);
309 static void Generate_Runtime_NewObject(MacroAssembler* masm,
311 Register original_constructor,
312 Label* count_incremented,
314 // ----------- S t a t e -------------
315 // -- r4: argument for Runtime_NewObject
316 // -----------------------------------
317 Register result = r7;
319 if (create_memento) {
320 // Get the cell or allocation site.
321 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
322 __ Push(r5, r4, original_constructor);
323 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
325 // Runtime_NewObjectWithAllocationSite increments allocation count.
326 // Skip the increment.
327 __ b(count_incremented);
329 __ Push(r4, original_constructor);
330 __ CallRuntime(Runtime::kNewObject, 2);
337 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
338 bool is_api_function,
339 bool create_memento) {
340 // ----------- S t a t e -------------
341 // -- r3 : number of arguments
342 // -- r4 : constructor function
343 // -- r5 : allocation site or undefined
344 // -- r6 : original constructor
345 // -- lr : return address
346 // -- sp[...]: constructor arguments
347 // -----------------------------------
349 // Should never create mementos for api functions.
350 DCHECK(!is_api_function || !create_memento);
352 Isolate* isolate = masm->isolate();
354 // Enter a construct frame.
356 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
358 if (create_memento) {
359 __ AssertUndefinedOrAllocationSite(r5, r7);
363 // Preserve the two incoming parameters on the stack.
367 Label rt_call, allocated, normal_new, count_incremented;
371 // Original constructor and function are different.
372 Generate_Runtime_NewObject(masm, create_memento, r6, &count_incremented,
374 __ bind(&normal_new);
376 // Try to allocate the object without transitioning into C code. If any of
377 // the preconditions is not met, the code bails out to the runtime call.
378 if (FLAG_inline_new) {
379 Label undo_allocation;
380 ExternalReference debug_step_in_fp =
381 ExternalReference::debug_step_in_fp_address(isolate);
382 __ mov(r5, Operand(debug_step_in_fp));
383 __ LoadP(r5, MemOperand(r5));
384 __ cmpi(r5, Operand::Zero());
387 // Load the initial map and verify that it is in fact a map.
388 // r4: constructor function
390 FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
391 __ JumpIfSmi(r5, &rt_call);
392 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
395 // Check that the constructor is not constructing a JSFunction (see
396 // comments in Runtime_NewObject in runtime.cc). In which case the
397 // initial map's instance type would be JS_FUNCTION_TYPE.
398 // r4: constructor function
400 __ CompareInstanceType(r5, r6, JS_FUNCTION_TYPE);
403 if (!is_api_function) {
405 MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
406 // Check if slack tracking is enabled.
407 __ lwz(r7, bit_field3);
408 __ DecodeField<Map::Counter>(r11, r7);
409 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
411 // Decrease generous allocation count.
412 __ Add(r7, r7, -(1 << Map::Counter::kShift), r0);
413 __ stw(r7, bit_field3);
414 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
419 __ Push(r5, r4); // r4 = constructor
420 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
427 // Now allocate the JSObject on the heap.
428 // r4: constructor function
430 __ lbz(r6, FieldMemOperand(r5, Map::kInstanceSizeOffset));
431 if (create_memento) {
432 __ addi(r6, r6, Operand(AllocationMemento::kSize / kPointerSize));
435 __ Allocate(r6, r7, r8, r9, &rt_call, SIZE_IN_WORDS);
437 // Allocated the JSObject, now initialize the fields. Map is set to
438 // initial map and properties and elements are set to empty fixed array.
439 // r4: constructor function
441 // r6: object size (not including memento if create_memento)
442 // r7: JSObject (not tagged)
443 __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
445 __ StoreP(r5, MemOperand(r8, JSObject::kMapOffset));
446 __ StoreP(r9, MemOperand(r8, JSObject::kPropertiesOffset));
447 __ StoreP(r9, MemOperand(r8, JSObject::kElementsOffset));
448 __ addi(r8, r8, Operand(JSObject::kElementsOffset + kPointerSize));
450 __ ShiftLeftImm(r9, r6, Operand(kPointerSizeLog2));
451 __ add(r9, r7, r9); // End of object.
453 // Fill all the in-object properties with the appropriate filler.
454 // r4: constructor function
456 // r6: object size (in words, including memento if create_memento)
457 // r7: JSObject (not tagged)
458 // r8: First in-object property of JSObject (not tagged)
460 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
461 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
463 if (!is_api_function) {
464 Label no_inobject_slack_tracking;
466 // Check if slack tracking is enabled.
467 __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
468 __ blt(&no_inobject_slack_tracking);
470 // Allocate object with a slack.
471 __ lbz(r3, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
472 if (FLAG_debug_code) {
473 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
475 // r0: offset of first field after pre-allocated fields
477 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
481 __ cmpi(r3, Operand::Zero());
483 __ InitializeNFieldsWithFiller(r8, r3, r10);
486 // To allow for truncation.
487 __ LoadRoot(r10, Heap::kOnePointerFillerMapRootIndex);
488 // Fill the remaining fields with one pointer filler map.
490 __ bind(&no_inobject_slack_tracking);
493 if (create_memento) {
494 __ subi(r3, r9, Operand(AllocationMemento::kSize));
495 __ InitializeFieldsWithFiller(r8, r3, r10);
497 // Fill in memento fields.
498 // r8: points to the allocated but uninitialized memento.
499 __ LoadRoot(r10, Heap::kAllocationMementoMapRootIndex);
500 __ StoreP(r10, MemOperand(r8, AllocationMemento::kMapOffset));
501 // Load the AllocationSite
502 __ LoadP(r10, MemOperand(sp, 2 * kPointerSize));
504 MemOperand(r8, AllocationMemento::kAllocationSiteOffset));
505 __ addi(r8, r8, Operand(AllocationMemento::kAllocationSiteOffset +
508 __ InitializeFieldsWithFiller(r8, r9, r10);
511 // Add the object tag to make the JSObject real, so that we can continue
512 // and jump into the continuation code at any time from now on. Any
513 // failures need to undo the allocation, so that the heap is in a
514 // consistent state and verifiable.
515 __ addi(r7, r7, Operand(kHeapObjectTag));
517 // Check if a non-empty properties array is needed. Continue with
518 // allocated object if not fall through to runtime call if it is.
519 // r4: constructor function
521 // r8: start of next object (not tagged)
522 __ lbz(r6, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
523 // The field instance sizes contains both pre-allocated property fields
524 // and in-object properties.
525 __ lbz(r0, FieldMemOperand(r5, Map::kPreAllocatedPropertyFieldsOffset));
527 __ lbz(r0, FieldMemOperand(r5, Map::kInObjectPropertiesOffset));
528 __ sub(r6, r6, r0, LeaveOE, SetRC);
530 // Done if no extra properties are to be allocated.
531 __ beq(&allocated, cr0);
532 __ Assert(ge, kPropertyAllocationCountFailed, cr0);
534 // Scale the number of elements by pointer size and add the header for
535 // FixedArrays to the start of the next object calculation from above.
537 // r6: number of elements in properties array
539 // r8: start of next object
540 __ addi(r3, r6, Operand(FixedArray::kHeaderSize / kPointerSize));
542 r3, r8, r9, r5, &undo_allocation,
543 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
545 // Initialize the FixedArray.
547 // r6: number of elements in properties array
549 // r8: FixedArray (not tagged)
550 __ LoadRoot(r9, Heap::kFixedArrayMapRootIndex);
552 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
553 __ StoreP(r9, MemOperand(r5));
554 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
556 __ StoreP(r3, MemOperand(r5, kPointerSize));
557 __ addi(r5, r5, Operand(2 * kPointerSize));
559 // Initialize the fields to undefined.
560 // r4: constructor function
561 // r5: First element of FixedArray (not tagged)
562 // r6: number of elements in properties array
564 // r8: FixedArray (not tagged)
565 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
568 __ cmpi(r6, Operand::Zero());
570 if (!is_api_function || create_memento) {
571 __ LoadRoot(r10, Heap::kUndefinedValueRootIndex);
572 } else if (FLAG_debug_code) {
573 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex);
575 __ Assert(eq, kUndefinedValueNotLoaded);
577 __ InitializeNFieldsWithFiller(r5, r6, r10);
581 // Store the initialized FixedArray into the properties field of
583 // r4: constructor function
585 // r8: FixedArray (not tagged)
586 __ addi(r8, r8, Operand(kHeapObjectTag)); // Add the heap tag.
587 __ StoreP(r8, FieldMemOperand(r7, JSObject::kPropertiesOffset), r0);
589 // Continue with JSObject being successfully allocated
590 // r4: constructor function
594 // Undo the setting of the new top so that the heap is verifiable. For
595 // example, the map's unused properties potentially do not match the
596 // allocated objects unused properties.
597 // r7: JSObject (previous new top)
598 __ bind(&undo_allocation);
599 __ UndoAllocationInNewSpace(r7, r8);
602 // Allocate the new receiver object using the runtime call.
603 // r4: constructor function
605 Generate_Runtime_NewObject(masm, create_memento, r4, &count_incremented,
608 // Receiver for constructor call allocated.
612 if (create_memento) {
613 __ LoadP(r5, MemOperand(sp, kPointerSize * 2));
614 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
616 __ beq(&count_incremented);
617 // r5 is an AllocationSite. We are creating a memento from it, so we
618 // need to increment the memento create count.
620 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset));
621 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0);
623 r6, FieldMemOperand(r5, AllocationSite::kPretenureCreateCountOffset),
625 __ bind(&count_incremented);
630 // Reload the number of arguments and the constructor from the stack.
633 // sp[2]: constructor function
634 // sp[3]: number of arguments (smi-tagged)
635 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
636 __ LoadP(r6, MemOperand(sp, 3 * kPointerSize));
638 // Set up pointer to last argument.
639 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
641 // Set up number of arguments for function call below
644 // Copy arguments and receiver to the expression stack.
645 // r3: number of arguments
646 // r4: constructor function
647 // r5: address of last argument (caller sp)
648 // r6: number of arguments (smi-tagged)
651 // sp[2]: constructor function
652 // sp[3]: number of arguments (smi-tagged)
654 __ cmpi(r3, Operand::Zero());
656 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
659 __ subi(ip, ip, Operand(kPointerSize));
660 __ LoadPX(r0, MemOperand(r5, ip));
665 // Call the function.
666 // r3: number of arguments
667 // r4: constructor function
668 if (is_api_function) {
669 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
670 Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
671 __ Call(code, RelocInfo::CODE_TARGET);
673 ParameterCount actual(r3);
674 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
677 // Store offset of return address for deoptimizer.
678 if (!is_api_function) {
679 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
682 // Restore context from the frame.
685 // sp[1]: constructor function
686 // sp[2]: number of arguments (smi-tagged)
687 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
689 // If the result is an object (in the ECMA sense), we should get rid
690 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
692 Label use_receiver, exit;
694 // If the result is a smi, it is *not* an object in the ECMA sense.
696 // sp[0]: receiver (newly allocated object)
697 // sp[1]: constructor function
698 // sp[2]: number of arguments (smi-tagged)
699 __ JumpIfSmi(r3, &use_receiver);
701 // If the type of the result (stored in its map) is less than
702 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
703 __ CompareObjectType(r3, r4, r6, FIRST_SPEC_OBJECT_TYPE);
706 // Throw away the result of the constructor invocation and use the
707 // on-stack receiver as the result.
708 __ bind(&use_receiver);
709 __ LoadP(r3, MemOperand(sp));
711 // Remove receiver from the stack, remove caller arguments, and
715 // sp[0]: receiver (newly allocated object)
716 // sp[1]: constructor function
717 // sp[2]: number of arguments (smi-tagged)
718 __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
720 // Leave construct frame.
723 __ SmiToPtrArrayOffset(r4, r4);
725 __ addi(sp, sp, Operand(kPointerSize));
726 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
731 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
732 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
736 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
737 Generate_JSConstructStubHelper(masm, true, false);
741 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
742 // ----------- S t a t e -------------
743 // -- r3 : number of arguments
744 // -- r4 : constructor function
745 // -- r5 : allocation site or undefined
746 // -- r6 : original constructor
747 // -- lr : return address
748 // -- sp[...]: constructor arguments
749 // -----------------------------------
751 // TODO(dslomov): support pretenuring
752 CHECK(!FLAG_pretenuring_call_new);
755 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
757 // Smi-tagged arguments count.
759 __ SmiTag(r7, SetRC);
761 // receiver is the hole.
762 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
765 // Set up pointer to last argument.
766 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
768 // Copy arguments and receiver to the expression stack.
769 // r3: number of arguments
770 // r4: constructor function
771 // r5: address of last argument (caller sp)
772 // r7: number of arguments (smi-tagged)
773 // cr0: compare against zero of arguments
775 // sp[1]: number of arguments (smi-tagged)
777 __ beq(&no_args, cr0);
778 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
781 __ subi(ip, ip, Operand(kPointerSize));
782 __ LoadPX(r0, MemOperand(r5, ip));
787 // Call the function.
788 // r3: number of arguments
789 // r4: constructor function
790 ParameterCount actual(r3);
791 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
793 // Restore context from the frame.
795 // sp[0]: number of arguments (smi-tagged)
796 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
797 __ LoadP(r4, MemOperand(sp, 0));
799 // Leave construct frame.
802 __ SmiToPtrArrayOffset(r4, r4);
804 __ addi(sp, sp, Operand(kPointerSize));
809 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
811 // Called from Generate_JS_Entry
817 // r0,r8-r9, cp may be clobbered
818 ProfileEntryHookStub::MaybeCallEntryHook(masm);
820 // Clear the context before we push it when entering the internal frame.
821 __ li(cp, Operand::Zero());
823 // Enter an internal frame.
825 FrameScope scope(masm, StackFrame::INTERNAL);
827 // Set up the context from the function argument.
828 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
830 __ InitializeRootRegister();
832 // Push the function and the receiver onto the stack.
836 // Copy arguments to the stack in a loop.
839 // r7: argv, i.e. points to first arg
841 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
843 // r5 points past last arg.
846 __ LoadP(r8, MemOperand(r7)); // read next parameter
847 __ addi(r7, r7, Operand(kPointerSize));
848 __ LoadP(r0, MemOperand(r8)); // dereference handle
849 __ push(r0); // push parameter
854 // Initialize all JavaScript callee-saved registers, since they will be seen
855 // by the garbage collector as part of handlers.
856 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
862 // Invoke the code and pass argc as r3.
865 // No type feedback cell is available
866 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
867 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
870 ParameterCount actual(r3);
871 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
873 // Exit the JS frame and remove the parameters (except function), and
882 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
883 Generate_JSEntryTrampolineHelper(masm, false);
887 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
888 Generate_JSEntryTrampolineHelper(masm, true);
892 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
893 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
894 GenerateTailCallToReturnedCode(masm);
898 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
899 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
900 // Push a copy of the function onto the stack.
901 // Push function as parameter to the runtime call.
903 // Whether to compile in a background thread.
904 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
906 __ CallRuntime(Runtime::kCompileOptimized, 2);
912 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
913 CallCompileOptimized(masm, false);
914 GenerateTailCallToReturnedCode(masm);
918 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
919 CallCompileOptimized(masm, true);
920 GenerateTailCallToReturnedCode(masm);
924 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
925 // For now, we are relying on the fact that make_code_young doesn't do any
926 // garbage collection which allows us to save/restore the registers without
927 // worrying about which of them contain pointers. We also don't build an
928 // internal frame to make the code faster, since we shouldn't have to do stack
929 // crawls in MakeCodeYoung. This seems a bit fragile.
931 // Point r3 at the start of the PlatformCodeAge sequence.
934 // The following registers must be saved and restored when calling through to
936 // r3 - contains return address (beginning of patch sequence)
938 // lr - return address
939 FrameScope scope(masm, StackFrame::MANUAL);
941 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
942 __ PrepareCallCFunction(2, 0, r5);
943 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
945 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
946 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
952 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
953 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
954 MacroAssembler* masm) { \
955 GenerateMakeCodeYoungAgainCommon(masm); \
957 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
958 MacroAssembler* masm) { \
959 GenerateMakeCodeYoungAgainCommon(masm); \
961 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
962 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
965 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
966 // For now, we are relying on the fact that make_code_young doesn't do any
967 // garbage collection which allows us to save/restore the registers without
968 // worrying about which of them contain pointers. We also don't build an
969 // internal frame to make the code faster, since we shouldn't have to do stack
970 // crawls in MakeCodeYoung. This seems a bit fragile.
972 // Point r3 at the start of the PlatformCodeAge sequence.
975 // The following registers must be saved and restored when calling through to
977 // r3 - contains return address (beginning of patch sequence)
979 // lr - return address
980 FrameScope scope(masm, StackFrame::MANUAL);
982 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | fp.bit());
983 __ PrepareCallCFunction(2, 0, r5);
984 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
986 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
988 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | fp.bit());
992 // Perform prologue operations usually performed by the young code stub.
993 __ PushFixedFrame(r4);
994 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
996 // Jump to point after the code-age stub.
997 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1002 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1003 GenerateMakeCodeYoungAgainCommon(masm);
1007 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1008 SaveFPRegsMode save_doubles) {
1010 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1012 // Preserve registers across notification, this is important for compiled
1013 // stubs that tail call the runtime on deopts passing their parameters in
1015 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1016 // Pass the function and deoptimization type to the runtime system.
1017 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1018 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1021 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state
1022 __ blr(); // Jump to miss handler
1026 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1027 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1031 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1032 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1036 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1037 Deoptimizer::BailoutType type) {
1039 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1040 // Pass the function and deoptimization type to the runtime system.
1041 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1043 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1046 // Get the full codegen state from the stack and untag it -> r9.
1047 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1049 // Switch on the state.
1050 Label with_tos_register, unknown_state;
1051 __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1052 __ bne(&with_tos_register);
1053 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1056 __ bind(&with_tos_register);
1057 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1058 __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1059 __ bne(&unknown_state);
1060 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1063 __ bind(&unknown_state);
1064 __ stop("no cases left");
1068 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1069 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1073 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1074 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1078 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1079 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1083 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1084 // Lookup the function in the JavaScript frame.
1085 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1087 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1088 // Pass function as argument.
1090 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1093 // If the code object is null, just return to the unoptimized code.
1095 __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1101 // Load deoptimization data from the code object.
1102 // <deopt_data> = <code>[#deoptimization_data_offset]
1103 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1105 #if V8_OOL_CONSTANT_POOL
1107 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1108 __ LoadP(kConstantPoolRegister,
1109 FieldMemOperand(r3, Code::kConstantPoolOffset));
1112 // Load the OSR entrypoint offset from the deoptimization data.
1113 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1114 __ LoadP(r4, FieldMemOperand(
1115 r4, FixedArray::OffsetOfElementAt(
1116 DeoptimizationInputData::kOsrPcOffsetIndex)));
1119 // Compute the target address = code_obj + header_size + osr_offset
1120 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1122 __ addi(r0, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
1125 // And "return" to the OSR entry point of the function.
1127 #if V8_OOL_CONSTANT_POOL
1133 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1134 // We check the stack limit as indicator that recompilation might be done.
1136 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1140 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1141 __ CallRuntime(Runtime::kStackGuard, 0);
1143 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1144 RelocInfo::CODE_TARGET);
1151 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1152 // 1. Make sure we have at least one argument.
1153 // r3: actual number of arguments
1156 __ cmpi(r3, Operand::Zero());
1158 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1160 __ addi(r3, r3, Operand(1));
1164 // 2. Get the function to call (passed as receiver) from the stack, check
1165 // if it is a function.
1166 // r3: actual number of arguments
1167 Label slow, non_function;
1168 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
1170 __ LoadP(r4, MemOperand(r4));
1171 __ JumpIfSmi(r4, &non_function);
1172 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1175 // 3a. Patch the first argument if necessary when calling a function.
1176 // r3: actual number of arguments
1178 Label shift_arguments;
1179 __ li(r7, Operand::Zero()); // indicate regular JS_FUNCTION
1181 Label convert_to_object, use_global_proxy, patch_receiver;
1182 // Change context eagerly in case we need the global receiver.
1183 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1185 // Do not transform the receiver for strict mode functions.
1186 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1187 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1189 #if V8_TARGET_ARCH_PPC64
1190 SharedFunctionInfo::kStrictModeFunction,
1192 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1195 __ bne(&shift_arguments, cr0);
1197 // Do not transform the receiver for native (Compilerhints already in r6).
1199 #if V8_TARGET_ARCH_PPC64
1200 SharedFunctionInfo::kNative,
1202 SharedFunctionInfo::kNative + kSmiTagSize,
1205 __ bne(&shift_arguments, cr0);
1207 // Compute the receiver in sloppy mode.
1208 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1210 __ LoadP(r5, MemOperand(r5, -kPointerSize));
1211 // r3: actual number of arguments
1213 // r5: first argument
1214 __ JumpIfSmi(r5, &convert_to_object);
1216 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1218 __ beq(&use_global_proxy);
1219 __ LoadRoot(r6, Heap::kNullValueRootIndex);
1221 __ beq(&use_global_proxy);
1223 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1224 __ CompareObjectType(r5, r6, r6, FIRST_SPEC_OBJECT_TYPE);
1225 __ bge(&shift_arguments);
1227 __ bind(&convert_to_object);
1230 // Enter an internal frame in order to preserve argument count.
1231 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1234 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1240 // Exit the internal frame.
1243 // Restore the function to r4, and the flag to r7.
1244 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1246 __ LoadP(r4, MemOperand(r7));
1247 __ li(r7, Operand::Zero());
1248 __ b(&patch_receiver);
1250 __ bind(&use_global_proxy);
1251 __ LoadP(r5, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1252 __ LoadP(r5, FieldMemOperand(r5, GlobalObject::kGlobalProxyOffset));
1254 __ bind(&patch_receiver);
1255 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1257 __ StoreP(r5, MemOperand(r6, -kPointerSize));
1259 __ b(&shift_arguments);
1262 // 3b. Check for function proxy.
1264 __ li(r7, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1265 __ cmpi(r5, Operand(JS_FUNCTION_PROXY_TYPE));
1266 __ beq(&shift_arguments);
1267 __ bind(&non_function);
1268 __ li(r7, Operand(2, RelocInfo::NONE32)); // indicate non-function
1270 // 3c. Patch the first argument when calling a non-function. The
1271 // CALL_NON_FUNCTION builtin expects the non-function callee as
1272 // receiver, so overwrite the first argument which will ultimately
1273 // become the receiver.
1274 // r3: actual number of arguments
1276 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1277 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1279 __ StoreP(r4, MemOperand(r5, -kPointerSize));
1281 // 4. Shift arguments and return address one slot down on the stack
1282 // (overwriting the original receiver). Adjust argument count to make
1283 // the original first argument the new receiver.
1284 // r3: actual number of arguments
1286 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1287 __ bind(&shift_arguments);
1290 // Calculate the copy start address (destination). Copy end address is sp.
1291 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
1295 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1296 __ StoreP(ip, MemOperand(r5));
1297 __ subi(r5, r5, Operand(kPointerSize));
1300 // Adjust the actual number of arguments and remove the top element
1301 // (which is a copy of the last argument).
1302 __ subi(r3, r3, Operand(1));
1306 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1307 // or a function proxy via CALL_FUNCTION_PROXY.
1308 // r3: actual number of arguments
1310 // r7: call type (0: JS function, 1: function proxy, 2: non-function)
1312 Label function, non_proxy;
1313 __ cmpi(r7, Operand::Zero());
1315 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1316 __ li(r5, Operand::Zero());
1317 __ cmpi(r7, Operand(1));
1320 __ push(r4); // re-add proxy object as additional argument
1321 __ addi(r3, r3, Operand(1));
1322 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1323 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1324 RelocInfo::CODE_TARGET);
1326 __ bind(&non_proxy);
1327 __ GetBuiltinFunction(r4, Builtins::CALL_NON_FUNCTION);
1328 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1329 RelocInfo::CODE_TARGET);
1333 // 5b. Get the code to call from the function and check that the number of
1334 // expected arguments matches what we're providing. If so, jump
1335 // (tail-call) to the code in register edx without checking arguments.
1336 // r3: actual number of arguments
1338 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1340 r5, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
1341 #if !V8_TARGET_ARCH_PPC64
1344 __ cmp(r5, r3); // Check formal and actual parameter counts.
1345 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1346 RelocInfo::CODE_TARGET, ne);
1348 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1349 ParameterCount expected(0);
1350 __ InvokeCode(ip, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1354 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1355 const int kIndexOffset =
1356 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1357 const int kLimitOffset =
1358 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1359 const int kArgsOffset = 2 * kPointerSize;
1360 const int kRecvOffset = 3 * kPointerSize;
1361 const int kFunctionOffset = 4 * kPointerSize;
1364 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1366 __ LoadP(r3, MemOperand(fp, kFunctionOffset)); // get the function
1368 __ LoadP(r3, MemOperand(fp, kArgsOffset)); // get the args array
1370 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1372 // Check the stack for overflow. We are not trying to catch
1373 // interruptions (e.g. debug break and preemption) here, so the "real stack
1374 // limit" is checked.
1376 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1377 // Make r5 the space we have left. The stack might already be overflowed
1378 // here which will cause r5 to become negative.
1380 // Check if the arguments will overflow the stack.
1381 __ SmiToPtrArrayOffset(r0, r3);
1383 __ bgt(&okay); // Signed comparison.
1385 // Out of stack space.
1386 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1388 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1389 // End of stack check.
1391 // Push current limit and index.
1393 __ li(r4, Operand::Zero());
1394 __ Push(r3, r4); // limit and initial index.
1396 // Get the receiver.
1397 __ LoadP(r3, MemOperand(fp, kRecvOffset));
1399 // Check that the function is a JS function (otherwise it must be a proxy).
1400 Label push_receiver;
1401 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1402 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1403 __ bne(&push_receiver);
1405 // Change context eagerly to get the right global object if necessary.
1406 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1407 // Load the shared function info while the function is still in r4.
1408 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1410 // Compute the receiver.
1411 // Do not transform the receiver for strict mode functions.
1412 Label call_to_object, use_global_proxy;
1413 __ lwz(r5, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1415 #if V8_TARGET_ARCH_PPC64
1416 SharedFunctionInfo::kStrictModeFunction,
1418 SharedFunctionInfo::kStrictModeFunction + kSmiTagSize,
1421 __ bne(&push_receiver, cr0);
1423 // Do not transform the receiver for strict mode functions.
1425 #if V8_TARGET_ARCH_PPC64
1426 SharedFunctionInfo::kNative,
1428 SharedFunctionInfo::kNative + kSmiTagSize,
1431 __ bne(&push_receiver, cr0);
1433 // Compute the receiver in sloppy mode.
1434 __ JumpIfSmi(r3, &call_to_object);
1435 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1437 __ beq(&use_global_proxy);
1438 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1440 __ beq(&use_global_proxy);
1442 // Check if the receiver is already a JavaScript object.
1444 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1445 __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
1446 __ bge(&push_receiver);
1448 // Convert the receiver to a regular object.
1450 __ bind(&call_to_object);
1452 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1453 __ b(&push_receiver);
1455 __ bind(&use_global_proxy);
1456 __ LoadP(r3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1457 __ LoadP(r3, FieldMemOperand(r3, GlobalObject::kGlobalProxyOffset));
1459 // Push the receiver.
1461 __ bind(&push_receiver);
1464 // Copy all arguments from the array to the stack.
1466 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1469 // Load the current argument from the arguments array and push it to the
1471 // r3: current argument index
1473 __ LoadP(r4, MemOperand(fp, kArgsOffset));
1476 // Call the runtime to access the property in the arguments array.
1477 __ CallRuntime(Runtime::kGetProperty, 2);
1480 // Use inline caching to access the arguments.
1481 __ LoadP(r3, MemOperand(fp, kIndexOffset));
1482 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1483 __ StoreP(r3, MemOperand(fp, kIndexOffset));
1485 // Test if the copy loop has finished copying all the elements from the
1486 // arguments object.
1488 __ LoadP(r4, MemOperand(fp, kLimitOffset));
1492 // Call the function.
1494 ParameterCount actual(r3);
1496 __ LoadP(r4, MemOperand(fp, kFunctionOffset));
1497 __ CompareObjectType(r4, r5, r5, JS_FUNCTION_TYPE);
1498 __ bne(&call_proxy);
1499 __ InvokeFunction(r4, actual, CALL_FUNCTION, NullCallWrapper());
1501 __ LeaveFrame(StackFrame::INTERNAL, 3 * kPointerSize);
1504 // Call the function proxy.
1505 __ bind(&call_proxy);
1506 __ push(r4); // add function proxy as last argument
1507 __ addi(r3, r3, Operand(1));
1508 __ li(r5, Operand::Zero());
1509 __ GetBuiltinFunction(r4, Builtins::CALL_FUNCTION_PROXY);
1510 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1511 RelocInfo::CODE_TARGET);
1513 // Tear down the internal frame and remove function, receiver and args.
1515 __ addi(sp, sp, Operand(3 * kPointerSize));
1520 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1521 Label* stack_overflow) {
1522 // ----------- S t a t e -------------
1523 // -- r3 : actual number of arguments
1524 // -- r4 : function (passed through to callee)
1525 // -- r5 : expected number of arguments
1526 // -----------------------------------
1527 // Check the stack for overflow. We are not trying to catch
1528 // interruptions (e.g. debug break and preemption) here, so the "real stack
1529 // limit" is checked.
1530 __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1531 // Make r8 the space we have left. The stack might already be overflowed
1532 // here which will cause r8 to become negative.
1534 // Check if the arguments will overflow the stack.
1535 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1537 __ ble(stack_overflow); // Signed comparison.
1541 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1543 __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1546 #if V8_OOL_CONSTANT_POOL
1547 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1549 __ Push(fp, r7, r4, r3);
1551 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1556 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1557 // ----------- S t a t e -------------
1558 // -- r3 : result being passed through
1559 // -----------------------------------
1560 // Get the number of arguments passed (as a smi), tear down the frame and
1561 // then tear down the parameters.
1562 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1564 int stack_adjustment = kPointerSize; // adjust for receiver
1565 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1566 __ SmiToPtrArrayOffset(r0, r4);
1571 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1572 // ----------- S t a t e -------------
1573 // -- r3 : actual number of arguments
1574 // -- r4 : function (passed through to callee)
1575 // -- r5 : expected number of arguments
1576 // -----------------------------------
1578 Label stack_overflow;
1579 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1580 Label invoke, dont_adapt_arguments;
1582 Label enough, too_few;
1583 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
1586 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1587 __ beq(&dont_adapt_arguments);
1589 { // Enough parameters: actual >= expected
1591 EnterArgumentsAdaptorFrame(masm);
1593 // Calculate copy start address into r3 and copy end address into r5.
1594 // r3: actual number of arguments as a smi
1596 // r5: expected number of arguments
1597 // ip: code entry to call
1598 __ SmiToPtrArrayOffset(r3, r3);
1600 // adjust for return address and receiver
1601 __ addi(r3, r3, Operand(2 * kPointerSize));
1602 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1605 // Copy the arguments (including the receiver) to the new stack frame.
1606 // r3: copy start address
1608 // r5: copy end address
1609 // ip: code entry to call
1613 __ LoadP(r0, MemOperand(r3, 0));
1615 __ cmp(r3, r5); // Compare before moving to next argument.
1616 __ subi(r3, r3, Operand(kPointerSize));
1622 { // Too few parameters: Actual < expected
1624 EnterArgumentsAdaptorFrame(masm);
1626 // Calculate copy start address into r0 and copy end address is fp.
1627 // r3: actual number of arguments as a smi
1629 // r5: expected number of arguments
1630 // ip: code entry to call
1631 __ SmiToPtrArrayOffset(r3, r3);
1634 // Copy the arguments (including the receiver) to the new stack frame.
1635 // r3: copy start address
1637 // r5: expected number of arguments
1638 // ip: code entry to call
1641 // Adjust load for return address and receiver.
1642 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
1644 __ cmp(r3, fp); // Compare before moving to next argument.
1645 __ subi(r3, r3, Operand(kPointerSize));
1648 // Fill the remaining expected arguments with undefined.
1650 // r5: expected number of arguments
1651 // ip: code entry to call
1652 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1653 __ ShiftLeftImm(r5, r5, Operand(kPointerSizeLog2));
1655 // Adjust for frame.
1656 __ subi(r5, r5, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1666 // Call the entry point.
1670 // Store offset of return address for deoptimizer.
1671 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1673 // Exit frame and return.
1674 LeaveArgumentsAdaptorFrame(masm);
1678 // -------------------------------------------
1679 // Dont adapt arguments.
1680 // -------------------------------------------
1681 __ bind(&dont_adapt_arguments);
1682 __ JumpToJSEntry(ip);
1684 __ bind(&stack_overflow);
1686 FrameScope frame(masm, StackFrame::MANUAL);
1687 EnterArgumentsAdaptorFrame(masm);
1688 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1696 } // namespace v8::internal
1698 #endif // V8_TARGET_ARCH_PPC