1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #if V8_TARGET_ARCH_MIPS
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
18 #define __ ACCESS_MASM(masm)
21 void Builtins::Generate_Adaptor(MacroAssembler* masm,
23 BuiltinExtraArguments extra_args) {
24 // ----------- S t a t e -------------
25 // -- a0 : number of arguments excluding receiver
26 // -- a1 : called function (only guaranteed when
27 // -- extra_args requires it)
29 // -- sp[0] : last argument
31 // -- sp[4 * (argc - 1)] : first argument
32 // -- sp[4 * agrc] : receiver
33 // -----------------------------------
35 // Insert extra arguments.
36 int num_extra_args = 0;
37 if (extra_args == NEEDS_CALLED_FUNCTION) {
41 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
44 // JumpToExternalReference expects a0 to contain the number of arguments
45 // including the receiver and the extra arguments.
46 __ Addu(a0, a0, num_extra_args + 1);
47 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
51 // Load the built-in InternalArray function from the current context.
52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
54 // Load the native context.
57 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
59 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
60 // Load the InternalArray function from the native context.
64 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
68 // Load the built-in Array function from the current context.
69 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
70 // Load the native context.
73 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
75 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
76 // Load the Array function from the native context.
79 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
83 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
84 // ----------- S t a t e -------------
85 // -- a0 : number of arguments
86 // -- ra : return address
87 // -- sp[...]: constructor arguments
88 // -----------------------------------
89 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
91 // Get the InternalArray function.
92 GenerateLoadInternalArrayFunction(masm, a1);
94 if (FLAG_debug_code) {
95 // Initial map for the builtin InternalArray functions should be maps.
96 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
98 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
99 t0, Operand(zero_reg));
100 __ GetObjectType(a2, a3, t0);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
102 t0, Operand(MAP_TYPE));
105 // Run the native code for the InternalArray function called as a normal
108 InternalArrayConstructorStub stub(masm->isolate());
109 __ TailCallStub(&stub);
113 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
114 // ----------- S t a t e -------------
115 // -- a0 : number of arguments
116 // -- ra : return address
117 // -- sp[...]: constructor arguments
118 // -----------------------------------
119 Label generic_array_code;
121 // Get the Array function.
122 GenerateLoadArrayFunction(masm, a1);
124 if (FLAG_debug_code) {
125 // Initial map for the builtin Array functions should be maps.
126 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
128 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
129 t0, Operand(zero_reg));
130 __ GetObjectType(a2, a3, t0);
131 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
132 t0, Operand(MAP_TYPE));
135 // Run the native code for the Array function called as a normal function.
138 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
139 ArrayConstructorStub stub(masm->isolate());
140 __ TailCallStub(&stub);
145 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
146 // ----------- S t a t e -------------
147 // -- a0 : number of arguments
148 // -- a1 : constructor function
149 // -- ra : return address
150 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
151 // -- sp[argc * 4] : receiver
152 // -----------------------------------
154 // 1. Load the first argument into a0 and get rid of the rest (including the
158 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
159 __ Subu(a0, a0, Operand(1));
160 __ sll(a0, a0, kPointerSizeLog2);
162 __ lw(a0, MemOperand(sp));
166 // 2a. At least one argument, return a0 if it's a string, otherwise
167 // dispatch to appropriate conversion.
168 Label to_string, symbol_descriptive_string;
170 __ JumpIfSmi(a0, &to_string);
171 __ GetObjectType(a0, a1, a1);
172 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
173 __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
174 __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
175 __ Branch(&to_string, gt, a1, Operand(zero_reg));
176 __ Ret(USE_DELAY_SLOT);
180 // 2b. No arguments, return the empty string (and pop the receiver).
181 __ bind(&no_arguments);
183 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
187 // 3a. Convert a0 to a string.
190 ToStringStub stub(masm->isolate());
191 __ TailCallStub(&stub);
194 // 3b. Convert symbol in a0 to a string.
195 __ bind(&symbol_descriptive_string);
198 __ TailCallRuntime(Runtime::kSymbolDescriptiveString, 1, 1);
204 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
205 // ----------- S t a t e -------------
206 // -- a0 : number of arguments
207 // -- a1 : constructor function
208 // -- ra : return address
209 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
210 // -- sp[argc * 4] : receiver
211 // -----------------------------------
213 // 1. Load the first argument into a0 and get rid of the rest (including the
216 Label no_arguments, done;
217 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
218 __ Subu(a0, a0, Operand(1));
219 __ sll(a0, a0, kPointerSizeLog2);
221 __ lw(a0, MemOperand(sp));
224 __ bind(&no_arguments);
225 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
230 // 2. Make sure a0 is a string.
232 Label convert, done_convert;
233 __ JumpIfSmi(a0, &convert);
234 __ GetObjectType(a0, a2, a2);
235 __ And(t0, a2, Operand(kIsNotStringMask));
236 __ Branch(&done_convert, eq, t0, Operand(zero_reg));
239 FrameScope scope(masm, StackFrame::INTERNAL);
240 ToStringStub stub(masm->isolate());
246 __ bind(&done_convert);
249 // 3. Allocate a JSValue wrapper for the string.
251 // ----------- S t a t e -------------
252 // -- a0 : the first argument
253 // -- a1 : constructor function
254 // -- ra : return address
255 // -----------------------------------
257 Label allocate, done_allocate;
258 __ Allocate(JSValue::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
259 __ bind(&done_allocate);
261 // Initialize the JSValue in eax.
262 __ LoadGlobalFunctionInitialMap(a1, a2, a3);
263 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
264 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
265 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
266 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
267 __ Ret(USE_DELAY_SLOT);
268 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
269 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
271 // Fallback to the runtime to allocate in new space.
274 FrameScope scope(masm, StackFrame::INTERNAL);
275 __ Move(a2, Smi::FromInt(JSValue::kSize));
277 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
280 __ jmp(&done_allocate);
285 static void CallRuntimePassFunction(
286 MacroAssembler* masm, Runtime::FunctionId function_id) {
287 FrameScope scope(masm, StackFrame::INTERNAL);
288 // Push a copy of the function onto the stack.
289 // Push call kind information and function as parameter to the runtime call.
292 __ CallRuntime(function_id, 1);
293 // Restore call kind information and receiver.
298 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
299 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
300 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
301 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
306 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
307 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
312 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
313 // Checking whether the queued function is ready for install is optional,
314 // since we come across interrupts and stack checks elsewhere. However,
315 // not checking may delay installing ready functions, and always checking
316 // would be quite expensive. A good compromise is to first check against
317 // stack limit as a cue for an interrupt signal.
319 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
320 __ Branch(&ok, hs, sp, Operand(t0));
322 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
323 GenerateTailCallToReturnedCode(masm);
326 GenerateTailCallToSharedCode(masm);
330 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
331 bool is_api_function) {
332 // ----------- S t a t e -------------
333 // -- a0 : number of arguments
334 // -- a1 : constructor function
335 // -- a2 : allocation site or undefined
336 // -- a3 : original constructor
337 // -- ra : return address
338 // -- sp[...]: constructor arguments
339 // -----------------------------------
341 Isolate* isolate = masm->isolate();
343 // Enter a construct frame.
345 FrameScope scope(masm, StackFrame::CONSTRUCT);
347 // Preserve the incoming parameters on the stack.
348 __ AssertUndefinedOrAllocationSite(a2, t0);
350 __ Push(a2, a0, a1, a3);
352 // Try to allocate the object without transitioning into C code. If any of
353 // the preconditions is not met, the code bails out to the runtime call.
354 Label rt_call, allocated;
355 if (FLAG_inline_new) {
356 ExternalReference debug_step_in_fp =
357 ExternalReference::debug_step_in_fp_address(isolate);
358 __ li(a2, Operand(debug_step_in_fp));
359 __ lw(a2, MemOperand(a2));
360 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
362 // Fall back to runtime if the original constructor and function differ.
363 __ Branch(&rt_call, ne, a1, Operand(a3));
365 // Load the initial map and verify that it is in fact a map.
366 // a1: constructor function
367 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
368 __ JumpIfSmi(a2, &rt_call);
369 __ GetObjectType(a2, t5, t4);
370 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
372 // Check that the constructor is not constructing a JSFunction (see
373 // comments in Runtime_NewObject in runtime.cc). In which case the
374 // initial map's instance type would be JS_FUNCTION_TYPE.
375 // a1: constructor function
377 __ lbu(t5, FieldMemOperand(a2, Map::kInstanceTypeOffset));
378 __ Branch(&rt_call, eq, t5, Operand(JS_FUNCTION_TYPE));
380 if (!is_api_function) {
382 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
383 // Check if slack tracking is enabled.
384 __ lw(t0, bit_field3);
385 __ DecodeField<Map::Counter>(t2, t0);
386 __ Branch(&allocate, lt, t2, Operand(Map::kSlackTrackingCounterEnd));
387 // Decrease generous allocation count.
388 __ Subu(t0, t0, Operand(1 << Map::Counter::kShift));
389 __ Branch(USE_DELAY_SLOT, &allocate, ne, t2,
390 Operand(Map::kSlackTrackingCounterEnd));
391 __ sw(t0, bit_field3); // In delay slot.
393 __ Push(a1, a2, a1); // a1 = Constructor.
394 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
397 __ li(t2, Operand(Map::kSlackTrackingCounterEnd - 1));
402 // Now allocate the JSObject on the heap.
403 // a1: constructor function
405 Label rt_call_reload_new_target;
406 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
408 __ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
410 // Allocated the JSObject, now initialize the fields. Map is set to
411 // initial map and properties and elements are set to empty fixed array.
412 // a1: constructor function
415 // t4: JSObject (not tagged)
416 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
418 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
419 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
420 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
421 __ Addu(t5, t5, Operand(3*kPointerSize));
422 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
423 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
424 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
426 // Fill all the in-object properties with appropriate filler.
427 // a1: constructor function
429 // a3: object size (in words)
430 // t4: JSObject (not tagged)
431 // t5: First in-object property of JSObject (not tagged)
432 // t2: slack tracking counter (non-API function case)
433 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
435 // Use t7 to hold undefined, which is used in several places below.
436 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
438 if (!is_api_function) {
439 Label no_inobject_slack_tracking;
441 // Check if slack tracking is enabled.
442 __ Branch(&no_inobject_slack_tracking, lt, t2,
443 Operand(Map::kSlackTrackingCounterEnd));
445 // Allocate object with a slack.
449 a2, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
450 __ lbu(a2, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
452 __ sll(at, a0, kPointerSizeLog2);
454 // a0: offset of first field after pre-allocated fields
455 if (FLAG_debug_code) {
456 __ sll(at, a3, kPointerSizeLog2);
457 __ Addu(t6, t4, Operand(at)); // End of object.
458 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
461 __ InitializeFieldsWithFiller(t5, a0, t7);
462 // To allow for truncation.
463 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
464 // Fill the remaining fields with one pointer filler map.
466 __ bind(&no_inobject_slack_tracking);
469 __ sll(at, a3, kPointerSizeLog2);
470 __ Addu(a0, t4, Operand(at)); // End of object.
471 __ InitializeFieldsWithFiller(t5, a0, t7);
473 // Add the object tag to make the JSObject real, so that we can continue
474 // and jump into the continuation code at any time from now on.
475 __ Addu(t4, t4, Operand(kHeapObjectTag));
477 // Continue with JSObject being successfully allocated.
481 // Reload the original constructor and fall-through.
482 __ bind(&rt_call_reload_new_target);
483 __ lw(a3, MemOperand(sp, 0 * kPointerSize));
486 // Allocate the new receiver object using the runtime call.
487 // a1: constructor function
488 // a3: original constructor
491 __ Push(a1, a3); // arguments 2-3 / 1-2
492 __ CallRuntime(Runtime::kNewObject, 2);
495 // Receiver for constructor call allocated.
499 // Restore the parameters.
500 __ Pop(a3); // new.target
503 // Retrieve smi-tagged arguments count from the stack.
504 __ lw(a0, MemOperand(sp));
509 // Set up pointer to last argument.
510 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
512 // Copy arguments and receiver to the expression stack.
513 // a0: number of arguments
514 // a1: constructor function
515 // a2: address of last argument (caller sp)
516 // a3: number of arguments (smi-tagged)
520 // sp[3]: number of arguments (smi-tagged)
525 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
526 __ Addu(t0, a2, Operand(t0));
527 __ lw(t1, MemOperand(t0));
530 __ Addu(a3, a3, Operand(-2));
531 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
533 // Call the function.
534 // a0: number of arguments
535 // a1: constructor function
536 if (is_api_function) {
537 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
539 masm->isolate()->builtins()->HandleApiCallConstruct();
540 __ Call(code, RelocInfo::CODE_TARGET);
542 ParameterCount actual(a0);
543 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
546 // Store offset of return address for deoptimizer.
547 if (!is_api_function) {
548 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
551 // Restore context from the frame.
552 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
554 // If the result is an object (in the ECMA sense), we should get rid
555 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
557 Label use_receiver, exit;
559 // If the result is a smi, it is *not* an object in the ECMA sense.
561 // sp[0]: receiver (newly allocated object)
563 // sp[2]: number of arguments (smi-tagged)
564 __ JumpIfSmi(v0, &use_receiver);
566 // If the type of the result (stored in its map) is less than
567 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
568 __ GetObjectType(v0, a1, a3);
569 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
571 // Throw away the result of the constructor invocation and use the
572 // on-stack receiver as the result.
573 __ bind(&use_receiver);
574 __ lw(v0, MemOperand(sp));
576 // Remove receiver from the stack, remove caller arguments, and
580 // sp[0]: receiver (newly allocated object)
581 // sp[1]: new.target (original constructor)
582 // sp[2]: number of arguments (smi-tagged)
583 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
585 // Leave construct frame.
588 __ sll(t0, a1, kPointerSizeLog2 - 1);
590 __ Addu(sp, sp, kPointerSize);
591 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
596 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
597 Generate_JSConstructStubHelper(masm, false);
601 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
602 Generate_JSConstructStubHelper(masm, true);
606 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
607 // ----------- S t a t e -------------
608 // -- a0 : number of arguments
609 // -- a1 : constructor function
610 // -- a2 : allocation site or undefined
611 // -- a3 : original constructor
612 // -- ra : return address
613 // -- sp[...]: constructor arguments
614 // -----------------------------------
617 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
619 __ AssertUndefinedOrAllocationSite(a2, t0);
624 __ push(t0); // Smi-tagged arguments count.
629 // receiver is the hole.
630 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
633 // Set up pointer to last argument.
634 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
636 // Copy arguments and receiver to the expression stack.
637 // a0: number of arguments
638 // a1: constructor function
639 // a2: address of last argument (caller sp)
640 // t0: number of arguments (smi-tagged)
643 // sp[2]: number of arguments (smi-tagged)
647 __ sll(at, t0, kPointerSizeLog2 - 1);
648 __ Addu(at, a2, Operand(at));
649 __ lw(at, MemOperand(at));
652 __ Subu(t0, t0, Operand(2));
653 __ Branch(&loop, ge, t0, Operand(zero_reg));
657 ExternalReference debug_step_in_fp =
658 ExternalReference::debug_step_in_fp_address(masm->isolate());
659 __ li(a2, Operand(debug_step_in_fp));
660 __ lw(a2, MemOperand(a2));
661 __ Branch(&skip_step_in, eq, a2, Operand(zero_reg));
664 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
667 __ bind(&skip_step_in);
669 // Call the function.
670 // a0: number of arguments
671 // a1: constructor function
672 ParameterCount actual(a0);
673 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
675 // Restore context from the frame.
678 // sp[1]: number of arguments (smi-tagged)
679 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
680 __ lw(a1, MemOperand(sp, kPointerSize));
682 // Leave construct frame.
685 __ sll(at, a1, kPointerSizeLog2 - 1);
686 __ Addu(sp, sp, Operand(at));
687 __ Addu(sp, sp, Operand(kPointerSize));
692 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
695 // Clobbers a2; preserves all other registers.
696 static void Generate_CheckStackOverflow(MacroAssembler* masm,
697 const int calleeOffset, Register argc,
698 IsTagged argc_is_tagged) {
699 // Check the stack for overflow. We are not trying to catch
700 // interruptions (e.g. debug break and preemption) here, so the "real stack
701 // limit" is checked.
703 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
704 // Make a2 the space we have left. The stack might already be overflowed
705 // here which will cause r2 to become negative.
707 // Check if the arguments will overflow the stack.
708 if (argc_is_tagged == kArgcIsSmiTagged) {
709 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
711 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
712 __ sll(t3, argc, kPointerSizeLog2);
714 // Signed comparison.
715 __ Branch(&okay, gt, a2, Operand(t3));
717 // Out of stack space.
718 __ lw(a1, MemOperand(fp, calleeOffset));
719 if (argc_is_tagged == kArgcIsUntaggedInt) {
723 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
729 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
731 // Called from JSEntryStub::GenerateBody
733 // ----------- S t a t e -------------
736 // -- a2: receiver_pointer
739 // -----------------------------------
740 ProfileEntryHookStub::MaybeCallEntryHook(masm);
742 // Clear the context before we push it when entering the JS frame.
743 __ mov(cp, zero_reg);
745 // Enter an internal frame.
747 FrameScope scope(masm, StackFrame::INTERNAL);
749 // Set up the context from the function argument.
750 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
752 // Push the function and the receiver onto the stack.
755 // Check if we have enough stack space to push all arguments.
756 // The function is the first thing that was pushed above after entering
757 // the internal frame.
758 const int kFunctionOffset =
759 InternalFrameConstants::kCodeOffset - kPointerSize;
761 Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
763 // Copy arguments to the stack in a loop.
765 // s0: argv, i.e. points to first arg
767 __ sll(t0, a3, kPointerSizeLog2);
770 __ nop(); // Branch delay slot nop.
771 // t2 points past last arg.
773 __ lw(t0, MemOperand(s0)); // Read next parameter.
774 __ addiu(s0, s0, kPointerSize);
775 __ lw(t0, MemOperand(t0)); // Dereference handle.
776 __ push(t0); // Push parameter.
778 __ Branch(&loop, ne, s0, Operand(t2));
780 // Initialize all JavaScript callee-saved registers, since they will be seen
781 // by the garbage collector as part of handlers.
782 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
788 // s6 holds the root address. Do not clobber.
789 // s7 is cp. Do not init.
791 // Invoke the code and pass argc as a0.
794 // No type feedback cell is available
795 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
796 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
799 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
802 // Leave internal frame.
809 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
810 Generate_JSEntryTrampolineHelper(masm, false);
814 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
815 Generate_JSEntryTrampolineHelper(masm, true);
819 // Generate code for entering a JS function with the interpreter.
820 // On entry to the function the receiver and arguments have been pushed on the
821 // stack left to right. The actual argument count matches the formal parameter
822 // count expected by the function.
824 // The live registers are:
825 // o a1: the JS function object being called.
827 // o fp: the caller's frame pointer
828 // o sp: stack pointer
829 // o ra: return address
831 // The function builds a JS frame. Please see JavaScriptFrameConstants in
832 // frames-mips.h for its layout.
833 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
835 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
836 // Open a frame scope to indicate that there is a frame on the stack. The
837 // MANUAL indicates that the scope shouldn't actually generate code to set up
838 // the frame (that is done below).
839 FrameScope frame_scope(masm, StackFrame::MANUAL);
841 __ Push(ra, fp, cp, a1);
842 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
844 // Get the bytecode array from the function object and load the pointer to the
845 // first entry into kInterpreterBytecodeRegister.
846 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
847 __ lw(kInterpreterBytecodeArrayRegister,
848 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
850 if (FLAG_debug_code) {
851 // Check function data field is actually a BytecodeArray object.
852 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
853 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
855 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
856 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
857 Operand(BYTECODE_ARRAY_TYPE));
860 // Allocate the local and temporary register file on the stack.
862 // Load frame size from the BytecodeArray object.
863 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
864 BytecodeArray::kFrameSizeOffset));
866 // Do a stack check to ensure we don't go over the limit.
868 __ Subu(t1, sp, Operand(t0));
869 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
870 __ Branch(&ok, hs, t1, Operand(a2));
871 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
874 // If ok, push undefined as the initial value for all register file entries.
877 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
878 __ Branch(&loop_check);
879 __ bind(&loop_header);
880 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
882 // Continue loop if not done.
883 __ bind(&loop_check);
884 __ Subu(t0, t0, Operand(kPointerSize));
885 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
888 // TODO(rmcilroy): List of things not currently dealt with here but done in
889 // fullcodegen's prologue:
890 // - Support profiler (specifically profiling_counter).
891 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
892 // - Allow simulator stop operations if FLAG_stop_at is set.
893 // - Deal with sloppy mode functions which need to replace the
894 // receiver with the global proxy when called as functions (without an
895 // explicit receiver object).
896 // - Code aging of the BytecodeArray object.
897 // - Supporting FLAG_trace.
899 // The following items are also not done here, and will probably be done using
900 // explicit bytecodes instead:
901 // - Allocating a new local context if applicable.
902 // - Setting up a local binding to the this function, which is used in
903 // derived constructors with super calls.
904 // - Setting new.target if required.
905 // - Dealing with REST parameters (only if
906 // https://codereview.chromium.org/1235153006 doesn't land by then).
907 // - Dealing with argument objects.
909 // Perform stack guard check.
912 __ LoadRoot(at, Heap::kStackLimitRootIndex);
913 __ Branch(&ok, hs, sp, Operand(at));
914 __ CallRuntime(Runtime::kStackGuard, 0);
918 // Load bytecode offset and dispatch table into registers.
919 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
921 kInterpreterRegisterFileRegister, fp,
922 Operand(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
923 __ li(kInterpreterBytecodeOffsetRegister,
924 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
925 __ LoadRoot(kInterpreterDispatchTableRegister,
926 Heap::kInterpreterTableRootIndex);
927 __ Addu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
928 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
930 // Dispatch to the first bytecode handler for the function.
931 __ Addu(a0, kInterpreterBytecodeArrayRegister,
932 kInterpreterBytecodeOffsetRegister);
933 __ lbu(a0, MemOperand(a0));
934 __ sll(at, a0, kPointerSizeLog2);
935 __ Addu(at, kInterpreterDispatchTableRegister, at);
936 __ lw(at, MemOperand(at));
937 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
938 // and header removal.
939 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
944 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
945 // TODO(rmcilroy): List of things not currently dealt with here but done in
946 // fullcodegen's EmitReturnSequence.
947 // - Supporting FLAG_trace for Runtime::TraceExit.
948 // - Support profiler (specifically decrementing profiling_counter
949 // appropriately and calling out to HandleInterrupts if necessary).
951 // The return value is in accumulator, which is already in v0.
953 // Leave the frame (also dropping the register file).
954 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
956 // Drop receiver + arguments and return.
957 __ lw(at, FieldMemOperand(kInterpreterBytecodeArrayRegister,
958 BytecodeArray::kParameterSizeOffset));
964 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
965 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
966 GenerateTailCallToReturnedCode(masm);
970 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
971 FrameScope scope(masm, StackFrame::INTERNAL);
972 // Push a copy of the function onto the stack.
973 // Push function as parameter to the runtime call.
975 // Whether to compile in a background thread.
977 at, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
980 __ CallRuntime(Runtime::kCompileOptimized, 2);
986 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
987 CallCompileOptimized(masm, false);
988 GenerateTailCallToReturnedCode(masm);
992 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
993 CallCompileOptimized(masm, true);
994 GenerateTailCallToReturnedCode(masm);
999 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1000 // For now, we are relying on the fact that make_code_young doesn't do any
1001 // garbage collection which allows us to save/restore the registers without
1002 // worrying about which of them contain pointers. We also don't build an
1003 // internal frame to make the code faster, since we shouldn't have to do stack
1004 // crawls in MakeCodeYoung. This seems a bit fragile.
1006 // Set a0 to point to the head of the PlatformCodeAge sequence.
1008 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1010 // The following registers must be saved and restored when calling through to
1012 // a0 - contains return address (beginning of patch sequence)
1014 RegList saved_regs =
1015 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1016 FrameScope scope(masm, StackFrame::MANUAL);
1017 __ MultiPush(saved_regs);
1018 __ PrepareCallCFunction(2, 0, a2);
1019 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1021 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1022 __ MultiPop(saved_regs);
1026 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1027 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1028 MacroAssembler* masm) { \
1029 GenerateMakeCodeYoungAgainCommon(masm); \
1031 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1032 MacroAssembler* masm) { \
1033 GenerateMakeCodeYoungAgainCommon(masm); \
1035 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1036 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1039 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1040 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1041 // that make_code_young doesn't do any garbage collection which allows us to
1042 // save/restore the registers without worrying about which of them contain
1045 // Set a0 to point to the head of the PlatformCodeAge sequence.
1047 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1049 // The following registers must be saved and restored when calling through to
1051 // a0 - contains return address (beginning of patch sequence)
1053 RegList saved_regs =
1054 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1055 FrameScope scope(masm, StackFrame::MANUAL);
1056 __ MultiPush(saved_regs);
1057 __ PrepareCallCFunction(2, 0, a2);
1058 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1060 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1062 __ MultiPop(saved_regs);
1064 // Perform prologue operations usually performed by the young code stub.
1065 __ Push(ra, fp, cp, a1);
1066 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1068 // Jump to point after the code-age stub.
1069 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1074 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1075 GenerateMakeCodeYoungAgainCommon(masm);
1079 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1080 Generate_MarkCodeAsExecutedOnce(masm);
1084 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1085 SaveFPRegsMode save_doubles) {
1087 FrameScope scope(masm, StackFrame::INTERNAL);
1089 // Preserve registers across notification, this is important for compiled
1090 // stubs that tail call the runtime on deopts passing their parameters in
1092 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1093 // Pass the function and deoptimization type to the runtime system.
1094 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1095 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1098 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1099 __ Jump(ra); // Jump to miss handler
1103 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1104 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1108 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1109 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1113 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1114 Deoptimizer::BailoutType type) {
1116 FrameScope scope(masm, StackFrame::INTERNAL);
1117 // Pass the function and deoptimization type to the runtime system.
1118 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1120 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1123 // Get the full codegen state from the stack and untag it -> t2.
1124 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1126 // Switch on the state.
1127 Label with_tos_register, unknown_state;
1128 __ Branch(&with_tos_register,
1129 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1130 __ Ret(USE_DELAY_SLOT);
1131 // Safe to fill delay slot Addu will emit one instruction.
1132 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1134 __ bind(&with_tos_register);
1135 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1136 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
1138 __ Ret(USE_DELAY_SLOT);
1139 // Safe to fill delay slot Addu will emit one instruction.
1140 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1142 __ bind(&unknown_state);
1143 __ stop("no cases left");
1147 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1148 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1152 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1153 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1157 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1158 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1162 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1163 // Lookup the function in the JavaScript frame.
1164 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1166 FrameScope scope(masm, StackFrame::INTERNAL);
1167 // Pass function as argument.
1169 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1172 // If the code object is null, just return to the unoptimized code.
1173 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1175 // Load deoptimization data from the code object.
1176 // <deopt_data> = <code>[#deoptimization_data_offset]
1177 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1179 // Load the OSR entrypoint offset from the deoptimization data.
1180 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1181 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1182 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1185 // Compute the target address = code_obj + header_size + osr_offset
1186 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1187 __ addu(v0, v0, a1);
1188 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1190 // And "return" to the OSR entry point of the function.
1195 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1196 // We check the stack limit as indicator that recompilation might be done.
1198 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1199 __ Branch(&ok, hs, sp, Operand(at));
1201 FrameScope scope(masm, StackFrame::INTERNAL);
1202 __ CallRuntime(Runtime::kStackGuard, 0);
1204 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1205 RelocInfo::CODE_TARGET);
1213 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1214 // 1. Make sure we have at least one argument.
1215 // a0: actual number of arguments
1218 __ Branch(&done, ne, a0, Operand(zero_reg));
1219 __ PushRoot(Heap::kUndefinedValueRootIndex);
1220 __ Addu(a0, a0, Operand(1));
1224 // 2. Get the function to call (passed as receiver) from the stack.
1225 // a0: actual number of arguments
1226 __ sll(at, a0, kPointerSizeLog2);
1227 __ addu(at, sp, at);
1228 __ lw(a1, MemOperand(at));
1230 // 3. Shift arguments and return address one slot down on the stack
1231 // (overwriting the original receiver). Adjust argument count to make
1232 // the original first argument the new receiver.
1233 // a0: actual number of arguments
1237 // Calculate the copy start address (destination). Copy end address is sp.
1238 __ sll(at, a0, kPointerSizeLog2);
1239 __ addu(a2, sp, at);
1242 __ lw(at, MemOperand(a2, -kPointerSize));
1243 __ sw(at, MemOperand(a2));
1244 __ Subu(a2, a2, Operand(kPointerSize));
1245 __ Branch(&loop, ne, a2, Operand(sp));
1246 // Adjust the actual number of arguments and remove the top element
1247 // (which is a copy of the last argument).
1248 __ Subu(a0, a0, Operand(1));
1252 // 4. Call the callable.
1253 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1257 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1258 const int vectorOffset,
1259 const int argumentsOffset,
1260 const int indexOffset,
1261 const int limitOffset) {
1263 Register receiver = LoadDescriptor::ReceiverRegister();
1264 Register key = LoadDescriptor::NameRegister();
1265 Register slot = LoadDescriptor::SlotRegister();
1266 Register vector = LoadWithVectorDescriptor::VectorRegister();
1268 __ lw(key, MemOperand(fp, indexOffset));
1271 // Load the current argument from the arguments array.
1273 __ lw(receiver, MemOperand(fp, argumentsOffset));
1275 // Use inline caching to speed up access to arguments.
1276 int slot_index = TypeFeedbackVector::PushAppliedArgumentsIndex();
1277 __ li(slot, Operand(Smi::FromInt(slot_index)));
1278 __ lw(vector, MemOperand(fp, vectorOffset));
1280 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1281 __ Call(ic, RelocInfo::CODE_TARGET);
1285 // Use inline caching to access the arguments.
1286 __ lw(key, MemOperand(fp, indexOffset));
1287 __ Addu(key, key, Operand(1 << kSmiTagSize));
1288 __ sw(key, MemOperand(fp, indexOffset));
1290 // Test if the copy loop has finished copying all the elements from the
1291 // arguments object.
1293 __ lw(a1, MemOperand(fp, limitOffset));
1294 __ Branch(&loop, ne, key, Operand(a1));
1296 // On exit, the pushed arguments count is in a0, untagged
1302 // Used by FunctionApply and ReflectApply
1303 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1304 const int kFormalParameters = targetIsArgument ? 3 : 2;
1305 const int kStackSize = kFormalParameters + 1;
1308 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1309 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1310 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1311 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1312 const int kVectorOffset =
1313 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1316 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1317 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1320 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1321 __ lw(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array.
1323 // Returns (in v0) number of arguments to copy to stack as Smi.
1324 if (targetIsArgument) {
1325 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1328 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1331 // Returns the result in v0.
1332 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1334 // Push current limit and index.
1335 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1336 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1337 __ mov(a1, zero_reg);
1338 __ lw(a2, MemOperand(fp, kReceiverOffset));
1339 __ Push(v0, a1, a2); // limit, initial index and receiver.
1341 // Copy all arguments from the array to the stack.
1342 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1343 kIndexOffset, kLimitOffset);
1345 // Call the callable.
1346 // TODO(bmeurer): This should be a tail call according to ES6.
1347 __ lw(a1, MemOperand(fp, kFunctionOffset));
1348 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1350 // Tear down the internal frame and remove function, receiver and args.
1353 __ Ret(USE_DELAY_SLOT);
1354 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1358 static void Generate_ConstructHelper(MacroAssembler* masm) {
1359 const int kFormalParameters = 3;
1360 const int kStackSize = kFormalParameters + 1;
1363 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1364 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1365 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1366 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1367 const int kVectorOffset =
1368 InternalFrameConstants::kCodeOffset - 1 * kPointerSize;
1371 __ lw(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1372 __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kFeedbackVectorOffset));
1375 // If newTarget is not supplied, set it to constructor
1376 Label validate_arguments;
1377 __ lw(a0, MemOperand(fp, kNewTargetOffset));
1378 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1379 __ Branch(&validate_arguments, ne, a0, Operand(at));
1380 __ lw(a0, MemOperand(fp, kFunctionOffset));
1381 __ sw(a0, MemOperand(fp, kNewTargetOffset));
1383 // Validate arguments
1384 __ bind(&validate_arguments);
1385 __ lw(a0, MemOperand(fp, kFunctionOffset)); // get the function
1387 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array
1389 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1391 // Returns argument count in v0.
1392 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1395 // Returns result in v0.
1396 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1398 // Push current limit and index.
1399 const int kIndexOffset = kVectorOffset - (2 * kPointerSize);
1400 const int kLimitOffset = kVectorOffset - (1 * kPointerSize);
1401 __ push(v0); // limit
1402 __ mov(a1, zero_reg); // initial index
1404 // Push the constructor function as callee.
1405 __ lw(a0, MemOperand(fp, kFunctionOffset));
1408 // Copy all arguments from the array to the stack.
1409 Generate_PushAppliedArguments(masm, kVectorOffset, kArgumentsOffset,
1410 kIndexOffset, kLimitOffset);
1412 // Use undefined feedback vector
1413 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1414 __ lw(a1, MemOperand(fp, kFunctionOffset));
1415 __ lw(t0, MemOperand(fp, kNewTargetOffset));
1417 // Call the function.
1418 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1419 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1421 // Leave internal frame.
1424 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1428 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1429 Generate_ApplyHelper(masm, false);
1433 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1434 Generate_ApplyHelper(masm, true);
1438 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1439 Generate_ConstructHelper(masm);
1443 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1444 Label* stack_overflow) {
1445 // ----------- S t a t e -------------
1446 // -- a0 : actual number of arguments
1447 // -- a1 : function (passed through to callee)
1448 // -- a2 : expected number of arguments
1449 // -----------------------------------
1450 // Check the stack for overflow. We are not trying to catch
1451 // interruptions (e.g. debug break and preemption) here, so the "real stack
1452 // limit" is checked.
1453 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1454 // Make t1 the space we have left. The stack might already be overflowed
1455 // here which will cause t1 to become negative.
1456 __ subu(t1, sp, t1);
1457 // Check if the arguments will overflow the stack.
1458 __ sll(at, a2, kPointerSizeLog2);
1459 // Signed comparison.
1460 __ Branch(stack_overflow, le, t1, Operand(at));
1464 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1465 __ sll(a0, a0, kSmiTagSize);
1466 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1467 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1469 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1473 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1474 // ----------- S t a t e -------------
1475 // -- v0 : result being passed through
1476 // -----------------------------------
1477 // Get the number of arguments passed (as a smi), tear down the frame and
1478 // then tear down the parameters.
1479 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1482 __ MultiPop(fp.bit() | ra.bit());
1483 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1484 __ Addu(sp, sp, t0);
1485 // Adjust for the receiver.
1486 __ Addu(sp, sp, Operand(kPointerSize));
1491 void Builtins::Generate_CallFunction(MacroAssembler* masm) {
1492 // ----------- S t a t e -------------
1493 // -- a0 : the number of arguments (not including the receiver)
1494 // -- a1 : the function to call (checked to be a JSFunction)
1495 // -----------------------------------
1497 Label convert, convert_global_proxy, convert_to_object, done_convert;
1498 __ AssertFunction(a1);
1499 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
1500 // slot is "classConstructor".
1501 // Enter the context of the function; ToObject has to run in the function
1502 // context, and we also need to take the global proxy from the function
1503 // context in case of conversion.
1504 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1505 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
1506 SharedFunctionInfo::kStrictModeByteOffset);
1507 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1508 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1509 // We need to convert the receiver for non-native sloppy mode functions.
1510 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1511 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
1512 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
1513 __ Branch(&done_convert, ne, at, Operand(zero_reg));
1515 __ sll(at, a0, kPointerSizeLog2);
1516 __ addu(at, sp, at);
1517 __ lw(a3, MemOperand(at));
1519 // ----------- S t a t e -------------
1520 // -- a0 : the number of arguments (not including the receiver)
1521 // -- a1 : the function to call (checked to be a JSFunction)
1522 // -- a2 : the shared function info.
1523 // -- a3 : the receiver
1524 // -- cp : the function context.
1525 // -----------------------------------
1527 Label convert_receiver;
1528 __ JumpIfSmi(a3, &convert_to_object);
1529 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1530 __ GetObjectType(a3, t0, t0);
1531 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
1532 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
1533 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
1534 __ bind(&convert_global_proxy);
1536 // Patch receiver to global proxy.
1537 __ LoadGlobalProxy(a3);
1539 __ Branch(&convert_receiver);
1540 __ bind(&convert_to_object);
1542 // Convert receiver using ToObject.
1543 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1544 // in the fast case? (fall back to AllocateInNewSpace?)
1545 FrameScope scope(masm, StackFrame::INTERNAL);
1546 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1549 ToObjectStub stub(masm->isolate());
1553 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1555 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1556 __ bind(&convert_receiver);
1557 __ sll(at, a0, kPointerSizeLog2);
1558 __ addu(at, sp, at);
1559 __ sw(a3, MemOperand(at));
1561 __ bind(&done_convert);
1563 // ----------- S t a t e -------------
1564 // -- a0 : the number of arguments (not including the receiver)
1565 // -- a1 : the function to call (checked to be a JSFunction)
1566 // -- a2 : the shared function info.
1567 // -- cp : the function context.
1568 // -----------------------------------
1571 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
1572 __ sra(a2, a2, kSmiTagSize); // Un-tag.
1573 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1574 ParameterCount actual(a0);
1575 ParameterCount expected(a2);
1576 __ InvokeCode(a3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
1581 void Builtins::Generate_Call(MacroAssembler* masm) {
1582 // ----------- S t a t e -------------
1583 // -- a0 : the number of arguments (not including the receiver)
1584 // -- a1 : the target to call (can be any Object).
1585 // -----------------------------------
1587 Label non_smi, non_function;
1588 __ JumpIfSmi(a1, &non_function);
1590 __ GetObjectType(a1, a2, a2);
1591 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET,
1592 eq, a2, Operand(JS_FUNCTION_TYPE));
1593 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1596 // 1. Call to function proxy.
1597 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
1598 __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kCallTrapOffset));
1599 __ AssertNotSmi(a1);
1600 __ Branch(&non_smi);
1602 // 2. Call to something else, which might have a [[Call]] internal method (if
1603 // not we raise an exception).
1604 __ bind(&non_function);
1605 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
1606 // be awesome instead; i.e. a trivial improvement would be to call into the
1607 // runtime and just deal with the API function there instead of returning a
1608 // delegate from a runtime call that just jumps back to the runtime once
1609 // called. Or, bonus points, call directly into the C API function here, as
1610 // we do in some Crankshaft fast cases.
1611 // Overwrite the original receiver with the (original) target.
1612 __ sll(at, a0, kPointerSizeLog2);
1613 __ addu(at, sp, at);
1614 __ sw(a1, MemOperand(at));
1616 // Determine the delegate for the target (if any).
1617 FrameScope scope(masm, StackFrame::INTERNAL);
1618 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1620 __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
1623 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1625 // The delegate is always a regular function.
1626 __ AssertFunction(a1);
1627 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1632 void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
1633 // ----------- S t a t e -------------
1634 // -- a0 : the number of arguments (not including the receiver)
1635 // -- a2 : the address of the first argument to be pushed. Subsequent
1636 // arguments should be consecutive above this, in the same order as
1637 // they are to be pushed onto the stack.
1638 // -- a1 : the target to call (can be any Object).
1640 // Find the address of the last argument.
1641 __ Addu(a3, a0, Operand(1)); // Add one for receiver.
1642 __ sll(a3, a3, kPointerSizeLog2);
1643 __ Subu(a3, a2, Operand(a3));
1645 // Push the arguments.
1646 Label loop_header, loop_check;
1647 __ Branch(&loop_check);
1648 __ bind(&loop_header);
1649 __ lw(t0, MemOperand(a2));
1650 __ Addu(a2, a2, Operand(-kPointerSize));
1652 __ bind(&loop_check);
1653 __ Branch(&loop_header, gt, a2, Operand(a3));
1656 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1660 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1661 // State setup as expected by MacroAssembler::InvokePrologue.
1662 // ----------- S t a t e -------------
1663 // -- a0: actual arguments count
1664 // -- a1: function (passed through to callee)
1665 // -- a2: expected arguments count
1666 // -----------------------------------
1668 Label stack_overflow;
1669 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1670 Label invoke, dont_adapt_arguments;
1672 Label enough, too_few;
1673 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1674 __ Branch(&dont_adapt_arguments, eq,
1675 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1676 // We use Uless as the number of argument should always be greater than 0.
1677 __ Branch(&too_few, Uless, a0, Operand(a2));
1679 { // Enough parameters: actual >= expected.
1680 // a0: actual number of arguments as a smi
1682 // a2: expected number of arguments
1683 // a3: code entry to call
1685 EnterArgumentsAdaptorFrame(masm);
1687 // Calculate copy start address into a0 and copy end address into t1.
1688 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1689 __ Addu(a0, fp, a0);
1690 // Adjust for return address and receiver.
1691 __ Addu(a0, a0, Operand(2 * kPointerSize));
1692 // Compute copy end address.
1693 __ sll(t1, a2, kPointerSizeLog2);
1694 __ subu(t1, a0, t1);
1696 // Copy the arguments (including the receiver) to the new stack frame.
1697 // a0: copy start address
1699 // a2: expected number of arguments
1700 // a3: code entry to call
1701 // t1: copy end address
1705 __ lw(t0, MemOperand(a0));
1707 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t1));
1708 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1713 { // Too few parameters: Actual < expected.
1716 // If the function is strong we need to throw an error.
1717 Label no_strong_error;
1718 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1719 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kCompilerHintsOffset));
1720 __ And(t3, t2, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
1722 __ Branch(&no_strong_error, eq, t3, Operand(zero_reg));
1724 // What we really care about is the required number of arguments.
1725 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset));
1727 __ Branch(&no_strong_error, ge, a0, Operand(t2));
1730 FrameScope frame(masm, StackFrame::MANUAL);
1731 EnterArgumentsAdaptorFrame(masm);
1732 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1735 __ bind(&no_strong_error);
1736 EnterArgumentsAdaptorFrame(masm);
1738 // Calculate copy start address into a0 and copy end address into t3.
1739 // a0: actual number of arguments as a smi
1741 // a2: expected number of arguments
1742 // a3: code entry to call
1743 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1744 __ Addu(a0, fp, a0);
1745 // Adjust for return address and receiver.
1746 __ Addu(a0, a0, Operand(2 * kPointerSize));
1747 // Compute copy end address. Also adjust for return address.
1748 __ Addu(t3, fp, kPointerSize);
1750 // Copy the arguments (including the receiver) to the new stack frame.
1751 // a0: copy start address
1753 // a2: expected number of arguments
1754 // a3: code entry to call
1755 // t3: copy end address
1758 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1759 __ Subu(sp, sp, kPointerSize);
1760 __ Subu(a0, a0, kPointerSize);
1761 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1762 __ sw(t0, MemOperand(sp)); // In the delay slot.
1764 // Fill the remaining expected arguments with undefined.
1766 // a2: expected number of arguments
1767 // a3: code entry to call
1768 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1769 __ sll(t2, a2, kPointerSizeLog2);
1770 __ Subu(t1, fp, Operand(t2));
1771 // Adjust for frame.
1772 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1777 __ Subu(sp, sp, kPointerSize);
1778 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
1779 __ sw(t0, MemOperand(sp));
1782 // Call the entry point.
1785 // a0 : expected number of arguments
1786 // a1 : function (passed through to callee)
1789 // Store offset of return address for deoptimizer.
1790 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1792 // Exit frame and return.
1793 LeaveArgumentsAdaptorFrame(masm);
1797 // -------------------------------------------
1798 // Don't adapt arguments.
1799 // -------------------------------------------
1800 __ bind(&dont_adapt_arguments);
1803 __ bind(&stack_overflow);
1805 FrameScope frame(masm, StackFrame::MANUAL);
1806 EnterArgumentsAdaptorFrame(masm);
1807 __ CallRuntime(Runtime::kThrowStackOverflow, 0);
1815 } // namespace internal
1818 #endif // V8_TARGET_ARCH_MIPS