1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #if V8_TARGET_ARCH_MIPS
11 #include "src/codegen.h"
12 #include "src/debug/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen/full-codegen.h"
15 #include "src/interpreter/bytecodes.h"
16 #include "src/runtime/runtime.h"
23 #define __ ACCESS_MASM(masm)
26 void Builtins::Generate_Adaptor(MacroAssembler* masm,
28 BuiltinExtraArguments extra_args) {
29 // ----------- S t a t e -------------
30 // -- a0 : number of arguments excluding receiver
31 // -- a1 : called function (only guaranteed when
32 // -- extra_args requires it)
34 // -- sp[0] : last argument
36 // -- sp[4 * (argc - 1)] : first argument
37 // -- sp[4 * agrc] : receiver
38 // -----------------------------------
40 // Insert extra arguments.
41 int num_extra_args = 0;
42 if (extra_args == NEEDS_CALLED_FUNCTION) {
46 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
49 // JumpToExternalReference expects a0 to contain the number of arguments
50 // including the receiver and the extra arguments.
51 __ Addu(a0, a0, num_extra_args + 1);
52 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
56 // Load the built-in InternalArray function from the current context.
57 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
59 // Load the native context.
62 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
64 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
65 // Load the InternalArray function from the native context.
69 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
73 // Load the built-in Array function from the current context.
74 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
75 // Load the native context.
78 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
80 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
81 // Load the Array function from the native context.
84 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
88 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
89 // ----------- S t a t e -------------
90 // -- a0 : number of arguments
91 // -- ra : return address
92 // -- sp[...]: constructor arguments
93 // -----------------------------------
94 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
96 // Get the InternalArray function.
97 GenerateLoadInternalArrayFunction(masm, a1);
99 if (FLAG_debug_code) {
100 // Initial map for the builtin InternalArray functions should be maps.
101 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
103 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
104 t0, Operand(zero_reg));
105 __ GetObjectType(a2, a3, t0);
106 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
107 t0, Operand(MAP_TYPE));
110 // Run the native code for the InternalArray function called as a normal
113 InternalArrayConstructorStub stub(masm->isolate());
114 __ TailCallStub(&stub);
118 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
119 // ----------- S t a t e -------------
120 // -- a0 : number of arguments
121 // -- ra : return address
122 // -- sp[...]: constructor arguments
123 // -----------------------------------
124 Label generic_array_code;
126 // Get the Array function.
127 GenerateLoadArrayFunction(masm, a1);
129 if (FLAG_debug_code) {
130 // Initial map for the builtin Array functions should be maps.
131 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
133 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
134 t0, Operand(zero_reg));
135 __ GetObjectType(a2, a3, t0);
136 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
137 t0, Operand(MAP_TYPE));
140 // Run the native code for the Array function called as a normal function.
143 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
144 ArrayConstructorStub stub(masm->isolate());
145 __ TailCallStub(&stub);
149 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
150 // ----------- S t a t e -------------
151 // -- a0 : number of arguments
152 // -- a1 : constructor function
153 // -- ra : return address
154 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
155 // -- sp[argc * 4] : receiver
156 // -----------------------------------
157 Counters* counters = masm->isolate()->counters();
158 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
160 Register function = a1;
161 if (FLAG_debug_code) {
162 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
163 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
166 // Load the first arguments in a0 and get rid of the rest.
168 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
169 // First args = sp[(argc - 1) * 4].
170 __ Subu(a0, a0, Operand(1));
171 __ sll(a0, a0, kPointerSizeLog2);
173 __ lw(a0, MemOperand(sp));
174 // sp now point to args[0], drop args[0] + receiver.
177 Register argument = a2;
178 Label not_cached, argument_is_string;
179 __ LookupNumberStringCache(a0, // Input.
185 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
186 __ bind(&argument_is_string);
188 // ----------- S t a t e -------------
189 // -- a2 : argument converted to string
190 // -- a1 : constructor function
191 // -- ra : return address
192 // -----------------------------------
195 __ Allocate(JSValue::kSize,
202 // Initialising the String Object.
204 __ LoadGlobalFunctionInitialMap(function, map, t0);
205 if (FLAG_debug_code) {
206 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
207 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
208 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
209 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
210 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
211 t0, Operand(zero_reg));
213 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
215 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
216 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
217 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
219 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
221 // Ensure the object is fully initialized.
222 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
226 // The argument was not found in the number to string cache. Check
227 // if it's a string already before calling the conversion builtin.
228 Label convert_argument;
229 __ bind(¬_cached);
230 __ JumpIfSmi(a0, &convert_argument);
233 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
234 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
235 STATIC_ASSERT(kNotStringTag != 0);
236 __ And(t0, a3, Operand(kIsNotStringMask));
237 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
238 __ mov(argument, a0);
239 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
240 __ Branch(&argument_is_string);
242 // Invoke the conversion builtin and put the result into a2.
243 __ bind(&convert_argument);
244 __ push(function); // Preserve the function.
245 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
247 FrameScope scope(masm, StackFrame::INTERNAL);
249 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
252 __ mov(argument, v0);
253 __ Branch(&argument_is_string);
255 // Load the empty string into a2, remove the receiver from the
256 // stack, and jump back to the case where the argument is a string.
257 __ bind(&no_arguments);
258 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
260 __ Branch(&argument_is_string);
262 // At this point the argument is already a string. Call runtime to
263 // create a string wrapper.
264 __ bind(&gc_required);
265 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
267 FrameScope scope(masm, StackFrame::INTERNAL);
269 __ CallRuntime(Runtime::kNewStringWrapper, 1);
275 static void CallRuntimePassFunction(
276 MacroAssembler* masm, Runtime::FunctionId function_id) {
277 FrameScope scope(masm, StackFrame::INTERNAL);
278 // Push a copy of the function onto the stack.
279 // Push call kind information and function as parameter to the runtime call.
282 __ CallRuntime(function_id, 1);
283 // Restore call kind information and receiver.
288 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
289 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
290 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
291 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
296 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
297 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
302 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
303 // Checking whether the queued function is ready for install is optional,
304 // since we come across interrupts and stack checks elsewhere. However,
305 // not checking may delay installing ready functions, and always checking
306 // would be quite expensive. A good compromise is to first check against
307 // stack limit as a cue for an interrupt signal.
309 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(t0));
312 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
313 GenerateTailCallToReturnedCode(masm);
316 GenerateTailCallToSharedCode(masm);
320 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
321 bool is_api_function,
322 bool create_memento) {
323 // ----------- S t a t e -------------
324 // -- a0 : number of arguments
325 // -- a1 : constructor function
326 // -- a2 : allocation site or undefined
327 // -- a3 : original constructor
328 // -- ra : return address
329 // -- sp[...]: constructor arguments
330 // -----------------------------------
332 // Should never create mementos for api functions.
333 DCHECK(!is_api_function || !create_memento);
335 Isolate* isolate = masm->isolate();
337 // Enter a construct frame.
339 FrameScope scope(masm, StackFrame::CONSTRUCT);
341 // Preserve the incoming parameters on the stack.
342 __ AssertUndefinedOrAllocationSite(a2, t0);
344 __ Push(a2, a0, a1, a3);
346 // Try to allocate the object without transitioning into C code. If any of
347 // the preconditions is not met, the code bails out to the runtime call.
348 Label rt_call, allocated;
349 if (FLAG_inline_new) {
350 ExternalReference debug_step_in_fp =
351 ExternalReference::debug_step_in_fp_address(isolate);
352 __ li(a2, Operand(debug_step_in_fp));
353 __ lw(a2, MemOperand(a2));
354 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
356 // Fall back to runtime if the original constructor and function differ.
357 __ Branch(&rt_call, ne, a1, Operand(a3));
359 // Load the initial map and verify that it is in fact a map.
360 // a1: constructor function
361 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
362 __ JumpIfSmi(a2, &rt_call);
363 __ GetObjectType(a2, t5, t4);
364 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
366 // Check that the constructor is not constructing a JSFunction (see
367 // comments in Runtime_NewObject in runtime.cc). In which case the
368 // initial map's instance type would be JS_FUNCTION_TYPE.
369 // a1: constructor function
371 __ lbu(t5, FieldMemOperand(a2, Map::kInstanceTypeOffset));
372 __ Branch(&rt_call, eq, t5, Operand(JS_FUNCTION_TYPE));
374 if (!is_api_function) {
376 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
377 // Check if slack tracking is enabled.
378 __ lw(t0, bit_field3);
379 __ DecodeField<Map::Counter>(t2, t0);
380 __ Branch(&allocate, lt, t2, Operand(Map::kSlackTrackingCounterEnd));
381 // Decrease generous allocation count.
382 __ Subu(t0, t0, Operand(1 << Map::Counter::kShift));
383 __ Branch(USE_DELAY_SLOT, &allocate, ne, t2,
384 Operand(Map::kSlackTrackingCounterEnd));
385 __ sw(t0, bit_field3); // In delay slot.
387 __ Push(a1, a2, a1); // a1 = Constructor.
388 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
391 __ li(t2, Operand(Map::kSlackTrackingCounterEnd - 1));
396 // Now allocate the JSObject on the heap.
397 // a1: constructor function
399 Label rt_call_reload_new_target;
400 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
401 if (create_memento) {
402 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
405 __ Allocate(a3, t4, t5, t6, &rt_call_reload_new_target, SIZE_IN_WORDS);
407 // Allocated the JSObject, now initialize the fields. Map is set to
408 // initial map and properties and elements are set to empty fixed array.
409 // a1: constructor function
411 // a3: object size (including memento if create_memento)
412 // t4: JSObject (not tagged)
413 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
415 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
416 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
417 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
418 __ Addu(t5, t5, Operand(3*kPointerSize));
419 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
420 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
421 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
423 // Fill all the in-object properties with appropriate filler.
424 // a1: constructor function
426 // a3: object size (in words, including memento if create_memento)
427 // t4: JSObject (not tagged)
428 // t5: First in-object property of JSObject (not tagged)
429 // t2: slack tracking counter (non-API function case)
430 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
432 // Use t7 to hold undefined, which is used in several places below.
433 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
435 if (!is_api_function) {
436 Label no_inobject_slack_tracking;
438 // Check if slack tracking is enabled.
439 __ Branch(&no_inobject_slack_tracking, lt, t2,
440 Operand(Map::kSlackTrackingCounterEnd));
442 // Allocate object with a slack.
443 __ lbu(a0, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
444 __ lbu(a2, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
446 __ sll(at, a0, kPointerSizeLog2);
448 // a0: offset of first field after pre-allocated fields
449 if (FLAG_debug_code) {
450 __ sll(at, a3, kPointerSizeLog2);
451 __ Addu(t6, t4, Operand(at)); // End of object.
452 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
455 __ InitializeFieldsWithFiller(t5, a0, t7);
456 // To allow for truncation.
457 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
458 // Fill the remaining fields with one pointer filler map.
460 __ bind(&no_inobject_slack_tracking);
463 if (create_memento) {
464 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
465 __ sll(a0, a0, kPointerSizeLog2);
466 __ Addu(a0, t4, Operand(a0)); // End of object.
467 __ InitializeFieldsWithFiller(t5, a0, t7);
469 // Fill in memento fields.
470 // t5: points to the allocated but uninitialized memento.
471 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
472 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
473 __ sw(t7, MemOperand(t5));
474 __ Addu(t5, t5, kPointerSize);
475 // Load the AllocationSite.
476 __ lw(t7, MemOperand(sp, 3 * kPointerSize));
477 __ AssertUndefinedOrAllocationSite(a2, t0);
478 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
479 __ sw(t7, MemOperand(t5));
480 __ Addu(t5, t5, kPointerSize);
482 __ sll(at, a3, kPointerSizeLog2);
483 __ Addu(a0, t4, Operand(at)); // End of object.
484 __ InitializeFieldsWithFiller(t5, a0, t7);
487 // Add the object tag to make the JSObject real, so that we can continue
488 // and jump into the continuation code at any time from now on.
489 __ Addu(t4, t4, Operand(kHeapObjectTag));
491 // Continue with JSObject being successfully allocated.
495 // Reload the original constructor and fall-through.
496 __ bind(&rt_call_reload_new_target);
497 __ lw(a3, MemOperand(sp, 0 * kPointerSize));
500 // Allocate the new receiver object using the runtime call.
501 // a1: constructor function
502 // a3: original constructor
504 if (create_memento) {
505 // Get the cell or allocation site.
506 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
507 __ push(a2); // argument 1: allocation site
510 __ Push(a1, a3); // arguments 2-3 / 1-2
511 if (create_memento) {
512 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
514 __ CallRuntime(Runtime::kNewObject, 2);
518 // Runtime_NewObjectWithAllocationSite increments allocation count.
519 // Skip the increment.
520 Label count_incremented;
521 if (create_memento) {
522 __ jmp(&count_incremented);
525 // Receiver for constructor call allocated.
529 if (create_memento) {
530 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
531 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
532 __ Branch(&count_incremented, eq, a2, Operand(t5));
533 // a2 is an AllocationSite. We are creating a memento from it, so we
534 // need to increment the memento create count.
535 __ lw(a3, FieldMemOperand(a2,
536 AllocationSite::kPretenureCreateCountOffset));
537 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
538 __ sw(a3, FieldMemOperand(a2,
539 AllocationSite::kPretenureCreateCountOffset));
540 __ bind(&count_incremented);
543 // Restore the parameters.
544 __ Pop(a3); // new.target
547 // Retrieve smi-tagged arguments count from the stack.
548 __ lw(a0, MemOperand(sp));
553 // Set up pointer to last argument.
554 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
556 // Copy arguments and receiver to the expression stack.
557 // a0: number of arguments
558 // a1: constructor function
559 // a2: address of last argument (caller sp)
560 // a3: number of arguments (smi-tagged)
564 // sp[3]: number of arguments (smi-tagged)
569 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
570 __ Addu(t0, a2, Operand(t0));
571 __ lw(t1, MemOperand(t0));
574 __ Addu(a3, a3, Operand(-2));
575 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
577 // Call the function.
578 // a0: number of arguments
579 // a1: constructor function
580 if (is_api_function) {
581 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
583 masm->isolate()->builtins()->HandleApiCallConstruct();
584 __ Call(code, RelocInfo::CODE_TARGET);
586 ParameterCount actual(a0);
587 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
590 // Store offset of return address for deoptimizer.
591 if (!is_api_function) {
592 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
595 // Restore context from the frame.
596 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
598 // If the result is an object (in the ECMA sense), we should get rid
599 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
601 Label use_receiver, exit;
603 // If the result is a smi, it is *not* an object in the ECMA sense.
605 // sp[0]: receiver (newly allocated object)
607 // sp[2]: number of arguments (smi-tagged)
608 __ JumpIfSmi(v0, &use_receiver);
610 // If the type of the result (stored in its map) is less than
611 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
612 __ GetObjectType(v0, a1, a3);
613 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
615 // Throw away the result of the constructor invocation and use the
616 // on-stack receiver as the result.
617 __ bind(&use_receiver);
618 __ lw(v0, MemOperand(sp));
620 // Remove receiver from the stack, remove caller arguments, and
624 // sp[0]: receiver (newly allocated object)
625 // sp[1]: new.target (original constructor)
626 // sp[2]: number of arguments (smi-tagged)
627 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
629 // Leave construct frame.
632 __ sll(t0, a1, kPointerSizeLog2 - 1);
634 __ Addu(sp, sp, kPointerSize);
635 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
640 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
641 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
645 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
646 Generate_JSConstructStubHelper(masm, true, false);
650 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
651 // ----------- S t a t e -------------
652 // -- a0 : number of arguments
653 // -- a1 : constructor function
654 // -- a2 : allocation site or undefined
655 // -- a3 : original constructor
656 // -- ra : return address
657 // -- sp[...]: constructor arguments
658 // -----------------------------------
661 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
663 __ AssertUndefinedOrAllocationSite(a2, t0);
668 __ push(t0); // Smi-tagged arguments count.
673 // receiver is the hole.
674 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
677 // Set up pointer to last argument.
678 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
680 // Copy arguments and receiver to the expression stack.
681 // a0: number of arguments
682 // a1: constructor function
683 // a2: address of last argument (caller sp)
684 // t0: number of arguments (smi-tagged)
687 // sp[2]: number of arguments (smi-tagged)
691 __ sll(at, t0, kPointerSizeLog2 - 1);
692 __ Addu(at, a2, Operand(at));
693 __ lw(at, MemOperand(at));
696 __ Subu(t0, t0, Operand(2));
697 __ Branch(&loop, ge, t0, Operand(zero_reg));
701 ExternalReference debug_step_in_fp =
702 ExternalReference::debug_step_in_fp_address(masm->isolate());
703 __ li(a2, Operand(debug_step_in_fp));
704 __ lw(a2, MemOperand(a2));
705 __ Branch(&skip_step_in, eq, a2, Operand(zero_reg));
708 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
711 __ bind(&skip_step_in);
713 // Call the function.
714 // a0: number of arguments
715 // a1: constructor function
716 ParameterCount actual(a0);
717 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
719 // Restore context from the frame.
722 // sp[1]: number of arguments (smi-tagged)
723 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
724 __ lw(a1, MemOperand(sp, kPointerSize));
726 // Leave construct frame.
729 __ sll(at, a1, kPointerSizeLog2 - 1);
730 __ Addu(sp, sp, Operand(at));
731 __ Addu(sp, sp, Operand(kPointerSize));
736 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
739 // Clobbers a2; preserves all other registers.
740 static void Generate_CheckStackOverflow(MacroAssembler* masm,
741 const int calleeOffset, Register argc,
742 IsTagged argc_is_tagged) {
743 // Check the stack for overflow. We are not trying to catch
744 // interruptions (e.g. debug break and preemption) here, so the "real stack
745 // limit" is checked.
747 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
748 // Make a2 the space we have left. The stack might already be overflowed
749 // here which will cause r2 to become negative.
751 // Check if the arguments will overflow the stack.
752 if (argc_is_tagged == kArgcIsSmiTagged) {
753 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
755 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
756 __ sll(t3, argc, kPointerSizeLog2);
758 // Signed comparison.
759 __ Branch(&okay, gt, a2, Operand(t3));
761 // Out of stack space.
762 __ lw(a1, MemOperand(fp, calleeOffset));
763 if (argc_is_tagged == kArgcIsUntaggedInt) {
767 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
773 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
775 // Called from JSEntryStub::GenerateBody
777 // ----------- S t a t e -------------
780 // -- a2: receiver_pointer
783 // -----------------------------------
784 ProfileEntryHookStub::MaybeCallEntryHook(masm);
786 // Clear the context before we push it when entering the JS frame.
787 __ mov(cp, zero_reg);
789 // Enter an internal frame.
791 FrameScope scope(masm, StackFrame::INTERNAL);
793 // Set up the context from the function argument.
794 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
796 // Push the function and the receiver onto the stack.
799 // Check if we have enough stack space to push all arguments.
800 // The function is the first thing that was pushed above after entering
801 // the internal frame.
802 const int kFunctionOffset =
803 InternalFrameConstants::kCodeOffset - kPointerSize;
805 Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
807 // Copy arguments to the stack in a loop.
809 // s0: argv, i.e. points to first arg
811 __ sll(t0, a3, kPointerSizeLog2);
814 __ nop(); // Branch delay slot nop.
815 // t2 points past last arg.
817 __ lw(t0, MemOperand(s0)); // Read next parameter.
818 __ addiu(s0, s0, kPointerSize);
819 __ lw(t0, MemOperand(t0)); // Dereference handle.
820 __ push(t0); // Push parameter.
822 __ Branch(&loop, ne, s0, Operand(t2));
824 // Initialize all JavaScript callee-saved registers, since they will be seen
825 // by the garbage collector as part of handlers.
826 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
832 // s6 holds the root address. Do not clobber.
833 // s7 is cp. Do not init.
835 // Invoke the code and pass argc as a0.
838 // No type feedback cell is available
839 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
840 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
843 ParameterCount actual(a0);
844 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
847 // Leave internal frame.
854 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
855 Generate_JSEntryTrampolineHelper(masm, false);
859 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
860 Generate_JSEntryTrampolineHelper(masm, true);
864 // Generate code for entering a JS function with the interpreter.
865 // On entry to the function the receiver and arguments have been pushed on the
866 // stack left to right. The actual argument count matches the formal parameter
867 // count expected by the function.
869 // The live registers are:
870 // o a1: the JS function object being called.
872 // o fp: the caller's frame pointer
873 // o sp: stack pointer
874 // o ra: return address
876 // The function builds a JS frame. Please see JavaScriptFrameConstants in
877 // frames-mips.h for its layout.
878 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
880 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
881 // Open a frame scope to indicate that there is a frame on the stack. The
882 // MANUAL indicates that the scope shouldn't actually generate code to set up
883 // the frame (that is done below).
884 FrameScope frame_scope(masm, StackFrame::MANUAL);
886 __ Push(ra, fp, cp, a1);
887 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
889 // Get the bytecode array from the function object and load the pointer to the
890 // first entry into kInterpreterBytecodeRegister.
891 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
892 __ lw(kInterpreterBytecodeArrayRegister,
893 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
895 if (FLAG_debug_code) {
896 // Check function data field is actually a BytecodeArray object.
897 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
898 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
900 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
901 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
902 Operand(BYTECODE_ARRAY_TYPE));
905 // Allocate the local and temporary register file on the stack.
907 // Load frame size from the BytecodeArray object.
908 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
909 BytecodeArray::kFrameSizeOffset));
911 // Do a stack check to ensure we don't go over the limit.
913 __ Subu(t1, sp, Operand(t0));
914 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
915 __ Branch(&ok, hs, t1, Operand(a2));
916 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
919 // If ok, push undefined as the initial value for all register file entries.
920 // Note: there should always be at least one stack slot for the return
921 // register in the register file.
923 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
924 __ bind(&loop_header);
925 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
927 // Continue loop if not done.
928 __ Subu(t0, t0, Operand(kPointerSize));
929 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
932 // TODO(rmcilroy): List of things not currently dealt with here but done in
933 // fullcodegen's prologue:
934 // - Support profiler (specifically profiling_counter).
935 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
936 // - Allow simulator stop operations if FLAG_stop_at is set.
937 // - Deal with sloppy mode functions which need to replace the
938 // receiver with the global proxy when called as functions (without an
939 // explicit receiver object).
940 // - Code aging of the BytecodeArray object.
941 // - Supporting FLAG_trace.
943 // The following items are also not done here, and will probably be done using
944 // explicit bytecodes instead:
945 // - Allocating a new local context if applicable.
946 // - Setting up a local binding to the this function, which is used in
947 // derived constructors with super calls.
948 // - Setting new.target if required.
949 // - Dealing with REST parameters (only if
950 // https://codereview.chromium.org/1235153006 doesn't land by then).
951 // - Dealing with argument objects.
953 // Perform stack guard check.
956 __ LoadRoot(at, Heap::kStackLimitRootIndex);
957 __ Branch(&ok, hs, sp, Operand(at));
958 __ CallRuntime(Runtime::kStackGuard, 0);
962 // Load bytecode offset and dispatch table into registers.
963 __ li(kInterpreterBytecodeOffsetRegister,
964 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
965 __ LoadRoot(kInterpreterDispatchTableRegister,
966 Heap::kInterpreterTableRootIndex);
967 __ Addu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
968 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
970 // Dispatch to the first bytecode handler for the function.
971 __ Addu(a0, kInterpreterBytecodeArrayRegister,
972 kInterpreterBytecodeOffsetRegister);
973 __ lbu(a0, MemOperand(a0));
974 __ sll(at, a0, kPointerSizeLog2);
975 __ Addu(at, kInterpreterDispatchTableRegister, at);
976 __ lw(at, MemOperand(at));
977 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
978 // and header removal.
979 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
984 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
985 // TODO(rmcilroy): List of things not currently dealt with here but done in
986 // fullcodegen's EmitReturnSequence.
987 // - Supporting FLAG_trace for Runtime::TraceExit.
988 // - Support profiler (specifically decrementing profiling_counter
989 // appropriately and calling out to HandleInterrupts if necessary).
991 // Load return value into v0.
992 __ lw(v0, MemOperand(fp, -kPointerSize -
993 StandardFrameConstants::kFixedFrameSizeFromFp));
994 // Leave the frame (also dropping the register file).
995 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
996 // Drop receiver + arguments.
997 __ Drop(1); // TODO(rmcilroy): Get number of arguments from BytecodeArray.
1002 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1003 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1004 GenerateTailCallToReturnedCode(masm);
1008 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1009 FrameScope scope(masm, StackFrame::INTERNAL);
1010 // Push a copy of the function onto the stack.
1011 // Push function as parameter to the runtime call.
1013 // Whether to compile in a background thread.
1015 at, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1018 __ CallRuntime(Runtime::kCompileOptimized, 2);
1019 // Restore receiver.
1024 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1025 CallCompileOptimized(masm, false);
1026 GenerateTailCallToReturnedCode(masm);
1030 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1031 CallCompileOptimized(masm, true);
1032 GenerateTailCallToReturnedCode(masm);
1037 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1038 // For now, we are relying on the fact that make_code_young doesn't do any
1039 // garbage collection which allows us to save/restore the registers without
1040 // worrying about which of them contain pointers. We also don't build an
1041 // internal frame to make the code faster, since we shouldn't have to do stack
1042 // crawls in MakeCodeYoung. This seems a bit fragile.
1044 // Set a0 to point to the head of the PlatformCodeAge sequence.
1046 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1048 // The following registers must be saved and restored when calling through to
1050 // a0 - contains return address (beginning of patch sequence)
1052 RegList saved_regs =
1053 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1054 FrameScope scope(masm, StackFrame::MANUAL);
1055 __ MultiPush(saved_regs);
1056 __ PrepareCallCFunction(2, 0, a2);
1057 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1059 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1060 __ MultiPop(saved_regs);
1064 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1065 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1066 MacroAssembler* masm) { \
1067 GenerateMakeCodeYoungAgainCommon(masm); \
1069 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1070 MacroAssembler* masm) { \
1071 GenerateMakeCodeYoungAgainCommon(masm); \
1073 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1074 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1077 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1078 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1079 // that make_code_young doesn't do any garbage collection which allows us to
1080 // save/restore the registers without worrying about which of them contain
1083 // Set a0 to point to the head of the PlatformCodeAge sequence.
1085 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1087 // The following registers must be saved and restored when calling through to
1089 // a0 - contains return address (beginning of patch sequence)
1091 RegList saved_regs =
1092 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1093 FrameScope scope(masm, StackFrame::MANUAL);
1094 __ MultiPush(saved_regs);
1095 __ PrepareCallCFunction(2, 0, a2);
1096 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1098 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1100 __ MultiPop(saved_regs);
1102 // Perform prologue operations usually performed by the young code stub.
1103 __ Push(ra, fp, cp, a1);
1104 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1106 // Jump to point after the code-age stub.
1107 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1112 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1113 GenerateMakeCodeYoungAgainCommon(masm);
1117 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1118 Generate_MarkCodeAsExecutedOnce(masm);
1122 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1123 SaveFPRegsMode save_doubles) {
1125 FrameScope scope(masm, StackFrame::INTERNAL);
1127 // Preserve registers across notification, this is important for compiled
1128 // stubs that tail call the runtime on deopts passing their parameters in
1130 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1131 // Pass the function and deoptimization type to the runtime system.
1132 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1133 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1136 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1137 __ Jump(ra); // Jump to miss handler
1141 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1142 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1146 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1147 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1151 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1152 Deoptimizer::BailoutType type) {
1154 FrameScope scope(masm, StackFrame::INTERNAL);
1155 // Pass the function and deoptimization type to the runtime system.
1156 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1158 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1161 // Get the full codegen state from the stack and untag it -> t2.
1162 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1164 // Switch on the state.
1165 Label with_tos_register, unknown_state;
1166 __ Branch(&with_tos_register,
1167 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1168 __ Ret(USE_DELAY_SLOT);
1169 // Safe to fill delay slot Addu will emit one instruction.
1170 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1172 __ bind(&with_tos_register);
1173 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1174 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
1176 __ Ret(USE_DELAY_SLOT);
1177 // Safe to fill delay slot Addu will emit one instruction.
1178 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1180 __ bind(&unknown_state);
1181 __ stop("no cases left");
1185 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1186 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1190 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1191 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1195 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1196 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1200 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1201 // Lookup the function in the JavaScript frame.
1202 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1204 FrameScope scope(masm, StackFrame::INTERNAL);
1205 // Pass function as argument.
1207 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1210 // If the code object is null, just return to the unoptimized code.
1211 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1213 // Load deoptimization data from the code object.
1214 // <deopt_data> = <code>[#deoptimization_data_offset]
1215 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1217 // Load the OSR entrypoint offset from the deoptimization data.
1218 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1219 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1220 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1223 // Compute the target address = code_obj + header_size + osr_offset
1224 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1225 __ addu(v0, v0, a1);
1226 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1228 // And "return" to the OSR entry point of the function.
1233 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1234 // We check the stack limit as indicator that recompilation might be done.
1236 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1237 __ Branch(&ok, hs, sp, Operand(at));
1239 FrameScope scope(masm, StackFrame::INTERNAL);
1240 __ CallRuntime(Runtime::kStackGuard, 0);
1242 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1243 RelocInfo::CODE_TARGET);
1250 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1251 // 1. Make sure we have at least one argument.
1252 // a0: actual number of arguments
1254 __ Branch(&done, ne, a0, Operand(zero_reg));
1255 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1257 __ Addu(a0, a0, Operand(1));
1261 // 2. Get the function to call (passed as receiver) from the stack, check
1262 // if it is a function.
1263 // a0: actual number of arguments
1264 Label slow, non_function;
1265 __ sll(at, a0, kPointerSizeLog2);
1266 __ addu(at, sp, at);
1267 __ lw(a1, MemOperand(at));
1268 __ JumpIfSmi(a1, &non_function);
1269 __ GetObjectType(a1, a2, a2);
1270 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1272 // 3a. Patch the first argument if necessary when calling a function.
1273 // a0: actual number of arguments
1275 Label shift_arguments;
1276 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1277 { Label convert_to_object, use_global_proxy, patch_receiver;
1278 // Change context eagerly in case we need the global receiver.
1279 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1281 // Do not transform the receiver for strict mode functions.
1282 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1283 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1284 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1286 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1288 // Do not transform the receiver for native (Compilerhints already in a3).
1289 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1290 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1292 // Compute the receiver in sloppy mode.
1293 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1294 __ sll(at, a0, kPointerSizeLog2);
1295 __ addu(a2, sp, at);
1296 __ lw(a2, MemOperand(a2, -kPointerSize));
1297 // a0: actual number of arguments
1299 // a2: first argument
1300 __ JumpIfSmi(a2, &convert_to_object, t2);
1302 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1303 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1304 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1305 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1307 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1308 __ GetObjectType(a2, a3, a3);
1309 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1311 __ bind(&convert_to_object);
1312 // Enter an internal frame in order to preserve argument count.
1314 FrameScope scope(masm, StackFrame::INTERNAL);
1315 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1318 ToObjectStub stub(masm->isolate());
1323 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1324 // Leave internal frame.
1327 // Restore the function to a1, and the flag to t0.
1328 __ sll(at, a0, kPointerSizeLog2);
1329 __ addu(at, sp, at);
1330 __ lw(a1, MemOperand(at));
1331 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1332 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
1334 __ bind(&use_global_proxy);
1335 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1336 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1338 __ bind(&patch_receiver);
1339 __ sll(at, a0, kPointerSizeLog2);
1340 __ addu(a3, sp, at);
1341 __ sw(a2, MemOperand(a3, -kPointerSize));
1343 __ Branch(&shift_arguments);
1346 // 3b. Check for function proxy.
1348 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1349 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1351 __ bind(&non_function);
1352 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1354 // 3c. Patch the first argument when calling a non-function. The
1355 // CALL_NON_FUNCTION builtin expects the non-function callee as
1356 // receiver, so overwrite the first argument which will ultimately
1357 // become the receiver.
1358 // a0: actual number of arguments
1360 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1361 __ sll(at, a0, kPointerSizeLog2);
1362 __ addu(a2, sp, at);
1363 __ sw(a1, MemOperand(a2, -kPointerSize));
1365 // 4. Shift arguments and return address one slot down on the stack
1366 // (overwriting the original receiver). Adjust argument count to make
1367 // the original first argument the new receiver.
1368 // a0: actual number of arguments
1370 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1371 __ bind(&shift_arguments);
1373 // Calculate the copy start address (destination). Copy end address is sp.
1374 __ sll(at, a0, kPointerSizeLog2);
1375 __ addu(a2, sp, at);
1378 __ lw(at, MemOperand(a2, -kPointerSize));
1379 __ sw(at, MemOperand(a2));
1380 __ Subu(a2, a2, Operand(kPointerSize));
1381 __ Branch(&loop, ne, a2, Operand(sp));
1382 // Adjust the actual number of arguments and remove the top element
1383 // (which is a copy of the last argument).
1384 __ Subu(a0, a0, Operand(1));
1388 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1389 // or a function proxy via CALL_FUNCTION_PROXY.
1390 // a0: actual number of arguments
1392 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1393 { Label function, non_proxy;
1394 __ Branch(&function, eq, t0, Operand(zero_reg));
1395 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1396 __ mov(a2, zero_reg);
1397 __ Branch(&non_proxy, ne, t0, Operand(1));
1399 __ push(a1); // Re-add proxy object as additional argument.
1400 __ Addu(a0, a0, Operand(1));
1401 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1402 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1403 RelocInfo::CODE_TARGET);
1405 __ bind(&non_proxy);
1406 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1407 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1408 RelocInfo::CODE_TARGET);
1412 // 5b. Get the code to call from the function and check that the number of
1413 // expected arguments matches what we're providing. If so, jump
1414 // (tail-call) to the code in register edx without checking arguments.
1415 // a0: actual number of arguments
1417 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1419 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1420 __ sra(a2, a2, kSmiTagSize);
1421 // Check formal and actual parameter counts.
1422 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1423 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1425 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1426 ParameterCount expected(0);
1427 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1431 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1432 const int argumentsOffset,
1433 const int indexOffset,
1434 const int limitOffset) {
1436 Register receiver = LoadDescriptor::ReceiverRegister();
1437 Register key = LoadDescriptor::NameRegister();
1438 Register slot = LoadDescriptor::SlotRegister();
1439 Register vector = LoadWithVectorDescriptor::VectorRegister();
1441 __ lw(key, MemOperand(fp, indexOffset));
1444 // Load the current argument from the arguments array.
1446 __ lw(receiver, MemOperand(fp, argumentsOffset));
1448 // Use inline caching to speed up access to arguments.
1449 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1450 FeedbackVectorSpec spec(0, 1, kinds);
1451 Handle<TypeFeedbackVector> feedback_vector =
1452 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1453 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1454 __ li(slot, Operand(Smi::FromInt(index)));
1455 __ li(vector, feedback_vector);
1457 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1458 __ Call(ic, RelocInfo::CODE_TARGET);
1462 // Use inline caching to access the arguments.
1463 __ lw(key, MemOperand(fp, indexOffset));
1464 __ Addu(key, key, Operand(1 << kSmiTagSize));
1465 __ sw(key, MemOperand(fp, indexOffset));
1467 // Test if the copy loop has finished copying all the elements from the
1468 // arguments object.
1470 __ lw(a1, MemOperand(fp, limitOffset));
1471 __ Branch(&loop, ne, key, Operand(a1));
1473 // On exit, the pushed arguments count is in a0, untagged
1479 // Used by FunctionApply and ReflectApply
1480 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1481 const int kFormalParameters = targetIsArgument ? 3 : 2;
1482 const int kStackSize = kFormalParameters + 1;
1485 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1486 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1487 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1488 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1490 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1492 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array.
1494 // Returns (in v0) number of arguments to copy to stack as Smi.
1495 if (targetIsArgument) {
1496 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1498 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1501 // Returns the result in v0.
1502 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1504 // Push current limit and index.
1505 const int kIndexOffset =
1506 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1507 const int kLimitOffset =
1508 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1509 __ mov(a1, zero_reg);
1510 __ Push(v0, a1); // Limit and initial index.
1512 // Get the receiver.
1513 __ lw(a0, MemOperand(fp, kReceiverOffset));
1515 // Check that the function is a JS function (otherwise it must be a proxy).
1516 Label push_receiver;
1517 __ lw(a1, MemOperand(fp, kFunctionOffset));
1518 __ GetObjectType(a1, a2, a2);
1519 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1521 // Change context eagerly to get the right global object if necessary.
1522 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1523 // Load the shared function info while the function is still in a1.
1524 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1526 // Compute the receiver.
1527 // Do not transform the receiver for strict mode functions.
1528 Label call_to_object, use_global_proxy;
1529 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1530 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1532 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1534 // Do not transform the receiver for native (Compilerhints already in a2).
1535 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1536 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1538 // Compute the receiver in sloppy mode.
1539 __ JumpIfSmi(a0, &call_to_object);
1540 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1541 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1542 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1543 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1545 // Check if the receiver is already a JavaScript object.
1547 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1548 __ GetObjectType(a0, a1, a1);
1549 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1551 // Convert the receiver to a regular object.
1553 __ bind(&call_to_object);
1554 ToObjectStub stub(masm->isolate());
1556 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1557 __ Branch(&push_receiver);
1559 __ bind(&use_global_proxy);
1560 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1561 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1563 // Push the receiver.
1565 __ bind(&push_receiver);
1568 // Copy all arguments from the array to the stack.
1569 Generate_PushAppliedArguments(
1570 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1572 // Call the function.
1574 ParameterCount actual(a0);
1575 __ lw(a1, MemOperand(fp, kFunctionOffset));
1576 __ GetObjectType(a1, a2, a2);
1577 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1579 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1581 frame_scope.GenerateLeaveFrame();
1582 __ Ret(USE_DELAY_SLOT);
1583 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1585 // Call the function proxy.
1586 __ bind(&call_proxy);
1587 __ push(a1); // Add function proxy as last argument.
1588 __ Addu(a0, a0, Operand(1));
1589 __ li(a2, Operand(0, RelocInfo::NONE32));
1590 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1591 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1592 RelocInfo::CODE_TARGET);
1593 // Tear down the internal frame and remove function, receiver and args.
1596 __ Ret(USE_DELAY_SLOT);
1597 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1601 static void Generate_ConstructHelper(MacroAssembler* masm) {
1602 const int kFormalParameters = 3;
1603 const int kStackSize = kFormalParameters + 1;
1606 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1607 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1608 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1609 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1611 // If newTarget is not supplied, set it to constructor
1612 Label validate_arguments;
1613 __ lw(a0, MemOperand(fp, kNewTargetOffset));
1614 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1615 __ Branch(&validate_arguments, ne, a0, Operand(at));
1616 __ lw(a0, MemOperand(fp, kFunctionOffset));
1617 __ sw(a0, MemOperand(fp, kNewTargetOffset));
1619 // Validate arguments
1620 __ bind(&validate_arguments);
1621 __ lw(a0, MemOperand(fp, kFunctionOffset)); // get the function
1623 __ lw(a0, MemOperand(fp, kArgumentsOffset)); // get the args array
1625 __ lw(a0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1627 // Returns argument count in v0.
1628 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1630 // Returns result in v0.
1631 Generate_CheckStackOverflow(masm, kFunctionOffset, v0, kArgcIsSmiTagged);
1633 // Push current limit and index.
1634 const int kIndexOffset =
1635 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1636 const int kLimitOffset =
1637 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1638 __ push(v0); // limit
1639 __ mov(a1, zero_reg); // initial index
1641 // Push the constructor function as callee.
1642 __ lw(a0, MemOperand(fp, kFunctionOffset));
1645 // Copy all arguments from the array to the stack.
1646 Generate_PushAppliedArguments(
1647 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1649 // Use undefined feedback vector
1650 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1651 __ lw(a1, MemOperand(fp, kFunctionOffset));
1652 __ lw(t0, MemOperand(fp, kNewTargetOffset));
1654 // Call the function.
1655 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1656 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1658 // Leave internal frame.
1661 __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot.
1665 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1666 Generate_ApplyHelper(masm, false);
1670 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1671 Generate_ApplyHelper(masm, true);
1675 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1676 Generate_ConstructHelper(masm);
1680 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1681 Label* stack_overflow) {
1682 // ----------- S t a t e -------------
1683 // -- a0 : actual number of arguments
1684 // -- a1 : function (passed through to callee)
1685 // -- a2 : expected number of arguments
1686 // -----------------------------------
1687 // Check the stack for overflow. We are not trying to catch
1688 // interruptions (e.g. debug break and preemption) here, so the "real stack
1689 // limit" is checked.
1690 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1691 // Make t1 the space we have left. The stack might already be overflowed
1692 // here which will cause t1 to become negative.
1693 __ subu(t1, sp, t1);
1694 // Check if the arguments will overflow the stack.
1695 __ sll(at, a2, kPointerSizeLog2);
1696 // Signed comparison.
1697 __ Branch(stack_overflow, le, t1, Operand(at));
1701 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1702 __ sll(a0, a0, kSmiTagSize);
1703 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1704 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1706 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1710 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1711 // ----------- S t a t e -------------
1712 // -- v0 : result being passed through
1713 // -----------------------------------
1714 // Get the number of arguments passed (as a smi), tear down the frame and
1715 // then tear down the parameters.
1716 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1719 __ MultiPop(fp.bit() | ra.bit());
1720 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1721 __ Addu(sp, sp, t0);
1722 // Adjust for the receiver.
1723 __ Addu(sp, sp, Operand(kPointerSize));
1727 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1728 // State setup as expected by MacroAssembler::InvokePrologue.
1729 // ----------- S t a t e -------------
1730 // -- a0: actual arguments count
1731 // -- a1: function (passed through to callee)
1732 // -- a2: expected arguments count
1733 // -----------------------------------
1735 Label stack_overflow;
1736 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1737 Label invoke, dont_adapt_arguments;
1739 Label enough, too_few;
1740 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1741 __ Branch(&dont_adapt_arguments, eq,
1742 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1743 // We use Uless as the number of argument should always be greater than 0.
1744 __ Branch(&too_few, Uless, a0, Operand(a2));
1746 { // Enough parameters: actual >= expected.
1747 // a0: actual number of arguments as a smi
1749 // a2: expected number of arguments
1750 // a3: code entry to call
1752 EnterArgumentsAdaptorFrame(masm);
1754 // Calculate copy start address into a0 and copy end address into a2.
1755 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1756 __ Addu(a0, fp, a0);
1757 // Adjust for return address and receiver.
1758 __ Addu(a0, a0, Operand(2 * kPointerSize));
1759 // Compute copy end address.
1760 __ sll(a2, a2, kPointerSizeLog2);
1761 __ subu(a2, a0, a2);
1763 // Copy the arguments (including the receiver) to the new stack frame.
1764 // a0: copy start address
1766 // a2: copy end address
1767 // a3: code entry to call
1771 __ lw(t0, MemOperand(a0));
1773 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1774 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1779 { // Too few parameters: Actual < expected.
1782 // If the function is strong we need to throw an error.
1783 Label no_strong_error;
1784 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1785 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kCompilerHintsOffset));
1786 __ And(t3, t2, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
1788 __ Branch(&no_strong_error, eq, t3, Operand(zero_reg));
1790 // What we really care about is the required number of arguments.
1791 __ lw(t2, FieldMemOperand(t1, SharedFunctionInfo::kLengthOffset));
1793 __ Branch(&no_strong_error, ge, a0, Operand(t2));
1796 FrameScope frame(masm, StackFrame::MANUAL);
1797 EnterArgumentsAdaptorFrame(masm);
1798 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1801 __ bind(&no_strong_error);
1802 EnterArgumentsAdaptorFrame(masm);
1804 // Calculate copy start address into a0 and copy end address is fp.
1805 // a0: actual number of arguments as a smi
1807 // a2: expected number of arguments
1808 // a3: code entry to call
1809 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1810 __ Addu(a0, fp, a0);
1811 // Adjust for return address and receiver.
1812 __ Addu(a0, a0, Operand(2 * kPointerSize));
1813 // Compute copy end address. Also adjust for return address.
1814 __ Addu(t3, fp, kPointerSize);
1816 // Copy the arguments (including the receiver) to the new stack frame.
1817 // a0: copy start address
1819 // a2: expected number of arguments
1820 // a3: code entry to call
1821 // t3: copy end address
1824 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1825 __ Subu(sp, sp, kPointerSize);
1826 __ Subu(a0, a0, kPointerSize);
1827 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1828 __ sw(t0, MemOperand(sp)); // In the delay slot.
1830 // Fill the remaining expected arguments with undefined.
1832 // a2: expected number of arguments
1833 // a3: code entry to call
1834 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1835 __ sll(t2, a2, kPointerSizeLog2);
1836 __ Subu(a2, fp, Operand(t2));
1837 // Adjust for frame.
1838 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1843 __ Subu(sp, sp, kPointerSize);
1844 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1845 __ sw(t0, MemOperand(sp));
1848 // Call the entry point.
1853 // Store offset of return address for deoptimizer.
1854 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1856 // Exit frame and return.
1857 LeaveArgumentsAdaptorFrame(masm);
1861 // -------------------------------------------
1862 // Don't adapt arguments.
1863 // -------------------------------------------
1864 __ bind(&dont_adapt_arguments);
1867 __ bind(&stack_overflow);
1869 FrameScope frame(masm, StackFrame::MANUAL);
1870 EnterArgumentsAdaptorFrame(masm);
1871 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1879 } // namespace internal
1882 #endif // V8_TARGET_ARCH_MIPS