1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #if V8_TARGET_ARCH_MIPS
11 #include "src/codegen.h"
12 #include "src/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen.h"
15 #include "src/runtime.h"
16 #include "src/stub-cache.h"
22 #define __ ACCESS_MASM(masm)
25 void Builtins::Generate_Adaptor(MacroAssembler* masm,
27 BuiltinExtraArguments extra_args) {
28 // ----------- S t a t e -------------
29 // -- a0 : number of arguments excluding receiver
30 // -- a1 : called function (only guaranteed when
31 // -- extra_args requires it)
33 // -- sp[0] : last argument
35 // -- sp[4 * (argc - 1)] : first argument
36 // -- sp[4 * agrc] : receiver
37 // -----------------------------------
39 // Insert extra arguments.
40 int num_extra_args = 0;
41 if (extra_args == NEEDS_CALLED_FUNCTION) {
45 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
48 // JumpToExternalReference expects s0 to contain the number of arguments
49 // including the receiver and the extra arguments.
50 __ Addu(s0, a0, num_extra_args + 1);
51 __ sll(s1, s0, kPointerSizeLog2);
52 __ Subu(s1, s1, kPointerSize);
53 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
57 // Load the built-in InternalArray function from the current context.
58 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
60 // Load the native context.
63 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
65 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
66 // Load the InternalArray function from the native context.
70 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
74 // Load the built-in Array function from the current context.
75 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
76 // Load the native context.
79 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
81 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
82 // Load the Array function from the native context.
85 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
89 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
90 // ----------- S t a t e -------------
91 // -- a0 : number of arguments
92 // -- ra : return address
93 // -- sp[...]: constructor arguments
94 // -----------------------------------
95 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
97 // Get the InternalArray function.
98 GenerateLoadInternalArrayFunction(masm, a1);
100 if (FLAG_debug_code) {
101 // Initial map for the builtin InternalArray functions should be maps.
102 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
104 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
105 t0, Operand(zero_reg));
106 __ GetObjectType(a2, a3, t0);
107 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
108 t0, Operand(MAP_TYPE));
111 // Run the native code for the InternalArray function called as a normal
114 InternalArrayConstructorStub stub(masm->isolate());
115 __ TailCallStub(&stub);
119 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
120 // ----------- S t a t e -------------
121 // -- a0 : number of arguments
122 // -- ra : return address
123 // -- sp[...]: constructor arguments
124 // -----------------------------------
125 Label generic_array_code;
127 // Get the Array function.
128 GenerateLoadArrayFunction(masm, a1);
130 if (FLAG_debug_code) {
131 // Initial map for the builtin Array functions should be maps.
132 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
134 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
135 t0, Operand(zero_reg));
136 __ GetObjectType(a2, a3, t0);
137 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
138 t0, Operand(MAP_TYPE));
141 // Run the native code for the Array function called as a normal function.
143 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
144 ArrayConstructorStub stub(masm->isolate());
145 __ TailCallStub(&stub);
149 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
150 // ----------- S t a t e -------------
151 // -- a0 : number of arguments
152 // -- a1 : constructor function
153 // -- ra : return address
154 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
155 // -- sp[argc * 4] : receiver
156 // -----------------------------------
157 Counters* counters = masm->isolate()->counters();
158 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
160 Register function = a1;
161 if (FLAG_debug_code) {
162 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
163 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
166 // Load the first arguments in a0 and get rid of the rest.
168 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
169 // First args = sp[(argc - 1) * 4].
170 __ Subu(a0, a0, Operand(1));
171 __ sll(a0, a0, kPointerSizeLog2);
173 __ lw(a0, MemOperand(sp));
174 // sp now point to args[0], drop args[0] + receiver.
177 Register argument = a2;
178 Label not_cached, argument_is_string;
179 __ LookupNumberStringCache(a0, // Input.
185 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
186 __ bind(&argument_is_string);
188 // ----------- S t a t e -------------
189 // -- a2 : argument converted to string
190 // -- a1 : constructor function
191 // -- ra : return address
192 // -----------------------------------
195 __ Allocate(JSValue::kSize,
202 // Initialising the String Object.
204 __ LoadGlobalFunctionInitialMap(function, map, t0);
205 if (FLAG_debug_code) {
206 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
207 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
208 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
209 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
210 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
211 t0, Operand(zero_reg));
213 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
215 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
216 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
217 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
219 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
221 // Ensure the object is fully initialized.
222 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
226 // The argument was not found in the number to string cache. Check
227 // if it's a string already before calling the conversion builtin.
228 Label convert_argument;
229 __ bind(¬_cached);
230 __ JumpIfSmi(a0, &convert_argument);
233 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
234 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
235 STATIC_ASSERT(kNotStringTag != 0);
236 __ And(t0, a3, Operand(kIsNotStringMask));
237 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
238 __ mov(argument, a0);
239 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
240 __ Branch(&argument_is_string);
242 // Invoke the conversion builtin and put the result into a2.
243 __ bind(&convert_argument);
244 __ push(function); // Preserve the function.
245 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
247 FrameScope scope(masm, StackFrame::INTERNAL);
249 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
252 __ mov(argument, v0);
253 __ Branch(&argument_is_string);
255 // Load the empty string into a2, remove the receiver from the
256 // stack, and jump back to the case where the argument is a string.
257 __ bind(&no_arguments);
258 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
260 __ Branch(&argument_is_string);
262 // At this point the argument is already a string. Call runtime to
263 // create a string wrapper.
264 __ bind(&gc_required);
265 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
267 FrameScope scope(masm, StackFrame::INTERNAL);
269 __ CallRuntime(Runtime::kNewStringWrapper, 1);
275 static void CallRuntimePassFunction(
276 MacroAssembler* masm, Runtime::FunctionId function_id) {
277 FrameScope scope(masm, StackFrame::INTERNAL);
278 // Push a copy of the function onto the stack.
279 // Push call kind information and function as parameter to the runtime call.
282 __ CallRuntime(function_id, 1);
283 // Restore call kind information and receiver.
288 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
289 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
290 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
291 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
296 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
297 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
302 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
303 // Checking whether the queued function is ready for install is optional,
304 // since we come across interrupts and stack checks elsewhere. However,
305 // not checking may delay installing ready functions, and always checking
306 // would be quite expensive. A good compromise is to first check against
307 // stack limit as a cue for an interrupt signal.
309 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(t0));
312 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
313 GenerateTailCallToReturnedCode(masm);
316 GenerateTailCallToSharedCode(masm);
320 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
321 bool is_api_function,
322 bool create_memento) {
323 // ----------- S t a t e -------------
324 // -- a0 : number of arguments
325 // -- a1 : constructor function
326 // -- a2 : allocation site or undefined
327 // -- ra : return address
328 // -- sp[...]: constructor arguments
329 // -----------------------------------
331 // Should never create mementos for api functions.
332 ASSERT(!is_api_function || !create_memento);
334 Isolate* isolate = masm->isolate();
336 // ----------- S t a t e -------------
337 // -- a0 : number of arguments
338 // -- a1 : constructor function
339 // -- ra : return address
340 // -- sp[...]: constructor arguments
341 // -----------------------------------
343 // Enter a construct frame.
345 FrameScope scope(masm, StackFrame::CONSTRUCT);
347 if (create_memento) {
348 __ AssertUndefinedOrAllocationSite(a2, a3);
352 // Preserve the two incoming parameters on the stack.
353 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
354 __ MultiPushReversed(a0.bit() | a1.bit());
356 Label rt_call, allocated;
357 // Try to allocate the object without transitioning into C code. If any of
358 // the preconditions is not met, the code bails out to the runtime call.
359 if (FLAG_inline_new) {
360 Label undo_allocation;
361 ExternalReference debug_step_in_fp =
362 ExternalReference::debug_step_in_fp_address(isolate);
363 __ li(a2, Operand(debug_step_in_fp));
364 __ lw(a2, MemOperand(a2));
365 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
367 // Load the initial map and verify that it is in fact a map.
368 // a1: constructor function
369 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
370 __ JumpIfSmi(a2, &rt_call);
371 __ GetObjectType(a2, a3, t4);
372 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
374 // Check that the constructor is not constructing a JSFunction (see
375 // comments in Runtime_NewObject in runtime.cc). In which case the
376 // initial map's instance type would be JS_FUNCTION_TYPE.
377 // a1: constructor function
379 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
380 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
382 if (!is_api_function) {
384 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
385 // Check if slack tracking is enabled.
386 __ lw(t0, bit_field3);
387 __ DecodeField<Map::ConstructionCount>(t2, t0);
388 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking));
389 // Decrease generous allocation count.
390 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift));
391 __ Branch(USE_DELAY_SLOT,
392 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking));
393 __ sw(t0, bit_field3); // In delay slot.
395 __ Push(a1, a2, a1); // a1 = Constructor.
396 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
399 // Slack tracking counter is kNoSlackTracking after runtime call.
400 ASSERT(JSFunction::kNoSlackTracking == 0);
401 __ mov(t2, zero_reg);
406 // Now allocate the JSObject on the heap.
407 // a1: constructor function
409 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
410 if (create_memento) {
411 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
414 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
416 // Allocated the JSObject, now initialize the fields. Map is set to
417 // initial map and properties and elements are set to empty fixed array.
418 // a1: constructor function
420 // a3: object size (not including memento if create_memento)
421 // t4: JSObject (not tagged)
422 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
424 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
425 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
426 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
427 __ Addu(t5, t5, Operand(3*kPointerSize));
428 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
429 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
430 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
432 // Fill all the in-object properties with appropriate filler.
433 // a1: constructor function
435 // a3: object size (in words, including memento if create_memento)
436 // t4: JSObject (not tagged)
437 // t5: First in-object property of JSObject (not tagged)
438 // t2: slack tracking counter (non-API function case)
439 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
441 // Use t7 to hold undefined, which is used in several places below.
442 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
444 if (!is_api_function) {
445 Label no_inobject_slack_tracking;
447 // Check if slack tracking is enabled.
448 __ Branch(&no_inobject_slack_tracking,
449 eq, t2, Operand(JSFunction::kNoSlackTracking));
451 // Allocate object with a slack.
452 __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
453 __ sll(at, a0, kPointerSizeLog2);
455 // a0: offset of first field after pre-allocated fields
456 if (FLAG_debug_code) {
457 __ sll(at, a3, kPointerSizeLog2);
458 __ Addu(t6, t4, Operand(at)); // End of object.
459 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
462 __ InitializeFieldsWithFiller(t5, a0, t7);
463 // To allow for truncation.
464 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
465 // Fill the remaining fields with one pointer filler map.
467 __ bind(&no_inobject_slack_tracking);
470 if (create_memento) {
471 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
472 __ sll(a0, a0, kPointerSizeLog2);
473 __ Addu(a0, t4, Operand(a0)); // End of object.
474 __ InitializeFieldsWithFiller(t5, a0, t7);
476 // Fill in memento fields.
477 // t5: points to the allocated but uninitialized memento.
478 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
479 ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
480 __ sw(t7, MemOperand(t5));
481 __ Addu(t5, t5, kPointerSize);
482 // Load the AllocationSite.
483 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
484 ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
485 __ sw(t7, MemOperand(t5));
486 __ Addu(t5, t5, kPointerSize);
488 __ sll(at, a3, kPointerSizeLog2);
489 __ Addu(a0, t4, Operand(at)); // End of object.
490 __ InitializeFieldsWithFiller(t5, a0, t7);
493 // Add the object tag to make the JSObject real, so that we can continue
494 // and jump into the continuation code at any time from now on. Any
495 // failures need to undo the allocation, so that the heap is in a
496 // consistent state and verifiable.
497 __ Addu(t4, t4, Operand(kHeapObjectTag));
499 // Check if a non-empty properties array is needed. Continue with
500 // allocated object if not fall through to runtime call if it is.
501 // a1: constructor function
503 // t5: start of next object (not tagged)
504 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
505 // The field instance sizes contains both pre-allocated property fields
506 // and in-object properties.
507 __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
508 __ Addu(a3, a3, Operand(t6));
509 __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
512 // Done if no extra properties are to be allocated.
513 __ Branch(&allocated, eq, a3, Operand(zero_reg));
514 __ Assert(greater_equal, kPropertyAllocationCountFailed,
515 a3, Operand(zero_reg));
517 // Scale the number of elements by pointer size and add the header for
518 // FixedArrays to the start of the next object calculation from above.
520 // a3: number of elements in properties array
522 // t5: start of next object
523 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
530 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
532 // Initialize the FixedArray.
534 // a3: number of elements in properties array (untagged)
536 // t5: start of next object
537 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
539 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
540 __ sll(a0, a3, kSmiTagSize);
541 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
542 __ Addu(a2, a2, Operand(2 * kPointerSize));
544 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
545 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
547 // Initialize the fields to undefined.
549 // a2: First element of FixedArray (not tagged)
550 // a3: number of elements in properties array
552 // t5: FixedArray (not tagged)
553 __ sll(t3, a3, kPointerSizeLog2);
554 __ addu(t6, a2, t3); // End of object.
555 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
557 if (!is_api_function || create_memento) {
558 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
559 } else if (FLAG_debug_code) {
560 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
561 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
565 __ sw(t7, MemOperand(a2));
566 __ addiu(a2, a2, kPointerSize);
568 __ Branch(&loop, less, a2, Operand(t6));
571 // Store the initialized FixedArray into the properties field of
573 // a1: constructor function
575 // t5: FixedArray (not tagged)
576 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
577 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
579 // Continue with JSObject being successfully allocated.
580 // a1: constructor function
584 // Undo the setting of the new top so that the heap is verifiable. For
585 // example, the map's unused properties potentially do not match the
586 // allocated objects unused properties.
587 // t4: JSObject (previous new top)
588 __ bind(&undo_allocation);
589 __ UndoAllocationInNewSpace(t4, t5);
592 // Allocate the new receiver object using the runtime call.
593 // a1: constructor function
595 if (create_memento) {
596 // Get the cell or allocation site.
597 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
601 __ push(a1); // Argument for Runtime_NewObject.
602 if (create_memento) {
603 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
605 __ CallRuntime(Runtime::kHiddenNewObject, 1);
609 // If we ended up using the runtime, and we want a memento, then the
610 // runtime call made it for us, and we shouldn't do create count
612 Label count_incremented;
613 if (create_memento) {
614 __ jmp(&count_incremented);
617 // Receiver for constructor call allocated.
621 if (create_memento) {
622 __ lw(a2, MemOperand(sp, kPointerSize * 2));
623 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
624 __ Branch(&count_incremented, eq, a2, Operand(t5));
625 // a2 is an AllocationSite. We are creating a memento from it, so we
626 // need to increment the memento create count.
627 __ lw(a3, FieldMemOperand(a2,
628 AllocationSite::kPretenureCreateCountOffset));
629 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
630 __ sw(a3, FieldMemOperand(a2,
631 AllocationSite::kPretenureCreateCountOffset));
632 __ bind(&count_incremented);
637 // Reload the number of arguments from the stack.
640 // sp[2]: constructor function
641 // sp[3]: number of arguments (smi-tagged)
642 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
643 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
645 // Set up pointer to last argument.
646 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
648 // Set up number of arguments for function call below.
649 __ srl(a0, a3, kSmiTagSize);
651 // Copy arguments and receiver to the expression stack.
652 // a0: number of arguments
653 // a1: constructor function
654 // a2: address of last argument (caller sp)
655 // a3: number of arguments (smi-tagged)
658 // sp[2]: constructor function
659 // sp[3]: number of arguments (smi-tagged)
663 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
664 __ Addu(t0, a2, Operand(t0));
665 __ lw(t1, MemOperand(t0));
668 __ Addu(a3, a3, Operand(-2));
669 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
671 // Call the function.
672 // a0: number of arguments
673 // a1: constructor function
674 if (is_api_function) {
675 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
677 masm->isolate()->builtins()->HandleApiCallConstruct();
678 __ Call(code, RelocInfo::CODE_TARGET);
680 ParameterCount actual(a0);
681 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
684 // Store offset of return address for deoptimizer.
685 if (!is_api_function) {
686 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
689 // Restore context from the frame.
690 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
692 // If the result is an object (in the ECMA sense), we should get rid
693 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
695 Label use_receiver, exit;
697 // If the result is a smi, it is *not* an object in the ECMA sense.
699 // sp[0]: receiver (newly allocated object)
700 // sp[1]: constructor function
701 // sp[2]: number of arguments (smi-tagged)
702 __ JumpIfSmi(v0, &use_receiver);
704 // If the type of the result (stored in its map) is less than
705 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
706 __ GetObjectType(v0, a1, a3);
707 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
709 // Throw away the result of the constructor invocation and use the
710 // on-stack receiver as the result.
711 __ bind(&use_receiver);
712 __ lw(v0, MemOperand(sp));
714 // Remove receiver from the stack, remove caller arguments, and
718 // sp[0]: receiver (newly allocated object)
719 // sp[1]: constructor function
720 // sp[2]: number of arguments (smi-tagged)
721 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
723 // Leave construct frame.
726 __ sll(t0, a1, kPointerSizeLog2 - 1);
728 __ Addu(sp, sp, kPointerSize);
729 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
734 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
735 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
739 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
740 Generate_JSConstructStubHelper(masm, true, false);
744 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
746 // Called from JSEntryStub::GenerateBody
748 // ----------- S t a t e -------------
751 // -- a2: receiver_pointer
754 // -----------------------------------
755 ProfileEntryHookStub::MaybeCallEntryHook(masm);
757 // Clear the context before we push it when entering the JS frame.
758 __ mov(cp, zero_reg);
760 // Enter an internal frame.
762 FrameScope scope(masm, StackFrame::INTERNAL);
764 // Set up the context from the function argument.
765 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
767 // Push the function and the receiver onto the stack.
770 // Copy arguments to the stack in a loop.
772 // s0: argv, i.e. points to first arg
774 __ sll(t0, a3, kPointerSizeLog2);
777 __ nop(); // Branch delay slot nop.
778 // t2 points past last arg.
780 __ lw(t0, MemOperand(s0)); // Read next parameter.
781 __ addiu(s0, s0, kPointerSize);
782 __ lw(t0, MemOperand(t0)); // Dereference handle.
783 __ push(t0); // Push parameter.
785 __ Branch(&loop, ne, s0, Operand(t2));
787 // Initialize all JavaScript callee-saved registers, since they will be seen
788 // by the garbage collector as part of handlers.
789 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
795 // s6 holds the root address. Do not clobber.
796 // s7 is cp. Do not init.
798 // Invoke the code and pass argc as a0.
801 // No type feedback cell is available
802 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
803 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
806 ParameterCount actual(a0);
807 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
810 // Leave internal frame.
817 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
818 Generate_JSEntryTrampolineHelper(masm, false);
822 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
823 Generate_JSEntryTrampolineHelper(masm, true);
827 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
828 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
829 GenerateTailCallToReturnedCode(masm);
833 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
834 FrameScope scope(masm, StackFrame::INTERNAL);
835 // Push a copy of the function onto the stack.
836 // Push function as parameter to the runtime call.
838 // Whether to compile in a background thread.
839 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
841 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
847 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
848 CallCompileOptimized(masm, false);
849 GenerateTailCallToReturnedCode(masm);
853 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
854 CallCompileOptimized(masm, true);
855 GenerateTailCallToReturnedCode(masm);
860 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
861 // For now, we are relying on the fact that make_code_young doesn't do any
862 // garbage collection which allows us to save/restore the registers without
863 // worrying about which of them contain pointers. We also don't build an
864 // internal frame to make the code faster, since we shouldn't have to do stack
865 // crawls in MakeCodeYoung. This seems a bit fragile.
867 // Set a0 to point to the head of the PlatformCodeAge sequence.
869 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
871 // The following registers must be saved and restored when calling through to
873 // a0 - contains return address (beginning of patch sequence)
876 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
877 FrameScope scope(masm, StackFrame::MANUAL);
878 __ MultiPush(saved_regs);
879 __ PrepareCallCFunction(2, 0, a2);
880 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
882 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
883 __ MultiPop(saved_regs);
887 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
888 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
889 MacroAssembler* masm) { \
890 GenerateMakeCodeYoungAgainCommon(masm); \
892 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
893 MacroAssembler* masm) { \
894 GenerateMakeCodeYoungAgainCommon(masm); \
896 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
897 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
900 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
901 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
902 // that make_code_young doesn't do any garbage collection which allows us to
903 // save/restore the registers without worrying about which of them contain
906 // Set a0 to point to the head of the PlatformCodeAge sequence.
908 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
910 // The following registers must be saved and restored when calling through to
912 // a0 - contains return address (beginning of patch sequence)
915 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
916 FrameScope scope(masm, StackFrame::MANUAL);
917 __ MultiPush(saved_regs);
918 __ PrepareCallCFunction(2, 0, a2);
919 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
921 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
923 __ MultiPop(saved_regs);
925 // Perform prologue operations usually performed by the young code stub.
926 __ Push(ra, fp, cp, a1);
927 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
929 // Jump to point after the code-age stub.
930 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
935 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
936 GenerateMakeCodeYoungAgainCommon(masm);
940 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
941 SaveFPRegsMode save_doubles) {
943 FrameScope scope(masm, StackFrame::INTERNAL);
945 // Preserve registers across notification, this is important for compiled
946 // stubs that tail call the runtime on deopts passing their parameters in
948 __ MultiPush(kJSCallerSaved | kCalleeSaved);
949 // Pass the function and deoptimization type to the runtime system.
950 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
951 __ MultiPop(kJSCallerSaved | kCalleeSaved);
954 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
955 __ Jump(ra); // Jump to miss handler
959 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
960 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
964 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
965 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
969 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
970 Deoptimizer::BailoutType type) {
972 FrameScope scope(masm, StackFrame::INTERNAL);
973 // Pass the function and deoptimization type to the runtime system.
974 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
976 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
979 // Get the full codegen state from the stack and untag it -> t2.
980 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
982 // Switch on the state.
983 Label with_tos_register, unknown_state;
984 __ Branch(&with_tos_register,
985 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
986 __ Ret(USE_DELAY_SLOT);
987 // Safe to fill delay slot Addu will emit one instruction.
988 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
990 __ bind(&with_tos_register);
991 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
992 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
994 __ Ret(USE_DELAY_SLOT);
995 // Safe to fill delay slot Addu will emit one instruction.
996 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
998 __ bind(&unknown_state);
999 __ stop("no cases left");
1003 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1004 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1008 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1009 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1013 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1014 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1018 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1019 // Lookup the function in the JavaScript frame.
1020 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1022 FrameScope scope(masm, StackFrame::INTERNAL);
1023 // Pass function as argument.
1025 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1028 // If the code object is null, just return to the unoptimized code.
1029 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1031 // Load deoptimization data from the code object.
1032 // <deopt_data> = <code>[#deoptimization_data_offset]
1033 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1035 // Load the OSR entrypoint offset from the deoptimization data.
1036 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1037 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1038 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1041 // Compute the target address = code_obj + header_size + osr_offset
1042 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1043 __ addu(v0, v0, a1);
1044 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1046 // And "return" to the OSR entry point of the function.
1051 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1052 // We check the stack limit as indicator that recompilation might be done.
1054 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1055 __ Branch(&ok, hs, sp, Operand(at));
1057 FrameScope scope(masm, StackFrame::INTERNAL);
1058 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1061 RelocInfo::CODE_TARGET);
1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1069 // 1. Make sure we have at least one argument.
1070 // a0: actual number of arguments
1072 __ Branch(&done, ne, a0, Operand(zero_reg));
1073 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1075 __ Addu(a0, a0, Operand(1));
1079 // 2. Get the function to call (passed as receiver) from the stack, check
1080 // if it is a function.
1081 // a0: actual number of arguments
1082 Label slow, non_function;
1083 __ sll(at, a0, kPointerSizeLog2);
1084 __ addu(at, sp, at);
1085 __ lw(a1, MemOperand(at));
1086 __ JumpIfSmi(a1, &non_function);
1087 __ GetObjectType(a1, a2, a2);
1088 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1090 // 3a. Patch the first argument if necessary when calling a function.
1091 // a0: actual number of arguments
1093 Label shift_arguments;
1094 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1095 { Label convert_to_object, use_global_receiver, patch_receiver;
1096 // Change context eagerly in case we need the global receiver.
1097 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1099 // Do not transform the receiver for strict mode functions.
1100 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1101 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1102 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1104 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1106 // Do not transform the receiver for native (Compilerhints already in a3).
1107 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1108 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1110 // Compute the receiver in sloppy mode.
1111 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1112 __ sll(at, a0, kPointerSizeLog2);
1113 __ addu(a2, sp, at);
1114 __ lw(a2, MemOperand(a2, -kPointerSize));
1115 // a0: actual number of arguments
1117 // a2: first argument
1118 __ JumpIfSmi(a2, &convert_to_object, t2);
1120 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1121 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1122 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1123 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1125 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1126 __ GetObjectType(a2, a3, a3);
1127 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1129 __ bind(&convert_to_object);
1130 // Enter an internal frame in order to preserve argument count.
1132 FrameScope scope(masm, StackFrame::INTERNAL);
1133 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1135 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1139 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1140 // Leave internal frame.
1142 // Restore the function to a1, and the flag to t0.
1143 __ sll(at, a0, kPointerSizeLog2);
1144 __ addu(at, sp, at);
1145 __ lw(a1, MemOperand(at));
1146 __ li(t0, Operand(0, RelocInfo::NONE32));
1147 __ Branch(&patch_receiver);
1149 __ bind(&use_global_receiver);
1150 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1151 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
1153 __ bind(&patch_receiver);
1154 __ sll(at, a0, kPointerSizeLog2);
1155 __ addu(a3, sp, at);
1156 __ sw(a2, MemOperand(a3, -kPointerSize));
1158 __ Branch(&shift_arguments);
1161 // 3b. Check for function proxy.
1163 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1164 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1166 __ bind(&non_function);
1167 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1169 // 3c. Patch the first argument when calling a non-function. The
1170 // CALL_NON_FUNCTION builtin expects the non-function callee as
1171 // receiver, so overwrite the first argument which will ultimately
1172 // become the receiver.
1173 // a0: actual number of arguments
1175 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1176 __ sll(at, a0, kPointerSizeLog2);
1177 __ addu(a2, sp, at);
1178 __ sw(a1, MemOperand(a2, -kPointerSize));
1180 // 4. Shift arguments and return address one slot down on the stack
1181 // (overwriting the original receiver). Adjust argument count to make
1182 // the original first argument the new receiver.
1183 // a0: actual number of arguments
1185 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1186 __ bind(&shift_arguments);
1188 // Calculate the copy start address (destination). Copy end address is sp.
1189 __ sll(at, a0, kPointerSizeLog2);
1190 __ addu(a2, sp, at);
1193 __ lw(at, MemOperand(a2, -kPointerSize));
1194 __ sw(at, MemOperand(a2));
1195 __ Subu(a2, a2, Operand(kPointerSize));
1196 __ Branch(&loop, ne, a2, Operand(sp));
1197 // Adjust the actual number of arguments and remove the top element
1198 // (which is a copy of the last argument).
1199 __ Subu(a0, a0, Operand(1));
1203 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1204 // or a function proxy via CALL_FUNCTION_PROXY.
1205 // a0: actual number of arguments
1207 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1208 { Label function, non_proxy;
1209 __ Branch(&function, eq, t0, Operand(zero_reg));
1210 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1211 __ mov(a2, zero_reg);
1212 __ Branch(&non_proxy, ne, t0, Operand(1));
1214 __ push(a1); // Re-add proxy object as additional argument.
1215 __ Addu(a0, a0, Operand(1));
1216 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1217 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1218 RelocInfo::CODE_TARGET);
1220 __ bind(&non_proxy);
1221 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1222 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1223 RelocInfo::CODE_TARGET);
1227 // 5b. Get the code to call from the function and check that the number of
1228 // expected arguments matches what we're providing. If so, jump
1229 // (tail-call) to the code in register edx without checking arguments.
1230 // a0: actual number of arguments
1232 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1234 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1235 __ sra(a2, a2, kSmiTagSize);
1236 // Check formal and actual parameter counts.
1237 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1238 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1240 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1241 ParameterCount expected(0);
1242 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1246 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1247 const int kIndexOffset =
1248 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1249 const int kLimitOffset =
1250 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1251 const int kArgsOffset = 2 * kPointerSize;
1252 const int kRecvOffset = 3 * kPointerSize;
1253 const int kFunctionOffset = 4 * kPointerSize;
1256 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1257 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1259 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1261 // Returns (in v0) number of arguments to copy to stack as Smi.
1262 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1264 // Check the stack for overflow. We are not trying to catch
1265 // interruptions (e.g. debug break and preemption) here, so the "real stack
1266 // limit" is checked.
1268 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1269 // Make a2 the space we have left. The stack might already be overflowed
1270 // here which will cause a2 to become negative.
1271 __ subu(a2, sp, a2);
1272 // Check if the arguments will overflow the stack.
1273 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1274 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1276 // Out of stack space.
1277 __ lw(a1, MemOperand(fp, kFunctionOffset));
1279 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1280 // End of stack check.
1282 // Push current limit and index.
1284 __ mov(a1, zero_reg);
1285 __ Push(v0, a1); // Limit and initial index.
1287 // Get the receiver.
1288 __ lw(a0, MemOperand(fp, kRecvOffset));
1290 // Check that the function is a JS function (otherwise it must be a proxy).
1291 Label push_receiver;
1292 __ lw(a1, MemOperand(fp, kFunctionOffset));
1293 __ GetObjectType(a1, a2, a2);
1294 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1296 // Change context eagerly to get the right global object if necessary.
1297 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1298 // Load the shared function info while the function is still in a1.
1299 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1301 // Compute the receiver.
1302 // Do not transform the receiver for strict mode functions.
1303 Label call_to_object, use_global_receiver;
1304 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1305 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1307 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1309 // Do not transform the receiver for native (Compilerhints already in a2).
1310 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1311 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1313 // Compute the receiver in sloppy mode.
1314 __ JumpIfSmi(a0, &call_to_object);
1315 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1316 __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1317 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1318 __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1320 // Check if the receiver is already a JavaScript object.
1322 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1323 __ GetObjectType(a0, a1, a1);
1324 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1326 // Convert the receiver to a regular object.
1328 __ bind(&call_to_object);
1330 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1331 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1332 __ Branch(&push_receiver);
1334 __ bind(&use_global_receiver);
1335 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1336 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
1338 // Push the receiver.
1340 __ bind(&push_receiver);
1343 // Copy all arguments from the array to the stack.
1345 __ lw(a0, MemOperand(fp, kIndexOffset));
1348 // Load the current argument from the arguments array and push it to the
1350 // a0: current argument index
1352 __ lw(a1, MemOperand(fp, kArgsOffset));
1355 // Call the runtime to access the property in the arguments array.
1356 __ CallRuntime(Runtime::kGetProperty, 2);
1359 // Use inline caching to access the arguments.
1360 __ lw(a0, MemOperand(fp, kIndexOffset));
1361 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1362 __ sw(a0, MemOperand(fp, kIndexOffset));
1364 // Test if the copy loop has finished copying all the elements from the
1365 // arguments object.
1367 __ lw(a1, MemOperand(fp, kLimitOffset));
1368 __ Branch(&loop, ne, a0, Operand(a1));
1370 // Call the function.
1372 ParameterCount actual(a0);
1373 __ sra(a0, a0, kSmiTagSize);
1374 __ lw(a1, MemOperand(fp, kFunctionOffset));
1375 __ GetObjectType(a1, a2, a2);
1376 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1378 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1380 frame_scope.GenerateLeaveFrame();
1381 __ Ret(USE_DELAY_SLOT);
1382 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1384 // Call the function proxy.
1385 __ bind(&call_proxy);
1386 __ push(a1); // Add function proxy as last argument.
1387 __ Addu(a0, a0, Operand(1));
1388 __ li(a2, Operand(0, RelocInfo::NONE32));
1389 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1390 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1391 RelocInfo::CODE_TARGET);
1392 // Tear down the internal frame and remove function, receiver and args.
1395 __ Ret(USE_DELAY_SLOT);
1396 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1400 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1401 Label* stack_overflow) {
1402 // ----------- S t a t e -------------
1403 // -- a0 : actual number of arguments
1404 // -- a1 : function (passed through to callee)
1405 // -- a2 : expected number of arguments
1406 // -----------------------------------
1407 // Check the stack for overflow. We are not trying to catch
1408 // interruptions (e.g. debug break and preemption) here, so the "real stack
1409 // limit" is checked.
1410 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1411 // Make t1 the space we have left. The stack might already be overflowed
1412 // here which will cause t1 to become negative.
1413 __ subu(t1, sp, t1);
1414 // Check if the arguments will overflow the stack.
1415 __ sll(at, a2, kPointerSizeLog2);
1416 // Signed comparison.
1417 __ Branch(stack_overflow, le, t1, Operand(at));
1421 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1422 __ sll(a0, a0, kSmiTagSize);
1423 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1424 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1426 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1430 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1431 // ----------- S t a t e -------------
1432 // -- v0 : result being passed through
1433 // -----------------------------------
1434 // Get the number of arguments passed (as a smi), tear down the frame and
1435 // then tear down the parameters.
1436 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1439 __ MultiPop(fp.bit() | ra.bit());
1440 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1441 __ Addu(sp, sp, t0);
1442 // Adjust for the receiver.
1443 __ Addu(sp, sp, Operand(kPointerSize));
1447 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1448 // State setup as expected by MacroAssembler::InvokePrologue.
1449 // ----------- S t a t e -------------
1450 // -- a0: actual arguments count
1451 // -- a1: function (passed through to callee)
1452 // -- a2: expected arguments count
1453 // -----------------------------------
1455 Label stack_overflow;
1456 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1457 Label invoke, dont_adapt_arguments;
1459 Label enough, too_few;
1460 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1461 __ Branch(&dont_adapt_arguments, eq,
1462 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1463 // We use Uless as the number of argument should always be greater than 0.
1464 __ Branch(&too_few, Uless, a0, Operand(a2));
1466 { // Enough parameters: actual >= expected.
1467 // a0: actual number of arguments as a smi
1469 // a2: expected number of arguments
1470 // a3: code entry to call
1472 EnterArgumentsAdaptorFrame(masm);
1474 // Calculate copy start address into a0 and copy end address into a2.
1475 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1476 __ Addu(a0, fp, a0);
1477 // Adjust for return address and receiver.
1478 __ Addu(a0, a0, Operand(2 * kPointerSize));
1479 // Compute copy end address.
1480 __ sll(a2, a2, kPointerSizeLog2);
1481 __ subu(a2, a0, a2);
1483 // Copy the arguments (including the receiver) to the new stack frame.
1484 // a0: copy start address
1486 // a2: copy end address
1487 // a3: code entry to call
1491 __ lw(t0, MemOperand(a0));
1493 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1494 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1499 { // Too few parameters: Actual < expected.
1501 EnterArgumentsAdaptorFrame(masm);
1503 // Calculate copy start address into a0 and copy end address is fp.
1504 // a0: actual number of arguments as a smi
1506 // a2: expected number of arguments
1507 // a3: code entry to call
1508 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1509 __ Addu(a0, fp, a0);
1510 // Adjust for return address and receiver.
1511 __ Addu(a0, a0, Operand(2 * kPointerSize));
1512 // Compute copy end address. Also adjust for return address.
1513 __ Addu(t3, fp, kPointerSize);
1515 // Copy the arguments (including the receiver) to the new stack frame.
1516 // a0: copy start address
1518 // a2: expected number of arguments
1519 // a3: code entry to call
1520 // t3: copy end address
1523 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1524 __ Subu(sp, sp, kPointerSize);
1525 __ Subu(a0, a0, kPointerSize);
1526 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1527 __ sw(t0, MemOperand(sp)); // In the delay slot.
1529 // Fill the remaining expected arguments with undefined.
1531 // a2: expected number of arguments
1532 // a3: code entry to call
1533 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1534 __ sll(t2, a2, kPointerSizeLog2);
1535 __ Subu(a2, fp, Operand(t2));
1536 // Adjust for frame.
1537 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1542 __ Subu(sp, sp, kPointerSize);
1543 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1544 __ sw(t0, MemOperand(sp));
1547 // Call the entry point.
1552 // Store offset of return address for deoptimizer.
1553 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1555 // Exit frame and return.
1556 LeaveArgumentsAdaptorFrame(masm);
1560 // -------------------------------------------
1561 // Don't adapt arguments.
1562 // -------------------------------------------
1563 __ bind(&dont_adapt_arguments);
1566 __ bind(&stack_overflow);
1568 FrameScope frame(masm, StackFrame::MANUAL);
1569 EnterArgumentsAdaptorFrame(masm);
1570 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1578 } } // namespace v8::internal
1580 #endif // V8_TARGET_ARCH_MIPS