1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #if V8_TARGET_ARCH_MIPS
11 #include "src/codegen.h"
12 #include "src/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen.h"
15 #include "src/runtime/runtime.h"
22 #define __ ACCESS_MASM(masm)
25 void Builtins::Generate_Adaptor(MacroAssembler* masm,
27 BuiltinExtraArguments extra_args) {
28 // ----------- S t a t e -------------
29 // -- a0 : number of arguments excluding receiver
30 // -- a1 : called function (only guaranteed when
31 // -- extra_args requires it)
33 // -- sp[0] : last argument
35 // -- sp[4 * (argc - 1)] : first argument
36 // -- sp[4 * agrc] : receiver
37 // -----------------------------------
39 // Insert extra arguments.
40 int num_extra_args = 0;
41 if (extra_args == NEEDS_CALLED_FUNCTION) {
45 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
48 // JumpToExternalReference expects a0 to contain the number of arguments
49 // including the receiver and the extra arguments.
50 __ Addu(a0, a0, num_extra_args + 1);
51 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
55 // Load the built-in InternalArray function from the current context.
56 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
58 // Load the native context.
61 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
63 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
64 // Load the InternalArray function from the native context.
68 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
72 // Load the built-in Array function from the current context.
73 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
74 // Load the native context.
77 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
79 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
80 // Load the Array function from the native context.
83 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
87 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
88 // ----------- S t a t e -------------
89 // -- a0 : number of arguments
90 // -- ra : return address
91 // -- sp[...]: constructor arguments
92 // -----------------------------------
93 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
95 // Get the InternalArray function.
96 GenerateLoadInternalArrayFunction(masm, a1);
98 if (FLAG_debug_code) {
99 // Initial map for the builtin InternalArray functions should be maps.
100 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
102 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
103 t0, Operand(zero_reg));
104 __ GetObjectType(a2, a3, t0);
105 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
106 t0, Operand(MAP_TYPE));
109 // Run the native code for the InternalArray function called as a normal
112 InternalArrayConstructorStub stub(masm->isolate());
113 __ TailCallStub(&stub);
117 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
118 // ----------- S t a t e -------------
119 // -- a0 : number of arguments
120 // -- ra : return address
121 // -- sp[...]: constructor arguments
122 // -----------------------------------
123 Label generic_array_code;
125 // Get the Array function.
126 GenerateLoadArrayFunction(masm, a1);
128 if (FLAG_debug_code) {
129 // Initial map for the builtin Array functions should be maps.
130 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
132 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
133 t0, Operand(zero_reg));
134 __ GetObjectType(a2, a3, t0);
135 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
136 t0, Operand(MAP_TYPE));
139 // Run the native code for the Array function called as a normal function.
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 ArrayConstructorStub stub(masm->isolate());
143 __ TailCallStub(&stub);
147 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
148 // ----------- S t a t e -------------
149 // -- a0 : number of arguments
150 // -- a1 : constructor function
151 // -- ra : return address
152 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
153 // -- sp[argc * 4] : receiver
154 // -----------------------------------
155 Counters* counters = masm->isolate()->counters();
156 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
158 Register function = a1;
159 if (FLAG_debug_code) {
160 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
161 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
164 // Load the first arguments in a0 and get rid of the rest.
166 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
167 // First args = sp[(argc - 1) * 4].
168 __ Subu(a0, a0, Operand(1));
169 __ sll(a0, a0, kPointerSizeLog2);
171 __ lw(a0, MemOperand(sp));
172 // sp now point to args[0], drop args[0] + receiver.
175 Register argument = a2;
176 Label not_cached, argument_is_string;
177 __ LookupNumberStringCache(a0, // Input.
183 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
184 __ bind(&argument_is_string);
186 // ----------- S t a t e -------------
187 // -- a2 : argument converted to string
188 // -- a1 : constructor function
189 // -- ra : return address
190 // -----------------------------------
193 __ Allocate(JSValue::kSize,
200 // Initialising the String Object.
202 __ LoadGlobalFunctionInitialMap(function, map, t0);
203 if (FLAG_debug_code) {
204 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
205 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
206 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
207 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
208 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
209 t0, Operand(zero_reg));
211 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
213 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
214 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
215 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
217 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
219 // Ensure the object is fully initialized.
220 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
224 // The argument was not found in the number to string cache. Check
225 // if it's a string already before calling the conversion builtin.
226 Label convert_argument;
227 __ bind(¬_cached);
228 __ JumpIfSmi(a0, &convert_argument);
231 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
232 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
233 STATIC_ASSERT(kNotStringTag != 0);
234 __ And(t0, a3, Operand(kIsNotStringMask));
235 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
236 __ mov(argument, a0);
237 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
238 __ Branch(&argument_is_string);
240 // Invoke the conversion builtin and put the result into a2.
241 __ bind(&convert_argument);
242 __ push(function); // Preserve the function.
243 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
245 FrameScope scope(masm, StackFrame::INTERNAL);
247 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
250 __ mov(argument, v0);
251 __ Branch(&argument_is_string);
253 // Load the empty string into a2, remove the receiver from the
254 // stack, and jump back to the case where the argument is a string.
255 __ bind(&no_arguments);
256 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
258 __ Branch(&argument_is_string);
260 // At this point the argument is already a string. Call runtime to
261 // create a string wrapper.
262 __ bind(&gc_required);
263 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
265 FrameScope scope(masm, StackFrame::INTERNAL);
267 __ CallRuntime(Runtime::kNewStringWrapper, 1);
273 static void CallRuntimePassFunction(
274 MacroAssembler* masm, Runtime::FunctionId function_id) {
275 FrameScope scope(masm, StackFrame::INTERNAL);
276 // Push a copy of the function onto the stack.
277 // Push call kind information and function as parameter to the runtime call.
280 __ CallRuntime(function_id, 1);
281 // Restore call kind information and receiver.
286 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
287 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
288 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
289 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
294 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
295 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
300 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
301 // Checking whether the queued function is ready for install is optional,
302 // since we come across interrupts and stack checks elsewhere. However,
303 // not checking may delay installing ready functions, and always checking
304 // would be quite expensive. A good compromise is to first check against
305 // stack limit as a cue for an interrupt signal.
307 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
308 __ Branch(&ok, hs, sp, Operand(t0));
310 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
311 GenerateTailCallToReturnedCode(masm);
314 GenerateTailCallToSharedCode(masm);
318 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
319 bool is_api_function,
320 bool create_memento) {
321 // ----------- S t a t e -------------
322 // -- a0 : number of arguments
323 // -- a1 : constructor function
324 // -- a2 : allocation site or undefined
325 // -- ra : return address
326 // -- sp[...]: constructor arguments
327 // -----------------------------------
329 // Should never create mementos for api functions.
330 DCHECK(!is_api_function || !create_memento);
332 Isolate* isolate = masm->isolate();
334 // ----------- S t a t e -------------
335 // -- a0 : number of arguments
336 // -- a1 : constructor function
337 // -- ra : return address
338 // -- sp[...]: constructor arguments
339 // -----------------------------------
341 // Enter a construct frame.
343 FrameScope scope(masm, StackFrame::CONSTRUCT);
345 if (create_memento) {
346 __ AssertUndefinedOrAllocationSite(a2, a3);
350 // Preserve the two incoming parameters on the stack.
351 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
352 __ MultiPushReversed(a0.bit() | a1.bit());
354 Label rt_call, allocated;
355 // Try to allocate the object without transitioning into C code. If any of
356 // the preconditions is not met, the code bails out to the runtime call.
357 if (FLAG_inline_new) {
358 Label undo_allocation;
359 ExternalReference debug_step_in_fp =
360 ExternalReference::debug_step_in_fp_address(isolate);
361 __ li(a2, Operand(debug_step_in_fp));
362 __ lw(a2, MemOperand(a2));
363 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
365 // Load the initial map and verify that it is in fact a map.
366 // a1: constructor function
367 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
368 __ JumpIfSmi(a2, &rt_call);
369 __ GetObjectType(a2, a3, t4);
370 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
372 // Check that the constructor is not constructing a JSFunction (see
373 // comments in Runtime_NewObject in runtime.cc). In which case the
374 // initial map's instance type would be JS_FUNCTION_TYPE.
375 // a1: constructor function
377 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
378 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
380 if (!is_api_function) {
382 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
383 // Check if slack tracking is enabled.
384 __ lw(t0, bit_field3);
385 __ DecodeField<Map::ConstructionCount>(t2, t0);
386 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking));
387 // Decrease generous allocation count.
388 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift));
389 __ Branch(USE_DELAY_SLOT,
390 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking));
391 __ sw(t0, bit_field3); // In delay slot.
393 __ Push(a1, a2, a1); // a1 = Constructor.
394 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
397 // Slack tracking counter is kNoSlackTracking after runtime call.
398 DCHECK(JSFunction::kNoSlackTracking == 0);
399 __ mov(t2, zero_reg);
404 // Now allocate the JSObject on the heap.
405 // a1: constructor function
407 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
408 if (create_memento) {
409 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
412 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
414 // Allocated the JSObject, now initialize the fields. Map is set to
415 // initial map and properties and elements are set to empty fixed array.
416 // a1: constructor function
418 // a3: object size (not including memento if create_memento)
419 // t4: JSObject (not tagged)
420 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
422 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
423 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
424 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
425 __ Addu(t5, t5, Operand(3*kPointerSize));
426 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
427 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
428 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
430 // Fill all the in-object properties with appropriate filler.
431 // a1: constructor function
433 // a3: object size (in words, including memento if create_memento)
434 // t4: JSObject (not tagged)
435 // t5: First in-object property of JSObject (not tagged)
436 // t2: slack tracking counter (non-API function case)
437 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
439 // Use t7 to hold undefined, which is used in several places below.
440 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
442 if (!is_api_function) {
443 Label no_inobject_slack_tracking;
445 // Check if slack tracking is enabled.
446 __ Branch(&no_inobject_slack_tracking,
447 eq, t2, Operand(JSFunction::kNoSlackTracking));
449 // Allocate object with a slack.
450 __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
451 __ sll(at, a0, kPointerSizeLog2);
453 // a0: offset of first field after pre-allocated fields
454 if (FLAG_debug_code) {
455 __ sll(at, a3, kPointerSizeLog2);
456 __ Addu(t6, t4, Operand(at)); // End of object.
457 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
460 __ InitializeFieldsWithFiller(t5, a0, t7);
461 // To allow for truncation.
462 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
463 // Fill the remaining fields with one pointer filler map.
465 __ bind(&no_inobject_slack_tracking);
468 if (create_memento) {
469 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
470 __ sll(a0, a0, kPointerSizeLog2);
471 __ Addu(a0, t4, Operand(a0)); // End of object.
472 __ InitializeFieldsWithFiller(t5, a0, t7);
474 // Fill in memento fields.
475 // t5: points to the allocated but uninitialized memento.
476 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
477 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
478 __ sw(t7, MemOperand(t5));
479 __ Addu(t5, t5, kPointerSize);
480 // Load the AllocationSite.
481 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
482 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
483 __ sw(t7, MemOperand(t5));
484 __ Addu(t5, t5, kPointerSize);
486 __ sll(at, a3, kPointerSizeLog2);
487 __ Addu(a0, t4, Operand(at)); // End of object.
488 __ InitializeFieldsWithFiller(t5, a0, t7);
491 // Add the object tag to make the JSObject real, so that we can continue
492 // and jump into the continuation code at any time from now on. Any
493 // failures need to undo the allocation, so that the heap is in a
494 // consistent state and verifiable.
495 __ Addu(t4, t4, Operand(kHeapObjectTag));
497 // Check if a non-empty properties array is needed. Continue with
498 // allocated object if not fall through to runtime call if it is.
499 // a1: constructor function
501 // t5: start of next object (not tagged)
502 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
503 // The field instance sizes contains both pre-allocated property fields
504 // and in-object properties.
505 __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
506 __ Addu(a3, a3, Operand(t6));
507 __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
510 // Done if no extra properties are to be allocated.
511 __ Branch(&allocated, eq, a3, Operand(zero_reg));
512 __ Assert(greater_equal, kPropertyAllocationCountFailed,
513 a3, Operand(zero_reg));
515 // Scale the number of elements by pointer size and add the header for
516 // FixedArrays to the start of the next object calculation from above.
518 // a3: number of elements in properties array
520 // t5: start of next object
521 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
528 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
530 // Initialize the FixedArray.
532 // a3: number of elements in properties array (untagged)
534 // t5: start of next object
535 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
537 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
538 __ sll(a0, a3, kSmiTagSize);
539 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
540 __ Addu(a2, a2, Operand(2 * kPointerSize));
542 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
543 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
545 // Initialize the fields to undefined.
547 // a2: First element of FixedArray (not tagged)
548 // a3: number of elements in properties array
550 // t5: FixedArray (not tagged)
551 __ sll(t3, a3, kPointerSizeLog2);
552 __ addu(t6, a2, t3); // End of object.
553 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
555 if (!is_api_function || create_memento) {
556 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
557 } else if (FLAG_debug_code) {
558 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
559 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
563 __ sw(t7, MemOperand(a2));
564 __ addiu(a2, a2, kPointerSize);
566 __ Branch(&loop, less, a2, Operand(t6));
569 // Store the initialized FixedArray into the properties field of
571 // a1: constructor function
573 // t5: FixedArray (not tagged)
574 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
575 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
577 // Continue with JSObject being successfully allocated.
578 // a1: constructor function
582 // Undo the setting of the new top so that the heap is verifiable. For
583 // example, the map's unused properties potentially do not match the
584 // allocated objects unused properties.
585 // t4: JSObject (previous new top)
586 __ bind(&undo_allocation);
587 __ UndoAllocationInNewSpace(t4, t5);
590 // Allocate the new receiver object using the runtime call.
591 // a1: constructor function
593 if (create_memento) {
594 // Get the cell or allocation site.
595 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
599 __ push(a1); // Argument for Runtime_NewObject.
600 if (create_memento) {
601 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
603 __ CallRuntime(Runtime::kNewObject, 1);
607 // If we ended up using the runtime, and we want a memento, then the
608 // runtime call made it for us, and we shouldn't do create count
610 Label count_incremented;
611 if (create_memento) {
612 __ jmp(&count_incremented);
615 // Receiver for constructor call allocated.
619 if (create_memento) {
620 __ lw(a2, MemOperand(sp, kPointerSize * 2));
621 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
622 __ Branch(&count_incremented, eq, a2, Operand(t5));
623 // a2 is an AllocationSite. We are creating a memento from it, so we
624 // need to increment the memento create count.
625 __ lw(a3, FieldMemOperand(a2,
626 AllocationSite::kPretenureCreateCountOffset));
627 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
628 __ sw(a3, FieldMemOperand(a2,
629 AllocationSite::kPretenureCreateCountOffset));
630 __ bind(&count_incremented);
635 // Reload the number of arguments from the stack.
638 // sp[2]: constructor function
639 // sp[3]: number of arguments (smi-tagged)
640 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
641 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
643 // Set up pointer to last argument.
644 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
646 // Set up number of arguments for function call below.
647 __ srl(a0, a3, kSmiTagSize);
649 // Copy arguments and receiver to the expression stack.
650 // a0: number of arguments
651 // a1: constructor function
652 // a2: address of last argument (caller sp)
653 // a3: number of arguments (smi-tagged)
656 // sp[2]: constructor function
657 // sp[3]: number of arguments (smi-tagged)
661 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
662 __ Addu(t0, a2, Operand(t0));
663 __ lw(t1, MemOperand(t0));
666 __ Addu(a3, a3, Operand(-2));
667 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
669 // Call the function.
670 // a0: number of arguments
671 // a1: constructor function
672 if (is_api_function) {
673 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
675 masm->isolate()->builtins()->HandleApiCallConstruct();
676 __ Call(code, RelocInfo::CODE_TARGET);
678 ParameterCount actual(a0);
679 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
682 // Store offset of return address for deoptimizer.
683 if (!is_api_function) {
684 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
687 // Restore context from the frame.
688 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
690 // If the result is an object (in the ECMA sense), we should get rid
691 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
693 Label use_receiver, exit;
695 // If the result is a smi, it is *not* an object in the ECMA sense.
697 // sp[0]: receiver (newly allocated object)
698 // sp[1]: constructor function
699 // sp[2]: number of arguments (smi-tagged)
700 __ JumpIfSmi(v0, &use_receiver);
702 // If the type of the result (stored in its map) is less than
703 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
704 __ GetObjectType(v0, a1, a3);
705 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
707 // Throw away the result of the constructor invocation and use the
708 // on-stack receiver as the result.
709 __ bind(&use_receiver);
710 __ lw(v0, MemOperand(sp));
712 // Remove receiver from the stack, remove caller arguments, and
716 // sp[0]: receiver (newly allocated object)
717 // sp[1]: constructor function
718 // sp[2]: number of arguments (smi-tagged)
719 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
721 // Leave construct frame.
724 __ sll(t0, a1, kPointerSizeLog2 - 1);
726 __ Addu(sp, sp, kPointerSize);
727 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
732 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
733 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
737 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
738 Generate_JSConstructStubHelper(masm, true, false);
742 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
744 // Called from JSEntryStub::GenerateBody
746 // ----------- S t a t e -------------
749 // -- a2: receiver_pointer
752 // -----------------------------------
753 ProfileEntryHookStub::MaybeCallEntryHook(masm);
755 // Clear the context before we push it when entering the JS frame.
756 __ mov(cp, zero_reg);
758 // Enter an internal frame.
760 FrameScope scope(masm, StackFrame::INTERNAL);
762 // Set up the context from the function argument.
763 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
765 // Push the function and the receiver onto the stack.
768 // Copy arguments to the stack in a loop.
770 // s0: argv, i.e. points to first arg
772 __ sll(t0, a3, kPointerSizeLog2);
775 __ nop(); // Branch delay slot nop.
776 // t2 points past last arg.
778 __ lw(t0, MemOperand(s0)); // Read next parameter.
779 __ addiu(s0, s0, kPointerSize);
780 __ lw(t0, MemOperand(t0)); // Dereference handle.
781 __ push(t0); // Push parameter.
783 __ Branch(&loop, ne, s0, Operand(t2));
785 // Initialize all JavaScript callee-saved registers, since they will be seen
786 // by the garbage collector as part of handlers.
787 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
793 // s6 holds the root address. Do not clobber.
794 // s7 is cp. Do not init.
796 // Invoke the code and pass argc as a0.
799 // No type feedback cell is available
800 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
801 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
804 ParameterCount actual(a0);
805 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
808 // Leave internal frame.
815 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
816 Generate_JSEntryTrampolineHelper(masm, false);
820 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
821 Generate_JSEntryTrampolineHelper(masm, true);
825 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
826 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
827 GenerateTailCallToReturnedCode(masm);
831 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
832 FrameScope scope(masm, StackFrame::INTERNAL);
833 // Push a copy of the function onto the stack.
834 // Push function as parameter to the runtime call.
836 // Whether to compile in a background thread.
837 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
839 __ CallRuntime(Runtime::kCompileOptimized, 2);
845 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
846 CallCompileOptimized(masm, false);
847 GenerateTailCallToReturnedCode(masm);
851 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
852 CallCompileOptimized(masm, true);
853 GenerateTailCallToReturnedCode(masm);
858 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
859 // For now, we are relying on the fact that make_code_young doesn't do any
860 // garbage collection which allows us to save/restore the registers without
861 // worrying about which of them contain pointers. We also don't build an
862 // internal frame to make the code faster, since we shouldn't have to do stack
863 // crawls in MakeCodeYoung. This seems a bit fragile.
865 // Set a0 to point to the head of the PlatformCodeAge sequence.
867 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
869 // The following registers must be saved and restored when calling through to
871 // a0 - contains return address (beginning of patch sequence)
874 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
875 FrameScope scope(masm, StackFrame::MANUAL);
876 __ MultiPush(saved_regs);
877 __ PrepareCallCFunction(2, 0, a2);
878 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
880 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
881 __ MultiPop(saved_regs);
885 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
886 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
887 MacroAssembler* masm) { \
888 GenerateMakeCodeYoungAgainCommon(masm); \
890 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
891 MacroAssembler* masm) { \
892 GenerateMakeCodeYoungAgainCommon(masm); \
894 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
895 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
898 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
899 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
900 // that make_code_young doesn't do any garbage collection which allows us to
901 // save/restore the registers without worrying about which of them contain
904 // Set a0 to point to the head of the PlatformCodeAge sequence.
906 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
908 // The following registers must be saved and restored when calling through to
910 // a0 - contains return address (beginning of patch sequence)
913 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
914 FrameScope scope(masm, StackFrame::MANUAL);
915 __ MultiPush(saved_regs);
916 __ PrepareCallCFunction(2, 0, a2);
917 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
919 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
921 __ MultiPop(saved_regs);
923 // Perform prologue operations usually performed by the young code stub.
924 __ Push(ra, fp, cp, a1);
925 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
927 // Jump to point after the code-age stub.
928 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
933 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
934 GenerateMakeCodeYoungAgainCommon(masm);
938 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
939 SaveFPRegsMode save_doubles) {
941 FrameScope scope(masm, StackFrame::INTERNAL);
943 // Preserve registers across notification, this is important for compiled
944 // stubs that tail call the runtime on deopts passing their parameters in
946 __ MultiPush(kJSCallerSaved | kCalleeSaved);
947 // Pass the function and deoptimization type to the runtime system.
948 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
949 __ MultiPop(kJSCallerSaved | kCalleeSaved);
952 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
953 __ Jump(ra); // Jump to miss handler
957 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
958 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
962 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
963 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
967 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
968 Deoptimizer::BailoutType type) {
970 FrameScope scope(masm, StackFrame::INTERNAL);
971 // Pass the function and deoptimization type to the runtime system.
972 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
974 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
977 // Get the full codegen state from the stack and untag it -> t2.
978 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
980 // Switch on the state.
981 Label with_tos_register, unknown_state;
982 __ Branch(&with_tos_register,
983 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
984 __ Ret(USE_DELAY_SLOT);
985 // Safe to fill delay slot Addu will emit one instruction.
986 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
988 __ bind(&with_tos_register);
989 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
990 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
992 __ Ret(USE_DELAY_SLOT);
993 // Safe to fill delay slot Addu will emit one instruction.
994 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
996 __ bind(&unknown_state);
997 __ stop("no cases left");
1001 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1002 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1006 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1007 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1011 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1012 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1016 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1017 // Lookup the function in the JavaScript frame.
1018 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1020 FrameScope scope(masm, StackFrame::INTERNAL);
1021 // Pass function as argument.
1023 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1026 // If the code object is null, just return to the unoptimized code.
1027 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1029 // Load deoptimization data from the code object.
1030 // <deopt_data> = <code>[#deoptimization_data_offset]
1031 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1033 // Load the OSR entrypoint offset from the deoptimization data.
1034 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1035 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1036 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1039 // Compute the target address = code_obj + header_size + osr_offset
1040 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1041 __ addu(v0, v0, a1);
1042 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1044 // And "return" to the OSR entry point of the function.
1049 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1050 // We check the stack limit as indicator that recompilation might be done.
1052 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1053 __ Branch(&ok, hs, sp, Operand(at));
1055 FrameScope scope(masm, StackFrame::INTERNAL);
1056 __ CallRuntime(Runtime::kStackGuard, 0);
1058 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1059 RelocInfo::CODE_TARGET);
1066 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1067 // 1. Make sure we have at least one argument.
1068 // a0: actual number of arguments
1070 __ Branch(&done, ne, a0, Operand(zero_reg));
1071 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1073 __ Addu(a0, a0, Operand(1));
1077 // 2. Get the function to call (passed as receiver) from the stack, check
1078 // if it is a function.
1079 // a0: actual number of arguments
1080 Label slow, non_function;
1081 __ sll(at, a0, kPointerSizeLog2);
1082 __ addu(at, sp, at);
1083 __ lw(a1, MemOperand(at));
1084 __ JumpIfSmi(a1, &non_function);
1085 __ GetObjectType(a1, a2, a2);
1086 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1088 // 3a. Patch the first argument if necessary when calling a function.
1089 // a0: actual number of arguments
1091 Label shift_arguments;
1092 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1093 { Label convert_to_object, use_global_proxy, patch_receiver;
1094 // Change context eagerly in case we need the global receiver.
1095 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1097 // Do not transform the receiver for strict mode functions.
1098 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1099 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1100 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1102 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1104 // Do not transform the receiver for native (Compilerhints already in a3).
1105 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1106 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1108 // Compute the receiver in sloppy mode.
1109 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1110 __ sll(at, a0, kPointerSizeLog2);
1111 __ addu(a2, sp, at);
1112 __ lw(a2, MemOperand(a2, -kPointerSize));
1113 // a0: actual number of arguments
1115 // a2: first argument
1116 __ JumpIfSmi(a2, &convert_to_object, t2);
1118 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1119 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1120 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1121 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1123 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1124 __ GetObjectType(a2, a3, a3);
1125 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1127 __ bind(&convert_to_object);
1128 // Enter an internal frame in order to preserve argument count.
1130 FrameScope scope(masm, StackFrame::INTERNAL);
1131 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1133 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1137 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1138 // Leave internal frame.
1141 // Restore the function to a1, and the flag to t0.
1142 __ sll(at, a0, kPointerSizeLog2);
1143 __ addu(at, sp, at);
1144 __ lw(a1, MemOperand(at));
1145 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1146 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
1148 __ bind(&use_global_proxy);
1149 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1150 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1152 __ bind(&patch_receiver);
1153 __ sll(at, a0, kPointerSizeLog2);
1154 __ addu(a3, sp, at);
1155 __ sw(a2, MemOperand(a3, -kPointerSize));
1157 __ Branch(&shift_arguments);
1160 // 3b. Check for function proxy.
1162 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1163 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1165 __ bind(&non_function);
1166 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1168 // 3c. Patch the first argument when calling a non-function. The
1169 // CALL_NON_FUNCTION builtin expects the non-function callee as
1170 // receiver, so overwrite the first argument which will ultimately
1171 // become the receiver.
1172 // a0: actual number of arguments
1174 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1175 __ sll(at, a0, kPointerSizeLog2);
1176 __ addu(a2, sp, at);
1177 __ sw(a1, MemOperand(a2, -kPointerSize));
1179 // 4. Shift arguments and return address one slot down on the stack
1180 // (overwriting the original receiver). Adjust argument count to make
1181 // the original first argument the new receiver.
1182 // a0: actual number of arguments
1184 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1185 __ bind(&shift_arguments);
1187 // Calculate the copy start address (destination). Copy end address is sp.
1188 __ sll(at, a0, kPointerSizeLog2);
1189 __ addu(a2, sp, at);
1192 __ lw(at, MemOperand(a2, -kPointerSize));
1193 __ sw(at, MemOperand(a2));
1194 __ Subu(a2, a2, Operand(kPointerSize));
1195 __ Branch(&loop, ne, a2, Operand(sp));
1196 // Adjust the actual number of arguments and remove the top element
1197 // (which is a copy of the last argument).
1198 __ Subu(a0, a0, Operand(1));
1202 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1203 // or a function proxy via CALL_FUNCTION_PROXY.
1204 // a0: actual number of arguments
1206 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1207 { Label function, non_proxy;
1208 __ Branch(&function, eq, t0, Operand(zero_reg));
1209 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1210 __ mov(a2, zero_reg);
1211 __ Branch(&non_proxy, ne, t0, Operand(1));
1213 __ push(a1); // Re-add proxy object as additional argument.
1214 __ Addu(a0, a0, Operand(1));
1215 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1216 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1217 RelocInfo::CODE_TARGET);
1219 __ bind(&non_proxy);
1220 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1221 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1222 RelocInfo::CODE_TARGET);
1226 // 5b. Get the code to call from the function and check that the number of
1227 // expected arguments matches what we're providing. If so, jump
1228 // (tail-call) to the code in register edx without checking arguments.
1229 // a0: actual number of arguments
1231 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1233 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1234 __ sra(a2, a2, kSmiTagSize);
1235 // Check formal and actual parameter counts.
1236 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1237 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1239 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1240 ParameterCount expected(0);
1241 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1245 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1246 const int kIndexOffset =
1247 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1248 const int kLimitOffset =
1249 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1250 const int kArgsOffset = 2 * kPointerSize;
1251 const int kRecvOffset = 3 * kPointerSize;
1252 const int kFunctionOffset = 4 * kPointerSize;
1255 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1256 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1258 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1260 // Returns (in v0) number of arguments to copy to stack as Smi.
1261 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1263 // Check the stack for overflow. We are not trying to catch
1264 // interruptions (e.g. debug break and preemption) here, so the "real stack
1265 // limit" is checked.
1267 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1268 // Make a2 the space we have left. The stack might already be overflowed
1269 // here which will cause a2 to become negative.
1270 __ subu(a2, sp, a2);
1271 // Check if the arguments will overflow the stack.
1272 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1273 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1275 // Out of stack space.
1276 __ lw(a1, MemOperand(fp, kFunctionOffset));
1278 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1279 // End of stack check.
1281 // Push current limit and index.
1283 __ mov(a1, zero_reg);
1284 __ Push(v0, a1); // Limit and initial index.
1286 // Get the receiver.
1287 __ lw(a0, MemOperand(fp, kRecvOffset));
1289 // Check that the function is a JS function (otherwise it must be a proxy).
1290 Label push_receiver;
1291 __ lw(a1, MemOperand(fp, kFunctionOffset));
1292 __ GetObjectType(a1, a2, a2);
1293 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1295 // Change context eagerly to get the right global object if necessary.
1296 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1297 // Load the shared function info while the function is still in a1.
1298 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1300 // Compute the receiver.
1301 // Do not transform the receiver for strict mode functions.
1302 Label call_to_object, use_global_proxy;
1303 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1304 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1306 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1308 // Do not transform the receiver for native (Compilerhints already in a2).
1309 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1310 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1312 // Compute the receiver in sloppy mode.
1313 __ JumpIfSmi(a0, &call_to_object);
1314 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1315 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1316 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1317 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1319 // Check if the receiver is already a JavaScript object.
1321 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1322 __ GetObjectType(a0, a1, a1);
1323 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1325 // Convert the receiver to a regular object.
1327 __ bind(&call_to_object);
1329 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1330 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1331 __ Branch(&push_receiver);
1333 __ bind(&use_global_proxy);
1334 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1335 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1337 // Push the receiver.
1339 __ bind(&push_receiver);
1342 // Copy all arguments from the array to the stack.
1344 __ lw(a0, MemOperand(fp, kIndexOffset));
1347 // Load the current argument from the arguments array and push it to the
1349 // a0: current argument index
1351 __ lw(a1, MemOperand(fp, kArgsOffset));
1354 // Call the runtime to access the property in the arguments array.
1355 __ CallRuntime(Runtime::kGetProperty, 2);
1358 // Use inline caching to access the arguments.
1359 __ lw(a0, MemOperand(fp, kIndexOffset));
1360 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1361 __ sw(a0, MemOperand(fp, kIndexOffset));
1363 // Test if the copy loop has finished copying all the elements from the
1364 // arguments object.
1366 __ lw(a1, MemOperand(fp, kLimitOffset));
1367 __ Branch(&loop, ne, a0, Operand(a1));
1369 // Call the function.
1371 ParameterCount actual(a0);
1372 __ sra(a0, a0, kSmiTagSize);
1373 __ lw(a1, MemOperand(fp, kFunctionOffset));
1374 __ GetObjectType(a1, a2, a2);
1375 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1377 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1379 frame_scope.GenerateLeaveFrame();
1380 __ Ret(USE_DELAY_SLOT);
1381 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1383 // Call the function proxy.
1384 __ bind(&call_proxy);
1385 __ push(a1); // Add function proxy as last argument.
1386 __ Addu(a0, a0, Operand(1));
1387 __ li(a2, Operand(0, RelocInfo::NONE32));
1388 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1389 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1390 RelocInfo::CODE_TARGET);
1391 // Tear down the internal frame and remove function, receiver and args.
1394 __ Ret(USE_DELAY_SLOT);
1395 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1399 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1400 Label* stack_overflow) {
1401 // ----------- S t a t e -------------
1402 // -- a0 : actual number of arguments
1403 // -- a1 : function (passed through to callee)
1404 // -- a2 : expected number of arguments
1405 // -----------------------------------
1406 // Check the stack for overflow. We are not trying to catch
1407 // interruptions (e.g. debug break and preemption) here, so the "real stack
1408 // limit" is checked.
1409 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1410 // Make t1 the space we have left. The stack might already be overflowed
1411 // here which will cause t1 to become negative.
1412 __ subu(t1, sp, t1);
1413 // Check if the arguments will overflow the stack.
1414 __ sll(at, a2, kPointerSizeLog2);
1415 // Signed comparison.
1416 __ Branch(stack_overflow, le, t1, Operand(at));
1420 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1421 __ sll(a0, a0, kSmiTagSize);
1422 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1423 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1425 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1429 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1430 // ----------- S t a t e -------------
1431 // -- v0 : result being passed through
1432 // -----------------------------------
1433 // Get the number of arguments passed (as a smi), tear down the frame and
1434 // then tear down the parameters.
1435 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1438 __ MultiPop(fp.bit() | ra.bit());
1439 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1440 __ Addu(sp, sp, t0);
1441 // Adjust for the receiver.
1442 __ Addu(sp, sp, Operand(kPointerSize));
1446 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1447 // State setup as expected by MacroAssembler::InvokePrologue.
1448 // ----------- S t a t e -------------
1449 // -- a0: actual arguments count
1450 // -- a1: function (passed through to callee)
1451 // -- a2: expected arguments count
1452 // -----------------------------------
1454 Label stack_overflow;
1455 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1456 Label invoke, dont_adapt_arguments;
1458 Label enough, too_few;
1459 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1460 __ Branch(&dont_adapt_arguments, eq,
1461 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1462 // We use Uless as the number of argument should always be greater than 0.
1463 __ Branch(&too_few, Uless, a0, Operand(a2));
1465 { // Enough parameters: actual >= expected.
1466 // a0: actual number of arguments as a smi
1468 // a2: expected number of arguments
1469 // a3: code entry to call
1471 EnterArgumentsAdaptorFrame(masm);
1473 // Calculate copy start address into a0 and copy end address into a2.
1474 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1475 __ Addu(a0, fp, a0);
1476 // Adjust for return address and receiver.
1477 __ Addu(a0, a0, Operand(2 * kPointerSize));
1478 // Compute copy end address.
1479 __ sll(a2, a2, kPointerSizeLog2);
1480 __ subu(a2, a0, a2);
1482 // Copy the arguments (including the receiver) to the new stack frame.
1483 // a0: copy start address
1485 // a2: copy end address
1486 // a3: code entry to call
1490 __ lw(t0, MemOperand(a0));
1492 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1493 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1498 { // Too few parameters: Actual < expected.
1500 EnterArgumentsAdaptorFrame(masm);
1502 // Calculate copy start address into a0 and copy end address is fp.
1503 // a0: actual number of arguments as a smi
1505 // a2: expected number of arguments
1506 // a3: code entry to call
1507 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1508 __ Addu(a0, fp, a0);
1509 // Adjust for return address and receiver.
1510 __ Addu(a0, a0, Operand(2 * kPointerSize));
1511 // Compute copy end address. Also adjust for return address.
1512 __ Addu(t3, fp, kPointerSize);
1514 // Copy the arguments (including the receiver) to the new stack frame.
1515 // a0: copy start address
1517 // a2: expected number of arguments
1518 // a3: code entry to call
1519 // t3: copy end address
1522 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1523 __ Subu(sp, sp, kPointerSize);
1524 __ Subu(a0, a0, kPointerSize);
1525 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1526 __ sw(t0, MemOperand(sp)); // In the delay slot.
1528 // Fill the remaining expected arguments with undefined.
1530 // a2: expected number of arguments
1531 // a3: code entry to call
1532 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1533 __ sll(t2, a2, kPointerSizeLog2);
1534 __ Subu(a2, fp, Operand(t2));
1535 // Adjust for frame.
1536 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1541 __ Subu(sp, sp, kPointerSize);
1542 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1543 __ sw(t0, MemOperand(sp));
1546 // Call the entry point.
1551 // Store offset of return address for deoptimizer.
1552 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1554 // Exit frame and return.
1555 LeaveArgumentsAdaptorFrame(masm);
1559 // -------------------------------------------
1560 // Don't adapt arguments.
1561 // -------------------------------------------
1562 __ bind(&dont_adapt_arguments);
1565 __ bind(&stack_overflow);
1567 FrameScope frame(masm, StackFrame::MANUAL);
1568 EnterArgumentsAdaptorFrame(masm);
1569 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1577 } } // namespace v8::internal
1579 #endif // V8_TARGET_ARCH_MIPS