1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #if V8_TARGET_ARCH_MIPS
36 #include "deoptimizer.h"
37 #include "full-codegen.h"
39 #include "stub-cache.h"
45 #define __ ACCESS_MASM(masm)
48 void Builtins::Generate_Adaptor(MacroAssembler* masm,
50 BuiltinExtraArguments extra_args) {
51 // ----------- S t a t e -------------
52 // -- a0 : number of arguments excluding receiver
53 // -- a1 : called function (only guaranteed when
54 // -- extra_args requires it)
56 // -- sp[0] : last argument
58 // -- sp[4 * (argc - 1)] : first argument
59 // -- sp[4 * agrc] : receiver
60 // -----------------------------------
62 // Insert extra arguments.
63 int num_extra_args = 0;
64 if (extra_args == NEEDS_CALLED_FUNCTION) {
68 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
71 // JumpToExternalReference expects s0 to contain the number of arguments
72 // including the receiver and the extra arguments.
73 __ Addu(s0, a0, num_extra_args + 1);
74 __ sll(s1, s0, kPointerSizeLog2);
75 __ Subu(s1, s1, kPointerSize);
76 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80 // Load the built-in InternalArray function from the current context.
81 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
83 // Load the native context.
86 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
88 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
89 // Load the InternalArray function from the native context.
93 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
97 // Load the built-in Array function from the current context.
98 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
99 // Load the native context.
102 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
104 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
105 // Load the Array function from the native context.
108 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
112 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
113 // ----------- S t a t e -------------
114 // -- a0 : number of arguments
115 // -- ra : return address
116 // -- sp[...]: constructor arguments
117 // -----------------------------------
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
120 // Get the InternalArray function.
121 GenerateLoadInternalArrayFunction(masm, a1);
123 if (FLAG_debug_code) {
124 // Initial map for the builtin InternalArray functions should be maps.
125 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
127 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
128 t0, Operand(zero_reg));
129 __ GetObjectType(a2, a3, t0);
130 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
131 t0, Operand(MAP_TYPE));
134 // Run the native code for the InternalArray function called as a normal
137 InternalArrayConstructorStub stub(masm->isolate());
138 __ TailCallStub(&stub);
142 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
143 // ----------- S t a t e -------------
144 // -- a0 : number of arguments
145 // -- ra : return address
146 // -- sp[...]: constructor arguments
147 // -----------------------------------
148 Label generic_array_code;
150 // Get the Array function.
151 GenerateLoadArrayFunction(masm, a1);
153 if (FLAG_debug_code) {
154 // Initial map for the builtin Array functions should be maps.
155 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
157 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
158 t0, Operand(zero_reg));
159 __ GetObjectType(a2, a3, t0);
160 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
161 t0, Operand(MAP_TYPE));
164 // Run the native code for the Array function called as a normal function.
166 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
167 ArrayConstructorStub stub(masm->isolate());
168 __ TailCallStub(&stub);
172 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
173 // ----------- S t a t e -------------
174 // -- a0 : number of arguments
175 // -- a1 : constructor function
176 // -- ra : return address
177 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
178 // -- sp[argc * 4] : receiver
179 // -----------------------------------
180 Counters* counters = masm->isolate()->counters();
181 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
183 Register function = a1;
184 if (FLAG_debug_code) {
185 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
186 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
189 // Load the first arguments in a0 and get rid of the rest.
191 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
192 // First args = sp[(argc - 1) * 4].
193 __ Subu(a0, a0, Operand(1));
194 __ sll(a0, a0, kPointerSizeLog2);
196 __ lw(a0, MemOperand(sp));
197 // sp now point to args[0], drop args[0] + receiver.
200 Register argument = a2;
201 Label not_cached, argument_is_string;
202 __ LookupNumberStringCache(a0, // Input.
208 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
209 __ bind(&argument_is_string);
211 // ----------- S t a t e -------------
212 // -- a2 : argument converted to string
213 // -- a1 : constructor function
214 // -- ra : return address
215 // -----------------------------------
218 __ Allocate(JSValue::kSize,
225 // Initialising the String Object.
227 __ LoadGlobalFunctionInitialMap(function, map, t0);
228 if (FLAG_debug_code) {
229 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
230 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
231 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
232 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
233 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
234 t0, Operand(zero_reg));
236 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
238 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
239 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
240 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
242 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
244 // Ensure the object is fully initialized.
245 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
249 // The argument was not found in the number to string cache. Check
250 // if it's a string already before calling the conversion builtin.
251 Label convert_argument;
252 __ bind(¬_cached);
253 __ JumpIfSmi(a0, &convert_argument);
256 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
257 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
258 STATIC_ASSERT(kNotStringTag != 0);
259 __ And(t0, a3, Operand(kIsNotStringMask));
260 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
261 __ mov(argument, a0);
262 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
263 __ Branch(&argument_is_string);
265 // Invoke the conversion builtin and put the result into a2.
266 __ bind(&convert_argument);
267 __ push(function); // Preserve the function.
268 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
270 FrameScope scope(masm, StackFrame::INTERNAL);
272 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
275 __ mov(argument, v0);
276 __ Branch(&argument_is_string);
278 // Load the empty string into a2, remove the receiver from the
279 // stack, and jump back to the case where the argument is a string.
280 __ bind(&no_arguments);
281 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
283 __ Branch(&argument_is_string);
285 // At this point the argument is already a string. Call runtime to
286 // create a string wrapper.
287 __ bind(&gc_required);
288 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
290 FrameScope scope(masm, StackFrame::INTERNAL);
292 __ CallRuntime(Runtime::kNewStringWrapper, 1);
298 static void CallRuntimePassFunction(
299 MacroAssembler* masm, Runtime::FunctionId function_id) {
300 FrameScope scope(masm, StackFrame::INTERNAL);
301 // Push a copy of the function onto the stack.
302 // Push call kind information and function as parameter to the runtime call.
305 __ CallRuntime(function_id, 1);
306 // Restore call kind information and receiver.
311 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
312 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
313 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
314 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
319 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
320 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
325 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
326 // Checking whether the queued function is ready for install is optional,
327 // since we come across interrupts and stack checks elsewhere. However,
328 // not checking may delay installing ready functions, and always checking
329 // would be quite expensive. A good compromise is to first check against
330 // stack limit as a cue for an interrupt signal.
332 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
333 __ Branch(&ok, hs, sp, Operand(t0));
335 CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
336 GenerateTailCallToReturnedCode(masm);
339 GenerateTailCallToSharedCode(masm);
343 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
344 bool is_api_function,
345 bool count_constructions,
346 bool create_memento) {
347 // ----------- S t a t e -------------
348 // -- a0 : number of arguments
349 // -- a1 : constructor function
350 // -- a2 : allocation site or undefined
351 // -- ra : return address
352 // -- sp[...]: constructor arguments
353 // -----------------------------------
355 // Should never count constructions for api objects.
356 ASSERT(!is_api_function || !count_constructions);
358 // Should never create mementos for api functions.
359 ASSERT(!is_api_function || !create_memento);
361 // Should never create mementos before slack tracking is finished.
362 ASSERT(!count_constructions || !create_memento);
364 Isolate* isolate = masm->isolate();
366 // ----------- S t a t e -------------
367 // -- a0 : number of arguments
368 // -- a1 : constructor function
369 // -- ra : return address
370 // -- sp[...]: constructor arguments
371 // -----------------------------------
373 // Enter a construct frame.
375 FrameScope scope(masm, StackFrame::CONSTRUCT);
377 if (create_memento) {
378 __ AssertUndefinedOrAllocationSite(a2, a3);
382 // Preserve the two incoming parameters on the stack.
383 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
384 __ MultiPushReversed(a0.bit() | a1.bit());
386 // Use t7 to hold undefined, which is used in several places below.
387 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
389 Label rt_call, allocated;
390 // Try to allocate the object without transitioning into C code. If any of
391 // the preconditions is not met, the code bails out to the runtime call.
392 if (FLAG_inline_new) {
393 Label undo_allocation;
394 #ifdef ENABLE_DEBUGGER_SUPPORT
395 ExternalReference debug_step_in_fp =
396 ExternalReference::debug_step_in_fp_address(isolate);
397 __ li(a2, Operand(debug_step_in_fp));
398 __ lw(a2, MemOperand(a2));
399 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
402 // Load the initial map and verify that it is in fact a map.
403 // a1: constructor function
404 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
405 __ JumpIfSmi(a2, &rt_call);
406 __ GetObjectType(a2, a3, t4);
407 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
409 // Check that the constructor is not constructing a JSFunction (see
410 // comments in Runtime_NewObject in runtime.cc). In which case the
411 // initial map's instance type would be JS_FUNCTION_TYPE.
412 // a1: constructor function
414 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
415 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
417 if (count_constructions) {
419 // Decrease generous allocation count.
420 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
421 MemOperand constructor_count =
422 FieldMemOperand(a3, SharedFunctionInfo::kConstructionCountOffset);
423 __ lbu(t0, constructor_count);
424 __ Subu(t0, t0, Operand(1));
425 __ sb(t0, constructor_count);
426 __ Branch(&allocate, ne, t0, Operand(zero_reg));
428 __ Push(a1, a2, a1); // a1 = Constructor.
429 // The call will replace the stub, so the countdown is only done once.
430 __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
437 // Now allocate the JSObject on the heap.
438 // a1: constructor function
440 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
441 if (create_memento) {
442 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
445 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
447 // Allocated the JSObject, now initialize the fields. Map is set to
448 // initial map and properties and elements are set to empty fixed array.
449 // a1: constructor function
451 // a3: object size (not including memento if create_memento)
452 // t4: JSObject (not tagged)
453 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
455 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
456 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
457 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
458 __ Addu(t5, t5, Operand(3*kPointerSize));
459 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
460 ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
461 ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
463 // Fill all the in-object properties with appropriate filler.
464 // a1: constructor function
466 // a3: object size (in words, including memento if create_memento)
467 // t4: JSObject (not tagged)
468 // t5: First in-object property of JSObject (not tagged)
469 ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
471 if (count_constructions) {
472 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
473 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
474 __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
476 __ sll(at, a0, kPointerSizeLog2);
478 __ sll(at, a3, kPointerSizeLog2);
479 __ Addu(t6, t4, Operand(at)); // End of object.
480 // a0: offset of first field after pre-allocated fields
481 if (FLAG_debug_code) {
482 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
485 __ InitializeFieldsWithFiller(t5, a0, t7);
486 // To allow for truncation.
487 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
488 __ InitializeFieldsWithFiller(t5, t6, t7);
489 } else if (create_memento) {
490 __ Subu(t7, a3, Operand(AllocationMemento::kSize / kPointerSize));
491 __ sll(at, t7, kPointerSizeLog2);
492 __ Addu(a0, t4, Operand(at)); // End of object.
493 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
494 __ InitializeFieldsWithFiller(t5, a0, t7);
496 // Fill in memento fields.
497 // t5: points to the allocated but uninitialized memento.
498 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
499 ASSERT_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
500 __ sw(t7, MemOperand(t5));
501 __ Addu(t5, t5, kPointerSize);
502 // Load the AllocationSite.
503 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
504 ASSERT_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
505 __ sw(t7, MemOperand(t5));
506 __ Addu(t5, t5, kPointerSize);
508 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
509 __ sll(at, a3, kPointerSizeLog2);
510 __ Addu(a0, t4, Operand(at)); // End of object.
511 __ InitializeFieldsWithFiller(t5, a0, t7);
514 // Add the object tag to make the JSObject real, so that we can continue
515 // and jump into the continuation code at any time from now on. Any
516 // failures need to undo the allocation, so that the heap is in a
517 // consistent state and verifiable.
518 __ Addu(t4, t4, Operand(kHeapObjectTag));
520 // Check if a non-empty properties array is needed. Continue with
521 // allocated object if not fall through to runtime call if it is.
522 // a1: constructor function
524 // t5: start of next object (not tagged)
525 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
526 // The field instance sizes contains both pre-allocated property fields
527 // and in-object properties.
528 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset));
529 __ Ext(t6, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
531 __ Addu(a3, a3, Operand(t6));
532 __ Ext(t6, a0, Map::kInObjectPropertiesByte * kBitsPerByte,
536 // Done if no extra properties are to be allocated.
537 __ Branch(&allocated, eq, a3, Operand(zero_reg));
538 __ Assert(greater_equal, kPropertyAllocationCountFailed,
539 a3, Operand(zero_reg));
541 // Scale the number of elements by pointer size and add the header for
542 // FixedArrays to the start of the next object calculation from above.
544 // a3: number of elements in properties array
546 // t5: start of next object
547 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
554 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
556 // Initialize the FixedArray.
558 // a3: number of elements in properties array (untagged)
560 // t5: start of next object
561 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
563 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
564 __ sll(a0, a3, kSmiTagSize);
565 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
566 __ Addu(a2, a2, Operand(2 * kPointerSize));
568 ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
569 ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
571 // Initialize the fields to undefined.
573 // a2: First element of FixedArray (not tagged)
574 // a3: number of elements in properties array
576 // t5: FixedArray (not tagged)
577 __ sll(t3, a3, kPointerSizeLog2);
578 __ addu(t6, a2, t3); // End of object.
579 ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
581 if (count_constructions) {
582 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
583 } else if (FLAG_debug_code) {
584 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
585 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
589 __ sw(t7, MemOperand(a2));
590 __ addiu(a2, a2, kPointerSize);
592 __ Branch(&loop, less, a2, Operand(t6));
595 // Store the initialized FixedArray into the properties field of
597 // a1: constructor function
599 // t5: FixedArray (not tagged)
600 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
601 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
603 // Continue with JSObject being successfully allocated.
604 // a1: constructor function
608 // Undo the setting of the new top so that the heap is verifiable. For
609 // example, the map's unused properties potentially do not match the
610 // allocated objects unused properties.
611 // t4: JSObject (previous new top)
612 __ bind(&undo_allocation);
613 __ UndoAllocationInNewSpace(t4, t5);
616 // Allocate the new receiver object using the runtime call.
617 // a1: constructor function
619 if (create_memento) {
620 // Get the cell or allocation site.
621 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
625 __ push(a1); // Argument for Runtime_NewObject.
626 if (create_memento) {
627 __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
629 __ CallRuntime(Runtime::kHiddenNewObject, 1);
633 // If we ended up using the runtime, and we want a memento, then the
634 // runtime call made it for us, and we shouldn't do create count
636 Label count_incremented;
637 if (create_memento) {
638 __ jmp(&count_incremented);
641 // Receiver for constructor call allocated.
644 if (create_memento) {
645 __ lw(a2, MemOperand(sp, kPointerSize * 2));
646 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
647 __ Branch(&count_incremented, eq, a2, Operand(t5));
648 // a2 is an AllocationSite. We are creating a memento from it, so we
649 // need to increment the memento create count.
650 __ lw(a3, FieldMemOperand(a2,
651 AllocationSite::kPretenureCreateCountOffset));
652 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
653 __ sw(a3, FieldMemOperand(a2,
654 AllocationSite::kPretenureCreateCountOffset));
655 __ bind(&count_incremented);
661 // Reload the number of arguments from the stack.
664 // sp[2]: constructor function
665 // sp[3]: number of arguments (smi-tagged)
666 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
667 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
669 // Set up pointer to last argument.
670 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
672 // Set up number of arguments for function call below.
673 __ srl(a0, a3, kSmiTagSize);
675 // Copy arguments and receiver to the expression stack.
676 // a0: number of arguments
677 // a1: constructor function
678 // a2: address of last argument (caller sp)
679 // a3: number of arguments (smi-tagged)
682 // sp[2]: constructor function
683 // sp[3]: number of arguments (smi-tagged)
687 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
688 __ Addu(t0, a2, Operand(t0));
689 __ lw(t1, MemOperand(t0));
692 __ Addu(a3, a3, Operand(-2));
693 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
695 // Call the function.
696 // a0: number of arguments
697 // a1: constructor function
698 if (is_api_function) {
699 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
701 masm->isolate()->builtins()->HandleApiCallConstruct();
702 __ Call(code, RelocInfo::CODE_TARGET);
704 ParameterCount actual(a0);
705 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
708 // Store offset of return address for deoptimizer.
709 if (!is_api_function && !count_constructions) {
710 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
713 // Restore context from the frame.
714 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
716 // If the result is an object (in the ECMA sense), we should get rid
717 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
719 Label use_receiver, exit;
721 // If the result is a smi, it is *not* an object in the ECMA sense.
723 // sp[0]: receiver (newly allocated object)
724 // sp[1]: constructor function
725 // sp[2]: number of arguments (smi-tagged)
726 __ JumpIfSmi(v0, &use_receiver);
728 // If the type of the result (stored in its map) is less than
729 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
730 __ GetObjectType(v0, a1, a3);
731 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
733 // Throw away the result of the constructor invocation and use the
734 // on-stack receiver as the result.
735 __ bind(&use_receiver);
736 __ lw(v0, MemOperand(sp));
738 // Remove receiver from the stack, remove caller arguments, and
742 // sp[0]: receiver (newly allocated object)
743 // sp[1]: constructor function
744 // sp[2]: number of arguments (smi-tagged)
745 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
747 // Leave construct frame.
750 __ sll(t0, a1, kPointerSizeLog2 - 1);
752 __ Addu(sp, sp, kPointerSize);
753 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
758 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
759 Generate_JSConstructStubHelper(masm, false, true, false);
763 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
764 Generate_JSConstructStubHelper(masm, false, false, FLAG_pretenuring_call_new);
768 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
769 Generate_JSConstructStubHelper(masm, true, false, false);
773 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
775 // Called from JSEntryStub::GenerateBody
777 // ----------- S t a t e -------------
780 // -- a2: receiver_pointer
783 // -----------------------------------
784 ProfileEntryHookStub::MaybeCallEntryHook(masm);
786 // Clear the context before we push it when entering the JS frame.
787 __ mov(cp, zero_reg);
789 // Enter an internal frame.
791 FrameScope scope(masm, StackFrame::INTERNAL);
793 // Set up the context from the function argument.
794 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
796 // Push the function and the receiver onto the stack.
799 // Copy arguments to the stack in a loop.
801 // s0: argv, i.e. points to first arg
803 __ sll(t0, a3, kPointerSizeLog2);
806 __ nop(); // Branch delay slot nop.
807 // t2 points past last arg.
809 __ lw(t0, MemOperand(s0)); // Read next parameter.
810 __ addiu(s0, s0, kPointerSize);
811 __ lw(t0, MemOperand(t0)); // Dereference handle.
812 __ push(t0); // Push parameter.
814 __ Branch(&loop, ne, s0, Operand(t2));
816 // Initialize all JavaScript callee-saved registers, since they will be seen
817 // by the garbage collector as part of handlers.
818 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
824 // s6 holds the root address. Do not clobber.
825 // s7 is cp. Do not init.
827 // Invoke the code and pass argc as a0.
830 // No type feedback cell is available
831 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
832 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
835 ParameterCount actual(a0);
836 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
839 // Leave internal frame.
846 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
847 Generate_JSEntryTrampolineHelper(masm, false);
851 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
852 Generate_JSEntryTrampolineHelper(masm, true);
856 void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
857 CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
858 GenerateTailCallToReturnedCode(masm);
862 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
863 FrameScope scope(masm, StackFrame::INTERNAL);
864 // Push a copy of the function onto the stack.
865 // Push function as parameter to the runtime call.
867 // Whether to compile in a background thread.
868 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
870 __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
876 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
877 CallCompileOptimized(masm, false);
878 GenerateTailCallToReturnedCode(masm);
882 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
883 CallCompileOptimized(masm, true);
884 GenerateTailCallToReturnedCode(masm);
889 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
890 // For now, we are relying on the fact that make_code_young doesn't do any
891 // garbage collection which allows us to save/restore the registers without
892 // worrying about which of them contain pointers. We also don't build an
893 // internal frame to make the code faster, since we shouldn't have to do stack
894 // crawls in MakeCodeYoung. This seems a bit fragile.
896 // Set a0 to point to the head of the PlatformCodeAge sequence.
898 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
900 // The following registers must be saved and restored when calling through to
902 // a0 - contains return address (beginning of patch sequence)
905 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
906 FrameScope scope(masm, StackFrame::MANUAL);
907 __ MultiPush(saved_regs);
908 __ PrepareCallCFunction(2, 0, a2);
909 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
911 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
912 __ MultiPop(saved_regs);
916 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
917 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
918 MacroAssembler* masm) { \
919 GenerateMakeCodeYoungAgainCommon(masm); \
921 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
922 MacroAssembler* masm) { \
923 GenerateMakeCodeYoungAgainCommon(masm); \
925 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
926 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
929 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
930 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
931 // that make_code_young doesn't do any garbage collection which allows us to
932 // save/restore the registers without worrying about which of them contain
935 // Set a0 to point to the head of the PlatformCodeAge sequence.
937 Operand((kNoCodeAgeSequenceLength - 1) * Assembler::kInstrSize));
939 // The following registers must be saved and restored when calling through to
941 // a0 - contains return address (beginning of patch sequence)
944 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
945 FrameScope scope(masm, StackFrame::MANUAL);
946 __ MultiPush(saved_regs);
947 __ PrepareCallCFunction(2, 0, a2);
948 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
950 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
952 __ MultiPop(saved_regs);
954 // Perform prologue operations usually performed by the young code stub.
955 __ Push(ra, fp, cp, a1);
956 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
958 // Jump to point after the code-age stub.
959 __ Addu(a0, a0, Operand((kNoCodeAgeSequenceLength) * Assembler::kInstrSize));
964 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
965 GenerateMakeCodeYoungAgainCommon(masm);
969 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
970 SaveFPRegsMode save_doubles) {
972 FrameScope scope(masm, StackFrame::INTERNAL);
974 // Preserve registers across notification, this is important for compiled
975 // stubs that tail call the runtime on deopts passing their parameters in
977 __ MultiPush(kJSCallerSaved | kCalleeSaved);
978 // Pass the function and deoptimization type to the runtime system.
979 __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
980 __ MultiPop(kJSCallerSaved | kCalleeSaved);
983 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
984 __ Jump(ra); // Jump to miss handler
988 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
989 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
993 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
994 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
998 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
999 Deoptimizer::BailoutType type) {
1001 FrameScope scope(masm, StackFrame::INTERNAL);
1002 // Pass the function and deoptimization type to the runtime system.
1003 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1005 __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
1008 // Get the full codegen state from the stack and untag it -> t2.
1009 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1011 // Switch on the state.
1012 Label with_tos_register, unknown_state;
1013 __ Branch(&with_tos_register,
1014 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
1015 __ Ret(USE_DELAY_SLOT);
1016 // Safe to fill delay slot Addu will emit one instruction.
1017 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1019 __ bind(&with_tos_register);
1020 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1021 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
1023 __ Ret(USE_DELAY_SLOT);
1024 // Safe to fill delay slot Addu will emit one instruction.
1025 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1027 __ bind(&unknown_state);
1028 __ stop("no cases left");
1032 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1033 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1037 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1038 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1042 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1043 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1047 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1048 // Lookup the function in the JavaScript frame.
1049 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1051 FrameScope scope(masm, StackFrame::INTERNAL);
1052 // Pass function as argument.
1054 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1057 // If the code object is null, just return to the unoptimized code.
1058 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1060 // Load deoptimization data from the code object.
1061 // <deopt_data> = <code>[#deoptimization_data_offset]
1062 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1064 // Load the OSR entrypoint offset from the deoptimization data.
1065 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1066 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1067 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1070 // Compute the target address = code_obj + header_size + osr_offset
1071 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1072 __ addu(v0, v0, a1);
1073 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1075 // And "return" to the OSR entry point of the function.
1080 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1081 // We check the stack limit as indicator that recompilation might be done.
1083 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1084 __ Branch(&ok, hs, sp, Operand(at));
1086 FrameScope scope(masm, StackFrame::INTERNAL);
1087 __ CallRuntime(Runtime::kHiddenStackGuard, 0);
1089 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1090 RelocInfo::CODE_TARGET);
1097 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1098 // 1. Make sure we have at least one argument.
1099 // a0: actual number of arguments
1101 __ Branch(&done, ne, a0, Operand(zero_reg));
1102 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1104 __ Addu(a0, a0, Operand(1));
1108 // 2. Get the function to call (passed as receiver) from the stack, check
1109 // if it is a function.
1110 // a0: actual number of arguments
1111 Label slow, non_function;
1112 __ sll(at, a0, kPointerSizeLog2);
1113 __ addu(at, sp, at);
1114 __ lw(a1, MemOperand(at));
1115 __ JumpIfSmi(a1, &non_function);
1116 __ GetObjectType(a1, a2, a2);
1117 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1119 // 3a. Patch the first argument if necessary when calling a function.
1120 // a0: actual number of arguments
1122 Label shift_arguments;
1123 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1124 { Label convert_to_object, use_global_receiver, patch_receiver;
1125 // Change context eagerly in case we need the global receiver.
1126 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1128 // Do not transform the receiver for strict mode functions.
1129 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1130 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1131 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1133 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1135 // Do not transform the receiver for native (Compilerhints already in a3).
1136 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1137 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1139 // Compute the receiver in sloppy mode.
1140 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1141 __ sll(at, a0, kPointerSizeLog2);
1142 __ addu(a2, sp, at);
1143 __ lw(a2, MemOperand(a2, -kPointerSize));
1144 // a0: actual number of arguments
1146 // a2: first argument
1147 __ JumpIfSmi(a2, &convert_to_object, t2);
1149 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1150 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1151 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1152 __ Branch(&use_global_receiver, eq, a2, Operand(a3));
1154 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1155 __ GetObjectType(a2, a3, a3);
1156 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1158 __ bind(&convert_to_object);
1159 // Enter an internal frame in order to preserve argument count.
1161 FrameScope scope(masm, StackFrame::INTERNAL);
1162 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1164 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1168 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1169 // Leave internal frame.
1171 // Restore the function to a1, and the flag to t0.
1172 __ sll(at, a0, kPointerSizeLog2);
1173 __ addu(at, sp, at);
1174 __ lw(a1, MemOperand(at));
1175 __ li(t0, Operand(0, RelocInfo::NONE32));
1176 __ Branch(&patch_receiver);
1178 __ bind(&use_global_receiver);
1179 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1180 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
1182 __ bind(&patch_receiver);
1183 __ sll(at, a0, kPointerSizeLog2);
1184 __ addu(a3, sp, at);
1185 __ sw(a2, MemOperand(a3, -kPointerSize));
1187 __ Branch(&shift_arguments);
1190 // 3b. Check for function proxy.
1192 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1193 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1195 __ bind(&non_function);
1196 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1198 // 3c. Patch the first argument when calling a non-function. The
1199 // CALL_NON_FUNCTION builtin expects the non-function callee as
1200 // receiver, so overwrite the first argument which will ultimately
1201 // become the receiver.
1202 // a0: actual number of arguments
1204 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1205 __ sll(at, a0, kPointerSizeLog2);
1206 __ addu(a2, sp, at);
1207 __ sw(a1, MemOperand(a2, -kPointerSize));
1209 // 4. Shift arguments and return address one slot down on the stack
1210 // (overwriting the original receiver). Adjust argument count to make
1211 // the original first argument the new receiver.
1212 // a0: actual number of arguments
1214 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1215 __ bind(&shift_arguments);
1217 // Calculate the copy start address (destination). Copy end address is sp.
1218 __ sll(at, a0, kPointerSizeLog2);
1219 __ addu(a2, sp, at);
1222 __ lw(at, MemOperand(a2, -kPointerSize));
1223 __ sw(at, MemOperand(a2));
1224 __ Subu(a2, a2, Operand(kPointerSize));
1225 __ Branch(&loop, ne, a2, Operand(sp));
1226 // Adjust the actual number of arguments and remove the top element
1227 // (which is a copy of the last argument).
1228 __ Subu(a0, a0, Operand(1));
1232 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1233 // or a function proxy via CALL_FUNCTION_PROXY.
1234 // a0: actual number of arguments
1236 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1237 { Label function, non_proxy;
1238 __ Branch(&function, eq, t0, Operand(zero_reg));
1239 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1240 __ mov(a2, zero_reg);
1241 __ Branch(&non_proxy, ne, t0, Operand(1));
1243 __ push(a1); // Re-add proxy object as additional argument.
1244 __ Addu(a0, a0, Operand(1));
1245 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1246 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1247 RelocInfo::CODE_TARGET);
1249 __ bind(&non_proxy);
1250 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1251 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1252 RelocInfo::CODE_TARGET);
1256 // 5b. Get the code to call from the function and check that the number of
1257 // expected arguments matches what we're providing. If so, jump
1258 // (tail-call) to the code in register edx without checking arguments.
1259 // a0: actual number of arguments
1261 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1263 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1264 __ sra(a2, a2, kSmiTagSize);
1265 // Check formal and actual parameter counts.
1266 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1267 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1269 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1270 ParameterCount expected(0);
1271 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1275 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1276 const int kIndexOffset =
1277 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1278 const int kLimitOffset =
1279 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1280 const int kArgsOffset = 2 * kPointerSize;
1281 const int kRecvOffset = 3 * kPointerSize;
1282 const int kFunctionOffset = 4 * kPointerSize;
1285 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1286 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1288 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1290 // Returns (in v0) number of arguments to copy to stack as Smi.
1291 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1293 // Check the stack for overflow. We are not trying to catch
1294 // interruptions (e.g. debug break and preemption) here, so the "real stack
1295 // limit" is checked.
1297 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1298 // Make a2 the space we have left. The stack might already be overflowed
1299 // here which will cause a2 to become negative.
1300 __ subu(a2, sp, a2);
1301 // Check if the arguments will overflow the stack.
1302 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1303 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1305 // Out of stack space.
1306 __ lw(a1, MemOperand(fp, kFunctionOffset));
1308 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1309 // End of stack check.
1311 // Push current limit and index.
1313 __ mov(a1, zero_reg);
1314 __ Push(v0, a1); // Limit and initial index.
1316 // Get the receiver.
1317 __ lw(a0, MemOperand(fp, kRecvOffset));
1319 // Check that the function is a JS function (otherwise it must be a proxy).
1320 Label push_receiver;
1321 __ lw(a1, MemOperand(fp, kFunctionOffset));
1322 __ GetObjectType(a1, a2, a2);
1323 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1325 // Change context eagerly to get the right global object if necessary.
1326 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1327 // Load the shared function info while the function is still in a1.
1328 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1330 // Compute the receiver.
1331 // Do not transform the receiver for strict mode functions.
1332 Label call_to_object, use_global_receiver;
1333 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1334 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1336 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1338 // Do not transform the receiver for native (Compilerhints already in a2).
1339 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1340 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1342 // Compute the receiver in sloppy mode.
1343 __ JumpIfSmi(a0, &call_to_object);
1344 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1345 __ Branch(&use_global_receiver, eq, a0, Operand(a1));
1346 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1347 __ Branch(&use_global_receiver, eq, a0, Operand(a2));
1349 // Check if the receiver is already a JavaScript object.
1351 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1352 __ GetObjectType(a0, a1, a1);
1353 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1355 // Convert the receiver to a regular object.
1357 __ bind(&call_to_object);
1359 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1360 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1361 __ Branch(&push_receiver);
1363 __ bind(&use_global_receiver);
1364 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1365 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
1367 // Push the receiver.
1369 __ bind(&push_receiver);
1372 // Copy all arguments from the array to the stack.
1374 __ lw(a0, MemOperand(fp, kIndexOffset));
1377 // Load the current argument from the arguments array and push it to the
1379 // a0: current argument index
1381 __ lw(a1, MemOperand(fp, kArgsOffset));
1384 // Call the runtime to access the property in the arguments array.
1385 __ CallRuntime(Runtime::kGetProperty, 2);
1388 // Use inline caching to access the arguments.
1389 __ lw(a0, MemOperand(fp, kIndexOffset));
1390 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1391 __ sw(a0, MemOperand(fp, kIndexOffset));
1393 // Test if the copy loop has finished copying all the elements from the
1394 // arguments object.
1396 __ lw(a1, MemOperand(fp, kLimitOffset));
1397 __ Branch(&loop, ne, a0, Operand(a1));
1399 // Call the function.
1401 ParameterCount actual(a0);
1402 __ sra(a0, a0, kSmiTagSize);
1403 __ lw(a1, MemOperand(fp, kFunctionOffset));
1404 __ GetObjectType(a1, a2, a2);
1405 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1407 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1409 frame_scope.GenerateLeaveFrame();
1410 __ Ret(USE_DELAY_SLOT);
1411 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1413 // Call the function proxy.
1414 __ bind(&call_proxy);
1415 __ push(a1); // Add function proxy as last argument.
1416 __ Addu(a0, a0, Operand(1));
1417 __ li(a2, Operand(0, RelocInfo::NONE32));
1418 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1419 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1420 RelocInfo::CODE_TARGET);
1421 // Tear down the internal frame and remove function, receiver and args.
1424 __ Ret(USE_DELAY_SLOT);
1425 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1429 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1430 __ sll(a0, a0, kSmiTagSize);
1431 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1432 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1434 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1438 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1439 // ----------- S t a t e -------------
1440 // -- v0 : result being passed through
1441 // -----------------------------------
1442 // Get the number of arguments passed (as a smi), tear down the frame and
1443 // then tear down the parameters.
1444 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1447 __ MultiPop(fp.bit() | ra.bit());
1448 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1449 __ Addu(sp, sp, t0);
1450 // Adjust for the receiver.
1451 __ Addu(sp, sp, Operand(kPointerSize));
1455 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1456 // State setup as expected by MacroAssembler::InvokePrologue.
1457 // ----------- S t a t e -------------
1458 // -- a0: actual arguments count
1459 // -- a1: function (passed through to callee)
1460 // -- a2: expected arguments count
1461 // -----------------------------------
1463 Label invoke, dont_adapt_arguments;
1465 Label enough, too_few;
1466 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1467 __ Branch(&dont_adapt_arguments, eq,
1468 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1469 // We use Uless as the number of argument should always be greater than 0.
1470 __ Branch(&too_few, Uless, a0, Operand(a2));
1472 { // Enough parameters: actual >= expected.
1473 // a0: actual number of arguments as a smi
1475 // a2: expected number of arguments
1476 // a3: code entry to call
1478 EnterArgumentsAdaptorFrame(masm);
1480 // Calculate copy start address into a0 and copy end address into a2.
1481 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1482 __ Addu(a0, fp, a0);
1483 // Adjust for return address and receiver.
1484 __ Addu(a0, a0, Operand(2 * kPointerSize));
1485 // Compute copy end address.
1486 __ sll(a2, a2, kPointerSizeLog2);
1487 __ subu(a2, a0, a2);
1489 // Copy the arguments (including the receiver) to the new stack frame.
1490 // a0: copy start address
1492 // a2: copy end address
1493 // a3: code entry to call
1497 __ lw(t0, MemOperand(a0));
1499 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1500 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1505 { // Too few parameters: Actual < expected.
1507 EnterArgumentsAdaptorFrame(masm);
1509 // Calculate copy start address into a0 and copy end address is fp.
1510 // a0: actual number of arguments as a smi
1512 // a2: expected number of arguments
1513 // a3: code entry to call
1514 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1515 __ Addu(a0, fp, a0);
1516 // Adjust for return address and receiver.
1517 __ Addu(a0, a0, Operand(2 * kPointerSize));
1518 // Compute copy end address. Also adjust for return address.
1519 __ Addu(t3, fp, kPointerSize);
1521 // Copy the arguments (including the receiver) to the new stack frame.
1522 // a0: copy start address
1524 // a2: expected number of arguments
1525 // a3: code entry to call
1526 // t3: copy end address
1529 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1530 __ Subu(sp, sp, kPointerSize);
1531 __ Subu(a0, a0, kPointerSize);
1532 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1533 __ sw(t0, MemOperand(sp)); // In the delay slot.
1535 // Fill the remaining expected arguments with undefined.
1537 // a2: expected number of arguments
1538 // a3: code entry to call
1539 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1540 __ sll(t2, a2, kPointerSizeLog2);
1541 __ Subu(a2, fp, Operand(t2));
1542 // Adjust for frame.
1543 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1548 __ Subu(sp, sp, kPointerSize);
1549 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1550 __ sw(t0, MemOperand(sp));
1553 // Call the entry point.
1558 // Store offset of return address for deoptimizer.
1559 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1561 // Exit frame and return.
1562 LeaveArgumentsAdaptorFrame(masm);
1566 // -------------------------------------------
1567 // Don't adapt arguments.
1568 // -------------------------------------------
1569 __ bind(&dont_adapt_arguments);
1576 } } // namespace v8::internal
1578 #endif // V8_TARGET_ARCH_MIPS