1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen.h"
13 #include "src/runtime/runtime.h"
19 #define __ ACCESS_MASM(masm)
22 void Builtins::Generate_Adaptor(MacroAssembler* masm,
24 BuiltinExtraArguments extra_args) {
25 // ----------- S t a t e -------------
26 // -- r0 : number of arguments excluding receiver
27 // -- r1 : called function (only guaranteed when
28 // extra_args requires it)
30 // -- sp[0] : last argument
32 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
33 // -- sp[4 * argc] : receiver
34 // -----------------------------------
36 // Insert extra arguments.
37 int num_extra_args = 0;
38 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 // JumpToExternalReference expects r0 to contain the number of arguments
46 // including the receiver and the extra arguments.
47 __ add(r0, r0, Operand(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
52 // Load the built-in InternalArray function from the current context.
53 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
55 // Load the native context.
58 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
60 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
61 // Load the InternalArray function from the native context.
65 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
69 // Load the built-in Array function from the current context.
70 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
71 // Load the native context.
74 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
76 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
77 // Load the Array function from the native context.
80 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
84 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
85 // ----------- S t a t e -------------
86 // -- r0 : number of arguments
87 // -- lr : return address
88 // -- sp[...]: constructor arguments
89 // -----------------------------------
90 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
92 // Get the InternalArray function.
93 GenerateLoadInternalArrayFunction(masm, r1);
95 if (FLAG_debug_code) {
96 // Initial map for the builtin InternalArray functions should be maps.
97 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
99 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
100 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
101 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
104 // Run the native code for the InternalArray function called as a normal
107 InternalArrayConstructorStub stub(masm->isolate());
108 __ TailCallStub(&stub);
112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113 // ----------- S t a t e -------------
114 // -- r0 : number of arguments
115 // -- lr : return address
116 // -- sp[...]: constructor arguments
117 // -----------------------------------
118 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
120 // Get the Array function.
121 GenerateLoadArrayFunction(masm, r1);
123 if (FLAG_debug_code) {
124 // Initial map for the builtin Array functions should be maps.
125 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
127 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
128 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
129 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
133 // Run the native code for the Array function called as a normal function.
135 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
136 ArrayConstructorStub stub(masm->isolate());
137 __ TailCallStub(&stub);
141 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
142 // ----------- S t a t e -------------
143 // -- r0 : number of arguments
144 // -- r1 : constructor function
145 // -- lr : return address
146 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
147 // -- sp[argc * 4] : receiver
148 // -----------------------------------
149 Counters* counters = masm->isolate()->counters();
150 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
152 Register function = r1;
153 if (FLAG_debug_code) {
154 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
155 __ cmp(function, Operand(r2));
156 __ Assert(eq, kUnexpectedStringFunction);
159 // Load the first arguments in r0 and get rid of the rest.
161 __ cmp(r0, Operand::Zero());
162 __ b(eq, &no_arguments);
163 // First args = sp[(argc - 1) * 4].
164 __ sub(r0, r0, Operand(1));
165 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
166 // sp now point to args[0], drop args[0] + receiver.
169 Register argument = r2;
170 Label not_cached, argument_is_string;
171 __ LookupNumberStringCache(r0, // Input.
177 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
178 __ bind(&argument_is_string);
180 // ----------- S t a t e -------------
181 // -- r2 : argument converted to string
182 // -- r1 : constructor function
183 // -- lr : return address
184 // -----------------------------------
187 __ Allocate(JSValue::kSize,
194 // Initialising the String Object.
196 __ LoadGlobalFunctionInitialMap(function, map, r4);
197 if (FLAG_debug_code) {
198 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
199 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
200 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
201 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
202 __ cmp(r4, Operand::Zero());
203 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
205 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
207 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
208 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
209 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
211 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
213 // Ensure the object is fully initialized.
214 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
218 // The argument was not found in the number to string cache. Check
219 // if it's a string already before calling the conversion builtin.
220 Label convert_argument;
221 __ bind(¬_cached);
222 __ JumpIfSmi(r0, &convert_argument);
225 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
226 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
227 STATIC_ASSERT(kNotStringTag != 0);
228 __ tst(r3, Operand(kIsNotStringMask));
229 __ b(ne, &convert_argument);
230 __ mov(argument, r0);
231 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
232 __ b(&argument_is_string);
234 // Invoke the conversion builtin and put the result into r2.
235 __ bind(&convert_argument);
236 __ push(function); // Preserve the function.
237 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
239 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
241 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
244 __ mov(argument, r0);
245 __ b(&argument_is_string);
247 // Load the empty string into r2, remove the receiver from the
248 // stack, and jump back to the case where the argument is a string.
249 __ bind(&no_arguments);
250 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
252 __ b(&argument_is_string);
254 // At this point the argument is already a string. Call runtime to
255 // create a string wrapper.
256 __ bind(&gc_required);
257 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
259 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
261 __ CallRuntime(Runtime::kNewStringWrapper, 1);
267 static void CallRuntimePassFunction(
268 MacroAssembler* masm, Runtime::FunctionId function_id) {
269 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
270 // Push a copy of the function onto the stack.
272 // Push function as parameter to the runtime call.
275 __ CallRuntime(function_id, 1);
281 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
282 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
283 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
284 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
289 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
290 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
295 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
296 // Checking whether the queued function is ready for install is optional,
297 // since we come across interrupts and stack checks elsewhere. However,
298 // not checking may delay installing ready functions, and always checking
299 // would be quite expensive. A good compromise is to first check against
300 // stack limit as a cue for an interrupt signal.
302 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
303 __ cmp(sp, Operand(ip));
306 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
307 GenerateTailCallToReturnedCode(masm);
310 GenerateTailCallToSharedCode(masm);
314 static void Generate_Runtime_NewObject(MacroAssembler* masm,
316 Register original_constructor,
317 Label* count_incremented,
319 if (create_memento) {
320 // Get the cell or allocation site.
321 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
325 __ push(r1); // argument for Runtime_NewObject
326 __ push(original_constructor); // original constructor
327 if (create_memento) {
328 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
330 __ CallRuntime(Runtime::kNewObject, 2);
334 // Runtime_NewObjectWithAllocationSite increments allocation count.
335 // Skip the increment.
336 if (create_memento) {
337 __ jmp(count_incremented);
344 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
345 bool is_api_function,
346 bool create_memento) {
347 // ----------- S t a t e -------------
348 // -- r0 : number of arguments
349 // -- r1 : constructor function
350 // -- r2 : allocation site or undefined
351 // -- r3 : original constructor
352 // -- lr : return address
353 // -- sp[...]: constructor arguments
354 // -----------------------------------
356 // Should never create mementos for api functions.
357 DCHECK(!is_api_function || !create_memento);
359 Isolate* isolate = masm->isolate();
361 // Enter a construct frame.
363 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
365 if (create_memento) {
366 __ AssertUndefinedOrAllocationSite(r2, r4);
370 // Preserve the two incoming parameters on the stack.
372 __ push(r0); // Smi-tagged arguments count.
373 __ push(r1); // Constructor function.
375 Label rt_call, allocated, normal_new, count_incremented;
377 __ b(eq, &normal_new);
379 // Original constructor and function are different.
380 Generate_Runtime_NewObject(masm, create_memento, r3, &count_incremented,
382 __ bind(&normal_new);
384 // Try to allocate the object without transitioning into C code. If any of
385 // the preconditions is not met, the code bails out to the runtime call.
386 if (FLAG_inline_new) {
387 Label undo_allocation;
388 ExternalReference debug_step_in_fp =
389 ExternalReference::debug_step_in_fp_address(isolate);
390 __ mov(r2, Operand(debug_step_in_fp));
391 __ ldr(r2, MemOperand(r2));
395 // Load the initial map and verify that it is in fact a map.
396 // r1: constructor function
397 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
398 __ JumpIfSmi(r2, &rt_call);
399 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
402 // Check that the constructor is not constructing a JSFunction (see
403 // comments in Runtime_NewObject in runtime.cc). In which case the
404 // initial map's instance type would be JS_FUNCTION_TYPE.
405 // r1: constructor function
407 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
410 if (!is_api_function) {
412 MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset);
413 // Check if slack tracking is enabled.
414 __ ldr(r4, bit_field3);
415 __ DecodeField<Map::Counter>(r3, r4);
416 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
418 // Decrease generous allocation count.
419 __ sub(r4, r4, Operand(1 << Map::Counter::kShift));
420 __ str(r4, bit_field3);
421 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
426 __ Push(r2, r1); // r1 = constructor
427 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
435 // Now allocate the JSObject on the heap.
436 // r1: constructor function
438 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
439 if (create_memento) {
440 __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
443 __ Allocate(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
445 // Allocated the JSObject, now initialize the fields. Map is set to
446 // initial map and properties and elements are set to empty fixed array.
447 // r1: constructor function
449 // r3: object size (not including memento if create_memento)
450 // r4: JSObject (not tagged)
451 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
453 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
454 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
455 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
456 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
457 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
458 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
460 // Fill all the in-object properties with the appropriate filler.
461 // r1: constructor function
463 // r3: object size (in words, including memento if create_memento)
464 // r4: JSObject (not tagged)
465 // r5: First in-object property of JSObject (not tagged)
466 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
467 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
469 if (!is_api_function) {
470 Label no_inobject_slack_tracking;
472 // Check if slack tracking is enabled.
473 __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset));
474 __ DecodeField<Map::Counter>(ip);
475 __ cmp(ip, Operand(Map::kSlackTrackingCounterEnd));
476 __ b(lt, &no_inobject_slack_tracking);
478 // Allocate object with a slack.
479 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
480 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
482 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
483 // r0: offset of first field after pre-allocated fields
484 if (FLAG_debug_code) {
485 __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
487 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
489 __ InitializeFieldsWithFiller(r5, r0, r6);
490 // To allow for truncation.
491 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
492 // Fill the remaining fields with one pointer filler map.
494 __ bind(&no_inobject_slack_tracking);
497 if (create_memento) {
498 __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
499 __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
500 __ InitializeFieldsWithFiller(r5, r0, r6);
502 // Fill in memento fields.
503 // r5: points to the allocated but uninitialized memento.
504 __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
505 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
506 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
507 // Load the AllocationSite
508 __ ldr(r6, MemOperand(sp, 2 * kPointerSize));
509 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
510 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
512 __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
513 __ InitializeFieldsWithFiller(r5, r0, r6);
516 // Add the object tag to make the JSObject real, so that we can continue
517 // and jump into the continuation code at any time from now on. Any
518 // failures need to undo the allocation, so that the heap is in a
519 // consistent state and verifiable.
520 __ add(r4, r4, Operand(kHeapObjectTag));
522 // Check if a non-empty properties array is needed. Continue with
523 // allocated object if not fall through to runtime call if it is.
524 // r1: constructor function
526 // r5: start of next object (not tagged)
527 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
528 // The field instance sizes contains both pre-allocated property fields
529 // and in-object properties.
530 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
531 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
533 __ add(r3, r3, Operand(r6));
534 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
536 __ sub(r3, r3, Operand(r6), SetCC);
538 // Done if no extra properties are to be allocated.
539 __ b(eq, &allocated);
540 __ Assert(pl, kPropertyAllocationCountFailed);
542 // Scale the number of elements by pointer size and add the header for
543 // FixedArrays to the start of the next object calculation from above.
545 // r3: number of elements in properties array
547 // r5: start of next object
548 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
555 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
557 // Initialize the FixedArray.
559 // r3: number of elements in properties array
561 // r5: FixedArray (not tagged)
562 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
564 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
565 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
566 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
568 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
570 // Initialize the fields to undefined.
571 // r1: constructor function
572 // r2: First element of FixedArray (not tagged)
573 // r3: number of elements in properties array
575 // r5: FixedArray (not tagged)
576 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
577 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
579 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
582 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
588 // Store the initialized FixedArray into the properties field of
590 // r1: constructor function
592 // r5: FixedArray (not tagged)
593 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
594 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
596 // Continue with JSObject being successfully allocated
597 // r1: constructor function
601 // Undo the setting of the new top so that the heap is verifiable. For
602 // example, the map's unused properties potentially do not match the
603 // allocated objects unused properties.
604 // r4: JSObject (previous new top)
605 __ bind(&undo_allocation);
606 __ UndoAllocationInNewSpace(r4, r5);
609 // Allocate the new receiver object using the runtime call.
610 // r1: constructor function
612 Generate_Runtime_NewObject(masm, create_memento, r1, &count_incremented,
615 // Receiver for constructor call allocated.
619 if (create_memento) {
620 __ ldr(r2, MemOperand(sp, kPointerSize * 2));
621 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
623 __ b(eq, &count_incremented);
624 // r2 is an AllocationSite. We are creating a memento from it, so we
625 // need to increment the memento create count.
626 __ ldr(r3, FieldMemOperand(r2,
627 AllocationSite::kPretenureCreateCountOffset));
628 __ add(r3, r3, Operand(Smi::FromInt(1)));
629 __ str(r3, FieldMemOperand(r2,
630 AllocationSite::kPretenureCreateCountOffset));
631 __ bind(&count_incremented);
637 // Reload the number of arguments and the constructor from the stack.
640 // sp[2]: constructor function
641 // sp[3]: number of arguments (smi-tagged)
642 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
643 __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
645 // Set up pointer to last argument.
646 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
648 // Set up number of arguments for function call below
651 // Copy arguments and receiver to the expression stack.
652 // r0: number of arguments
653 // r1: constructor function
654 // r2: address of last argument (caller sp)
655 // r3: number of arguments (smi-tagged)
658 // sp[2]: constructor function
659 // sp[3]: number of arguments (smi-tagged)
663 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
666 __ sub(r3, r3, Operand(2), SetCC);
669 // Call the function.
670 // r0: number of arguments
671 // r1: constructor function
672 if (is_api_function) {
673 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
675 masm->isolate()->builtins()->HandleApiCallConstruct();
676 __ Call(code, RelocInfo::CODE_TARGET);
678 ParameterCount actual(r0);
679 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
682 // Store offset of return address for deoptimizer.
683 if (!is_api_function) {
684 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
687 // Restore context from the frame.
690 // sp[1]: constructor function
691 // sp[2]: number of arguments (smi-tagged)
692 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
694 // If the result is an object (in the ECMA sense), we should get rid
695 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
697 Label use_receiver, exit;
699 // If the result is a smi, it is *not* an object in the ECMA sense.
701 // sp[0]: receiver (newly allocated object)
702 // sp[1]: constructor function
703 // sp[2]: number of arguments (smi-tagged)
704 __ JumpIfSmi(r0, &use_receiver);
706 // If the type of the result (stored in its map) is less than
707 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
708 __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
711 // Throw away the result of the constructor invocation and use the
712 // on-stack receiver as the result.
713 __ bind(&use_receiver);
714 __ ldr(r0, MemOperand(sp));
716 // Remove receiver from the stack, remove caller arguments, and
720 // sp[0]: receiver (newly allocated object)
721 // sp[1]: constructor function
722 // sp[2]: number of arguments (smi-tagged)
723 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
725 // Leave construct frame.
728 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
729 __ add(sp, sp, Operand(kPointerSize));
730 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
735 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
736 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
740 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
741 Generate_JSConstructStubHelper(masm, true, false);
745 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
746 // ----------- S t a t e -------------
747 // -- r0 : number of arguments
748 // -- r1 : constructor function
749 // -- r2 : allocation site or undefined
750 // -- r3 : original constructor
751 // -- lr : return address
752 // -- sp[...]: constructor arguments
753 // -----------------------------------
755 // TODO(dslomov): support pretenuring
756 CHECK(!FLAG_pretenuring_call_new);
759 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
763 __ push(r4); // Smi-tagged arguments count.
768 // receiver is the hole.
769 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
772 // Set up pointer to last argument.
773 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
775 // Copy arguments and receiver to the expression stack.
776 // r0: number of arguments
777 // r1: constructor function
778 // r2: address of last argument (caller sp)
779 // r4: number of arguments (smi-tagged)
782 // sp[2]: number of arguments (smi-tagged)
786 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
789 __ sub(r4, r4, Operand(2), SetCC);
792 __ add(r0, r0, Operand(1));
796 ExternalReference debug_step_in_fp =
797 ExternalReference::debug_step_in_fp_address(masm->isolate());
798 __ mov(r2, Operand(debug_step_in_fp));
799 __ ldr(r2, MemOperand(r2));
801 __ b(eq, &skip_step_in);
806 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
810 __ bind(&skip_step_in);
812 // Call the function.
813 // r0: number of arguments
814 // r1: constructor function
815 ParameterCount actual(r0);
816 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
818 // Restore context from the frame.
820 // sp[0]: number of arguments (smi-tagged)
821 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
822 __ ldr(r1, MemOperand(sp, 0));
824 // Leave construct frame.
827 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
828 __ add(sp, sp, Operand(kPointerSize));
833 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
835 // Called from Generate_JS_Entry
841 // r5-r6, r8 (if not FLAG_enable_ool_constant_pool) and cp may be clobbered
842 ProfileEntryHookStub::MaybeCallEntryHook(masm);
844 // Clear the context before we push it when entering the internal frame.
845 __ mov(cp, Operand::Zero());
847 // Enter an internal frame.
849 FrameScope scope(masm, StackFrame::INTERNAL);
851 // Set up the context from the function argument.
852 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
854 __ InitializeRootRegister();
856 // Push the function and the receiver onto the stack.
860 // Copy arguments to the stack in a loop.
863 // r4: argv, i.e. points to first arg
865 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
866 // r2 points past last arg.
869 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
870 __ ldr(r0, MemOperand(r0)); // dereference handle
871 __ push(r0); // push parameter
876 // Initialize all JavaScript callee-saved registers, since they will be seen
877 // by the garbage collector as part of handlers.
878 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
879 __ mov(r5, Operand(r4));
880 __ mov(r6, Operand(r4));
881 if (!FLAG_enable_ool_constant_pool) {
882 __ mov(r8, Operand(r4));
884 if (kR9Available == 1) {
885 __ mov(r9, Operand(r4));
888 // Invoke the code and pass argc as r0.
889 __ mov(r0, Operand(r3));
891 // No type feedback cell is available
892 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
893 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
896 ParameterCount actual(r0);
897 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
899 // Exit the JS frame and remove the parameters (except function), and
901 // Respect ABI stack constraint.
909 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
910 Generate_JSEntryTrampolineHelper(masm, false);
914 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
915 Generate_JSEntryTrampolineHelper(masm, true);
919 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
920 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
921 GenerateTailCallToReturnedCode(masm);
925 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
926 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
927 // Push a copy of the function onto the stack.
929 // Push function as parameter to the runtime call.
931 // Whether to compile in a background thread.
932 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
934 __ CallRuntime(Runtime::kCompileOptimized, 2);
940 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
941 CallCompileOptimized(masm, false);
942 GenerateTailCallToReturnedCode(masm);
946 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
947 CallCompileOptimized(masm, true);
948 GenerateTailCallToReturnedCode(masm);
952 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
953 // For now, we are relying on the fact that make_code_young doesn't do any
954 // garbage collection which allows us to save/restore the registers without
955 // worrying about which of them contain pointers. We also don't build an
956 // internal frame to make the code faster, since we shouldn't have to do stack
957 // crawls in MakeCodeYoung. This seems a bit fragile.
959 // The following registers must be saved and restored when calling through to
961 // r0 - contains return address (beginning of patch sequence)
963 FrameScope scope(masm, StackFrame::MANUAL);
964 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
965 __ PrepareCallCFunction(2, 0, r2);
966 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
968 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
969 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
973 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
974 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
975 MacroAssembler* masm) { \
976 GenerateMakeCodeYoungAgainCommon(masm); \
978 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
979 MacroAssembler* masm) { \
980 GenerateMakeCodeYoungAgainCommon(masm); \
982 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
983 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
986 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
987 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
988 // that make_code_young doesn't do any garbage collection which allows us to
989 // save/restore the registers without worrying about which of them contain
992 // The following registers must be saved and restored when calling through to
994 // r0 - contains return address (beginning of patch sequence)
996 FrameScope scope(masm, StackFrame::MANUAL);
997 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
998 __ PrepareCallCFunction(2, 0, r2);
999 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1000 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1001 masm->isolate()), 2);
1002 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1004 // Perform prologue operations usually performed by the young code stub.
1005 __ PushFixedFrame(r1);
1006 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1008 // Jump to point after the code-age stub.
1009 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1014 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1015 GenerateMakeCodeYoungAgainCommon(masm);
1019 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1020 SaveFPRegsMode save_doubles) {
1022 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1024 // Preserve registers across notification, this is important for compiled
1025 // stubs that tail call the runtime on deopts passing their parameters in
1027 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1028 // Pass the function and deoptimization type to the runtime system.
1029 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1030 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1033 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1034 __ mov(pc, lr); // Jump to miss handler
1038 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1039 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1043 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1044 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1048 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1049 Deoptimizer::BailoutType type) {
1051 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1052 // Pass the function and deoptimization type to the runtime system.
1053 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1055 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1058 // Get the full codegen state from the stack and untag it -> r6.
1059 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1061 // Switch on the state.
1062 Label with_tos_register, unknown_state;
1063 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1064 __ b(ne, &with_tos_register);
1065 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1068 __ bind(&with_tos_register);
1069 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1070 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1071 __ b(ne, &unknown_state);
1072 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1075 __ bind(&unknown_state);
1076 __ stop("no cases left");
1080 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1081 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1085 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1086 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1090 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1091 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1095 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1096 // Lookup the function in the JavaScript frame.
1097 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1099 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1100 // Pass function as argument.
1102 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1105 // If the code object is null, just return to the unoptimized code.
1107 __ cmp(r0, Operand(Smi::FromInt(0)));
1113 // Load deoptimization data from the code object.
1114 // <deopt_data> = <code>[#deoptimization_data_offset]
1115 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1117 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1118 if (FLAG_enable_ool_constant_pool) {
1119 __ ldr(pp, FieldMemOperand(r0, Code::kConstantPoolOffset));
1122 // Load the OSR entrypoint offset from the deoptimization data.
1123 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1124 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1125 DeoptimizationInputData::kOsrPcOffsetIndex)));
1127 // Compute the target address = code_obj + header_size + osr_offset
1128 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1129 __ add(r0, r0, Operand::SmiUntag(r1));
1130 __ add(lr, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1132 // And "return" to the OSR entry point of the function.
1138 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1139 // We check the stack limit as indicator that recompilation might be done.
1141 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1142 __ cmp(sp, Operand(ip));
1145 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1146 __ CallRuntime(Runtime::kStackGuard, 0);
1148 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1149 RelocInfo::CODE_TARGET);
1156 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1157 // 1. Make sure we have at least one argument.
1158 // r0: actual number of arguments
1160 __ cmp(r0, Operand::Zero());
1162 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1164 __ add(r0, r0, Operand(1));
1168 // 2. Get the function to call (passed as receiver) from the stack, check
1169 // if it is a function.
1170 // r0: actual number of arguments
1171 Label slow, non_function;
1172 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1173 __ JumpIfSmi(r1, &non_function);
1174 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1177 // 3a. Patch the first argument if necessary when calling a function.
1178 // r0: actual number of arguments
1180 Label shift_arguments;
1181 __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION
1182 { Label convert_to_object, use_global_proxy, patch_receiver;
1183 // Change context eagerly in case we need the global receiver.
1184 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1186 // Do not transform the receiver for strict mode functions.
1187 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1188 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1189 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1191 __ b(ne, &shift_arguments);
1193 // Do not transform the receiver for native (Compilerhints already in r3).
1194 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1195 __ b(ne, &shift_arguments);
1197 // Compute the receiver in sloppy mode.
1198 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1199 __ ldr(r2, MemOperand(r2, -kPointerSize));
1200 // r0: actual number of arguments
1202 // r2: first argument
1203 __ JumpIfSmi(r2, &convert_to_object);
1205 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1207 __ b(eq, &use_global_proxy);
1208 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1210 __ b(eq, &use_global_proxy);
1212 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1213 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1214 __ b(ge, &shift_arguments);
1216 __ bind(&convert_to_object);
1219 // Enter an internal frame in order to preserve argument count.
1220 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1225 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1231 // Exit the internal frame.
1234 // Restore the function to r1, and the flag to r4.
1235 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1236 __ mov(r4, Operand::Zero());
1237 __ jmp(&patch_receiver);
1239 __ bind(&use_global_proxy);
1240 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1241 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
1243 __ bind(&patch_receiver);
1244 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1245 __ str(r2, MemOperand(r3, -kPointerSize));
1247 __ jmp(&shift_arguments);
1250 // 3b. Check for function proxy.
1252 __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1253 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1254 __ b(eq, &shift_arguments);
1255 __ bind(&non_function);
1256 __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function
1258 // 3c. Patch the first argument when calling a non-function. The
1259 // CALL_NON_FUNCTION builtin expects the non-function callee as
1260 // receiver, so overwrite the first argument which will ultimately
1261 // become the receiver.
1262 // r0: actual number of arguments
1264 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1265 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1266 __ str(r1, MemOperand(r2, -kPointerSize));
1268 // 4. Shift arguments and return address one slot down on the stack
1269 // (overwriting the original receiver). Adjust argument count to make
1270 // the original first argument the new receiver.
1271 // r0: actual number of arguments
1273 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1274 __ bind(&shift_arguments);
1276 // Calculate the copy start address (destination). Copy end address is sp.
1277 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1280 __ ldr(ip, MemOperand(r2, -kPointerSize));
1281 __ str(ip, MemOperand(r2));
1282 __ sub(r2, r2, Operand(kPointerSize));
1285 // Adjust the actual number of arguments and remove the top element
1286 // (which is a copy of the last argument).
1287 __ sub(r0, r0, Operand(1));
1291 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1292 // or a function proxy via CALL_FUNCTION_PROXY.
1293 // r0: actual number of arguments
1295 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1296 { Label function, non_proxy;
1298 __ b(eq, &function);
1299 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1300 __ mov(r2, Operand::Zero());
1301 __ cmp(r4, Operand(1));
1302 __ b(ne, &non_proxy);
1304 __ push(r1); // re-add proxy object as additional argument
1305 __ add(r0, r0, Operand(1));
1306 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1307 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1308 RelocInfo::CODE_TARGET);
1310 __ bind(&non_proxy);
1311 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
1312 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1313 RelocInfo::CODE_TARGET);
1317 // 5b. Get the code to call from the function and check that the number of
1318 // expected arguments matches what we're providing. If so, jump
1319 // (tail-call) to the code in register edx without checking arguments.
1320 // r0: actual number of arguments
1322 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1324 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1326 __ cmp(r2, r0); // Check formal and actual parameter counts.
1327 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1328 RelocInfo::CODE_TARGET,
1331 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1332 ParameterCount expected(0);
1333 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1337 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1338 const int kIndexOffset =
1339 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1340 const int kLimitOffset =
1341 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1342 const int kArgsOffset = 2 * kPointerSize;
1343 const int kRecvOffset = 3 * kPointerSize;
1344 const int kFunctionOffset = 4 * kPointerSize;
1347 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1349 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1351 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1353 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1355 // Check the stack for overflow. We are not trying to catch
1356 // interruptions (e.g. debug break and preemption) here, so the "real stack
1357 // limit" is checked.
1359 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1360 // Make r2 the space we have left. The stack might already be overflowed
1361 // here which will cause r2 to become negative.
1363 // Check if the arguments will overflow the stack.
1364 __ cmp(r2, Operand::PointerOffsetFromSmiKey(r0));
1365 __ b(gt, &okay); // Signed comparison.
1367 // Out of stack space.
1368 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1370 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1371 // End of stack check.
1373 // Push current limit and index.
1375 __ push(r0); // limit
1376 __ mov(r1, Operand::Zero()); // initial index
1379 // Get the receiver.
1380 __ ldr(r0, MemOperand(fp, kRecvOffset));
1382 // Check that the function is a JS function (otherwise it must be a proxy).
1383 Label push_receiver;
1384 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1385 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1386 __ b(ne, &push_receiver);
1388 // Change context eagerly to get the right global object if necessary.
1389 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1390 // Load the shared function info while the function is still in r1.
1391 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1393 // Compute the receiver.
1394 // Do not transform the receiver for strict mode functions.
1395 Label call_to_object, use_global_proxy;
1396 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1397 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1399 __ b(ne, &push_receiver);
1401 // Do not transform the receiver for strict mode functions.
1402 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1403 __ b(ne, &push_receiver);
1405 // Compute the receiver in sloppy mode.
1406 __ JumpIfSmi(r0, &call_to_object);
1407 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1409 __ b(eq, &use_global_proxy);
1410 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1412 __ b(eq, &use_global_proxy);
1414 // Check if the receiver is already a JavaScript object.
1416 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1417 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1418 __ b(ge, &push_receiver);
1420 // Convert the receiver to a regular object.
1422 __ bind(&call_to_object);
1424 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1425 __ b(&push_receiver);
1427 __ bind(&use_global_proxy);
1428 __ ldr(r0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1429 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalProxyOffset));
1431 // Push the receiver.
1433 __ bind(&push_receiver);
1436 // Copy all arguments from the array to the stack.
1438 __ ldr(r0, MemOperand(fp, kIndexOffset));
1441 // Load the current argument from the arguments array and push it to the
1443 // r0: current argument index
1445 __ ldr(r1, MemOperand(fp, kArgsOffset));
1448 // Call the runtime to access the property in the arguments array.
1449 __ CallRuntime(Runtime::kGetProperty, 2);
1452 // Use inline caching to access the arguments.
1453 __ ldr(r0, MemOperand(fp, kIndexOffset));
1454 __ add(r0, r0, Operand(1 << kSmiTagSize));
1455 __ str(r0, MemOperand(fp, kIndexOffset));
1457 // Test if the copy loop has finished copying all the elements from the
1458 // arguments object.
1460 __ ldr(r1, MemOperand(fp, kLimitOffset));
1464 // Call the function.
1466 ParameterCount actual(r0);
1468 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1469 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1470 __ b(ne, &call_proxy);
1471 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
1473 frame_scope.GenerateLeaveFrame();
1474 __ add(sp, sp, Operand(3 * kPointerSize));
1477 // Call the function proxy.
1478 __ bind(&call_proxy);
1479 __ push(r1); // add function proxy as last argument
1480 __ add(r0, r0, Operand(1));
1481 __ mov(r2, Operand::Zero());
1482 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1483 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1484 RelocInfo::CODE_TARGET);
1486 // Tear down the internal frame and remove function, receiver and args.
1488 __ add(sp, sp, Operand(3 * kPointerSize));
1493 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1494 Label* stack_overflow) {
1495 // ----------- S t a t e -------------
1496 // -- r0 : actual number of arguments
1497 // -- r1 : function (passed through to callee)
1498 // -- r2 : expected number of arguments
1499 // -----------------------------------
1500 // Check the stack for overflow. We are not trying to catch
1501 // interruptions (e.g. debug break and preemption) here, so the "real stack
1502 // limit" is checked.
1503 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1504 // Make r5 the space we have left. The stack might already be overflowed
1505 // here which will cause r5 to become negative.
1507 // Check if the arguments will overflow the stack.
1508 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1509 __ b(le, stack_overflow); // Signed comparison.
1513 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1515 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1516 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1517 (FLAG_enable_ool_constant_pool ? pp.bit() : 0) |
1518 fp.bit() | lr.bit());
1520 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1524 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1525 // ----------- S t a t e -------------
1526 // -- r0 : result being passed through
1527 // -----------------------------------
1528 // Get the number of arguments passed (as a smi), tear down the frame and
1529 // then tear down the parameters.
1530 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1533 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1534 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1535 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1539 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1540 // ----------- S t a t e -------------
1541 // -- r0 : actual number of arguments
1542 // -- r1 : function (passed through to callee)
1543 // -- r2 : expected number of arguments
1544 // -----------------------------------
1546 Label stack_overflow;
1547 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1548 Label invoke, dont_adapt_arguments;
1550 Label enough, too_few;
1551 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1554 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1555 __ b(eq, &dont_adapt_arguments);
1557 { // Enough parameters: actual >= expected
1559 EnterArgumentsAdaptorFrame(masm);
1561 // Calculate copy start address into r0 and copy end address into r2.
1562 // r0: actual number of arguments as a smi
1564 // r2: expected number of arguments
1565 // r3: code entry to call
1566 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1567 // adjust for return address and receiver
1568 __ add(r0, r0, Operand(2 * kPointerSize));
1569 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1571 // Copy the arguments (including the receiver) to the new stack frame.
1572 // r0: copy start address
1574 // r2: copy end address
1575 // r3: code entry to call
1579 __ ldr(ip, MemOperand(r0, 0));
1581 __ cmp(r0, r2); // Compare before moving to next argument.
1582 __ sub(r0, r0, Operand(kPointerSize));
1588 { // Too few parameters: Actual < expected
1590 EnterArgumentsAdaptorFrame(masm);
1592 // Calculate copy start address into r0 and copy end address is fp.
1593 // r0: actual number of arguments as a smi
1595 // r2: expected number of arguments
1596 // r3: code entry to call
1597 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1599 // Copy the arguments (including the receiver) to the new stack frame.
1600 // r0: copy start address
1602 // r2: expected number of arguments
1603 // r3: code entry to call
1606 // Adjust load for return address and receiver.
1607 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1609 __ cmp(r0, fp); // Compare before moving to next argument.
1610 __ sub(r0, r0, Operand(kPointerSize));
1613 // Fill the remaining expected arguments with undefined.
1615 // r2: expected number of arguments
1616 // r3: code entry to call
1617 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1618 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1619 // Adjust for frame.
1620 __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1630 // Call the entry point.
1634 // Store offset of return address for deoptimizer.
1635 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1637 // Exit frame and return.
1638 LeaveArgumentsAdaptorFrame(masm);
1642 // -------------------------------------------
1643 // Dont adapt arguments.
1644 // -------------------------------------------
1645 __ bind(&dont_adapt_arguments);
1648 __ bind(&stack_overflow);
1650 FrameScope frame(masm, StackFrame::MANUAL);
1651 EnterArgumentsAdaptorFrame(masm);
1652 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1660 } } // namespace v8::internal
1662 #endif // V8_TARGET_ARCH_ARM