1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/interpreter/bytecodes.h"
14 #include "src/runtime/runtime.h"
20 #define __ ACCESS_MASM(masm)
23 void Builtins::Generate_Adaptor(MacroAssembler* masm,
25 BuiltinExtraArguments extra_args) {
26 // ----------- S t a t e -------------
27 // -- r0 : number of arguments excluding receiver
28 // -- r1 : called function (only guaranteed when
29 // extra_args requires it)
31 // -- sp[0] : last argument
33 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
34 // -- sp[4 * argc] : receiver
35 // -----------------------------------
37 // Insert extra arguments.
38 int num_extra_args = 0;
39 if (extra_args == NEEDS_CALLED_FUNCTION) {
43 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
46 // JumpToExternalReference expects r0 to contain the number of arguments
47 // including the receiver and the extra arguments.
48 __ add(r0, r0, Operand(num_extra_args + 1));
49 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
53 // Load the built-in InternalArray function from the current context.
54 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
56 // Load the native context.
59 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
61 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
62 // Load the InternalArray function from the native context.
66 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
70 // Load the built-in Array function from the current context.
71 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
72 // Load the native context.
75 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
77 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
78 // Load the Array function from the native context.
81 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
85 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
86 // ----------- S t a t e -------------
87 // -- r0 : number of arguments
88 // -- lr : return address
89 // -- sp[...]: constructor arguments
90 // -----------------------------------
91 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
93 // Get the InternalArray function.
94 GenerateLoadInternalArrayFunction(masm, r1);
96 if (FLAG_debug_code) {
97 // Initial map for the builtin InternalArray functions should be maps.
98 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
100 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
101 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
102 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
105 // Run the native code for the InternalArray function called as a normal
108 InternalArrayConstructorStub stub(masm->isolate());
109 __ TailCallStub(&stub);
113 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
114 // ----------- S t a t e -------------
115 // -- r0 : number of arguments
116 // -- lr : return address
117 // -- sp[...]: constructor arguments
118 // -----------------------------------
119 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
121 // Get the Array function.
122 GenerateLoadArrayFunction(masm, r1);
124 if (FLAG_debug_code) {
125 // Initial map for the builtin Array functions should be maps.
126 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
128 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
129 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
130 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
134 // Run the native code for the Array function called as a normal function.
136 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
137 ArrayConstructorStub stub(masm->isolate());
138 __ TailCallStub(&stub);
142 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
143 // ----------- S t a t e -------------
144 // -- r0 : number of arguments
145 // -- r1 : constructor function
146 // -- lr : return address
147 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
148 // -- sp[argc * 4] : receiver
149 // -----------------------------------
150 Counters* counters = masm->isolate()->counters();
151 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
153 Register function = r1;
154 if (FLAG_debug_code) {
155 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
156 __ cmp(function, Operand(r2));
157 __ Assert(eq, kUnexpectedStringFunction);
160 // Load the first arguments in r0 and get rid of the rest.
162 __ cmp(r0, Operand::Zero());
163 __ b(eq, &no_arguments);
164 // First args = sp[(argc - 1) * 4].
165 __ sub(r0, r0, Operand(1));
166 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
167 // sp now point to args[0], drop args[0] + receiver.
170 Register argument = r2;
171 Label not_cached, argument_is_string;
172 __ LookupNumberStringCache(r0, // Input.
178 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
179 __ bind(&argument_is_string);
181 // ----------- S t a t e -------------
182 // -- r2 : argument converted to string
183 // -- r1 : constructor function
184 // -- lr : return address
185 // -----------------------------------
188 __ Allocate(JSValue::kSize,
195 // Initialising the String Object.
197 __ LoadGlobalFunctionInitialMap(function, map, r4);
198 if (FLAG_debug_code) {
199 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
200 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
201 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
202 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
203 __ cmp(r4, Operand::Zero());
204 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
206 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
208 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
209 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
210 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
212 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
214 // Ensure the object is fully initialized.
215 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
219 // The argument was not found in the number to string cache. Check
220 // if it's a string already before calling the conversion builtin.
221 Label convert_argument;
222 __ bind(¬_cached);
223 __ JumpIfSmi(r0, &convert_argument);
226 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
227 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
228 STATIC_ASSERT(kNotStringTag != 0);
229 __ tst(r3, Operand(kIsNotStringMask));
230 __ b(ne, &convert_argument);
231 __ mov(argument, r0);
232 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
233 __ b(&argument_is_string);
235 // Invoke the conversion builtin and put the result into r2.
236 __ bind(&convert_argument);
237 __ push(function); // Preserve the function.
238 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
240 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
242 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
245 __ mov(argument, r0);
246 __ b(&argument_is_string);
248 // Load the empty string into r2, remove the receiver from the
249 // stack, and jump back to the case where the argument is a string.
250 __ bind(&no_arguments);
251 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
253 __ b(&argument_is_string);
255 // At this point the argument is already a string. Call runtime to
256 // create a string wrapper.
257 __ bind(&gc_required);
258 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
260 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
262 __ CallRuntime(Runtime::kNewStringWrapper, 1);
268 static void CallRuntimePassFunction(
269 MacroAssembler* masm, Runtime::FunctionId function_id) {
270 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
271 // Push a copy of the function onto the stack.
273 // Push function as parameter to the runtime call.
276 __ CallRuntime(function_id, 1);
282 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
283 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
284 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
285 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
290 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
291 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
296 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
297 // Checking whether the queued function is ready for install is optional,
298 // since we come across interrupts and stack checks elsewhere. However,
299 // not checking may delay installing ready functions, and always checking
300 // would be quite expensive. A good compromise is to first check against
301 // stack limit as a cue for an interrupt signal.
303 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
304 __ cmp(sp, Operand(ip));
307 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
308 GenerateTailCallToReturnedCode(masm);
311 GenerateTailCallToSharedCode(masm);
315 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
316 bool is_api_function,
317 bool create_memento) {
318 // ----------- S t a t e -------------
319 // -- r0 : number of arguments
320 // -- r1 : constructor function
321 // -- r2 : allocation site or undefined
322 // -- r3 : original constructor
323 // -- lr : return address
324 // -- sp[...]: constructor arguments
325 // -----------------------------------
327 // Should never create mementos for api functions.
328 DCHECK(!is_api_function || !create_memento);
330 Isolate* isolate = masm->isolate();
332 // Enter a construct frame.
334 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
336 // Preserve the incoming parameters on the stack.
337 __ AssertUndefinedOrAllocationSite(r2, r4);
344 // Try to allocate the object without transitioning into C code. If any of
345 // the preconditions is not met, the code bails out to the runtime call.
346 Label rt_call, allocated;
347 if (FLAG_inline_new) {
348 ExternalReference debug_step_in_fp =
349 ExternalReference::debug_step_in_fp_address(isolate);
350 __ mov(r2, Operand(debug_step_in_fp));
351 __ ldr(r2, MemOperand(r2));
355 // Fall back to runtime if the original constructor and function differ.
359 // Load the initial map and verify that it is in fact a map.
360 // r1: constructor function
361 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
362 __ JumpIfSmi(r2, &rt_call);
363 __ CompareObjectType(r2, r5, r4, MAP_TYPE);
366 // Check that the constructor is not constructing a JSFunction (see
367 // comments in Runtime_NewObject in runtime.cc). In which case the
368 // initial map's instance type would be JS_FUNCTION_TYPE.
369 // r1: constructor function
371 __ CompareInstanceType(r2, r5, JS_FUNCTION_TYPE);
374 if (!is_api_function) {
376 MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset);
377 // Check if slack tracking is enabled.
378 __ ldr(r4, bit_field3);
379 __ DecodeField<Map::Counter>(r3, r4);
380 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
382 // Decrease generous allocation count.
383 __ sub(r4, r4, Operand(1 << Map::Counter::kShift));
384 __ str(r4, bit_field3);
385 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
390 __ Push(r2, r1); // r1 = constructor
391 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
399 // Now allocate the JSObject on the heap.
400 // r1: constructor function
402 Label rt_call_reload_new_target;
403 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
404 if (create_memento) {
405 __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
408 __ Allocate(r3, r4, r5, r6, &rt_call_reload_new_target, SIZE_IN_WORDS);
410 // Allocated the JSObject, now initialize the fields. Map is set to
411 // initial map and properties and elements are set to empty fixed array.
412 // r1: constructor function
414 // r3: object size (including memento if create_memento)
415 // r4: JSObject (not tagged)
416 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
418 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
419 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
420 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
421 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
422 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
423 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
425 // Fill all the in-object properties with the appropriate filler.
426 // r1: constructor function
428 // r3: object size (in words, including memento if create_memento)
429 // r4: JSObject (not tagged)
430 // r5: First in-object property of JSObject (not tagged)
431 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
432 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
434 if (!is_api_function) {
435 Label no_inobject_slack_tracking;
437 // Check if slack tracking is enabled.
438 __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset));
439 __ DecodeField<Map::Counter>(ip);
440 __ cmp(ip, Operand(Map::kSlackTrackingCounterEnd));
441 __ b(lt, &no_inobject_slack_tracking);
443 // Allocate object with a slack.
444 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
445 __ Ubfx(r0, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
447 __ ldr(r2, FieldMemOperand(r2, Map::kInstanceAttributesOffset));
448 __ Ubfx(r2, r2, Map::kUnusedPropertyFieldsByte * kBitsPerByte,
450 __ sub(r0, r0, Operand(r2));
451 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
452 // r0: offset of first field after pre-allocated fields
453 if (FLAG_debug_code) {
454 __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
456 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
458 __ InitializeFieldsWithFiller(r5, r0, r6);
459 // To allow for truncation.
460 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
461 // Fill the remaining fields with one pointer filler map.
463 __ bind(&no_inobject_slack_tracking);
466 if (create_memento) {
467 __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
468 __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
469 __ InitializeFieldsWithFiller(r5, r0, r6);
471 // Fill in memento fields.
472 // r5: points to the allocated but uninitialized memento.
473 __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
474 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
475 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
476 // Load the AllocationSite
477 __ ldr(r6, MemOperand(sp, 3 * kPointerSize));
478 __ AssertUndefinedOrAllocationSite(r6, r0);
479 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
480 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
482 __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
483 __ InitializeFieldsWithFiller(r5, r0, r6);
486 // Add the object tag to make the JSObject real, so that we can continue
487 // and jump into the continuation code at any time from now on.
488 __ add(r4, r4, Operand(kHeapObjectTag));
490 // Continue with JSObject being successfully allocated
494 // Reload the original constructor and fall-through.
495 __ bind(&rt_call_reload_new_target);
496 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
499 // Allocate the new receiver object using the runtime call.
500 // r1: constructor function
501 // r3: original constructor
503 if (create_memento) {
504 // Get the cell or allocation site.
505 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
506 __ push(r2); // argument 1: allocation site
509 __ push(r1); // argument 2/1: constructor function
510 __ push(r3); // argument 3/2: original constructor
511 if (create_memento) {
512 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
514 __ CallRuntime(Runtime::kNewObject, 2);
518 // Runtime_NewObjectWithAllocationSite increments allocation count.
519 // Skip the increment.
520 Label count_incremented;
521 if (create_memento) {
522 __ jmp(&count_incremented);
525 // Receiver for constructor call allocated.
529 if (create_memento) {
530 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
531 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
533 __ b(eq, &count_incremented);
534 // r2 is an AllocationSite. We are creating a memento from it, so we
535 // need to increment the memento create count.
536 __ ldr(r3, FieldMemOperand(r2,
537 AllocationSite::kPretenureCreateCountOffset));
538 __ add(r3, r3, Operand(Smi::FromInt(1)));
539 __ str(r3, FieldMemOperand(r2,
540 AllocationSite::kPretenureCreateCountOffset));
541 __ bind(&count_incremented);
544 // Restore the parameters.
548 // Retrieve smi-tagged arguments count from the stack.
549 __ ldr(r0, MemOperand(sp));
552 // Push new.target onto the construct frame. This is stored just below the
553 // receiver on the stack.
558 // Set up pointer to last argument.
559 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
561 // Copy arguments and receiver to the expression stack.
562 // r0: number of arguments
563 // r1: constructor function
564 // r2: address of last argument (caller sp)
565 // r3: number of arguments (smi-tagged)
569 // sp[3]: number of arguments (smi-tagged)
574 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
577 __ sub(r3, r3, Operand(2), SetCC);
580 // Call the function.
581 // r0: number of arguments
582 // r1: constructor function
583 if (is_api_function) {
584 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
586 masm->isolate()->builtins()->HandleApiCallConstruct();
587 __ Call(code, RelocInfo::CODE_TARGET);
589 ParameterCount actual(r0);
590 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
593 // Store offset of return address for deoptimizer.
594 if (!is_api_function) {
595 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
598 // Restore context from the frame.
602 // sp[2]: number of arguments (smi-tagged)
603 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
605 // If the result is an object (in the ECMA sense), we should get rid
606 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
608 Label use_receiver, exit;
610 // If the result is a smi, it is *not* an object in the ECMA sense.
614 // sp[2]: number of arguments (smi-tagged)
615 __ JumpIfSmi(r0, &use_receiver);
617 // If the type of the result (stored in its map) is less than
618 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
619 __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
622 // Throw away the result of the constructor invocation and use the
623 // on-stack receiver as the result.
624 __ bind(&use_receiver);
625 __ ldr(r0, MemOperand(sp));
627 // Remove receiver from the stack, remove caller arguments, and
631 // sp[0]: receiver (newly allocated object)
632 // sp[1]: new.target (original constructor)
633 // sp[2]: number of arguments (smi-tagged)
634 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
636 // Leave construct frame.
639 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
640 __ add(sp, sp, Operand(kPointerSize));
641 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
646 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
647 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
651 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
652 Generate_JSConstructStubHelper(masm, true, false);
656 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
657 // ----------- S t a t e -------------
658 // -- r0 : number of arguments
659 // -- r1 : constructor function
660 // -- r2 : allocation site or undefined
661 // -- r3 : original constructor
662 // -- lr : return address
663 // -- sp[...]: constructor arguments
664 // -----------------------------------
667 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
669 __ AssertUndefinedOrAllocationSite(r2, r4);
674 __ push(r4); // Smi-tagged arguments count.
679 // receiver is the hole.
680 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
683 // Set up pointer to last argument.
684 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
686 // Copy arguments and receiver to the expression stack.
687 // r0: number of arguments
688 // r1: constructor function
689 // r2: address of last argument (caller sp)
690 // r4: number of arguments (smi-tagged)
693 // sp[2]: number of arguments (smi-tagged)
697 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
700 __ sub(r4, r4, Operand(2), SetCC);
705 ExternalReference debug_step_in_fp =
706 ExternalReference::debug_step_in_fp_address(masm->isolate());
707 __ mov(r2, Operand(debug_step_in_fp));
708 __ ldr(r2, MemOperand(r2));
710 __ b(eq, &skip_step_in);
715 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
719 __ bind(&skip_step_in);
721 // Call the function.
722 // r0: number of arguments
723 // r1: constructor function
724 ParameterCount actual(r0);
725 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
727 // Restore context from the frame.
729 // sp[0]: number of arguments (smi-tagged)
730 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
731 // Get arguments count, skipping over new.target.
732 __ ldr(r1, MemOperand(sp, kPointerSize));
734 // Leave construct frame.
737 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
738 __ add(sp, sp, Operand(kPointerSize));
743 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
746 // Clobbers r2; preserves all other registers.
747 static void Generate_CheckStackOverflow(MacroAssembler* masm,
748 const int calleeOffset, Register argc,
749 IsTagged argc_is_tagged) {
750 // Check the stack for overflow. We are not trying to catch
751 // interruptions (e.g. debug break and preemption) here, so the "real stack
752 // limit" is checked.
754 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
755 // Make r2 the space we have left. The stack might already be overflowed
756 // here which will cause r2 to become negative.
758 // Check if the arguments will overflow the stack.
759 if (argc_is_tagged == kArgcIsSmiTagged) {
760 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
762 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
763 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
765 __ b(gt, &okay); // Signed comparison.
767 // Out of stack space.
768 __ ldr(r1, MemOperand(fp, calleeOffset));
769 if (argc_is_tagged == kArgcIsUntaggedInt) {
773 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
779 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
781 // Called from Generate_JS_Entry
787 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
788 ProfileEntryHookStub::MaybeCallEntryHook(masm);
790 // Clear the context before we push it when entering the internal frame.
791 __ mov(cp, Operand::Zero());
793 // Enter an internal frame.
795 FrameScope scope(masm, StackFrame::INTERNAL);
797 // Set up the context from the function argument.
798 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
800 __ InitializeRootRegister();
802 // Push the function and the receiver onto the stack.
806 // Check if we have enough stack space to push all arguments.
807 // The function is the first thing that was pushed above after entering
808 // the internal frame.
809 const int kFunctionOffset =
810 InternalFrameConstants::kCodeOffset - kPointerSize;
812 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsUntaggedInt);
814 // Copy arguments to the stack in a loop.
817 // r4: argv, i.e. points to first arg
819 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
820 // r2 points past last arg.
823 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
824 __ ldr(r0, MemOperand(r0)); // dereference handle
825 __ push(r0); // push parameter
830 // Initialize all JavaScript callee-saved registers, since they will be seen
831 // by the garbage collector as part of handlers.
832 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
833 __ mov(r5, Operand(r4));
834 __ mov(r6, Operand(r4));
835 if (!FLAG_enable_embedded_constant_pool) {
836 __ mov(r8, Operand(r4));
838 if (kR9Available == 1) {
839 __ mov(r9, Operand(r4));
842 // Invoke the code and pass argc as r0.
843 __ mov(r0, Operand(r3));
845 // No type feedback cell is available
846 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
847 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
850 ParameterCount actual(r0);
851 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
853 // Exit the JS frame and remove the parameters (except function), and
855 // Respect ABI stack constraint.
863 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
864 Generate_JSEntryTrampolineHelper(masm, false);
868 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
869 Generate_JSEntryTrampolineHelper(masm, true);
873 // Generate code for entering a JS function with the interpreter.
874 // On entry to the function the receiver and arguments have been pushed on the
875 // stack left to right. The actual argument count matches the formal parameter
876 // count expected by the function.
878 // The live registers are:
879 // o r1: the JS function object being called.
881 // o pp: the caller's constant pool pointer (if enabled)
882 // o fp: the caller's frame pointer
883 // o sp: stack pointer
884 // o lr: return address
886 // The function builds a JS frame. Please see JavaScriptFrameConstants in
887 // frames-arm.h for its layout.
888 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
890 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
891 // Open a frame scope to indicate that there is a frame on the stack. The
892 // MANUAL indicates that the scope shouldn't actually generate code to set up
893 // the frame (that is done below).
894 FrameScope frame_scope(masm, StackFrame::MANUAL);
895 __ PushFixedFrame(r1);
896 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
898 // Get the bytecode array from the function object and load the pointer to the
899 // first entry into kInterpreterBytecodeRegister.
900 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
901 __ ldr(kInterpreterBytecodeArrayRegister,
902 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset));
904 if (FLAG_debug_code) {
905 // Check function data field is actually a BytecodeArray object.
906 __ SmiTst(kInterpreterBytecodeArrayRegister);
907 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
908 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
909 BYTECODE_ARRAY_TYPE);
910 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
913 // Allocate the local and temporary register file on the stack.
915 // Load frame size from the BytecodeArray object.
916 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
917 BytecodeArray::kFrameSizeOffset));
919 // Do a stack check to ensure we don't go over the limit.
921 __ sub(r9, sp, Operand(r4));
922 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
923 __ cmp(r9, Operand(r2));
925 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
928 // If ok, push undefined as the initial value for all register file entries.
929 // Note: there should always be at least one stack slot for the return
930 // register in the register file.
932 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
933 __ bind(&loop_header);
934 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
936 // Continue loop if not done.
937 __ sub(r4, r4, Operand(kPointerSize), SetCC);
938 __ b(&loop_header, ne);
941 // TODO(rmcilroy): List of things not currently dealt with here but done in
942 // fullcodegen's prologue:
943 // - Support profiler (specifically profiling_counter).
944 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
945 // - Allow simulator stop operations if FLAG_stop_at is set.
946 // - Deal with sloppy mode functions which need to replace the
947 // receiver with the global proxy when called as functions (without an
948 // explicit receiver object).
949 // - Code aging of the BytecodeArray object.
950 // - Supporting FLAG_trace.
952 // The following items are also not done here, and will probably be done using
953 // explicit bytecodes instead:
954 // - Allocating a new local context if applicable.
955 // - Setting up a local binding to the this function, which is used in
956 // derived constructors with super calls.
957 // - Setting new.target if required.
958 // - Dealing with REST parameters (only if
959 // https://codereview.chromium.org/1235153006 doesn't land by then).
960 // - Dealing with argument objects.
962 // Perform stack guard check.
965 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
966 __ cmp(sp, Operand(ip));
968 __ CallRuntime(Runtime::kStackGuard, 0);
972 // Load bytecode offset and dispatch table into registers.
973 __ mov(kInterpreterBytecodeOffsetRegister,
974 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
975 __ LoadRoot(kInterpreterDispatchTableRegister,
976 Heap::kInterpreterTableRootIndex);
977 __ add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
978 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
980 // Dispatch to the first bytecode handler for the function.
981 __ ldrb(r0, MemOperand(kInterpreterBytecodeArrayRegister,
982 kInterpreterBytecodeOffsetRegister));
983 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r0, LSL,
985 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
986 // and header removal.
987 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
992 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
993 // TODO(rmcilroy): List of things not currently dealt with here but done in
994 // fullcodegen's EmitReturnSequence.
995 // - Supporting FLAG_trace for Runtime::TraceExit.
996 // - Support profiler (specifically decrementing profiling_counter
997 // appropriately and calling out to HandleInterrupts if necessary).
999 // Load return value into r0.
1000 __ ldr(r0, MemOperand(fp, -kPointerSize -
1001 StandardFrameConstants::kFixedFrameSizeFromFp));
1002 // Leave the frame (also dropping the register file).
1003 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1004 // Drop receiver + arguments.
1005 __ Drop(1); // TODO(rmcilroy): Get number of arguments from BytecodeArray.
1010 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1011 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1012 GenerateTailCallToReturnedCode(masm);
1016 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1017 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1018 // Push a copy of the function onto the stack.
1020 // Push function as parameter to the runtime call.
1022 // Whether to compile in a background thread.
1024 ip, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1027 __ CallRuntime(Runtime::kCompileOptimized, 2);
1028 // Restore receiver.
1033 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1034 CallCompileOptimized(masm, false);
1035 GenerateTailCallToReturnedCode(masm);
1039 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1040 CallCompileOptimized(masm, true);
1041 GenerateTailCallToReturnedCode(masm);
1045 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1046 // For now, we are relying on the fact that make_code_young doesn't do any
1047 // garbage collection which allows us to save/restore the registers without
1048 // worrying about which of them contain pointers. We also don't build an
1049 // internal frame to make the code faster, since we shouldn't have to do stack
1050 // crawls in MakeCodeYoung. This seems a bit fragile.
1052 // The following registers must be saved and restored when calling through to
1054 // r0 - contains return address (beginning of patch sequence)
1056 FrameScope scope(masm, StackFrame::MANUAL);
1057 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1058 __ PrepareCallCFunction(2, 0, r2);
1059 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1061 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1062 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1066 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1067 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1068 MacroAssembler* masm) { \
1069 GenerateMakeCodeYoungAgainCommon(masm); \
1071 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1072 MacroAssembler* masm) { \
1073 GenerateMakeCodeYoungAgainCommon(masm); \
1075 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1076 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1079 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1080 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1081 // that make_code_young doesn't do any garbage collection which allows us to
1082 // save/restore the registers without worrying about which of them contain
1085 // The following registers must be saved and restored when calling through to
1087 // r0 - contains return address (beginning of patch sequence)
1089 FrameScope scope(masm, StackFrame::MANUAL);
1090 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1091 __ PrepareCallCFunction(2, 0, r2);
1092 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1093 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1094 masm->isolate()), 2);
1095 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1097 // Perform prologue operations usually performed by the young code stub.
1098 __ PushFixedFrame(r1);
1099 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1101 // Jump to point after the code-age stub.
1102 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1107 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1108 GenerateMakeCodeYoungAgainCommon(masm);
1112 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1113 Generate_MarkCodeAsExecutedOnce(masm);
1117 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1118 SaveFPRegsMode save_doubles) {
1120 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1122 // Preserve registers across notification, this is important for compiled
1123 // stubs that tail call the runtime on deopts passing their parameters in
1125 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1126 // Pass the function and deoptimization type to the runtime system.
1127 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1128 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1131 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1132 __ mov(pc, lr); // Jump to miss handler
1136 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1137 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1141 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1142 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1146 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1147 Deoptimizer::BailoutType type) {
1149 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1150 // Pass the function and deoptimization type to the runtime system.
1151 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1153 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1156 // Get the full codegen state from the stack and untag it -> r6.
1157 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1159 // Switch on the state.
1160 Label with_tos_register, unknown_state;
1161 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1162 __ b(ne, &with_tos_register);
1163 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1166 __ bind(&with_tos_register);
1167 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1168 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1169 __ b(ne, &unknown_state);
1170 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1173 __ bind(&unknown_state);
1174 __ stop("no cases left");
1178 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1179 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1183 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1184 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1188 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1189 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1193 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1194 // Lookup the function in the JavaScript frame.
1195 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1197 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1198 // Pass function as argument.
1200 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1203 // If the code object is null, just return to the unoptimized code.
1205 __ cmp(r0, Operand(Smi::FromInt(0)));
1211 // Load deoptimization data from the code object.
1212 // <deopt_data> = <code>[#deoptimization_data_offset]
1213 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1215 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1216 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1218 if (FLAG_enable_embedded_constant_pool) {
1219 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
1222 // Load the OSR entrypoint offset from the deoptimization data.
1223 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1224 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1225 DeoptimizationInputData::kOsrPcOffsetIndex)));
1227 // Compute the target address = code start + osr_offset
1228 __ add(lr, r0, Operand::SmiUntag(r1));
1230 // And "return" to the OSR entry point of the function.
1236 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1237 // We check the stack limit as indicator that recompilation might be done.
1239 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1240 __ cmp(sp, Operand(ip));
1243 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1244 __ CallRuntime(Runtime::kStackGuard, 0);
1246 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1247 RelocInfo::CODE_TARGET);
1254 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1255 // 1. Make sure we have at least one argument.
1256 // r0: actual number of arguments
1258 __ cmp(r0, Operand::Zero());
1260 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1262 __ add(r0, r0, Operand(1));
1266 // 2. Get the function to call (passed as receiver) from the stack, check
1267 // if it is a function.
1268 // r0: actual number of arguments
1269 Label slow, non_function;
1270 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1271 __ JumpIfSmi(r1, &non_function);
1272 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1275 // 3a. Patch the first argument if necessary when calling a function.
1276 // r0: actual number of arguments
1278 Label shift_arguments;
1279 __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION
1280 { Label convert_to_object, use_global_proxy, patch_receiver;
1281 // Change context eagerly in case we need the global receiver.
1282 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1284 // Do not transform the receiver for strict mode functions.
1285 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1286 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1287 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1289 __ b(ne, &shift_arguments);
1291 // Do not transform the receiver for native (Compilerhints already in r3).
1292 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1293 __ b(ne, &shift_arguments);
1295 // Compute the receiver in sloppy mode.
1296 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1297 __ ldr(r2, MemOperand(r2, -kPointerSize));
1298 // r0: actual number of arguments
1300 // r2: first argument
1301 __ JumpIfSmi(r2, &convert_to_object);
1303 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1305 __ b(eq, &use_global_proxy);
1306 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1308 __ b(eq, &use_global_proxy);
1310 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1311 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1312 __ b(ge, &shift_arguments);
1314 __ bind(&convert_to_object);
1317 // Enter an internal frame in order to preserve argument count.
1318 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1323 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1329 // Exit the internal frame.
1332 // Restore the function to r1, and the flag to r4.
1333 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1334 __ mov(r4, Operand::Zero());
1335 __ jmp(&patch_receiver);
1337 __ bind(&use_global_proxy);
1338 __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1339 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset));
1341 __ bind(&patch_receiver);
1342 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1343 __ str(r2, MemOperand(r3, -kPointerSize));
1345 __ jmp(&shift_arguments);
1348 // 3b. Check for function proxy.
1350 __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy
1351 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1352 __ b(eq, &shift_arguments);
1353 __ bind(&non_function);
1354 __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function
1356 // 3c. Patch the first argument when calling a non-function. The
1357 // CALL_NON_FUNCTION builtin expects the non-function callee as
1358 // receiver, so overwrite the first argument which will ultimately
1359 // become the receiver.
1360 // r0: actual number of arguments
1362 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1363 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1364 __ str(r1, MemOperand(r2, -kPointerSize));
1366 // 4. Shift arguments and return address one slot down on the stack
1367 // (overwriting the original receiver). Adjust argument count to make
1368 // the original first argument the new receiver.
1369 // r0: actual number of arguments
1371 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1372 __ bind(&shift_arguments);
1374 // Calculate the copy start address (destination). Copy end address is sp.
1375 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1378 __ ldr(ip, MemOperand(r2, -kPointerSize));
1379 __ str(ip, MemOperand(r2));
1380 __ sub(r2, r2, Operand(kPointerSize));
1383 // Adjust the actual number of arguments and remove the top element
1384 // (which is a copy of the last argument).
1385 __ sub(r0, r0, Operand(1));
1389 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1390 // or a function proxy via CALL_FUNCTION_PROXY.
1391 // r0: actual number of arguments
1393 // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1394 { Label function, non_proxy;
1396 __ b(eq, &function);
1397 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1398 __ mov(r2, Operand::Zero());
1399 __ cmp(r4, Operand(1));
1400 __ b(ne, &non_proxy);
1402 __ push(r1); // re-add proxy object as additional argument
1403 __ add(r0, r0, Operand(1));
1404 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1405 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1406 RelocInfo::CODE_TARGET);
1408 __ bind(&non_proxy);
1409 __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
1410 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1411 RelocInfo::CODE_TARGET);
1415 // 5b. Get the code to call from the function and check that the number of
1416 // expected arguments matches what we're providing. If so, jump
1417 // (tail-call) to the code in register edx without checking arguments.
1418 // r0: actual number of arguments
1420 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1422 FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1424 __ cmp(r2, r0); // Check formal and actual parameter counts.
1425 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1426 RelocInfo::CODE_TARGET,
1429 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1430 ParameterCount expected(0);
1431 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1435 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1436 const int argumentsOffset,
1437 const int indexOffset,
1438 const int limitOffset) {
1440 Register receiver = LoadDescriptor::ReceiverRegister();
1441 Register key = LoadDescriptor::NameRegister();
1442 Register slot = LoadDescriptor::SlotRegister();
1443 Register vector = LoadWithVectorDescriptor::VectorRegister();
1445 __ ldr(key, MemOperand(fp, indexOffset));
1448 // Load the current argument from the arguments array.
1450 __ ldr(receiver, MemOperand(fp, argumentsOffset));
1452 // Use inline caching to speed up access to arguments.
1453 FeedbackVectorSpec spec(0, Code::KEYED_LOAD_IC);
1454 Handle<TypeFeedbackVector> feedback_vector =
1455 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1456 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1457 __ mov(slot, Operand(Smi::FromInt(index)));
1458 __ Move(vector, feedback_vector);
1460 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1461 __ Call(ic, RelocInfo::CODE_TARGET);
1463 // Push the nth argument.
1466 __ ldr(key, MemOperand(fp, indexOffset));
1467 __ add(key, key, Operand(1 << kSmiTagSize));
1468 __ str(key, MemOperand(fp, indexOffset));
1470 // Test if the copy loop has finished copying all the elements from the
1471 // arguments object.
1473 __ ldr(r1, MemOperand(fp, limitOffset));
1477 // On exit, the pushed arguments count is in r0, untagged
1483 // Used by FunctionApply and ReflectApply
1484 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1485 const int kFormalParameters = targetIsArgument ? 3 : 2;
1486 const int kStackSize = kFormalParameters + 1;
1489 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1490 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1491 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1492 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1494 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1496 __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array
1498 if (targetIsArgument) {
1499 __ InvokeBuiltin(Builtins::REFLECT_APPLY_PREPARE, CALL_FUNCTION);
1501 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1504 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged);
1506 // Push current limit and index.
1507 const int kIndexOffset =
1508 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1509 const int kLimitOffset =
1510 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1511 __ push(r0); // limit
1512 __ mov(r1, Operand::Zero()); // initial index
1515 // Get the receiver.
1516 __ ldr(r0, MemOperand(fp, kReceiverOffset));
1518 // Check that the function is a JS function (otherwise it must be a proxy).
1519 Label push_receiver;
1520 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1521 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1522 __ b(ne, &push_receiver);
1524 // Change context eagerly to get the right global object if necessary.
1525 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1526 // Load the shared function info while the function is still in r1.
1527 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1529 // Compute the receiver.
1530 // Do not transform the receiver for strict mode functions.
1531 Label call_to_object, use_global_proxy;
1532 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1533 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1535 __ b(ne, &push_receiver);
1537 // Do not transform the receiver for strict mode functions.
1538 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1539 __ b(ne, &push_receiver);
1541 // Compute the receiver in sloppy mode.
1542 __ JumpIfSmi(r0, &call_to_object);
1543 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1545 __ b(eq, &use_global_proxy);
1546 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1548 __ b(eq, &use_global_proxy);
1550 // Check if the receiver is already a JavaScript object.
1552 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1553 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1554 __ b(ge, &push_receiver);
1556 // Convert the receiver to a regular object.
1558 __ bind(&call_to_object);
1560 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1561 __ b(&push_receiver);
1563 __ bind(&use_global_proxy);
1564 __ ldr(r0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1565 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalProxyOffset));
1567 // Push the receiver.
1569 __ bind(&push_receiver);
1572 // Copy all arguments from the array to the stack.
1573 Generate_PushAppliedArguments(
1574 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1576 // Call the function.
1578 ParameterCount actual(r0);
1579 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1580 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1581 __ b(ne, &call_proxy);
1582 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
1584 frame_scope.GenerateLeaveFrame();
1585 __ add(sp, sp, Operand(kStackSize * kPointerSize));
1588 // Call the function proxy.
1589 __ bind(&call_proxy);
1590 __ push(r1); // add function proxy as last argument
1591 __ add(r0, r0, Operand(1));
1592 __ mov(r2, Operand::Zero());
1593 __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
1594 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1595 RelocInfo::CODE_TARGET);
1597 // Tear down the internal frame and remove function, receiver and args.
1599 __ add(sp, sp, Operand(kStackSize * kPointerSize));
1604 static void Generate_ConstructHelper(MacroAssembler* masm) {
1605 const int kFormalParameters = 3;
1606 const int kStackSize = kFormalParameters + 1;
1609 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1610 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1611 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1612 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1614 // If newTarget is not supplied, set it to constructor
1615 Label validate_arguments;
1616 __ ldr(r0, MemOperand(fp, kNewTargetOffset));
1617 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1618 __ b(ne, &validate_arguments);
1619 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1620 __ str(r0, MemOperand(fp, kNewTargetOffset));
1622 // Validate arguments
1623 __ bind(&validate_arguments);
1624 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1626 __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array
1628 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1630 __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
1632 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged);
1634 // Push current limit and index.
1635 const int kIndexOffset =
1636 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1637 const int kLimitOffset =
1638 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1639 __ push(r0); // limit
1640 __ mov(r1, Operand::Zero()); // initial index
1642 // Push the constructor function as callee.
1643 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1646 // Copy all arguments from the array to the stack.
1647 Generate_PushAppliedArguments(
1648 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1650 // Use undefined feedback vector
1651 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1652 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1653 __ ldr(r4, MemOperand(fp, kNewTargetOffset));
1655 // Call the function.
1656 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1657 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1659 // Leave internal frame.
1661 __ add(sp, sp, Operand(kStackSize * kPointerSize));
1666 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1667 Generate_ApplyHelper(masm, false);
1671 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1672 Generate_ApplyHelper(masm, true);
1676 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1677 Generate_ConstructHelper(masm);
1681 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1682 Label* stack_overflow) {
1683 // ----------- S t a t e -------------
1684 // -- r0 : actual number of arguments
1685 // -- r1 : function (passed through to callee)
1686 // -- r2 : expected number of arguments
1687 // -----------------------------------
1688 // Check the stack for overflow. We are not trying to catch
1689 // interruptions (e.g. debug break and preemption) here, so the "real stack
1690 // limit" is checked.
1691 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1692 // Make r5 the space we have left. The stack might already be overflowed
1693 // here which will cause r5 to become negative.
1695 // Check if the arguments will overflow the stack.
1696 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1697 __ b(le, stack_overflow); // Signed comparison.
1701 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1703 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1704 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1705 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
1706 fp.bit() | lr.bit());
1708 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1712 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1713 // ----------- S t a t e -------------
1714 // -- r0 : result being passed through
1715 // -----------------------------------
1716 // Get the number of arguments passed (as a smi), tear down the frame and
1717 // then tear down the parameters.
1718 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1721 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1722 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1723 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1727 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1728 // ----------- S t a t e -------------
1729 // -- r0 : actual number of arguments
1730 // -- r1 : function (passed through to callee)
1731 // -- r2 : expected number of arguments
1732 // -----------------------------------
1734 Label stack_overflow;
1735 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1736 Label invoke, dont_adapt_arguments;
1738 Label enough, too_few;
1739 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1742 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1743 __ b(eq, &dont_adapt_arguments);
1745 { // Enough parameters: actual >= expected
1747 EnterArgumentsAdaptorFrame(masm);
1749 // Calculate copy start address into r0 and copy end address into r2.
1750 // r0: actual number of arguments as a smi
1752 // r2: expected number of arguments
1753 // r3: code entry to call
1754 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1755 // adjust for return address and receiver
1756 __ add(r0, r0, Operand(2 * kPointerSize));
1757 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1759 // Copy the arguments (including the receiver) to the new stack frame.
1760 // r0: copy start address
1762 // r2: copy end address
1763 // r3: code entry to call
1767 __ ldr(ip, MemOperand(r0, 0));
1769 __ cmp(r0, r2); // Compare before moving to next argument.
1770 __ sub(r0, r0, Operand(kPointerSize));
1776 { // Too few parameters: Actual < expected
1779 // If the function is strong we need to throw an error.
1780 Label no_strong_error;
1781 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1782 __ ldr(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
1783 __ tst(r5, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
1785 __ b(eq, &no_strong_error);
1787 // What we really care about is the required number of arguments.
1788 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kLengthOffset));
1789 __ cmp(r0, Operand::SmiUntag(r4));
1790 __ b(ge, &no_strong_error);
1793 FrameScope frame(masm, StackFrame::MANUAL);
1794 EnterArgumentsAdaptorFrame(masm);
1795 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1798 __ bind(&no_strong_error);
1799 EnterArgumentsAdaptorFrame(masm);
1801 // Calculate copy start address into r0 and copy end address is fp.
1802 // r0: actual number of arguments as a smi
1804 // r2: expected number of arguments
1805 // r3: code entry to call
1806 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1808 // Copy the arguments (including the receiver) to the new stack frame.
1809 // r0: copy start address
1811 // r2: expected number of arguments
1812 // r3: code entry to call
1815 // Adjust load for return address and receiver.
1816 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1818 __ cmp(r0, fp); // Compare before moving to next argument.
1819 __ sub(r0, r0, Operand(kPointerSize));
1822 // Fill the remaining expected arguments with undefined.
1824 // r2: expected number of arguments
1825 // r3: code entry to call
1826 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1827 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1828 // Adjust for frame.
1829 __ sub(r2, r2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1839 // Call the entry point.
1843 // Store offset of return address for deoptimizer.
1844 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1846 // Exit frame and return.
1847 LeaveArgumentsAdaptorFrame(masm);
1851 // -------------------------------------------
1852 // Dont adapt arguments.
1853 // -------------------------------------------
1854 __ bind(&dont_adapt_arguments);
1857 __ bind(&stack_overflow);
1859 FrameScope frame(masm, StackFrame::MANUAL);
1860 EnterArgumentsAdaptorFrame(masm);
1861 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1869 } // namespace internal
1872 #endif // V8_TARGET_ARCH_ARM