1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
17 #define __ ACCESS_MASM(masm)
20 void Builtins::Generate_Adaptor(MacroAssembler* masm,
22 BuiltinExtraArguments extra_args) {
23 // ----------- S t a t e -------------
24 // -- r0 : number of arguments excluding receiver
25 // -- r1 : called function (only guaranteed when
26 // extra_args requires it)
28 // -- sp[0] : last argument
30 // -- sp[4 * (argc - 1)] : first argument (argc == r0)
31 // -- sp[4 * argc] : receiver
32 // -----------------------------------
34 // Insert extra arguments.
35 int num_extra_args = 0;
36 if (extra_args == NEEDS_CALLED_FUNCTION) {
40 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
43 // JumpToExternalReference expects r0 to contain the number of arguments
44 // including the receiver and the extra arguments.
45 __ add(r0, r0, Operand(num_extra_args + 1));
46 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
50 // Load the built-in InternalArray function from the current context.
51 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53 // Load the native context.
56 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
58 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
59 // Load the InternalArray function from the native context.
63 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
67 // Load the built-in Array function from the current context.
68 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
69 // Load the native context.
72 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
74 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
75 // Load the Array function from the native context.
78 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
82 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83 // ----------- S t a t e -------------
84 // -- r0 : number of arguments
85 // -- lr : return address
86 // -- sp[...]: constructor arguments
87 // -----------------------------------
88 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
90 // Get the InternalArray function.
91 GenerateLoadInternalArrayFunction(masm, r1);
93 if (FLAG_debug_code) {
94 // Initial map for the builtin InternalArray functions should be maps.
95 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
97 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
98 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
99 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
102 // Run the native code for the InternalArray function called as a normal
105 InternalArrayConstructorStub stub(masm->isolate());
106 __ TailCallStub(&stub);
110 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
111 // ----------- S t a t e -------------
112 // -- r0 : number of arguments
113 // -- lr : return address
114 // -- sp[...]: constructor arguments
115 // -----------------------------------
116 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
118 // Get the Array function.
119 GenerateLoadArrayFunction(masm, r1);
121 if (FLAG_debug_code) {
122 // Initial map for the builtin Array functions should be maps.
123 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
125 __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
126 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
127 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
131 // Run the native code for the Array function called as a normal function.
133 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
134 ArrayConstructorStub stub(masm->isolate());
135 __ TailCallStub(&stub);
139 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
140 // ----------- S t a t e -------------
141 // -- r0 : number of arguments
142 // -- r1 : constructor function
143 // -- lr : return address
144 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
145 // -- sp[argc * 4] : receiver
146 // -----------------------------------
147 Counters* counters = masm->isolate()->counters();
148 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
150 Register function = r1;
151 if (FLAG_debug_code) {
152 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
153 __ cmp(function, Operand(r2));
154 __ Assert(eq, kUnexpectedStringFunction);
157 // Load the first arguments in r0 and get rid of the rest.
159 __ cmp(r0, Operand::Zero());
160 __ b(eq, &no_arguments);
161 // First args = sp[(argc - 1) * 4].
162 __ sub(r0, r0, Operand(1));
163 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
164 // sp now point to args[0], drop args[0] + receiver.
167 Register argument = r2;
168 Label not_cached, argument_is_string;
169 __ LookupNumberStringCache(r0, // Input.
175 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
176 __ bind(&argument_is_string);
178 // ----------- S t a t e -------------
179 // -- r2 : argument converted to string
180 // -- r1 : constructor function
181 // -- lr : return address
182 // -----------------------------------
185 __ Allocate(JSValue::kSize,
192 // Initialising the String Object.
194 __ LoadGlobalFunctionInitialMap(function, map, r4);
195 if (FLAG_debug_code) {
196 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
197 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
198 __ Assert(eq, kUnexpectedStringWrapperInstanceSize);
199 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
200 __ cmp(r4, Operand::Zero());
201 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper);
203 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
205 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
206 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
207 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
209 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
211 // Ensure the object is fully initialized.
212 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
216 // The argument was not found in the number to string cache. Check
217 // if it's a string already before calling the conversion builtin.
218 Label convert_argument;
219 __ bind(¬_cached);
220 __ JumpIfSmi(r0, &convert_argument);
223 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
224 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
225 STATIC_ASSERT(kNotStringTag != 0);
226 __ tst(r3, Operand(kIsNotStringMask));
227 __ b(ne, &convert_argument);
228 __ mov(argument, r0);
229 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
230 __ b(&argument_is_string);
232 // Invoke the conversion builtin and put the result into r2.
233 __ bind(&convert_argument);
234 __ push(function); // Preserve the function.
235 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
237 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
238 ToStringStub stub(masm->isolate());
242 __ mov(argument, r0);
243 __ b(&argument_is_string);
245 // Load the empty string into r2, remove the receiver from the
246 // stack, and jump back to the case where the argument is a string.
247 __ bind(&no_arguments);
248 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
250 __ b(&argument_is_string);
252 // At this point the argument is already a string. Call runtime to
253 // create a string wrapper.
254 __ bind(&gc_required);
255 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
257 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
259 __ CallRuntime(Runtime::kNewStringWrapper, 1);
265 static void CallRuntimePassFunction(
266 MacroAssembler* masm, Runtime::FunctionId function_id) {
267 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
268 // Push a copy of the function onto the stack.
270 // Push function as parameter to the runtime call.
273 __ CallRuntime(function_id, 1);
279 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
280 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
281 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
282 __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
287 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
288 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
293 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
294 // Checking whether the queued function is ready for install is optional,
295 // since we come across interrupts and stack checks elsewhere. However,
296 // not checking may delay installing ready functions, and always checking
297 // would be quite expensive. A good compromise is to first check against
298 // stack limit as a cue for an interrupt signal.
300 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
301 __ cmp(sp, Operand(ip));
304 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
305 GenerateTailCallToReturnedCode(masm);
308 GenerateTailCallToSharedCode(masm);
312 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
313 bool is_api_function,
314 bool create_memento) {
315 // ----------- S t a t e -------------
316 // -- r0 : number of arguments
317 // -- r1 : constructor function
318 // -- r2 : allocation site or undefined
319 // -- r3 : original constructor
320 // -- lr : return address
321 // -- sp[...]: constructor arguments
322 // -----------------------------------
324 // Should never create mementos for api functions.
325 DCHECK(!is_api_function || !create_memento);
327 Isolate* isolate = masm->isolate();
329 // Enter a construct frame.
331 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
333 // Preserve the incoming parameters on the stack.
334 __ AssertUndefinedOrAllocationSite(r2, r4);
341 // Try to allocate the object without transitioning into C code. If any of
342 // the preconditions is not met, the code bails out to the runtime call.
343 Label rt_call, allocated;
344 if (FLAG_inline_new) {
345 ExternalReference debug_step_in_fp =
346 ExternalReference::debug_step_in_fp_address(isolate);
347 __ mov(r2, Operand(debug_step_in_fp));
348 __ ldr(r2, MemOperand(r2));
352 // Fall back to runtime if the original constructor and function differ.
356 // Load the initial map and verify that it is in fact a map.
357 // r1: constructor function
358 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
359 __ JumpIfSmi(r2, &rt_call);
360 __ CompareObjectType(r2, r5, r4, MAP_TYPE);
363 // Check that the constructor is not constructing a JSFunction (see
364 // comments in Runtime_NewObject in runtime.cc). In which case the
365 // initial map's instance type would be JS_FUNCTION_TYPE.
366 // r1: constructor function
368 __ CompareInstanceType(r2, r5, JS_FUNCTION_TYPE);
371 if (!is_api_function) {
373 MemOperand bit_field3 = FieldMemOperand(r2, Map::kBitField3Offset);
374 // Check if slack tracking is enabled.
375 __ ldr(r4, bit_field3);
376 __ DecodeField<Map::Counter>(r3, r4);
377 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
379 // Decrease generous allocation count.
380 __ sub(r4, r4, Operand(1 << Map::Counter::kShift));
381 __ str(r4, bit_field3);
382 __ cmp(r3, Operand(Map::kSlackTrackingCounterEnd));
387 __ Push(r2, r1); // r1 = constructor
388 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
396 // Now allocate the JSObject on the heap.
397 // r1: constructor function
399 Label rt_call_reload_new_target;
400 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
401 if (create_memento) {
402 __ add(r3, r3, Operand(AllocationMemento::kSize / kPointerSize));
405 __ Allocate(r3, r4, r5, r6, &rt_call_reload_new_target, SIZE_IN_WORDS);
407 // Allocated the JSObject, now initialize the fields. Map is set to
408 // initial map and properties and elements are set to empty fixed array.
409 // r1: constructor function
411 // r3: object size (including memento if create_memento)
412 // r4: JSObject (not tagged)
413 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
415 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
416 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
417 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
418 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
419 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
420 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
422 // Fill all the in-object properties with the appropriate filler.
423 // r1: constructor function
425 // r3: object size (in words, including memento if create_memento)
426 // r4: JSObject (not tagged)
427 // r5: First in-object property of JSObject (not tagged)
428 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
429 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
431 if (!is_api_function) {
432 Label no_inobject_slack_tracking;
434 // Check if slack tracking is enabled.
435 __ ldr(ip, FieldMemOperand(r2, Map::kBitField3Offset));
436 __ DecodeField<Map::Counter>(ip);
437 __ cmp(ip, Operand(Map::kSlackTrackingCounterEnd));
438 __ b(lt, &no_inobject_slack_tracking);
440 // Allocate object with a slack.
441 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
442 __ Ubfx(r0, r0, Map::kInObjectPropertiesOrConstructorFunctionIndexByte *
445 __ ldr(r2, FieldMemOperand(r2, Map::kInstanceAttributesOffset));
446 __ Ubfx(r2, r2, Map::kUnusedPropertyFieldsByte * kBitsPerByte,
448 __ sub(r0, r0, Operand(r2));
449 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
450 // r0: offset of first field after pre-allocated fields
451 if (FLAG_debug_code) {
452 __ add(ip, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
454 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
456 __ InitializeFieldsWithFiller(r5, r0, r6);
457 // To allow for truncation.
458 __ LoadRoot(r6, Heap::kOnePointerFillerMapRootIndex);
459 // Fill the remaining fields with one pointer filler map.
461 __ bind(&no_inobject_slack_tracking);
464 if (create_memento) {
465 __ sub(ip, r3, Operand(AllocationMemento::kSize / kPointerSize));
466 __ add(r0, r4, Operand(ip, LSL, kPointerSizeLog2)); // End of object.
467 __ InitializeFieldsWithFiller(r5, r0, r6);
469 // Fill in memento fields.
470 // r5: points to the allocated but uninitialized memento.
471 __ LoadRoot(r6, Heap::kAllocationMementoMapRootIndex);
472 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
473 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
474 // Load the AllocationSite
475 __ ldr(r6, MemOperand(sp, 3 * kPointerSize));
476 __ AssertUndefinedOrAllocationSite(r6, r0);
477 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
478 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
480 __ add(r0, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
481 __ InitializeFieldsWithFiller(r5, r0, r6);
484 // Add the object tag to make the JSObject real, so that we can continue
485 // and jump into the continuation code at any time from now on.
486 __ add(r4, r4, Operand(kHeapObjectTag));
488 // Continue with JSObject being successfully allocated
492 // Reload the original constructor and fall-through.
493 __ bind(&rt_call_reload_new_target);
494 __ ldr(r3, MemOperand(sp, 0 * kPointerSize));
497 // Allocate the new receiver object using the runtime call.
498 // r1: constructor function
499 // r3: original constructor
501 if (create_memento) {
502 // Get the cell or allocation site.
503 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
504 __ push(r2); // argument 1: allocation site
507 __ push(r1); // argument 2/1: constructor function
508 __ push(r3); // argument 3/2: original constructor
509 if (create_memento) {
510 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
512 __ CallRuntime(Runtime::kNewObject, 2);
516 // Runtime_NewObjectWithAllocationSite increments allocation count.
517 // Skip the increment.
518 Label count_incremented;
519 if (create_memento) {
520 __ jmp(&count_incremented);
523 // Receiver for constructor call allocated.
527 if (create_memento) {
528 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
529 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
531 __ b(eq, &count_incremented);
532 // r2 is an AllocationSite. We are creating a memento from it, so we
533 // need to increment the memento create count.
534 __ ldr(r3, FieldMemOperand(r2,
535 AllocationSite::kPretenureCreateCountOffset));
536 __ add(r3, r3, Operand(Smi::FromInt(1)));
537 __ str(r3, FieldMemOperand(r2,
538 AllocationSite::kPretenureCreateCountOffset));
539 __ bind(&count_incremented);
542 // Restore the parameters.
546 // Retrieve smi-tagged arguments count from the stack.
547 __ ldr(r0, MemOperand(sp));
550 // Push new.target onto the construct frame. This is stored just below the
551 // receiver on the stack.
556 // Set up pointer to last argument.
557 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
559 // Copy arguments and receiver to the expression stack.
560 // r0: number of arguments
561 // r1: constructor function
562 // r2: address of last argument (caller sp)
563 // r3: number of arguments (smi-tagged)
567 // sp[3]: number of arguments (smi-tagged)
572 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
575 __ sub(r3, r3, Operand(2), SetCC);
578 // Call the function.
579 // r0: number of arguments
580 // r1: constructor function
581 if (is_api_function) {
582 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
584 masm->isolate()->builtins()->HandleApiCallConstruct();
585 __ Call(code, RelocInfo::CODE_TARGET);
587 ParameterCount actual(r0);
588 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
591 // Store offset of return address for deoptimizer.
592 if (!is_api_function) {
593 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
596 // Restore context from the frame.
600 // sp[2]: number of arguments (smi-tagged)
601 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
603 // If the result is an object (in the ECMA sense), we should get rid
604 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
606 Label use_receiver, exit;
608 // If the result is a smi, it is *not* an object in the ECMA sense.
612 // sp[2]: number of arguments (smi-tagged)
613 __ JumpIfSmi(r0, &use_receiver);
615 // If the type of the result (stored in its map) is less than
616 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
617 __ CompareObjectType(r0, r1, r3, FIRST_SPEC_OBJECT_TYPE);
620 // Throw away the result of the constructor invocation and use the
621 // on-stack receiver as the result.
622 __ bind(&use_receiver);
623 __ ldr(r0, MemOperand(sp));
625 // Remove receiver from the stack, remove caller arguments, and
629 // sp[0]: receiver (newly allocated object)
630 // sp[1]: new.target (original constructor)
631 // sp[2]: number of arguments (smi-tagged)
632 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
634 // Leave construct frame.
637 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
638 __ add(sp, sp, Operand(kPointerSize));
639 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
644 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
645 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
649 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
650 Generate_JSConstructStubHelper(masm, true, false);
654 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
655 // ----------- S t a t e -------------
656 // -- r0 : number of arguments
657 // -- r1 : constructor function
658 // -- r2 : allocation site or undefined
659 // -- r3 : original constructor
660 // -- lr : return address
661 // -- sp[...]: constructor arguments
662 // -----------------------------------
665 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
667 __ AssertUndefinedOrAllocationSite(r2, r4);
672 __ push(r4); // Smi-tagged arguments count.
677 // receiver is the hole.
678 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
681 // Set up pointer to last argument.
682 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
684 // Copy arguments and receiver to the expression stack.
685 // r0: number of arguments
686 // r1: constructor function
687 // r2: address of last argument (caller sp)
688 // r4: number of arguments (smi-tagged)
691 // sp[2]: number of arguments (smi-tagged)
695 __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
698 __ sub(r4, r4, Operand(2), SetCC);
703 ExternalReference debug_step_in_fp =
704 ExternalReference::debug_step_in_fp_address(masm->isolate());
705 __ mov(r2, Operand(debug_step_in_fp));
706 __ ldr(r2, MemOperand(r2));
708 __ b(eq, &skip_step_in);
713 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
717 __ bind(&skip_step_in);
719 // Call the function.
720 // r0: number of arguments
721 // r1: constructor function
722 ParameterCount actual(r0);
723 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
725 // Restore context from the frame.
727 // sp[0]: number of arguments (smi-tagged)
728 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
729 // Get arguments count, skipping over new.target.
730 __ ldr(r1, MemOperand(sp, kPointerSize));
732 // Leave construct frame.
735 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
736 __ add(sp, sp, Operand(kPointerSize));
741 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
744 // Clobbers r2; preserves all other registers.
745 static void Generate_CheckStackOverflow(MacroAssembler* masm,
746 const int calleeOffset, Register argc,
747 IsTagged argc_is_tagged) {
748 // Check the stack for overflow. We are not trying to catch
749 // interruptions (e.g. debug break and preemption) here, so the "real stack
750 // limit" is checked.
752 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
753 // Make r2 the space we have left. The stack might already be overflowed
754 // here which will cause r2 to become negative.
756 // Check if the arguments will overflow the stack.
757 if (argc_is_tagged == kArgcIsSmiTagged) {
758 __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
760 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
761 __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
763 __ b(gt, &okay); // Signed comparison.
765 // Out of stack space.
766 __ ldr(r1, MemOperand(fp, calleeOffset));
767 if (argc_is_tagged == kArgcIsUntaggedInt) {
771 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
777 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
779 // Called from Generate_JS_Entry
785 // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
786 ProfileEntryHookStub::MaybeCallEntryHook(masm);
788 // Clear the context before we push it when entering the internal frame.
789 __ mov(cp, Operand::Zero());
791 // Enter an internal frame.
793 FrameScope scope(masm, StackFrame::INTERNAL);
795 // Set up the context from the function argument.
796 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
798 __ InitializeRootRegister();
800 // Push the function and the receiver onto the stack.
804 // Check if we have enough stack space to push all arguments.
805 // The function is the first thing that was pushed above after entering
806 // the internal frame.
807 const int kFunctionOffset =
808 InternalFrameConstants::kCodeOffset - kPointerSize;
810 Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsUntaggedInt);
812 // Copy arguments to the stack in a loop.
815 // r4: argv, i.e. points to first arg
817 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
818 // r2 points past last arg.
821 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
822 __ ldr(r0, MemOperand(r0)); // dereference handle
823 __ push(r0); // push parameter
828 // Initialize all JavaScript callee-saved registers, since they will be seen
829 // by the garbage collector as part of handlers.
830 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
831 __ mov(r5, Operand(r4));
832 __ mov(r6, Operand(r4));
833 if (!FLAG_enable_embedded_constant_pool) {
834 __ mov(r8, Operand(r4));
836 if (kR9Available == 1) {
837 __ mov(r9, Operand(r4));
840 // Invoke the code and pass argc as r0.
841 __ mov(r0, Operand(r3));
843 // No type feedback cell is available
844 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
845 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
848 ParameterCount actual(r0);
849 __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper());
851 // Exit the JS frame and remove the parameters (except function), and
853 // Respect ABI stack constraint.
861 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
862 Generate_JSEntryTrampolineHelper(masm, false);
866 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
867 Generate_JSEntryTrampolineHelper(masm, true);
871 // Generate code for entering a JS function with the interpreter.
872 // On entry to the function the receiver and arguments have been pushed on the
873 // stack left to right. The actual argument count matches the formal parameter
874 // count expected by the function.
876 // The live registers are:
877 // o r1: the JS function object being called.
879 // o pp: the caller's constant pool pointer (if enabled)
880 // o fp: the caller's frame pointer
881 // o sp: stack pointer
882 // o lr: return address
884 // The function builds a JS frame. Please see JavaScriptFrameConstants in
885 // frames-arm.h for its layout.
886 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
888 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
889 // Open a frame scope to indicate that there is a frame on the stack. The
890 // MANUAL indicates that the scope shouldn't actually generate code to set up
891 // the frame (that is done below).
892 FrameScope frame_scope(masm, StackFrame::MANUAL);
893 __ PushFixedFrame(r1);
894 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
896 // Get the bytecode array from the function object and load the pointer to the
897 // first entry into kInterpreterBytecodeRegister.
898 __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
899 __ ldr(kInterpreterBytecodeArrayRegister,
900 FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset));
902 if (FLAG_debug_code) {
903 // Check function data field is actually a BytecodeArray object.
904 __ SmiTst(kInterpreterBytecodeArrayRegister);
905 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
906 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
907 BYTECODE_ARRAY_TYPE);
908 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
911 // Allocate the local and temporary register file on the stack.
913 // Load frame size from the BytecodeArray object.
914 __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
915 BytecodeArray::kFrameSizeOffset));
917 // Do a stack check to ensure we don't go over the limit.
919 __ sub(r9, sp, Operand(r4));
920 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
921 __ cmp(r9, Operand(r2));
923 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
926 // If ok, push undefined as the initial value for all register file entries.
929 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
930 __ b(&loop_check, al);
931 __ bind(&loop_header);
932 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
934 // Continue loop if not done.
935 __ bind(&loop_check);
936 __ sub(r4, r4, Operand(kPointerSize), SetCC);
937 __ b(&loop_header, ge);
940 // TODO(rmcilroy): List of things not currently dealt with here but done in
941 // fullcodegen's prologue:
942 // - Support profiler (specifically profiling_counter).
943 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
944 // - Allow simulator stop operations if FLAG_stop_at is set.
945 // - Deal with sloppy mode functions which need to replace the
946 // receiver with the global proxy when called as functions (without an
947 // explicit receiver object).
948 // - Code aging of the BytecodeArray object.
949 // - Supporting FLAG_trace.
951 // The following items are also not done here, and will probably be done using
952 // explicit bytecodes instead:
953 // - Allocating a new local context if applicable.
954 // - Setting up a local binding to the this function, which is used in
955 // derived constructors with super calls.
956 // - Setting new.target if required.
957 // - Dealing with REST parameters (only if
958 // https://codereview.chromium.org/1235153006 doesn't land by then).
959 // - Dealing with argument objects.
961 // Perform stack guard check.
964 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
965 __ cmp(sp, Operand(ip));
967 __ CallRuntime(Runtime::kStackGuard, 0);
971 // Load accumulator, register file, bytecode offset, dispatch table into
973 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
974 __ sub(kInterpreterRegisterFileRegister, fp,
975 Operand(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
976 __ mov(kInterpreterBytecodeOffsetRegister,
977 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
978 __ LoadRoot(kInterpreterDispatchTableRegister,
979 Heap::kInterpreterTableRootIndex);
980 __ add(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
981 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
983 // Dispatch to the first bytecode handler for the function.
984 __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
985 kInterpreterBytecodeOffsetRegister));
986 __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
988 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
989 // and header removal.
990 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
995 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
996 // TODO(rmcilroy): List of things not currently dealt with here but done in
997 // fullcodegen's EmitReturnSequence.
998 // - Supporting FLAG_trace for Runtime::TraceExit.
999 // - Support profiler (specifically decrementing profiling_counter
1000 // appropriately and calling out to HandleInterrupts if necessary).
1002 // The return value is in accumulator, which is already in r0.
1004 // Leave the frame (also dropping the register file).
1005 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1007 // Drop receiver + arguments and return.
1008 __ ldr(ip, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1009 BytecodeArray::kParameterSizeOffset));
1010 __ add(sp, sp, ip, LeaveCC);
1015 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1016 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1017 GenerateTailCallToReturnedCode(masm);
1021 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
1022 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1023 // Push a copy of the function onto the stack.
1025 // Push function as parameter to the runtime call.
1027 // Whether to compile in a background thread.
1029 ip, concurrent ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1032 __ CallRuntime(Runtime::kCompileOptimized, 2);
1033 // Restore receiver.
1038 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1039 CallCompileOptimized(masm, false);
1040 GenerateTailCallToReturnedCode(masm);
1044 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1045 CallCompileOptimized(masm, true);
1046 GenerateTailCallToReturnedCode(masm);
1050 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1051 // For now, we are relying on the fact that make_code_young doesn't do any
1052 // garbage collection which allows us to save/restore the registers without
1053 // worrying about which of them contain pointers. We also don't build an
1054 // internal frame to make the code faster, since we shouldn't have to do stack
1055 // crawls in MakeCodeYoung. This seems a bit fragile.
1057 // The following registers must be saved and restored when calling through to
1059 // r0 - contains return address (beginning of patch sequence)
1061 FrameScope scope(masm, StackFrame::MANUAL);
1062 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1063 __ PrepareCallCFunction(2, 0, r2);
1064 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1066 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1067 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1071 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1072 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1073 MacroAssembler* masm) { \
1074 GenerateMakeCodeYoungAgainCommon(masm); \
1076 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1077 MacroAssembler* masm) { \
1078 GenerateMakeCodeYoungAgainCommon(masm); \
1080 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1081 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1084 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1085 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1086 // that make_code_young doesn't do any garbage collection which allows us to
1087 // save/restore the registers without worrying about which of them contain
1090 // The following registers must be saved and restored when calling through to
1092 // r0 - contains return address (beginning of patch sequence)
1094 FrameScope scope(masm, StackFrame::MANUAL);
1095 __ stm(db_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1096 __ PrepareCallCFunction(2, 0, r2);
1097 __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1098 __ CallCFunction(ExternalReference::get_mark_code_as_executed_function(
1099 masm->isolate()), 2);
1100 __ ldm(ia_w, sp, r0.bit() | r1.bit() | fp.bit() | lr.bit());
1102 // Perform prologue operations usually performed by the young code stub.
1103 __ PushFixedFrame(r1);
1104 __ add(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1106 // Jump to point after the code-age stub.
1107 __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1112 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1113 GenerateMakeCodeYoungAgainCommon(masm);
1117 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1118 Generate_MarkCodeAsExecutedOnce(masm);
1122 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1123 SaveFPRegsMode save_doubles) {
1125 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1127 // Preserve registers across notification, this is important for compiled
1128 // stubs that tail call the runtime on deopts passing their parameters in
1130 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1131 // Pass the function and deoptimization type to the runtime system.
1132 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
1133 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1136 __ add(sp, sp, Operand(kPointerSize)); // Ignore state
1137 __ mov(pc, lr); // Jump to miss handler
1141 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1142 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1146 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1147 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1151 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1152 Deoptimizer::BailoutType type) {
1154 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1155 // Pass the function and deoptimization type to the runtime system.
1156 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1158 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1161 // Get the full codegen state from the stack and untag it -> r6.
1162 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1164 // Switch on the state.
1165 Label with_tos_register, unknown_state;
1166 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1167 __ b(ne, &with_tos_register);
1168 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1171 __ bind(&with_tos_register);
1172 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1173 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1174 __ b(ne, &unknown_state);
1175 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1178 __ bind(&unknown_state);
1179 __ stop("no cases left");
1183 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1184 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1188 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1189 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1193 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1194 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1198 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1199 // Lookup the function in the JavaScript frame.
1200 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1202 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1203 // Pass function as argument.
1205 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1208 // If the code object is null, just return to the unoptimized code.
1210 __ cmp(r0, Operand(Smi::FromInt(0)));
1216 // Load deoptimization data from the code object.
1217 // <deopt_data> = <code>[#deoptimization_data_offset]
1218 __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1220 { ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1221 __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1223 if (FLAG_enable_embedded_constant_pool) {
1224 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
1227 // Load the OSR entrypoint offset from the deoptimization data.
1228 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1229 __ ldr(r1, FieldMemOperand(r1, FixedArray::OffsetOfElementAt(
1230 DeoptimizationInputData::kOsrPcOffsetIndex)));
1232 // Compute the target address = code start + osr_offset
1233 __ add(lr, r0, Operand::SmiUntag(r1));
1235 // And "return" to the OSR entry point of the function.
1241 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1242 // We check the stack limit as indicator that recompilation might be done.
1244 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1245 __ cmp(sp, Operand(ip));
1248 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1249 __ CallRuntime(Runtime::kStackGuard, 0);
1251 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1252 RelocInfo::CODE_TARGET);
1260 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1261 // 1. Make sure we have at least one argument.
1262 // r0: actual number of arguments
1265 __ cmp(r0, Operand::Zero());
1267 __ PushRoot(Heap::kUndefinedValueRootIndex);
1268 __ add(r0, r0, Operand(1));
1272 // 2. Get the callable to call (passed as receiver) from the stack.
1273 // r0: actual number of arguments
1274 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1276 // 3. Shift arguments and return address one slot down on the stack
1277 // (overwriting the original receiver). Adjust argument count to make
1278 // the original first argument the new receiver.
1279 // r0: actual number of arguments
1283 // Calculate the copy start address (destination). Copy end address is sp.
1284 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1287 __ ldr(ip, MemOperand(r2, -kPointerSize));
1288 __ str(ip, MemOperand(r2));
1289 __ sub(r2, r2, Operand(kPointerSize));
1292 // Adjust the actual number of arguments and remove the top element
1293 // (which is a copy of the last argument).
1294 __ sub(r0, r0, Operand(1));
1298 // 4. Call the callable.
1299 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1303 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1304 const int argumentsOffset,
1305 const int indexOffset,
1306 const int limitOffset) {
1308 Register receiver = LoadDescriptor::ReceiverRegister();
1309 Register key = LoadDescriptor::NameRegister();
1310 Register slot = LoadDescriptor::SlotRegister();
1311 Register vector = LoadWithVectorDescriptor::VectorRegister();
1313 __ ldr(key, MemOperand(fp, indexOffset));
1316 // Load the current argument from the arguments array.
1318 __ ldr(receiver, MemOperand(fp, argumentsOffset));
1320 // Use inline caching to speed up access to arguments.
1321 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1322 FeedbackVectorSpec spec(0, 1, kinds);
1323 Handle<TypeFeedbackVector> feedback_vector =
1324 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1325 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1326 __ mov(slot, Operand(Smi::FromInt(index)));
1327 __ Move(vector, feedback_vector);
1329 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1330 __ Call(ic, RelocInfo::CODE_TARGET);
1332 // Push the nth argument.
1335 __ ldr(key, MemOperand(fp, indexOffset));
1336 __ add(key, key, Operand(1 << kSmiTagSize));
1337 __ str(key, MemOperand(fp, indexOffset));
1339 // Test if the copy loop has finished copying all the elements from the
1340 // arguments object.
1342 __ ldr(r1, MemOperand(fp, limitOffset));
1346 // On exit, the pushed arguments count is in r0, untagged
1352 // Used by FunctionApply and ReflectApply
1353 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1354 const int kFormalParameters = targetIsArgument ? 3 : 2;
1355 const int kStackSize = kFormalParameters + 1;
1358 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1359 const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1360 const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1361 const int kFunctionOffset = kReceiverOffset + kPointerSize;
1363 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1364 __ ldr(r1, MemOperand(fp, kArgumentsOffset)); // get the args array
1366 if (targetIsArgument) {
1367 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1370 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1373 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged);
1375 // Push current limit and index.
1376 const int kIndexOffset =
1377 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1378 const int kLimitOffset =
1379 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1380 __ mov(r1, Operand::Zero());
1381 __ ldr(r2, MemOperand(fp, kReceiverOffset));
1382 __ Push(r0, r1, r2); // limit, initial index and receiver.
1384 // Copy all arguments from the array to the stack.
1385 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1388 // Call the callable.
1389 // TODO(bmeurer): This should be a tail call according to ES6.
1390 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1391 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1393 // Tear down the internal frame and remove function, receiver and args.
1395 __ add(sp, sp, Operand(kStackSize * kPointerSize));
1400 static void Generate_ConstructHelper(MacroAssembler* masm) {
1401 const int kFormalParameters = 3;
1402 const int kStackSize = kFormalParameters + 1;
1405 FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL);
1406 const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1407 const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1408 const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1410 // If newTarget is not supplied, set it to constructor
1411 Label validate_arguments;
1412 __ ldr(r0, MemOperand(fp, kNewTargetOffset));
1413 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
1414 __ b(ne, &validate_arguments);
1415 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1416 __ str(r0, MemOperand(fp, kNewTargetOffset));
1418 // Validate arguments
1419 __ bind(&validate_arguments);
1420 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1422 __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array
1424 __ ldr(r0, MemOperand(fp, kNewTargetOffset)); // get the new.target
1426 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1429 Generate_CheckStackOverflow(masm, kFunctionOffset, r0, kArgcIsSmiTagged);
1431 // Push current limit and index.
1432 const int kIndexOffset =
1433 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1434 const int kLimitOffset =
1435 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1436 __ push(r0); // limit
1437 __ mov(r1, Operand::Zero()); // initial index
1439 // Push the constructor function as callee.
1440 __ ldr(r0, MemOperand(fp, kFunctionOffset));
1443 // Copy all arguments from the array to the stack.
1444 Generate_PushAppliedArguments(
1445 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1447 // Use undefined feedback vector
1448 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1449 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1450 __ ldr(r4, MemOperand(fp, kNewTargetOffset));
1452 // Call the function.
1453 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1454 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1456 // Leave internal frame.
1458 __ add(sp, sp, Operand(kStackSize * kPointerSize));
1463 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1464 Generate_ApplyHelper(masm, false);
1468 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1469 Generate_ApplyHelper(masm, true);
1473 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1474 Generate_ConstructHelper(masm);
1478 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1479 Label* stack_overflow) {
1480 // ----------- S t a t e -------------
1481 // -- r0 : actual number of arguments
1482 // -- r1 : function (passed through to callee)
1483 // -- r2 : expected number of arguments
1484 // -----------------------------------
1485 // Check the stack for overflow. We are not trying to catch
1486 // interruptions (e.g. debug break and preemption) here, so the "real stack
1487 // limit" is checked.
1488 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
1489 // Make r5 the space we have left. The stack might already be overflowed
1490 // here which will cause r5 to become negative.
1492 // Check if the arguments will overflow the stack.
1493 __ cmp(r5, Operand(r2, LSL, kPointerSizeLog2));
1494 __ b(le, stack_overflow); // Signed comparison.
1498 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1500 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1501 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
1502 (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
1503 fp.bit() | lr.bit());
1505 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1509 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1510 // ----------- S t a t e -------------
1511 // -- r0 : result being passed through
1512 // -----------------------------------
1513 // Get the number of arguments passed (as a smi), tear down the frame and
1514 // then tear down the parameters.
1515 __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1518 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
1519 __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
1520 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1525 void Builtins::Generate_CallFunction(MacroAssembler* masm) {
1526 // ----------- S t a t e -------------
1527 // -- r0 : the number of arguments (not including the receiver)
1528 // -- r1 : the function to call (checked to be a JSFunction)
1529 // -----------------------------------
1531 Label convert, convert_global_proxy, convert_to_object, done_convert;
1532 __ AssertFunction(r1);
1533 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
1534 // slot is "classConstructor".
1535 // Enter the context of the function; ToObject has to run in the function
1536 // context, and we also need to take the global proxy from the function
1537 // context in case of conversion.
1538 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1539 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
1540 SharedFunctionInfo::kStrictModeByteOffset);
1541 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1542 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1543 // We need to convert the receiver for non-native sloppy mode functions.
1544 __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
1545 __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
1546 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
1547 __ b(ne, &done_convert);
1549 __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1551 // ----------- S t a t e -------------
1552 // -- r0 : the number of arguments (not including the receiver)
1553 // -- r1 : the function to call (checked to be a JSFunction)
1554 // -- r2 : the shared function info.
1555 // -- r3 : the receiver
1556 // -- cp : the function context.
1557 // -----------------------------------
1559 Label convert_receiver;
1560 __ JumpIfSmi(r3, &convert_to_object);
1561 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1562 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
1563 __ b(hs, &done_convert);
1564 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &convert_global_proxy);
1565 __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
1566 __ bind(&convert_global_proxy);
1568 // Patch receiver to global proxy.
1569 __ LoadGlobalProxy(r3);
1571 __ b(&convert_receiver);
1572 __ bind(&convert_to_object);
1574 // Convert receiver using ToObject.
1575 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1576 // in the fast case? (fall back to AllocateInNewSpace?)
1577 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1581 ToObjectStub stub(masm->isolate());
1587 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1588 __ bind(&convert_receiver);
1589 __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1591 __ bind(&done_convert);
1593 // ----------- S t a t e -------------
1594 // -- r0 : the number of arguments (not including the receiver)
1595 // -- r1 : the function to call (checked to be a JSFunction)
1596 // -- r2 : the shared function info.
1597 // -- cp : the function context.
1598 // -----------------------------------
1601 FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
1603 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1604 ParameterCount actual(r0);
1605 ParameterCount expected(r2);
1606 __ InvokeCode(r3, expected, actual, JUMP_FUNCTION, NullCallWrapper());
1611 void Builtins::Generate_Call(MacroAssembler* masm) {
1612 // ----------- S t a t e -------------
1613 // -- r0 : the number of arguments (not including the receiver)
1614 // -- r1 : the target to call (can be any Object).
1615 // -----------------------------------
1617 Label non_smi, non_function;
1618 __ JumpIfSmi(r1, &non_function);
1620 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1621 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET,
1623 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1624 __ b(ne, &non_function);
1626 // 1. Call to function proxy.
1627 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
1628 __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kCallTrapOffset));
1629 __ AssertNotSmi(r1);
1632 // 2. Call to something else, which might have a [[Call]] internal method (if
1633 // not we raise an exception).
1634 __ bind(&non_function);
1635 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
1636 // be awesome instead; i.e. a trivial improvement would be to call into the
1637 // runtime and just deal with the API function there instead of returning a
1638 // delegate from a runtime call that just jumps back to the runtime once
1639 // called. Or, bonus points, call directly into the C API function here, as
1640 // we do in some Crankshaft fast cases.
1641 // Overwrite the original receiver with the (original) target.
1642 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1644 // Determine the delegate for the target (if any).
1645 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1648 __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
1653 // The delegate is always a regular function.
1654 __ AssertFunction(r1);
1655 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1659 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1660 // ----------- S t a t e -------------
1661 // -- r0 : actual number of arguments
1662 // -- r1 : function (passed through to callee)
1663 // -- r2 : expected number of arguments
1664 // -----------------------------------
1666 Label stack_overflow;
1667 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1668 Label invoke, dont_adapt_arguments;
1670 Label enough, too_few;
1671 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1674 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1675 __ b(eq, &dont_adapt_arguments);
1677 { // Enough parameters: actual >= expected
1679 EnterArgumentsAdaptorFrame(masm);
1681 // Calculate copy start address into r0 and copy end address into r4.
1682 // r0: actual number of arguments as a smi
1684 // r2: expected number of arguments
1685 // r3: code entry to call
1686 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1687 // adjust for return address and receiver
1688 __ add(r0, r0, Operand(2 * kPointerSize));
1689 __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
1691 // Copy the arguments (including the receiver) to the new stack frame.
1692 // r0: copy start address
1694 // r2: expected number of arguments
1695 // r3: code entry to call
1696 // r4: copy end address
1700 __ ldr(ip, MemOperand(r0, 0));
1702 __ cmp(r0, r4); // Compare before moving to next argument.
1703 __ sub(r0, r0, Operand(kPointerSize));
1709 { // Too few parameters: Actual < expected
1712 // If the function is strong we need to throw an error.
1713 Label no_strong_error;
1714 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1715 __ ldr(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
1716 __ tst(r5, Operand(1 << (SharedFunctionInfo::kStrongModeFunction +
1718 __ b(eq, &no_strong_error);
1720 // What we really care about is the required number of arguments.
1721 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kLengthOffset));
1722 __ cmp(r0, Operand::SmiUntag(r4));
1723 __ b(ge, &no_strong_error);
1726 FrameScope frame(masm, StackFrame::MANUAL);
1727 EnterArgumentsAdaptorFrame(masm);
1728 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1731 __ bind(&no_strong_error);
1732 EnterArgumentsAdaptorFrame(masm);
1734 // Calculate copy start address into r0 and copy end address is fp.
1735 // r0: actual number of arguments as a smi
1737 // r2: expected number of arguments
1738 // r3: code entry to call
1739 __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
1741 // Copy the arguments (including the receiver) to the new stack frame.
1742 // r0: copy start address
1744 // r2: expected number of arguments
1745 // r3: code entry to call
1748 // Adjust load for return address and receiver.
1749 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1751 __ cmp(r0, fp); // Compare before moving to next argument.
1752 __ sub(r0, r0, Operand(kPointerSize));
1755 // Fill the remaining expected arguments with undefined.
1757 // r2: expected number of arguments
1758 // r3: code entry to call
1759 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1760 __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
1761 // Adjust for frame.
1762 __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1772 // Call the entry point.
1775 // r0 : expected number of arguments
1776 // r1 : function (passed through to callee)
1779 // Store offset of return address for deoptimizer.
1780 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1782 // Exit frame and return.
1783 LeaveArgumentsAdaptorFrame(masm);
1787 // -------------------------------------------
1788 // Dont adapt arguments.
1789 // -------------------------------------------
1790 __ bind(&dont_adapt_arguments);
1793 __ bind(&stack_overflow);
1795 FrameScope frame(masm, StackFrame::MANUAL);
1796 EnterArgumentsAdaptorFrame(masm);
1797 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
1805 } // namespace internal
1808 #endif // V8_TARGET_ARCH_ARM