*
* v8::Locker is a scoped lock object. While it's
* active (i.e. between its construction and destruction) the current thread is
- * allowed to use the locked isolate. V8 guarantees that an isolate can be
- * locked by at most one thread at any time. In other words, the scope of a
- * v8::Locker is a critical section.
+ * allowed to use the locked isolate. V8 guarantees that an isolate can be locked
+ * by at most one thread at any time. In other words, the scope of a v8::Locker is
+ * a critical section.
*
* Sample usage:
* \code
static void StopPreemption();
/**
- * Returns whether or not the locker for a given isolate, or default isolate
- * if NULL is given, is locked by the current thread.
+ * Returns whether or not the locker for a given isolate, or default isolate if NULL is given,
+ * is locked by the current thread.
*/
static bool IsLocked(Isolate* isolate = NULL);
__ bind(&convert_argument);
__ push(function); // Preserve the function.
__ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0);
- __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
- }
+ __ EnterInternalFrame();
+ __ push(r0);
+ __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
+ __ LeaveInternalFrame();
__ pop(function);
__ mov(argument, r0);
__ b(&argument_is_string);
// create a string wrapper.
__ bind(&gc_required);
__ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(argument);
- __ CallRuntime(Runtime::kNewStringWrapper, 1);
- }
+ __ EnterInternalFrame();
+ __ push(argument);
+ __ CallRuntime(Runtime::kNewStringWrapper, 1);
+ __ LeaveInternalFrame();
__ Ret();
}
Isolate* isolate = masm->isolate();
// Enter a construct frame.
- {
- FrameScope scope(masm, StackFrame::CONSTRUCT);
-
- // Preserve the two incoming parameters on the stack.
- __ mov(r0, Operand(r0, LSL, kSmiTagSize));
- __ push(r0); // Smi-tagged arguments count.
- __ push(r1); // Constructor function.
-
- // Try to allocate the object without transitioning into C code. If any of
- // the preconditions is not met, the code bails out to the runtime call.
- Label rt_call, allocated;
- if (FLAG_inline_new) {
- Label undo_allocation;
+ __ EnterConstructFrame();
+
+ // Preserve the two incoming parameters on the stack.
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize));
+ __ push(r0); // Smi-tagged arguments count.
+ __ push(r1); // Constructor function.
+
+ // Try to allocate the object without transitioning into C code. If any of the
+ // preconditions is not met, the code bails out to the runtime call.
+ Label rt_call, allocated;
+ if (FLAG_inline_new) {
+ Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
- ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address(isolate);
- __ mov(r2, Operand(debug_step_in_fp));
- __ ldr(r2, MemOperand(r2));
- __ tst(r2, r2);
- __ b(ne, &rt_call);
+ ExternalReference debug_step_in_fp =
+ ExternalReference::debug_step_in_fp_address(isolate);
+ __ mov(r2, Operand(debug_step_in_fp));
+ __ ldr(r2, MemOperand(r2));
+ __ tst(r2, r2);
+ __ b(ne, &rt_call);
#endif
- // Load the initial map and verify that it is in fact a map.
- // r1: constructor function
- __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
- __ tst(r2, Operand(kSmiTagMask));
- __ b(eq, &rt_call);
- __ CompareObjectType(r2, r3, r4, MAP_TYPE);
- __ b(ne, &rt_call);
-
- // Check that the constructor is not constructing a JSFunction (see
- // comments in Runtime_NewObject in runtime.cc). In which case the
- // initial map's instance type would be JS_FUNCTION_TYPE.
- // r1: constructor function
- // r2: initial map
- __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
- __ b(eq, &rt_call);
+ // Load the initial map and verify that it is in fact a map.
+ // r1: constructor function
+ __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
+ __ tst(r2, Operand(kSmiTagMask));
+ __ b(eq, &rt_call);
+ __ CompareObjectType(r2, r3, r4, MAP_TYPE);
+ __ b(ne, &rt_call);
- if (count_constructions) {
- Label allocate;
- // Decrease generous allocation count.
- __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
- MemOperand constructor_count =
- FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
- __ ldrb(r4, constructor_count);
- __ sub(r4, r4, Operand(1), SetCC);
- __ strb(r4, constructor_count);
- __ b(ne, &allocate);
-
- __ Push(r1, r2);
-
- __ push(r1); // constructor
- // The call will replace the stub, so the countdown is only done once.
- __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
-
- __ pop(r2);
- __ pop(r1);
-
- __ bind(&allocate);
- }
+ // Check that the constructor is not constructing a JSFunction (see comments
+ // in Runtime_NewObject in runtime.cc). In which case the initial map's
+ // instance type would be JS_FUNCTION_TYPE.
+ // r1: constructor function
+ // r2: initial map
+ __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
+ __ b(eq, &rt_call);
+
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ MemOperand constructor_count =
+ FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
+ __ ldrb(r4, constructor_count);
+ __ sub(r4, r4, Operand(1), SetCC);
+ __ strb(r4, constructor_count);
+ __ b(ne, &allocate);
+
+ __ Push(r1, r2);
+
+ __ push(r1); // constructor
+ // The call will replace the stub, so the countdown is only done once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+
+ __ pop(r2);
+ __ pop(r1);
+
+ __ bind(&allocate);
+ }
- // Now allocate the JSObject on the heap.
- // r1: constructor function
- // r2: initial map
- __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
- __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
-
- // Allocated the JSObject, now initialize the fields. Map is set to
- // initial map and properties and elements are set to empty fixed array.
- // r1: constructor function
- // r2: initial map
- // r3: object size
- // r4: JSObject (not tagged)
- __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
- __ mov(r5, r4);
- ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
- __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
- ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
- __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
- ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
- __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
-
- // Fill all the in-object properties with the appropriate filler.
- // r1: constructor function
- // r2: initial map
- // r3: object size (in words)
- // r4: JSObject (not tagged)
- // r5: First in-object property of JSObject (not tagged)
- __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
- ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
- { Label loop, entry;
- if (count_constructions) {
- // To allow for truncation.
- __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
- } else {
- __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
- }
- __ b(&entry);
- __ bind(&loop);
- __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
- __ bind(&entry);
- __ cmp(r5, r6);
- __ b(lt, &loop);
- }
+ // Now allocate the JSObject on the heap.
+ // r1: constructor function
+ // r2: initial map
+ __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
+ __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
- // Add the object tag to make the JSObject real, so that we can continue
- // and jump into the continuation code at any time from now on. Any
- // failures need to undo the allocation, so that the heap is in a
- // consistent state and verifiable.
- __ add(r4, r4, Operand(kHeapObjectTag));
-
- // Check if a non-empty properties array is needed. Continue with
- // allocated object if not fall through to runtime call if it is.
- // r1: constructor function
- // r4: JSObject
- // r5: start of next object (not tagged)
- __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
- // The field instance sizes contains both pre-allocated property fields
- // and in-object properties.
- __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
- __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
- __ add(r3, r3, Operand(r6));
- __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
- __ sub(r3, r3, Operand(r6), SetCC);
-
- // Done if no extra properties are to be allocated.
- __ b(eq, &allocated);
- __ Assert(pl, "Property allocation count failed.");
-
- // Scale the number of elements by pointer size and add the header for
- // FixedArrays to the start of the next object calculation from above.
- // r1: constructor
- // r3: number of elements in properties array
- // r4: JSObject
- // r5: start of next object
- __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
- __ AllocateInNewSpace(
- r0,
- r5,
- r6,
- r2,
- &undo_allocation,
- static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
-
- // Initialize the FixedArray.
- // r1: constructor
- // r3: number of elements in properties array
- // r4: JSObject
- // r5: FixedArray (not tagged)
- __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
- __ mov(r2, r5);
- ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
- __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
- ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
- __ mov(r0, Operand(r3, LSL, kSmiTagSize));
- __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
-
- // Initialize the fields to undefined.
- // r1: constructor function
- // r2: First element of FixedArray (not tagged)
- // r3: number of elements in properties array
- // r4: JSObject
- // r5: FixedArray (not tagged)
- __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
- ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
- { Label loop, entry;
- if (count_constructions) {
- __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
- } else if (FLAG_debug_code) {
- __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
- __ cmp(r7, r8);
- __ Assert(eq, "Undefined value not loaded.");
- }
- __ b(&entry);
- __ bind(&loop);
- __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
- __ bind(&entry);
- __ cmp(r2, r6);
- __ b(lt, &loop);
+ // Allocated the JSObject, now initialize the fields. Map is set to initial
+ // map and properties and elements are set to empty fixed array.
+ // r1: constructor function
+ // r2: initial map
+ // r3: object size
+ // r4: JSObject (not tagged)
+ __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
+ __ mov(r5, r4);
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
+ __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+ ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
+ __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
+
+ // Fill all the in-object properties with the appropriate filler.
+ // r1: constructor function
+ // r2: initial map
+ // r3: object size (in words)
+ // r4: JSObject (not tagged)
+ // r5: First in-object property of JSObject (not tagged)
+ __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
+ { Label loop, entry;
+ if (count_constructions) {
+ // To allow for truncation.
+ __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
+ } else {
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
}
-
- // Store the initialized FixedArray into the properties field of
- // the JSObject
- // r1: constructor function
- // r4: JSObject
- // r5: FixedArray (not tagged)
- __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
- __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
-
- // Continue with JSObject being successfully allocated
- // r1: constructor function
- // r4: JSObject
- __ jmp(&allocated);
-
- // Undo the setting of the new top so that the heap is verifiable. For
- // example, the map's unused properties potentially do not match the
- // allocated objects unused properties.
- // r4: JSObject (previous new top)
- __ bind(&undo_allocation);
- __ UndoAllocationInNewSpace(r4, r5);
+ __ b(&entry);
+ __ bind(&loop);
+ __ str(r7, MemOperand(r5, kPointerSize, PostIndex));
+ __ bind(&entry);
+ __ cmp(r5, r6);
+ __ b(lt, &loop);
}
- // Allocate the new receiver object using the runtime call.
- // r1: constructor function
- __ bind(&rt_call);
- __ push(r1); // argument for Runtime_NewObject
- __ CallRuntime(Runtime::kNewObject, 1);
- __ mov(r4, r0);
+ // Add the object tag to make the JSObject real, so that we can continue and
+ // jump into the continuation code at any time from now on. Any failures
+ // need to undo the allocation, so that the heap is in a consistent state
+ // and verifiable.
+ __ add(r4, r4, Operand(kHeapObjectTag));
- // Receiver for constructor call allocated.
+ // Check if a non-empty properties array is needed. Continue with allocated
+ // object if not fall through to runtime call if it is.
+ // r1: constructor function
+ // r4: JSObject
+ // r5: start of next object (not tagged)
+ __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
+ // The field instance sizes contains both pre-allocated property fields and
+ // in-object properties.
+ __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
+ __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * 8, 8);
+ __ add(r3, r3, Operand(r6));
+ __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * 8, 8);
+ __ sub(r3, r3, Operand(r6), SetCC);
+
+ // Done if no extra properties are to be allocated.
+ __ b(eq, &allocated);
+ __ Assert(pl, "Property allocation count failed.");
+
+ // Scale the number of elements by pointer size and add the header for
+ // FixedArrays to the start of the next object calculation from above.
+ // r1: constructor
+ // r3: number of elements in properties array
// r4: JSObject
- __ bind(&allocated);
- __ push(r4);
-
- // Push the function and the allocated receiver from the stack.
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ ldr(r1, MemOperand(sp, kPointerSize));
- __ push(r1); // Constructor function.
- __ push(r4); // Receiver.
-
- // Reload the number of arguments from the stack.
+ // r5: start of next object
+ __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
+ __ AllocateInNewSpace(
+ r0,
+ r5,
+ r6,
+ r2,
+ &undo_allocation,
+ static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
+
+ // Initialize the FixedArray.
+ // r1: constructor
+ // r3: number of elements in properties array
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
+ __ mov(r2, r5);
+ ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
+ __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
+ ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
+ __ mov(r0, Operand(r3, LSL, kSmiTagSize));
+ __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
+
+ // Initialize the fields to undefined.
// r1: constructor function
- // sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
-
- // Setup pointer to last argument.
- __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // Setup number of arguments for function call below
- __ mov(r0, Operand(r3, LSR, kSmiTagSize));
-
- // Copy arguments and receiver to the expression stack.
- // r0: number of arguments
- // r2: address of last argument (caller sp)
+ // r2: First element of FixedArray (not tagged)
+ // r3: number of elements in properties array
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
+ ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
+ { Label loop, entry;
+ if (count_constructions) {
+ __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
+ } else if (FLAG_debug_code) {
+ __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
+ __ cmp(r7, r8);
+ __ Assert(eq, "Undefined value not loaded.");
+ }
+ __ b(&entry);
+ __ bind(&loop);
+ __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
+ __ bind(&entry);
+ __ cmp(r2, r6);
+ __ b(lt, &loop);
+ }
+
+ // Store the initialized FixedArray into the properties field of
+ // the JSObject
// r1: constructor function
- // r3: number of arguments (smi-tagged)
- // sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- Label loop, entry;
- __ b(&entry);
- __ bind(&loop);
- __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
- __ push(ip);
- __ bind(&entry);
- __ sub(r3, r3, Operand(2), SetCC);
- __ b(ge, &loop);
+ // r4: JSObject
+ // r5: FixedArray (not tagged)
+ __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
+ __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
- // Call the function.
- // r0: number of arguments
+ // Continue with JSObject being successfully allocated
// r1: constructor function
- if (is_api_function) {
- __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
- Handle<Code> code =
- masm->isolate()->builtins()->HandleApiCallConstruct();
- ParameterCount expected(0);
- __ InvokeCode(code, expected, expected,
- RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
- } else {
- ParameterCount actual(r0);
- __ InvokeFunction(r1, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
+ // r4: JSObject
+ __ jmp(&allocated);
+
+ // Undo the setting of the new top so that the heap is verifiable. For
+ // example, the map's unused properties potentially do not match the
+ // allocated objects unused properties.
+ // r4: JSObject (previous new top)
+ __ bind(&undo_allocation);
+ __ UndoAllocationInNewSpace(r4, r5);
+ }
- // Pop the function from the stack.
- // sp[0]: constructor function
- // sp[2]: receiver
- // sp[3]: constructor function
- // sp[4]: number of arguments (smi-tagged)
- __ pop();
+ // Allocate the new receiver object using the runtime call.
+ // r1: constructor function
+ __ bind(&rt_call);
+ __ push(r1); // argument for Runtime_NewObject
+ __ CallRuntime(Runtime::kNewObject, 1);
+ __ mov(r4, r0);
- // Restore context from the frame.
- // r0: result
- // sp[0]: receiver
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
-
- // If the result is an object (in the ECMA sense), we should get rid
- // of the receiver and use the result; see ECMA-262 section 13.2.2-7
- // on page 74.
- Label use_receiver, exit;
-
- // If the result is a smi, it is *not* an object in the ECMA sense.
- // r0: result
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &use_receiver);
-
- // If the type of the result (stored in its map) is less than
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
- __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
- __ b(ge, &exit);
-
- // Throw away the result of the constructor invocation and use the
- // on-stack receiver as the result.
- __ bind(&use_receiver);
- __ ldr(r0, MemOperand(sp));
-
- // Remove receiver from the stack, remove caller arguments, and
- // return.
- __ bind(&exit);
- // r0: result
- // sp[0]: receiver (newly allocated object)
- // sp[1]: constructor function
- // sp[2]: number of arguments (smi-tagged)
- __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
-
- // Leave construct frame.
+ // Receiver for constructor call allocated.
+ // r4: JSObject
+ __ bind(&allocated);
+ __ push(r4);
+
+ // Push the function and the allocated receiver from the stack.
+ // sp[0]: receiver (newly allocated object)
+ // sp[1]: constructor function
+ // sp[2]: number of arguments (smi-tagged)
+ __ ldr(r1, MemOperand(sp, kPointerSize));
+ __ push(r1); // Constructor function.
+ __ push(r4); // Receiver.
+
+ // Reload the number of arguments from the stack.
+ // r1: constructor function
+ // sp[0]: receiver
+ // sp[1]: constructor function
+ // sp[2]: receiver
+ // sp[3]: constructor function
+ // sp[4]: number of arguments (smi-tagged)
+ __ ldr(r3, MemOperand(sp, 4 * kPointerSize));
+
+ // Setup pointer to last argument.
+ __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
+
+ // Setup number of arguments for function call below
+ __ mov(r0, Operand(r3, LSR, kSmiTagSize));
+
+ // Copy arguments and receiver to the expression stack.
+ // r0: number of arguments
+ // r2: address of last argument (caller sp)
+ // r1: constructor function
+ // r3: number of arguments (smi-tagged)
+ // sp[0]: receiver
+ // sp[1]: constructor function
+ // sp[2]: receiver
+ // sp[3]: constructor function
+ // sp[4]: number of arguments (smi-tagged)
+ Label loop, entry;
+ __ b(&entry);
+ __ bind(&loop);
+ __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
+ __ push(ip);
+ __ bind(&entry);
+ __ sub(r3, r3, Operand(2), SetCC);
+ __ b(ge, &loop);
+
+ // Call the function.
+ // r0: number of arguments
+ // r1: constructor function
+ if (is_api_function) {
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+ Handle<Code> code =
+ masm->isolate()->builtins()->HandleApiCallConstruct();
+ ParameterCount expected(0);
+ __ InvokeCode(code, expected, expected,
+ RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
+ } else {
+ ParameterCount actual(r0);
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
+ // Pop the function from the stack.
+ // sp[0]: constructor function
+ // sp[2]: receiver
+ // sp[3]: constructor function
+ // sp[4]: number of arguments (smi-tagged)
+ __ pop();
+
+ // Restore context from the frame.
+ // r0: result
+ // sp[0]: receiver
+ // sp[1]: constructor function
+ // sp[2]: number of arguments (smi-tagged)
+ __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+
+ // If the result is an object (in the ECMA sense), we should get rid
+ // of the receiver and use the result; see ECMA-262 section 13.2.2-7
+ // on page 74.
+ Label use_receiver, exit;
+
+ // If the result is a smi, it is *not* an object in the ECMA sense.
+ // r0: result
+ // sp[0]: receiver (newly allocated object)
+ // sp[1]: constructor function
+ // sp[2]: number of arguments (smi-tagged)
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &use_receiver);
+
+ // If the type of the result (stored in its map) is less than
+ // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
+ __ CompareObjectType(r0, r3, r3, FIRST_JS_OBJECT_TYPE);
+ __ b(ge, &exit);
+
+ // Throw away the result of the constructor invocation and use the
+ // on-stack receiver as the result.
+ __ bind(&use_receiver);
+ __ ldr(r0, MemOperand(sp));
+
+ // Remove receiver from the stack, remove caller arguments, and
+ // return.
+ __ bind(&exit);
+ // r0: result
+ // sp[0]: receiver (newly allocated object)
+ // sp[1]: constructor function
+ // sp[2]: number of arguments (smi-tagged)
+ __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
+ __ LeaveConstructFrame();
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
__ add(sp, sp, Operand(kPointerSize));
__ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
// r4: argv
// r5-r7, cp may be clobbered
- // Clear the context before we push it when entering the internal frame.
+ // Clear the context before we push it when entering the JS frame.
__ mov(cp, Operand(0, RelocInfo::NONE));
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Set up the context from the function argument.
- __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+ // Set up the context from the function argument.
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
- // Set up the roots register.
- ExternalReference roots_address =
- ExternalReference::roots_address(masm->isolate());
- __ mov(r10, Operand(roots_address));
+ // Set up the roots register.
+ ExternalReference roots_address =
+ ExternalReference::roots_address(masm->isolate());
+ __ mov(r10, Operand(roots_address));
- // Push the function and the receiver onto the stack.
- __ push(r1);
- __ push(r2);
+ // Push the function and the receiver onto the stack.
+ __ push(r1);
+ __ push(r2);
- // Copy arguments to the stack in a loop.
- // r1: function
- // r3: argc
- // r4: argv, i.e. points to first arg
- Label loop, entry;
- __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
- // r2 points past last arg.
- __ b(&entry);
- __ bind(&loop);
- __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
- __ ldr(r0, MemOperand(r0)); // dereference handle
- __ push(r0); // push parameter
- __ bind(&entry);
- __ cmp(r4, r2);
- __ b(ne, &loop);
-
- // Initialize all JavaScript callee-saved registers, since they will be seen
- // by the garbage collector as part of handlers.
- __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
- __ mov(r5, Operand(r4));
- __ mov(r6, Operand(r4));
- __ mov(r7, Operand(r4));
- if (kR9Available == 1) {
- __ mov(r9, Operand(r4));
- }
-
- // Invoke the code and pass argc as r0.
- __ mov(r0, Operand(r3));
- if (is_construct) {
- __ Call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
- } else {
- ParameterCount actual(r0);
- __ InvokeFunction(r1, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
+ // Copy arguments to the stack in a loop.
+ // r1: function
+ // r3: argc
+ // r4: argv, i.e. points to first arg
+ Label loop, entry;
+ __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
+ // r2 points past last arg.
+ __ b(&entry);
+ __ bind(&loop);
+ __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
+ __ ldr(r0, MemOperand(r0)); // dereference handle
+ __ push(r0); // push parameter
+ __ bind(&entry);
+ __ cmp(r4, r2);
+ __ b(ne, &loop);
+
+ // Initialize all JavaScript callee-saved registers, since they will be seen
+ // by the garbage collector as part of handlers.
+ __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
+ __ mov(r5, Operand(r4));
+ __ mov(r6, Operand(r4));
+ __ mov(r7, Operand(r4));
+ if (kR9Available == 1) {
+ __ mov(r9, Operand(r4));
+ }
- // Exit the JS frame and remove the parameters (except function), and
- // return.
- // Respect ABI stack constraint.
+ // Invoke the code and pass argc as r0.
+ __ mov(r0, Operand(r3));
+ if (is_construct) {
+ __ Call(masm->isolate()->builtins()->JSConstructCall(),
+ RelocInfo::CODE_TARGET);
+ } else {
+ ParameterCount actual(r0);
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
+
+ // Exit the JS frame and remove the parameters (except function), and return.
+ // Respect ABI stack constraint.
+ __ LeaveInternalFrame();
__ Jump(lr);
// r0: result
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Preserve the function.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
+ // Preserve the function.
+ __ push(r1);
+ // Push call kind information.
+ __ push(r5);
- // Push the function on the stack as the argument to the runtime function.
- __ push(r1);
- __ CallRuntime(Runtime::kLazyCompile, 1);
- // Calculate the entry point.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Push the function on the stack as the argument to the runtime function.
+ __ push(r1);
+ __ CallRuntime(Runtime::kLazyCompile, 1);
+ // Calculate the entry point.
+ __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
- // Restore call kind information.
- __ pop(r5);
- // Restore saved function.
- __ pop(r1);
+ // Restore call kind information.
+ __ pop(r5);
+ // Restore saved function.
+ __ pop(r1);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(r2);
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Preserve the function.
- __ push(r1);
- // Push call kind information.
- __ push(r5);
+ // Preserve the function.
+ __ push(r1);
+ // Push call kind information.
+ __ push(r5);
- // Push the function on the stack as the argument to the runtime function.
- __ push(r1);
- __ CallRuntime(Runtime::kLazyRecompile, 1);
- // Calculate the entry point.
- __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Push the function on the stack as the argument to the runtime function.
+ __ push(r1);
+ __ CallRuntime(Runtime::kLazyRecompile, 1);
+ // Calculate the entry point.
+ __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
- // Restore call kind information.
- __ pop(r5);
- // Restore saved function.
- __ pop(r1);
+ // Restore call kind information.
+ __ pop(r5);
+ // Restore saved function.
+ __ pop(r1);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ Jump(r2);
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Pass the function and deoptimization type to the runtime system.
- __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
- __ push(r0);
- __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
- }
+ __ EnterInternalFrame();
+ // Pass the function and deoptimization type to the runtime system.
+ __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
+ __ push(r0);
+ __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
+ __ LeaveInternalFrame();
// Get the full codegen state from the stack and untag it -> r6.
__ ldr(r6, MemOperand(sp, 0 * kPointerSize));
// the registers without worrying about which of them contain
// pointers. This seems a bit fragile.
__ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ CallRuntime(Runtime::kNotifyOSR, 0);
- }
+ __ EnterInternalFrame();
+ __ CallRuntime(Runtime::kNotifyOSR, 0);
+ __ LeaveInternalFrame();
__ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
__ Ret();
}
// Lookup the function in the JavaScript frame and push it as an
// argument to the on-stack replacement function.
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0);
- __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
- }
+ __ EnterInternalFrame();
+ __ push(r0);
+ __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
+ __ LeaveInternalFrame();
// If the result was -1 it means that we couldn't optimize the
// function. Just return and continue in the unoptimized version.
__ b(ge, &shift_arguments);
__ bind(&convert_to_object);
+ __ EnterInternalFrame(); // In order to preserve argument count.
+ __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
+ __ push(r0);
- {
- // Enter an internal frame in order to preserve argument count.
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
- __ push(r0);
-
- __ push(r2);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ mov(r2, r0);
-
- __ pop(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
-
- // Exit the internal frame.
- }
+ __ push(r2);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ mov(r2, r0);
+ __ pop(r0);
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ LeaveInternalFrame();
// Restore the function to r1.
__ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
__ jmp(&patch_receiver);
const int kRecvOffset = 3 * kPointerSize;
const int kFunctionOffset = 4 * kPointerSize;
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
- __ push(r0);
- __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
- __ push(r0);
- __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
-
- // Check the stack for overflow. We are not trying need to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- Label okay;
- __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
- // Make r2 the space we have left. The stack might already be overflowed
- // here which will cause r2 to become negative.
- __ sub(r2, sp, r2);
- // Check if the arguments will overflow the stack.
- __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ b(gt, &okay); // Signed comparison.
-
- // Out of stack space.
- __ ldr(r1, MemOperand(fp, kFunctionOffset));
- __ push(r1);
- __ push(r0);
- __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
- // End of stack check.
-
- // Push current limit and index.
- __ bind(&okay);
- __ push(r0); // limit
- __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
- __ push(r1);
-
- // Change context eagerly to get the right global object if necessary.
- __ ldr(r0, MemOperand(fp, kFunctionOffset));
- __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
- // Load the shared function info while the function is still in r0.
- __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
-
- // Compute the receiver.
- Label call_to_object, use_global_receiver, push_receiver;
- __ ldr(r0, MemOperand(fp, kRecvOffset));
+ __ EnterInternalFrame();
- // Do not transform the receiver for strict mode functions.
- __ ldr(r2, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
- __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
- kSmiTagSize)));
- __ b(ne, &push_receiver);
-
- // Do not transform the receiver for strict mode functions.
- __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
- __ b(ne, &push_receiver);
-
- // Compute the receiver in non-strict mode.
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, &call_to_object);
- __ LoadRoot(r1, Heap::kNullValueRootIndex);
- __ cmp(r0, r1);
- __ b(eq, &use_global_receiver);
- __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
- __ cmp(r0, r1);
- __ b(eq, &use_global_receiver);
-
- // Check if the receiver is already a JavaScript object.
- // r0: receiver
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
- __ b(ge, &push_receiver);
-
- // Convert the receiver to a regular object.
- // r0: receiver
- __ bind(&call_to_object);
- __ push(r0);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ b(&push_receiver);
-
- // Use the current global receiver object as the receiver.
- __ bind(&use_global_receiver);
- const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
- __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
- __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
-
- // Push the receiver.
- // r0: receiver
- __ bind(&push_receiver);
- __ push(r0);
-
- // Copy all arguments from the array to the stack.
- Label entry, loop;
- __ ldr(r0, MemOperand(fp, kIndexOffset));
- __ b(&entry);
-
- // Load the current argument from the arguments array and push it to the
- // stack.
- // r0: current argument index
- __ bind(&loop);
- __ ldr(r1, MemOperand(fp, kArgsOffset));
- __ push(r1);
- __ push(r0);
-
- // Call the runtime to access the property in the arguments array.
- __ CallRuntime(Runtime::kGetProperty, 2);
- __ push(r0);
+ __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
+ __ push(r0);
+ __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
+ __ push(r0);
+ __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
+
+ // Check the stack for overflow. We are not trying need to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ Label okay;
+ __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
+ // Make r2 the space we have left. The stack might already be overflowed
+ // here which will cause r2 to become negative.
+ __ sub(r2, sp, r2);
+ // Check if the arguments will overflow the stack.
+ __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ b(gt, &okay); // Signed comparison.
+
+ // Out of stack space.
+ __ ldr(r1, MemOperand(fp, kFunctionOffset));
+ __ push(r1);
+ __ push(r0);
+ __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+ // End of stack check.
+
+ // Push current limit and index.
+ __ bind(&okay);
+ __ push(r0); // limit
+ __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
+ __ push(r1);
+
+ // Change context eagerly to get the right global object if necessary.
+ __ ldr(r0, MemOperand(fp, kFunctionOffset));
+ __ ldr(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
+ // Load the shared function info while the function is still in r0.
+ __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
+
+ // Compute the receiver.
+ Label call_to_object, use_global_receiver, push_receiver;
+ __ ldr(r0, MemOperand(fp, kRecvOffset));
+
+ // Do not transform the receiver for strict mode functions.
+ __ ldr(r2, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
+ __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
+ kSmiTagSize)));
+ __ b(ne, &push_receiver);
+
+ // Do not transform the receiver for strict mode functions.
+ __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
+ __ b(ne, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
+ __ tst(r0, Operand(kSmiTagMask));
+ __ b(eq, &call_to_object);
+ __ LoadRoot(r1, Heap::kNullValueRootIndex);
+ __ cmp(r0, r1);
+ __ b(eq, &use_global_receiver);
+ __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
+ __ cmp(r0, r1);
+ __ b(eq, &use_global_receiver);
+
+ // Check if the receiver is already a JavaScript object.
+ // r0: receiver
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
+ __ b(ge, &push_receiver);
+
+ // Convert the receiver to a regular object.
+ // r0: receiver
+ __ bind(&call_to_object);
+ __ push(r0);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ b(&push_receiver);
+
+ // Use the current global receiver object as the receiver.
+ __ bind(&use_global_receiver);
+ const int kGlobalOffset =
+ Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
+ __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
+ __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
+
+ // Push the receiver.
+ // r0: receiver
+ __ bind(&push_receiver);
+ __ push(r0);
- // Use inline caching to access the arguments.
- __ ldr(r0, MemOperand(fp, kIndexOffset));
- __ add(r0, r0, Operand(1 << kSmiTagSize));
- __ str(r0, MemOperand(fp, kIndexOffset));
+ // Copy all arguments from the array to the stack.
+ Label entry, loop;
+ __ ldr(r0, MemOperand(fp, kIndexOffset));
+ __ b(&entry);
- // Test if the copy loop has finished copying all the elements from the
- // arguments object.
- __ bind(&entry);
- __ ldr(r1, MemOperand(fp, kLimitOffset));
- __ cmp(r0, r1);
- __ b(ne, &loop);
+ // Load the current argument from the arguments array and push it to the
+ // stack.
+ // r0: current argument index
+ __ bind(&loop);
+ __ ldr(r1, MemOperand(fp, kArgsOffset));
+ __ push(r1);
+ __ push(r0);
- // Invoke the function.
- ParameterCount actual(r0);
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
- __ ldr(r1, MemOperand(fp, kFunctionOffset));
- __ InvokeFunction(r1, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
+ // Call the runtime to access the property in the arguments array.
+ __ CallRuntime(Runtime::kGetProperty, 2);
+ __ push(r0);
- // Tear down the internal frame and remove function, receiver and args.
- }
+ // Use inline caching to access the arguments.
+ __ ldr(r0, MemOperand(fp, kIndexOffset));
+ __ add(r0, r0, Operand(1 << kSmiTagSize));
+ __ str(r0, MemOperand(fp, kIndexOffset));
+ // Test if the copy loop has finished copying all the elements from the
+ // arguments object.
+ __ bind(&entry);
+ __ ldr(r1, MemOperand(fp, kLimitOffset));
+ __ cmp(r0, r1);
+ __ b(ne, &loop);
+
+ // Invoke the function.
+ ParameterCount actual(r0);
+ __ mov(r0, Operand(r0, ASR, kSmiTagSize));
+ __ ldr(r1, MemOperand(fp, kFunctionOffset));
+ __ InvokeFunction(r1, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
+
+ // Tear down the internal frame and remove function, receiver and args.
+ __ LeaveInternalFrame();
__ add(sp, sp, Operand(3 * kPointerSize));
__ Jump(lr);
}
__ vmov(d0, r0, r1);
__ vmov(d1, r2, r3);
}
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ CallCFunction(
- ExternalReference::double_fp_operation(op, masm->isolate()), 0, 2);
- }
+ // Call C routine that may not cause GC or other trouble.
+ __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()),
+ 0, 2);
// Store answer in the overwritable heap number. Double returned in
// registers r0 and r1 or in d0.
if (masm->use_eabi_hardfloat()) {
// This stub does not handle the inlined cases (Smis, Booleans, undefined).
// The stub returns zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
// This stub uses VFP3 instructions.
CpuFeatures::Scope scope(VFP3);
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0);
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- __ mov(r1, Operand(r0));
- __ pop(r0);
- }
+ __ EnterInternalFrame();
+ __ push(r0);
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
+ __ mov(r1, Operand(r0));
+ __ pop(r0);
+ __ LeaveInternalFrame();
__ bind(&heapnumber_allocated);
__ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0); // Push the heap number, not the untagged int32.
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- __ mov(r2, r0); // Move the new heap number into r2.
- // Get the heap number into r0, now that the new heap number is in r2.
- __ pop(r0);
- }
+ __ EnterInternalFrame();
+ __ push(r0); // Push the heap number, not the untagged int32.
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
+ __ mov(r2, r0); // Move the new heap number into r2.
+ // Get the heap number into r0, now that the new heap number is in r2.
+ __ pop(r0);
+ __ LeaveInternalFrame();
// Convert the heap number in r0 to an untagged integer in r1.
// This can't go slow-case because it's the same number we already
__ LoadRoot(r5, Heap::kHeapNumberMapRootIndex);
__ AllocateHeapNumber(r0, scratch0, scratch1, r5, &skip_cache);
__ vstr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r0);
- __ CallRuntime(RuntimeFunction(), 1);
- }
+ __ EnterInternalFrame();
+ __ push(r0);
+ __ CallRuntime(RuntimeFunction(), 1);
+ __ LeaveInternalFrame();
__ vldr(d2, FieldMemOperand(r0, HeapNumber::kValueOffset));
__ Ret();
// We return the value in d2 without adding it to the cache, but
// we cause a scavenging GC so that future allocations will succeed.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Allocate an aligned object larger than a HeapNumber.
- ASSERT(4 * kPointerSize >= HeapNumber::kSize);
- __ mov(scratch0, Operand(4 * kPointerSize));
- __ push(scratch0);
- __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
- }
+ __ EnterInternalFrame();
+
+ // Allocate an aligned object larger than a HeapNumber.
+ ASSERT(4 * kPointerSize >= HeapNumber::kSize);
+ __ mov(scratch0, Operand(4 * kPointerSize));
+ __ push(scratch0);
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
+ __ LeaveInternalFrame();
__ Ret();
}
}
__ push(lr);
__ PrepareCallCFunction(1, 1, scratch);
__ SetCallCDoubleArguments(double_base, exponent);
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ CallCFunction(
- ExternalReference::power_double_int_function(masm->isolate()),
- 1, 1);
- __ pop(lr);
- __ GetCFunctionDoubleResult(double_result);
- }
+ __ CallCFunction(
+ ExternalReference::power_double_int_function(masm->isolate()),
+ 1, 1);
+ __ pop(lr);
+ __ GetCFunctionDoubleResult(double_result);
__ vstr(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
__ mov(r0, heapnumber);
__ push(lr);
__ PrepareCallCFunction(0, 2, scratch);
__ SetCallCDoubleArguments(double_base, double_exponent);
- {
- AllowExternalCallThatCantCauseGC scope(masm);
- __ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
- 0, 2);
- __ pop(lr);
- __ GetCFunctionDoubleResult(double_result);
- }
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()),
+ 0, 2);
+ __ pop(lr);
+ __ GetCFunctionDoubleResult(double_result);
__ vstr(double_result,
FieldMemOperand(heapnumber, HeapNumber::kValueOffset));
__ mov(r0, heapnumber);
__ sub(r6, r6, Operand(kPointerSize));
// Enter the exit frame that transitions from JavaScript to C++.
- FrameScope scope(masm, StackFrame::MANUAL);
__ EnterExitFrame(save_doubles_);
// Setup argc and the builtin function in callee-saved registers.
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(r0, r1);
- __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
- }
+ __ EnterInternalFrame();
+ __ Push(r0, r1);
+ __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
+ __ LeaveInternalFrame();
__ cmp(r0, Operand(0));
__ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
__ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(r1, r0);
- __ mov(ip, Operand(Smi::FromInt(op_)));
- __ push(ip);
- __ CallExternalReference(miss, 3);
- }
+ __ EnterInternalFrame();
+ __ Push(r1, r0);
+ __ mov(ip, Operand(Smi::FromInt(op_)));
+ __ push(ip);
+ __ CallExternalReference(miss, 3);
+ __ LeaveInternalFrame();
// Compute the entry point of the rewritten stub.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore registers.
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
// Registers:
// result: StringDictionary to probe
// r1: key
void Generate(MacroAssembler* masm);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
Register tos_;
Major MajorKey() { return ToBoolean; }
Register r0,
Register r1);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
static const int kInlinedProbes = 4;
static const int kTotalProbes = 20;
}
#endif
- Major MajorKey() { return StringDictionaryLookup; }
+ Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
return LookupModeBits::encode(mode_);
// Platform-specific RuntimeCallHelper functions.
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
- masm->EnterFrame(StackFrame::INTERNAL);
- ASSERT(!masm->has_frame());
- masm->set_has_frame(true);
+ masm->EnterInternalFrame();
}
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
- masm->LeaveFrame(StackFrame::INTERNAL);
- ASSERT(masm->has_frame());
- masm->set_has_frame(false);
+ masm->LeaveInternalFrame();
}
static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
RegList object_regs,
RegList non_object_regs) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Store the registers containing live values on the expression stack to
- // make sure that these are correctly updated during GC. Non object values
- // are stored as a smi causing it to be untouched by GC.
- ASSERT((object_regs & ~kJSCallerSaved) == 0);
- ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
- ASSERT((object_regs & non_object_regs) == 0);
- if ((object_regs | non_object_regs) != 0) {
- for (int i = 0; i < kNumJSCallerSaved; i++) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- if ((non_object_regs & (1 << r)) != 0) {
- if (FLAG_debug_code) {
- __ tst(reg, Operand(0xc0000000));
- __ Assert(eq, "Unable to encode value as smi");
- }
- __ mov(reg, Operand(reg, LSL, kSmiTagSize));
+ __ EnterInternalFrame();
+
+ // Store the registers containing live values on the expression stack to
+ // make sure that these are correctly updated during GC. Non object values
+ // are stored as a smi causing it to be untouched by GC.
+ ASSERT((object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((object_regs & non_object_regs) == 0);
+ if ((object_regs | non_object_regs) != 0) {
+ for (int i = 0; i < kNumJSCallerSaved; i++) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ if ((non_object_regs & (1 << r)) != 0) {
+ if (FLAG_debug_code) {
+ __ tst(reg, Operand(0xc0000000));
+ __ Assert(eq, "Unable to encode value as smi");
}
+ __ mov(reg, Operand(reg, LSL, kSmiTagSize));
}
- __ stm(db_w, sp, object_regs | non_object_regs);
}
+ __ stm(db_w, sp, object_regs | non_object_regs);
+ }
#ifdef DEBUG
- __ RecordComment("// Calling from debug break to runtime - come in - over");
+ __ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
- __ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
- __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
-
- CEntryStub ceb(1);
- __ CallStub(&ceb);
-
- // Restore the register values from the expression stack.
- if ((object_regs | non_object_regs) != 0) {
- __ ldm(ia_w, sp, object_regs | non_object_regs);
- for (int i = 0; i < kNumJSCallerSaved; i++) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- if ((non_object_regs & (1 << r)) != 0) {
- __ mov(reg, Operand(reg, LSR, kSmiTagSize));
- }
- if (FLAG_debug_code &&
- (((object_regs |non_object_regs) & (1 << r)) == 0)) {
- __ mov(reg, Operand(kDebugZapValue));
- }
+ __ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
+ __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
+
+ CEntryStub ceb(1);
+ __ CallStub(&ceb);
+
+ // Restore the register values from the expression stack.
+ if ((object_regs | non_object_regs) != 0) {
+ __ ldm(ia_w, sp, object_regs | non_object_regs);
+ for (int i = 0; i < kNumJSCallerSaved; i++) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ if ((non_object_regs & (1 << r)) != 0) {
+ __ mov(reg, Operand(reg, LSR, kSmiTagSize));
+ }
+ if (FLAG_debug_code &&
+ (((object_regs |non_object_regs) & (1 << r)) == 0)) {
+ __ mov(reg, Operand(kDebugZapValue));
}
}
-
- // Leave the internal frame.
}
+ __ LeaveInternalFrame();
+
// Now that the break point has been handled, resume normal execution by
// jumping to the target address intended by the caller and that was
// overwritten by the address of DebugBreakXXX.
__ mov(r5, Operand(ExternalReference::isolate_address()));
__ str(r5, MemOperand(sp, 1 * kPointerSize)); // Isolate.
// Call Deoptimizer::New().
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
- }
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
// Preserve "deoptimizer" object in register r0 and get the input
// frame descriptor pointer to r1 (deoptimizer->input_);
// r0: deoptimizer object; r1: scratch.
__ PrepareCallCFunction(1, r1);
// Call Deoptimizer::ComputeOutputFrames().
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(
- ExternalReference::compute_output_frames_function(isolate), 1);
- }
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 1);
__ pop(r0); // Restore deoptimizer object (class Deoptimizer).
// Replace the current (input) frame with the output frames.
__ bind(&ok);
}
- // Open a frame scope to indicate that there is a frame on the stack. The
- // MANUAL indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done below).
- FrameScope frame_scope(masm_, StackFrame::MANUAL);
-
int locals_count = scope()->num_stack_slots();
__ Push(lr, fp, cp, r1);
// Get the receiver of the function from the stack.
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Push the receiver and the name of the function.
- __ Push(r3, r2);
+ // Push the receiver and the name of the function.
+ __ Push(r3, r2);
- // Call the entry.
- __ mov(r0, Operand(2));
- __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
+ // Call the entry.
+ __ mov(r0, Operand(2));
+ __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
- CEntryStub stub(1);
- __ CallStub(&stub);
+ CEntryStub stub(1);
+ __ CallStub(&stub);
- // Move result to r1 and leave the internal frame.
- __ mov(r1, Operand(r0));
- }
+ // Move result to r1 and leave the internal frame.
+ __ mov(r1, Operand(r0));
+ __ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
// This can happen only for regular CallIC but not KeyedCallIC.
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(r2); // save the key
- __ Push(r1, r2); // pass the receiver and the key
- __ CallRuntime(Runtime::kKeyedGetProperty, 2);
- __ pop(r2); // restore the key
- }
+ __ EnterInternalFrame();
+ __ push(r2); // save the key
+ __ Push(r1, r2); // pass the receiver and the key
+ __ CallRuntime(Runtime::kKeyedGetProperty, 2);
+ __ pop(r2); // restore the key
+ __ LeaveInternalFrame();
__ mov(r1, r0);
__ jmp(&do_call);
status_ = GENERATING;
CpuFeatures::Scope scope1(VFP3);
CpuFeatures::Scope scope2(ARMv7);
-
- // Open a frame scope to indicate that there is a frame on the stack. The
- // NONE indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done in GeneatePrologue).
- FrameScope frame_scope(masm_, StackFrame::NONE);
-
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
: Assembler(arg_isolate, buffer, size),
generating_stub_(false),
- allow_stub_calls_(true),
- has_frame_(false) {
+ allow_stub_calls_(true) {
if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
isolate());
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
RelocInfo::Mode rmode,
InvokeFlag flag,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
InvokePrologue(expected, actual, code, no_reg, &done, flag,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
// Contract with called JS functions requires that function is passed in r1.
ASSERT(fun.is(r1));
const ParameterCount& actual,
InvokeFlag flag,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
ASSERT(function->is_compiled());
// Get the function and setup the context.
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::DebugBreak() {
+ ASSERT(allow_stub_calls());
mov(r0, Operand(0, RelocInfo::NONE));
mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
CEntryStub ces(1);
- ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
#endif
void MacroAssembler::CallStub(CodeStub* stub, Condition cond) {
- ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
}
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub, Condition cond) {
- ASSERT(AllowThisStubCall(stub)); // Stub calls are not allowed in some stubs.
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Object* result;
{ MaybeObject* maybe_result = stub->TryGetCode();
if (!maybe_result->ToObject(&result)) return maybe_result;
void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
}
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub, Condition cond) {
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
Object* result;
{ MaybeObject* maybe_result = stub->TryGetCode();
if (!maybe_result->ToObject(&result)) return maybe_result;
}
-bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
- if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
-}
-
-
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
add(sp, sp, Operand(num_arguments * kPointerSize));
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
InvokeFlag flag,
const CallWrapper& call_wrapper) {
- // You can't call a builtin without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
GetBuiltinEntry(r2, id);
if (flag == CALL_FUNCTION) {
call_wrapper.BeforeCall(CallSize(r2));
RecordComment(msg);
}
#endif
+ // Disable stub call restrictions to always allow calls to abort.
+ AllowStubCallsScope allow_scope(this, true);
mov(r0, Operand(p0));
push(r0);
mov(r0, Operand(Smi::FromInt(p1 - p0)));
push(r0);
- // Disable stub call restrictions to always allow calls to abort.
- if (!has_frame_) {
- // We don't actually want to generate a pile of code for this, so just
- // claim there is a stack frame, without generating one.
- FrameScope scope(this, StackFrame::NONE);
- CallRuntime(Runtime::kAbort, 2);
- } else {
- CallRuntime(Runtime::kAbort, 2);
- }
+ CallRuntime(Runtime::kAbort, 2);
// will not return here
if (is_const_pool_blocked()) {
// If the calling code cares about the exact number of
Register scratch,
int num_reg_arguments,
int num_double_arguments) {
- ASSERT(has_frame());
// Make sure that the stack is aligned before calling a C function unless
// running in the simulator. The simulator has its own alignment check which
// provides more information.
#define V8_ARM_MACRO_ASSEMBLER_ARM_H_
#include "assembler.h"
-#include "frames.h"
#include "v8globals.h"
namespace v8 {
const Register fpscr_flags,
const Condition cond = al);
+
+ // ---------------------------------------------------------------------------
+ // Activation frames
+
+ void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
+ void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
+
+ void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
+ void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
+
// Enter exit frame.
// stack_space - extra stack space, used for alignment before call to C.
void EnterExitFrame(bool save_doubles, int stack_space = 0);
bool generating_stub() { return generating_stub_; }
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; }
- void set_has_frame(bool value) { has_frame_ = value; }
- bool has_frame() { return has_frame_; }
- inline bool AllowThisStubCall(CodeStub* stub);
// EABI variant for double arguments in use.
bool use_eabi_hardfloat() {
void LoadInstanceDescriptors(Register map, Register descriptors);
- // Activation support.
- void EnterFrame(StackFrame::Type type);
- void LeaveFrame(StackFrame::Type type);
-
private:
void CallCFunctionHelper(Register function,
ExternalReference function_reference,
const CallWrapper& call_wrapper,
CallKind call_kind);
+ // Activation support.
+ void EnterFrame(StackFrame::Type type);
+ void LeaveFrame(StackFrame::Type type);
+
void InitializeNewString(Register string,
Register length,
Heap::RootListIndex map_index,
bool generating_stub_;
bool allow_stub_calls_;
- bool has_frame_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
// Isolate.
__ mov(r3, Operand(ExternalReference::isolate_address()));
- {
- AllowExternalCallThatCantCauseGC scope(masm_);
- ExternalReference function =
- ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
- __ CallCFunction(function, argument_count);
- }
+ ExternalReference function =
+ ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
+ __ CallCFunction(function, argument_count);
// Check if function returned non-zero for success or zero for failure.
__ cmp(r0, Operand(0, RelocInfo::NONE));
// Entry code:
__ bind(&entry_label_);
-
- // Tell the system that we have a stack frame. Because the type is MANUAL, no
- // is generated.
- FrameScope scope(masm_, StackFrame::MANUAL);
-
- // Actually emit code to start a new stack frame.
// Push arguments
// Save callee-save registers.
// Start new stack frame.
miss_label);
// Call a runtime function to load the interceptor property.
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
// Save the name_ register across the call.
__ push(name_);
// Restore the name_ register.
__ pop(name_);
-
- // Leave the internal frame.
+ __ LeaveInternalFrame();
}
void LoadWithInterceptor(MacroAssembler* masm,
JSObject* holder_obj,
Register scratch,
Label* interceptor_succeeded) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(holder, name_);
-
- CompileCallLoadPropertyWithInterceptor(masm,
- receiver,
- holder,
- name_,
- holder_obj);
-
- __ pop(name_); // Restore the name.
- __ pop(receiver); // Restore the holder.
- }
+ __ EnterInternalFrame();
+ __ Push(holder, name_);
+
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
+
+ __ pop(name_); // Restore the name.
+ __ pop(receiver); // Restore the holder.
+ __ LeaveInternalFrame();
// If interceptor returns no-result sentinel, call the constant function.
__ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
- {
- FrameScope frame_scope(masm(), StackFrame::INTERNAL);
-
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
- __ Push(receiver, holder_reg, name_reg);
- } else {
- __ Push(holder_reg, name_reg);
- }
+ __ EnterInternalFrame();
- // Invoke an interceptor. Note: map checks from receiver to
- // interceptor's holder has been compiled before (see a caller
- // of this method.)
- CompileCallLoadPropertyWithInterceptor(masm(),
- receiver,
- holder_reg,
- name_reg,
- interceptor_holder);
-
- // Check if interceptor provided a value for property. If it's
- // the case, return immediately.
- Label interceptor_failed;
- __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
- __ cmp(r0, scratch1);
- __ b(eq, &interceptor_failed);
- frame_scope.GenerateLeaveFrame();
- __ Ret();
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
+ __ Push(receiver, holder_reg, name_reg);
+ } else {
+ __ Push(holder_reg, name_reg);
+ }
- __ bind(&interceptor_failed);
- __ pop(name_reg);
- __ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- __ pop(receiver);
- }
+ // Invoke an interceptor. Note: map checks from receiver to
+ // interceptor's holder has been compiled before (see a caller
+ // of this method.)
+ CompileCallLoadPropertyWithInterceptor(masm(),
+ receiver,
+ holder_reg,
+ name_reg,
+ interceptor_holder);
+
+ // Check if interceptor provided a value for property. If it's
+ // the case, return immediately.
+ Label interceptor_failed;
+ __ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
+ __ cmp(r0, scratch1);
+ __ b(eq, &interceptor_failed);
+ __ LeaveInternalFrame();
+ __ Ret();
- // Leave the internal frame.
+ __ bind(&interceptor_failed);
+ __ pop(name_reg);
+ __ pop(holder_reg);
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ __ pop(receiver);
}
+ __ LeaveInternalFrame();
+
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into |holder| register.
if (interceptor_holder != lookup->holder()) {
// We pass all arguments to the generator, but it may not use all of
// them. This works because the first arguments are on top of the
// stack.
- ASSERT(!masm.has_frame());
g(&masm, functions[i].name, functions[i].extra_args);
// Move the code into the object heap.
CodeDesc desc;
// Update the static counter each time a new code stub is generated.
masm->isolate()->counters()->code_stubs()->Increment();
- // Nested stubs are not allowed for leaves.
- AllowStubCallsScope allow_scope(masm, false);
+ // Nested stubs are not allowed for leafs.
+ AllowStubCallsScope allow_scope(masm, AllowsStubCalls());
// Generate the code for the stub.
masm->set_generating_stub(true);
- NoCurrentFrameScope scope(masm);
Generate(masm);
}
V(KeyedLoadExternalArray) \
V(KeyedStoreExternalArray) \
V(DebuggerStatement) \
- V(StringDictionaryLookup)
+ V(StringDictionaryNegativeLookup)
// List of code stubs only used on ARM platforms.
#ifdef V8_TARGET_ARCH_ARM
virtual ~CodeStub() {}
- // See comment above, where Instanceof is defined.
- virtual bool CompilingCallsToThisStubIsGCSafe() {
- return MajorKey() <= Instanceof;
- }
-
- virtual bool SometimesSetsUpAFrame() { return true; }
-
protected:
static const int kMajorBits = 6;
static const int kMinorBits = kBitsPerInt - kSmiTagSize - kMajorBits;
MajorKeyBits::encode(MajorKey());
}
+ // See comment above, where Instanceof is defined.
+ bool AllowsStubCalls() { return MajorKey() <= Instanceof; }
+
class MajorKeyBits: public BitField<uint32_t, 0, kMajorBits> {};
class MinorKeyBits: public BitField<uint32_t, kMajorBits, kMinorBits> {};
void Generate(MacroAssembler* masm);
void SaveDoubles() { save_doubles_ = true; }
- // The version of this stub that doesn't save doubles is generated ahead of
- // time, so it's OK to call it from other stubs that can't cope with GC during
- // their code generation.
- virtual bool CompilingCallsToThisStubIsGCSafe() { return !save_doubles_; }
-
private:
void GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
}
}
-
-// We rely on this function not causing a GC. It is called from generated code
-// without having a real stack frame in place.
Deoptimizer* Deoptimizer::New(JSFunction* function,
BailoutType type,
unsigned bailout_id,
}
-// We rely on this function not causing a GC. It is called from generated code
-// without having a real stack frame in place.
void Deoptimizer::DoComputeOutputFrames() {
if (bailout_type_ == OSR) {
DoComputeOsrOutputFrame();
enum Type {
NONE = 0,
STACK_FRAME_TYPE_LIST(DECLARE_TYPE)
- NUMBER_OF_TYPES,
- // Used by FrameScope to indicate that the stack frame is constructed
- // manually and the FrameScope does not need to emit code.
- MANUAL
+ NUMBER_OF_TYPES
};
#undef DECLARE_TYPE
ASSERT(!is_api_function || !count_constructions);
// Enter a construct frame.
- {
- FrameScope scope(masm, StackFrame::CONSTRUCT);
+ __ EnterConstructFrame();
- // Store a smi-tagged arguments count on the stack.
- __ SmiTag(eax);
- __ push(eax);
+ // Store a smi-tagged arguments count on the stack.
+ __ SmiTag(eax);
+ __ push(eax);
- // Push the function to invoke on the stack.
- __ push(edi);
+ // Push the function to invoke on the stack.
+ __ push(edi);
- // Try to allocate the object without transitioning into C code. If any of
- // the preconditions is not met, the code bails out to the runtime call.
- Label rt_call, allocated;
- if (FLAG_inline_new) {
- Label undo_allocation;
+ // Try to allocate the object without transitioning into C code. If any of the
+ // preconditions is not met, the code bails out to the runtime call.
+ Label rt_call, allocated;
+ if (FLAG_inline_new) {
+ Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
- ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address(masm->isolate());
- __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
- __ j(not_equal, &rt_call);
+ ExternalReference debug_step_in_fp =
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
+ __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
+ __ j(not_equal, &rt_call);
#endif
- // Verified that the constructor is a JSFunction.
- // Load the initial map and verify that it is in fact a map.
- // edi: constructor
- __ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
- // Will both indicate a NULL and a Smi
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &rt_call);
- // edi: constructor
- // eax: initial map (if proven valid below)
- __ CmpObjectType(eax, MAP_TYPE, ebx);
- __ j(not_equal, &rt_call);
-
- // Check that the constructor is not constructing a JSFunction (see
- // comments in Runtime_NewObject in runtime.cc). In which case the initial
- // map's instance type would be JS_FUNCTION_TYPE.
- // edi: constructor
- // eax: initial map
- __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
- __ j(equal, &rt_call);
-
- if (count_constructions) {
- Label allocate;
- // Decrease generous allocation count.
- __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ dec_b(FieldOperand(ecx,
- SharedFunctionInfo::kConstructionCountOffset));
- __ j(not_zero, &allocate);
-
- __ push(eax);
- __ push(edi);
+ // Verified that the constructor is a JSFunction.
+ // Load the initial map and verify that it is in fact a map.
+ // edi: constructor
+ __ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(zero, &rt_call);
+ // edi: constructor
+ // eax: initial map (if proven valid below)
+ __ CmpObjectType(eax, MAP_TYPE, ebx);
+ __ j(not_equal, &rt_call);
+
+ // Check that the constructor is not constructing a JSFunction (see comments
+ // in Runtime_NewObject in runtime.cc). In which case the initial map's
+ // instance type would be JS_FUNCTION_TYPE.
+ // edi: constructor
+ // eax: initial map
+ __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
+ __ j(equal, &rt_call);
+
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ dec_b(FieldOperand(ecx, SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
- __ push(edi); // constructor
- // The call will replace the stub, so the countdown is only done once.
- __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+ __ push(eax);
+ __ push(edi);
- __ pop(edi);
- __ pop(eax);
+ __ push(edi); // constructor
+ // The call will replace the stub, so the countdown is only done once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
- __ bind(&allocate);
- }
+ __ pop(edi);
+ __ pop(eax);
- // Now allocate the JSObject on the heap.
- // edi: constructor
- // eax: initial map
- __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
- __ shl(edi, kPointerSizeLog2);
- __ AllocateInNewSpace(edi,
- ebx,
- edi,
- no_reg,
- &rt_call,
- NO_ALLOCATION_FLAGS);
- // Allocated the JSObject, now initialize the fields.
- // eax: initial map
- // ebx: JSObject
- // edi: start of next object
- __ mov(Operand(ebx, JSObject::kMapOffset), eax);
- Factory* factory = masm->isolate()->factory();
- __ mov(ecx, factory->empty_fixed_array());
- __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
- __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
- // Set extra fields in the newly allocated object.
- // eax: initial map
- // ebx: JSObject
- // edi: start of next object
- { Label loop, entry;
- // To allow for truncation.
- if (count_constructions) {
- __ mov(edx, factory->one_pointer_filler_map());
- } else {
- __ mov(edx, factory->undefined_value());
- }
- __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
- __ jmp(&entry);
- __ bind(&loop);
- __ mov(Operand(ecx, 0), edx);
- __ add(Operand(ecx), Immediate(kPointerSize));
- __ bind(&entry);
- __ cmp(ecx, Operand(edi));
- __ j(less, &loop);
- }
+ __ bind(&allocate);
+ }
- // Add the object tag to make the JSObject real, so that we can continue
- // and jump into the continuation code at any time from now on. Any
- // failures need to undo the allocation, so that the heap is in a
- // consistent state and verifiable.
- // eax: initial map
- // ebx: JSObject
- // edi: start of next object
- __ or_(Operand(ebx), Immediate(kHeapObjectTag));
-
- // Check if a non-empty properties array is needed.
- // Allocate and initialize a FixedArray if it is.
- // eax: initial map
- // ebx: JSObject
- // edi: start of next object
- // Calculate the total number of properties described by the map.
- __ movzx_b(edx, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
- __ movzx_b(ecx,
- FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
- __ add(edx, Operand(ecx));
- // Calculate unused properties past the end of the in-object properties.
- __ movzx_b(ecx, FieldOperand(eax, Map::kInObjectPropertiesOffset));
- __ sub(edx, Operand(ecx));
- // Done if no extra properties are to be allocated.
- __ j(zero, &allocated);
- __ Assert(positive, "Property allocation count failed.");
-
- // Scale the number of elements by pointer size and add the header for
- // FixedArrays to the start of the next object calculation from above.
- // ebx: JSObject
- // edi: start of next object (will be start of FixedArray)
- // edx: number of elements in properties array
- __ AllocateInNewSpace(FixedArray::kHeaderSize,
- times_pointer_size,
- edx,
- edi,
- ecx,
- no_reg,
- &undo_allocation,
- RESULT_CONTAINS_TOP);
-
- // Initialize the FixedArray.
- // ebx: JSObject
- // edi: FixedArray
- // edx: number of elements
- // ecx: start of next object
- __ mov(eax, factory->fixed_array_map());
- __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
- __ SmiTag(edx);
- __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
-
- // Initialize the fields to undefined.
- // ebx: JSObject
- // edi: FixedArray
- // ecx: start of next object
- { Label loop, entry;
+ // Now allocate the JSObject on the heap.
+ // edi: constructor
+ // eax: initial map
+ __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
+ __ shl(edi, kPointerSizeLog2);
+ __ AllocateInNewSpace(edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
+ // Allocated the JSObject, now initialize the fields.
+ // eax: initial map
+ // ebx: JSObject
+ // edi: start of next object
+ __ mov(Operand(ebx, JSObject::kMapOffset), eax);
+ Factory* factory = masm->isolate()->factory();
+ __ mov(ecx, factory->empty_fixed_array());
+ __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
+ __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
+ // Set extra fields in the newly allocated object.
+ // eax: initial map
+ // ebx: JSObject
+ // edi: start of next object
+ { Label loop, entry;
+ // To allow for truncation.
+ if (count_constructions) {
+ __ mov(edx, factory->one_pointer_filler_map());
+ } else {
__ mov(edx, factory->undefined_value());
- __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
- __ jmp(&entry);
- __ bind(&loop);
- __ mov(Operand(eax, 0), edx);
- __ add(Operand(eax), Immediate(kPointerSize));
- __ bind(&entry);
- __ cmp(eax, Operand(ecx));
- __ j(below, &loop);
}
+ __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ mov(Operand(ecx, 0), edx);
+ __ add(Operand(ecx), Immediate(kPointerSize));
+ __ bind(&entry);
+ __ cmp(ecx, Operand(edi));
+ __ j(less, &loop);
+ }
- // Store the initialized FixedArray into the properties field of
- // the JSObject
- // ebx: JSObject
- // edi: FixedArray
- __ or_(Operand(edi), Immediate(kHeapObjectTag)); // add the heap tag
- __ mov(FieldOperand(ebx, JSObject::kPropertiesOffset), edi);
+ // Add the object tag to make the JSObject real, so that we can continue and
+ // jump into the continuation code at any time from now on. Any failures
+ // need to undo the allocation, so that the heap is in a consistent state
+ // and verifiable.
+ // eax: initial map
+ // ebx: JSObject
+ // edi: start of next object
+ __ or_(Operand(ebx), Immediate(kHeapObjectTag));
+
+ // Check if a non-empty properties array is needed.
+ // Allocate and initialize a FixedArray if it is.
+ // eax: initial map
+ // ebx: JSObject
+ // edi: start of next object
+ // Calculate the total number of properties described by the map.
+ __ movzx_b(edx, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
+ __ movzx_b(ecx, FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
+ __ add(edx, Operand(ecx));
+ // Calculate unused properties past the end of the in-object properties.
+ __ movzx_b(ecx, FieldOperand(eax, Map::kInObjectPropertiesOffset));
+ __ sub(edx, Operand(ecx));
+ // Done if no extra properties are to be allocated.
+ __ j(zero, &allocated);
+ __ Assert(positive, "Property allocation count failed.");
+
+ // Scale the number of elements by pointer size and add the header for
+ // FixedArrays to the start of the next object calculation from above.
+ // ebx: JSObject
+ // edi: start of next object (will be start of FixedArray)
+ // edx: number of elements in properties array
+ __ AllocateInNewSpace(FixedArray::kHeaderSize,
+ times_pointer_size,
+ edx,
+ edi,
+ ecx,
+ no_reg,
+ &undo_allocation,
+ RESULT_CONTAINS_TOP);
+
+ // Initialize the FixedArray.
+ // ebx: JSObject
+ // edi: FixedArray
+ // edx: number of elements
+ // ecx: start of next object
+ __ mov(eax, factory->fixed_array_map());
+ __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
+ __ SmiTag(edx);
+ __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
+
+ // Initialize the fields to undefined.
+ // ebx: JSObject
+ // edi: FixedArray
+ // ecx: start of next object
+ { Label loop, entry;
+ __ mov(edx, factory->undefined_value());
+ __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ mov(Operand(eax, 0), edx);
+ __ add(Operand(eax), Immediate(kPointerSize));
+ __ bind(&entry);
+ __ cmp(eax, Operand(ecx));
+ __ j(below, &loop);
+ }
+ // Store the initialized FixedArray into the properties field of
+ // the JSObject
+ // ebx: JSObject
+ // edi: FixedArray
+ __ or_(Operand(edi), Immediate(kHeapObjectTag)); // add the heap tag
+ __ mov(FieldOperand(ebx, JSObject::kPropertiesOffset), edi);
- // Continue with JSObject being successfully allocated
- // ebx: JSObject
- __ jmp(&allocated);
- // Undo the setting of the new top so that the heap is verifiable. For
- // example, the map's unused properties potentially do not match the
- // allocated objects unused properties.
- // ebx: JSObject (previous new top)
- __ bind(&undo_allocation);
- __ UndoAllocationInNewSpace(ebx);
- }
+ // Continue with JSObject being successfully allocated
+ // ebx: JSObject
+ __ jmp(&allocated);
- // Allocate the new receiver object using the runtime call.
- __ bind(&rt_call);
- // Must restore edi (constructor) before calling runtime.
- __ mov(edi, Operand(esp, 0));
- // edi: function (constructor)
- __ push(edi);
- __ CallRuntime(Runtime::kNewObject, 1);
- __ mov(ebx, Operand(eax)); // store result in ebx
+ // Undo the setting of the new top so that the heap is verifiable. For
+ // example, the map's unused properties potentially do not match the
+ // allocated objects unused properties.
+ // ebx: JSObject (previous new top)
+ __ bind(&undo_allocation);
+ __ UndoAllocationInNewSpace(ebx);
+ }
- // New object allocated.
- // ebx: newly allocated object
- __ bind(&allocated);
- // Retrieve the function from the stack.
- __ pop(edi);
+ // Allocate the new receiver object using the runtime call.
+ __ bind(&rt_call);
+ // Must restore edi (constructor) before calling runtime.
+ __ mov(edi, Operand(esp, 0));
+ // edi: function (constructor)
+ __ push(edi);
+ __ CallRuntime(Runtime::kNewObject, 1);
+ __ mov(ebx, Operand(eax)); // store result in ebx
- // Retrieve smi-tagged arguments count from the stack.
- __ mov(eax, Operand(esp, 0));
- __ SmiUntag(eax);
+ // New object allocated.
+ // ebx: newly allocated object
+ __ bind(&allocated);
+ // Retrieve the function from the stack.
+ __ pop(edi);
- // Push the allocated receiver to the stack. We need two copies
- // because we may have to return the original one and the calling
- // conventions dictate that the called function pops the receiver.
- __ push(ebx);
- __ push(ebx);
+ // Retrieve smi-tagged arguments count from the stack.
+ __ mov(eax, Operand(esp, 0));
+ __ SmiUntag(eax);
- // Setup pointer to last argument.
- __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+ // Push the allocated receiver to the stack. We need two copies
+ // because we may have to return the original one and the calling
+ // conventions dictate that the called function pops the receiver.
+ __ push(ebx);
+ __ push(ebx);
- // Copy arguments and receiver to the expression stack.
- Label loop, entry;
- __ mov(ecx, Operand(eax));
- __ jmp(&entry);
- __ bind(&loop);
- __ push(Operand(ebx, ecx, times_4, 0));
- __ bind(&entry);
- __ dec(ecx);
- __ j(greater_equal, &loop);
-
- // Call the function.
- if (is_api_function) {
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
- Handle<Code> code =
- masm->isolate()->builtins()->HandleApiCallConstruct();
- ParameterCount expected(0);
- __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
- CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
- } else {
- ParameterCount actual(eax);
- __ InvokeFunction(edi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
+ // Setup pointer to last argument.
+ __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
+
+ // Copy arguments and receiver to the expression stack.
+ Label loop, entry;
+ __ mov(ecx, Operand(eax));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ push(Operand(ebx, ecx, times_4, 0));
+ __ bind(&entry);
+ __ dec(ecx);
+ __ j(greater_equal, &loop);
- // Restore context from the frame.
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ // Call the function.
+ if (is_api_function) {
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+ Handle<Code> code =
+ masm->isolate()->builtins()->HandleApiCallConstruct();
+ ParameterCount expected(0);
+ __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+ } else {
+ ParameterCount actual(eax);
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
+ }
- // If the result is an object (in the ECMA sense), we should get rid
- // of the receiver and use the result; see ECMA-262 section 13.2.2-7
- // on page 74.
- Label use_receiver, exit;
+ // Restore context from the frame.
+ __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- // If the result is a smi, it is *not* an object in the ECMA sense.
- __ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &use_receiver);
+ // If the result is an object (in the ECMA sense), we should get rid
+ // of the receiver and use the result; see ECMA-262 section 13.2.2-7
+ // on page 74.
+ Label use_receiver, exit;
- // If the type of the result (stored in its map) is less than
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
- __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
- __ j(above_equal, &exit);
+ // If the result is a smi, it is *not* an object in the ECMA sense.
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(zero, &use_receiver);
- // Throw away the result of the constructor invocation and use the
- // on-stack receiver as the result.
- __ bind(&use_receiver);
- __ mov(eax, Operand(esp, 0));
+ // If the type of the result (stored in its map) is less than
+ // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
+ __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
+ __ j(above_equal, &exit);
- // Restore the arguments count and leave the construct frame.
- __ bind(&exit);
- __ mov(ebx, Operand(esp, kPointerSize)); // get arguments count
+ // Throw away the result of the constructor invocation and use the
+ // on-stack receiver as the result.
+ __ bind(&use_receiver);
+ __ mov(eax, Operand(esp, 0));
- // Leave construct frame.
- }
+ // Restore the arguments count and leave the construct frame.
+ __ bind(&exit);
+ __ mov(ebx, Operand(esp, kPointerSize)); // get arguments count
+ __ LeaveConstructFrame();
// Remove caller arguments from the stack and return.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
- // Clear the context before we push it when entering the internal frame.
+ // Clear the context before we push it when entering the JS frame.
__ Set(esi, Immediate(0));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
- // Load the previous frame pointer (ebx) to access C arguments
- __ mov(ebx, Operand(ebp, 0));
+ // Load the previous frame pointer (ebx) to access C arguments
+ __ mov(ebx, Operand(ebp, 0));
- // Get the function from the frame and setup the context.
- __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
- __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset));
+ // Get the function from the frame and setup the context.
+ __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
+ __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset));
- // Push the function and the receiver onto the stack.
- __ push(ecx);
- __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
+ // Push the function and the receiver onto the stack.
+ __ push(ecx);
+ __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
- // Load the number of arguments and setup pointer to the arguments.
- __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
- __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
+ // Load the number of arguments and setup pointer to the arguments.
+ __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
+ __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
- // Copy arguments to the stack in a loop.
- Label loop, entry;
- __ Set(ecx, Immediate(0));
- __ jmp(&entry);
- __ bind(&loop);
- __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
- __ push(Operand(edx, 0)); // dereference handle
- __ inc(Operand(ecx));
- __ bind(&entry);
- __ cmp(ecx, Operand(eax));
- __ j(not_equal, &loop);
-
- // Get the function from the stack and call it.
- // kPointerSize for the receiver.
- __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
-
- // Invoke the code.
- if (is_construct) {
- __ call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
- } else {
- ParameterCount actual(eax);
- __ InvokeFunction(edi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
+ // Copy arguments to the stack in a loop.
+ Label loop, entry;
+ __ Set(ecx, Immediate(0));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
+ __ push(Operand(edx, 0)); // dereference handle
+ __ inc(Operand(ecx));
+ __ bind(&entry);
+ __ cmp(ecx, Operand(eax));
+ __ j(not_equal, &loop);
- // Exit the internal frame. Notice that this also removes the empty.
- // context and the function left on the stack by the code
- // invocation.
+ // Get the function from the stack and call it.
+ __ mov(edi, Operand(esp, eax, times_4, +1 * kPointerSize)); // +1 ~ receiver
+
+ // Invoke the code.
+ if (is_construct) {
+ __ call(masm->isolate()->builtins()->JSConstructCall(),
+ RelocInfo::CODE_TARGET);
+ } else {
+ ParameterCount actual(eax);
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
- __ ret(kPointerSize); // Remove receiver.
+
+ // Exit the JS frame. Notice that this also removes the empty
+ // context and the function left on the stack by the code
+ // invocation.
+ __ LeaveInternalFrame();
+ __ ret(1 * kPointerSize); // remove receiver
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
- // Push a copy of the function.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
+ // Push a copy of the function.
+ __ push(edi);
+ // Push call kind information.
+ __ push(ecx);
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyCompile, 1);
+ __ push(edi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kLazyCompile, 1);
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
+ __ pop(edi);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
- // Push a copy of the function onto the stack.
- __ push(edi);
- // Push call kind information.
- __ push(ecx);
+ // Push a copy of the function onto the stack.
+ __ push(edi);
+ // Push call kind information.
+ __ push(ecx);
- __ push(edi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyRecompile, 1);
+ __ push(edi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kLazyRecompile, 1);
- // Restore call kind information.
- __ pop(ecx);
- // Restore receiver.
- __ pop(edi);
+ // Restore call kind information.
+ __ pop(ecx);
+ // Restore receiver.
+ __ pop(edi);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
- // Pass the function and deoptimization type to the runtime system.
- __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
- __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
+ // Pass the function and deoptimization type to the runtime system.
+ __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
+ __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Get the full codegen state from the stack and untag it.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
// the registers without worrying about which of them contain
// pointers. This seems a bit fragile.
__ pushad();
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ CallRuntime(Runtime::kNotifyOSR, 0);
- }
+ __ EnterInternalFrame();
+ __ CallRuntime(Runtime::kNotifyOSR, 0);
+ __ LeaveInternalFrame();
__ popad();
__ ret(0);
}
__ j(above_equal, &shift_arguments);
__ bind(&convert_to_object);
+ __ EnterInternalFrame(); // In order to preserve argument count.
+ __ SmiTag(eax);
+ __ push(eax);
- { // In order to preserve argument count.
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(eax);
- __ push(eax);
-
- __ push(ebx);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ mov(ebx, eax);
-
- __ pop(eax);
- __ SmiUntag(eax);
- }
+ __ push(ebx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ mov(ebx, eax);
+ __ pop(eax);
+ __ SmiUntag(eax);
+ __ LeaveInternalFrame();
// Restore the function to edi.
__ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
__ jmp(&patch_receiver);
void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- __ push(Operand(ebp, 4 * kPointerSize)); // push this
- __ push(Operand(ebp, 2 * kPointerSize)); // push arguments
- __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
-
- // Check the stack for overflow. We are not trying need to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- Label okay;
- ExternalReference real_stack_limit =
- ExternalReference::address_of_real_stack_limit(masm->isolate());
- __ mov(edi, Operand::StaticVariable(real_stack_limit));
- // Make ecx the space we have left. The stack might already be overflowed
- // here which will cause ecx to become negative.
- __ mov(ecx, Operand(esp));
- __ sub(ecx, Operand(edi));
- // Make edx the space we need for the array when it is unrolled onto the
- // stack.
- __ mov(edx, Operand(eax));
- __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
- // Check if the arguments will overflow the stack.
- __ cmp(ecx, Operand(edx));
- __ j(greater, &okay); // Signed comparison.
-
- // Out of stack space.
- __ push(Operand(ebp, 4 * kPointerSize)); // push this
- __ push(eax);
- __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
- __ bind(&okay);
- // End of stack check.
-
- // Push current index and limit.
- const int kLimitOffset =
- StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
- const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
- __ push(eax); // limit
- __ push(Immediate(0)); // index
-
- // Change context eagerly to get the right global object if
- // necessary.
- __ mov(edi, Operand(ebp, 4 * kPointerSize));
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
-
- // Compute the receiver.
- Label call_to_object, use_global_receiver, push_receiver;
- __ mov(ebx, Operand(ebp, 3 * kPointerSize));
-
- // Do not transform the receiver for strict mode functions.
- __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
- 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
- __ j(not_equal, &push_receiver);
-
- Factory* factory = masm->isolate()->factory();
-
- // Do not transform the receiver for natives (shared already in ecx).
- __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
- 1 << SharedFunctionInfo::kNativeBitWithinByte);
- __ j(not_equal, &push_receiver);
-
- // Compute the receiver in non-strict mode.
- // Call ToObject on the receiver if it is not an object, or use the
- // global object if it is null or undefined.
- __ test(ebx, Immediate(kSmiTagMask));
- __ j(zero, &call_to_object);
- __ cmp(ebx, factory->null_value());
- __ j(equal, &use_global_receiver);
- __ cmp(ebx, factory->undefined_value());
- __ j(equal, &use_global_receiver);
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- __ CmpObjectType(ebx, FIRST_JS_OBJECT_TYPE, ecx);
- __ j(above_equal, &push_receiver);
-
- __ bind(&call_to_object);
- __ push(ebx);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ mov(ebx, Operand(eax));
- __ jmp(&push_receiver);
-
- // Use the current global receiver object as the receiver.
- __ bind(&use_global_receiver);
- const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
- __ mov(ebx, FieldOperand(esi, kGlobalOffset));
- __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
- __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
- __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
+ __ EnterInternalFrame();
+
+ __ push(Operand(ebp, 4 * kPointerSize)); // push this
+ __ push(Operand(ebp, 2 * kPointerSize)); // push arguments
+ __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
+
+ // Check the stack for overflow. We are not trying need to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ Label okay;
+ ExternalReference real_stack_limit =
+ ExternalReference::address_of_real_stack_limit(masm->isolate());
+ __ mov(edi, Operand::StaticVariable(real_stack_limit));
+ // Make ecx the space we have left. The stack might already be overflowed
+ // here which will cause ecx to become negative.
+ __ mov(ecx, Operand(esp));
+ __ sub(ecx, Operand(edi));
+ // Make edx the space we need for the array when it is unrolled onto the
+ // stack.
+ __ mov(edx, Operand(eax));
+ __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
+ // Check if the arguments will overflow the stack.
+ __ cmp(ecx, Operand(edx));
+ __ j(greater, &okay); // Signed comparison.
+
+ // Out of stack space.
+ __ push(Operand(ebp, 4 * kPointerSize)); // push this
+ __ push(eax);
+ __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+ __ bind(&okay);
+ // End of stack check.
+
+ // Push current index and limit.
+ const int kLimitOffset =
+ StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
+ const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
+ __ push(eax); // limit
+ __ push(Immediate(0)); // index
+
+ // Change context eagerly to get the right global object if
+ // necessary.
+ __ mov(edi, Operand(ebp, 4 * kPointerSize));
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
+
+ // Compute the receiver.
+ Label call_to_object, use_global_receiver, push_receiver;
+ __ mov(ebx, Operand(ebp, 3 * kPointerSize));
+
+ // Do not transform the receiver for strict mode functions.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
+ __ j(not_equal, &push_receiver);
- // Push the receiver.
- __ bind(&push_receiver);
- __ push(ebx);
+ Factory* factory = masm->isolate()->factory();
- // Copy all arguments from the array to the stack.
- Label entry, loop;
- __ mov(eax, Operand(ebp, kIndexOffset));
- __ jmp(&entry);
- __ bind(&loop);
- __ mov(edx, Operand(ebp, 2 * kPointerSize)); // load arguments
+ // Do not transform the receiver for natives (shared already in ecx).
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
+ __ j(not_equal, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
+ // Call ToObject on the receiver if it is not an object, or use the
+ // global object if it is null or undefined.
+ __ test(ebx, Immediate(kSmiTagMask));
+ __ j(zero, &call_to_object);
+ __ cmp(ebx, factory->null_value());
+ __ j(equal, &use_global_receiver);
+ __ cmp(ebx, factory->undefined_value());
+ __ j(equal, &use_global_receiver);
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ __ CmpObjectType(ebx, FIRST_JS_OBJECT_TYPE, ecx);
+ __ j(above_equal, &push_receiver);
+
+ __ bind(&call_to_object);
+ __ push(ebx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ mov(ebx, Operand(eax));
+ __ jmp(&push_receiver);
+
+ // Use the current global receiver object as the receiver.
+ __ bind(&use_global_receiver);
+ const int kGlobalOffset =
+ Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ __ mov(ebx, FieldOperand(esi, kGlobalOffset));
+ __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
+ __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
+ __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
+
+ // Push the receiver.
+ __ bind(&push_receiver);
+ __ push(ebx);
+
+ // Copy all arguments from the array to the stack.
+ Label entry, loop;
+ __ mov(eax, Operand(ebp, kIndexOffset));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ mov(edx, Operand(ebp, 2 * kPointerSize)); // load arguments
- // Use inline caching to speed up access to arguments.
- Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET);
- // It is important that we do not have a test instruction after the
- // call. A test instruction after the call is used to indicate that
- // we have generated an inline version of the keyed load. In this
- // case, we know that we are not generating a test instruction next.
+ // Use inline caching to speed up access to arguments.
+ Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
+ __ call(ic, RelocInfo::CODE_TARGET);
+ // It is important that we do not have a test instruction after the
+ // call. A test instruction after the call is used to indicate that
+ // we have generated an inline version of the keyed load. In this
+ // case, we know that we are not generating a test instruction next.
- // Push the nth argument.
- __ push(eax);
+ // Push the nth argument.
+ __ push(eax);
- // Update the index on the stack and in register eax.
- __ mov(eax, Operand(ebp, kIndexOffset));
- __ add(Operand(eax), Immediate(1 << kSmiTagSize));
- __ mov(Operand(ebp, kIndexOffset), eax);
+ // Update the index on the stack and in register eax.
+ __ mov(eax, Operand(ebp, kIndexOffset));
+ __ add(Operand(eax), Immediate(1 << kSmiTagSize));
+ __ mov(Operand(ebp, kIndexOffset), eax);
- __ bind(&entry);
- __ cmp(eax, Operand(ebp, kLimitOffset));
- __ j(not_equal, &loop);
+ __ bind(&entry);
+ __ cmp(eax, Operand(ebp, kLimitOffset));
+ __ j(not_equal, &loop);
- // Invoke the function.
- ParameterCount actual(eax);
- __ SmiUntag(eax);
- __ mov(edi, Operand(ebp, 4 * kPointerSize));
- __ InvokeFunction(edi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
+ // Invoke the function.
+ ParameterCount actual(eax);
+ __ SmiUntag(eax);
+ __ mov(edi, Operand(ebp, 4 * kPointerSize));
+ __ InvokeFunction(edi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
- // Leave the internal frame.
- }
+ __ LeaveInternalFrame();
__ ret(3 * kPointerSize); // remove this, receiver, and arguments
}
// Invoke the conversion builtin and put the result into ebx.
__ bind(&convert_argument);
__ IncrementCounter(counters->string_ctor_conversions(), 1);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(edi); // Preserve the function.
- __ push(eax);
- __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
- __ pop(edi);
- }
+ __ EnterInternalFrame();
+ __ push(edi); // Preserve the function.
+ __ push(eax);
+ __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
+ __ pop(edi);
+ __ LeaveInternalFrame();
__ mov(ebx, eax);
__ jmp(&argument_is_string);
// create a string wrapper.
__ bind(&gc_required);
__ IncrementCounter(counters->string_ctor_gc_required(), 1);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(ebx);
- __ CallRuntime(Runtime::kNewStringWrapper, 1);
- }
+ __ EnterInternalFrame();
+ __ push(ebx);
+ __ CallRuntime(Runtime::kNewStringWrapper, 1);
+ __ LeaveInternalFrame();
__ ret(0);
}
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
CpuFeatures::TryForceFeatureScope scope(SSE2);
- if (!CpuFeatures::IsSupported(SSE2) && FLAG_debug_code) {
+ if (!CpuFeatures::IsSupported(SSE2)) {
__ Abort("Unreachable code: Cannot optimize without SSE2 support.");
return;
}
// Pass the function to optimize as the argument to the on-stack
// replacement runtime function.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(eax);
- __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
- }
+ __ EnterInternalFrame();
+ __ push(eax);
+ __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
+ __ LeaveInternalFrame();
// If the result was -1 it means that we couldn't optimize the
// function. Just return and continue in the unoptimized version.
__ j(above_equal, &ok, Label::kNear);
StackCheckStub stub;
__ TailCallStub(&stub);
- if (FLAG_debug_code) {
- __ Abort("Unreachable code: returned from tail call.");
- }
+ __ Abort("Unreachable code: returned from tail call.");
__ bind(&ok);
__ ret(0);
// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
void ToBooleanStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
Label false_result, true_result, not_string;
__ mov(eax, Operand(esp, 1 * kPointerSize));
Factory* factory = masm->isolate()->factory();
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(edx);
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- __ pop(edx);
- }
+ __ EnterInternalFrame();
+ __ push(edx);
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
+ __ pop(edx);
+ __ LeaveInternalFrame();
__ bind(&heapnumber_allocated);
// eax: allocated 'empty' number
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Push the original HeapNumber on the stack. The integer value can't
- // be stored since it's untagged and not in the smi range (so we can't
- // smi-tag it). We'll recalculate the value after the GC instead.
- __ push(ebx);
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- // New HeapNumber is in eax.
- __ pop(edx);
- }
+ __ EnterInternalFrame();
+ // Push the original HeapNumber on the stack. The integer value can't
+ // be stored since it's untagged and not in the smi range (so we can't
+ // smi-tag it). We'll recalculate the value after the GC instead.
+ __ push(ebx);
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
+ // New HeapNumber is in eax.
+ __ pop(edx);
+ __ LeaveInternalFrame();
// IntegerConvert uses ebx and edi as scratch registers.
// This conversion won't go slow-case.
IntegerConvert(masm, edx, CpuFeatures::IsSupported(SSE3), slow);
__ add(Operand(esp), Immediate(kDoubleSize));
// We return the value in xmm1 without adding it to the cache, but
// we cause a scavenging GC so that future allocations will succeed.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Allocate an unused object bigger than a HeapNumber.
- __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
- __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
- }
+ __ EnterInternalFrame();
+ // Allocate an unused object bigger than a HeapNumber.
+ __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
+ __ LeaveInternalFrame();
__ Ret();
}
__ bind(&runtime_call);
__ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(eax);
- __ CallRuntime(RuntimeFunction(), 1);
- }
+ __ EnterInternalFrame();
+ __ push(eax);
+ __ CallRuntime(RuntimeFunction(), 1);
+ __ LeaveInternalFrame();
__ movdbl(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
__ Ret();
}
__ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
} else {
// Call the builtin and convert 0/1 to true/false.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(object);
- __ push(function);
- __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
- }
+ __ EnterInternalFrame();
+ __ push(object);
+ __ push(function);
+ __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
+ __ LeaveInternalFrame();
Label true_value, done;
__ test(eax, Operand(eax));
__ j(zero, &true_value, Label::kNear);
__ push(eax);
__ push(ecx);
- {
- // Call the runtime system in a fresh internal frame.
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
- masm->isolate());
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(edx);
- __ push(eax);
- __ push(Immediate(Smi::FromInt(op_)));
- __ CallExternalReference(miss, 3);
- }
+ // Call the runtime system in a fresh internal frame.
+ ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
+ masm->isolate());
+ __ EnterInternalFrame();
+ __ push(edx);
+ __ push(eax);
+ __ push(Immediate(Smi::FromInt(op_)));
+ __ CallExternalReference(miss, 3);
+ __ LeaveInternalFrame();
// Compute the entry point of the rewritten stub.
__ lea(edi, FieldOperand(eax, Code::kHeaderSize));
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
// Stack frame on entry:
// esp[0 * kPointerSize]: return address.
// esp[1 * kPointerSize]: key's hash.
void Generate(MacroAssembler* masm);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
Major MajorKey() { return ToBoolean; }
int MinorKey() { return 0; }
Register r0,
Register r1);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
static const int kInlinedProbes = 4;
static const int kTotalProbes = 20;
}
#endif
- Major MajorKey() { return StringDictionaryLookup; }
+ Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
return DictionaryBits::encode(dictionary_.code()) |
// Platform-specific RuntimeCallHelper functions.
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
- masm->EnterFrame(StackFrame::INTERNAL);
- ASSERT(!masm->has_frame());
- masm->set_has_frame(true);
+ masm->EnterInternalFrame();
}
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
- masm->LeaveFrame(StackFrame::INTERNAL);
- ASSERT(masm->has_frame());
- masm->set_has_frame(false);
+ masm->LeaveInternalFrame();
}
RegList non_object_regs,
bool convert_call_to_jmp) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Store the registers containing live values on the expression stack to
- // make sure that these are correctly updated during GC. Non object values
- // are stored as a smi causing it to be untouched by GC.
- ASSERT((object_regs & ~kJSCallerSaved) == 0);
- ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
- ASSERT((object_regs & non_object_regs) == 0);
- for (int i = 0; i < kNumJSCallerSaved; i++) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- if ((object_regs & (1 << r)) != 0) {
- __ push(reg);
- }
- if ((non_object_regs & (1 << r)) != 0) {
- if (FLAG_debug_code) {
- __ test(reg, Immediate(0xc0000000));
- __ Assert(zero, "Unable to encode value as smi");
- }
- __ SmiTag(reg);
- __ push(reg);
+ __ EnterInternalFrame();
+
+ // Store the registers containing live values on the expression stack to
+ // make sure that these are correctly updated during GC. Non object values
+ // are stored as a smi causing it to be untouched by GC.
+ ASSERT((object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((object_regs & non_object_regs) == 0);
+ for (int i = 0; i < kNumJSCallerSaved; i++) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ if ((object_regs & (1 << r)) != 0) {
+ __ push(reg);
+ }
+ if ((non_object_regs & (1 << r)) != 0) {
+ if (FLAG_debug_code) {
+ __ test(reg, Immediate(0xc0000000));
+ __ Assert(zero, "Unable to encode value as smi");
}
+ __ SmiTag(reg);
+ __ push(reg);
}
+ }
#ifdef DEBUG
- __ RecordComment("// Calling from debug break to runtime - come in - over");
+ __ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
- __ Set(eax, Immediate(0)); // No arguments.
- __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
-
- CEntryStub ceb(1);
- __ CallStub(&ceb);
-
- // Restore the register values containing object pointers from the
- // expression stack.
- for (int i = kNumJSCallerSaved; --i >= 0;) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- if (FLAG_debug_code) {
- __ Set(reg, Immediate(kDebugZapValue));
- }
- if ((object_regs & (1 << r)) != 0) {
- __ pop(reg);
- }
- if ((non_object_regs & (1 << r)) != 0) {
- __ pop(reg);
- __ SmiUntag(reg);
- }
+ __ Set(eax, Immediate(0)); // No arguments.
+ __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
+
+ CEntryStub ceb(1);
+ __ CallStub(&ceb);
+
+ // Restore the register values containing object pointers from the expression
+ // stack.
+ for (int i = kNumJSCallerSaved; --i >= 0;) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ if (FLAG_debug_code) {
+ __ Set(reg, Immediate(kDebugZapValue));
+ }
+ if ((object_regs & (1 << r)) != 0) {
+ __ pop(reg);
+ }
+ if ((non_object_regs & (1 << r)) != 0) {
+ __ pop(reg);
+ __ SmiUntag(reg);
}
-
- // Get rid of the internal frame.
}
+ // Get rid of the internal frame.
+ __ LeaveInternalFrame();
+
// If this call did not replace a call but patched other code then there will
// be an unwanted return address left on the stack. Here we get rid of that.
if (convert_call_to_jmp) {
__ mov(Operand(esp, 4 * kPointerSize), edx); // Fp-to-sp delta.
__ mov(Operand(esp, 5 * kPointerSize),
Immediate(ExternalReference::isolate_address()));
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
- }
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
// Preserve deoptimizer object in register eax and get the input
// frame descriptor pointer.
__ push(eax);
__ PrepareCallCFunction(1, ebx);
__ mov(Operand(esp, 0 * kPointerSize), eax);
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(
- ExternalReference::compute_output_frames_function(isolate), 1);
- }
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 1);
__ pop(eax);
// Replace the current frame with the output frames.
__ bind(&ok);
}
- // Open a frame scope to indicate that there is a frame on the stack. The
- // MANUAL indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done below).
- FrameScope frame_scope(masm_, StackFrame::MANUAL);
-
__ push(ebp); // Caller's frame pointer.
__ mov(ebp, esp);
__ push(esi); // Callee's context.
// Get the receiver of the function from the stack; 1 ~ return address.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
- // Push the receiver and the name of the function.
- __ push(edx);
- __ push(ecx);
+ // Push the receiver and the name of the function.
+ __ push(edx);
+ __ push(ecx);
- // Call the entry.
- CEntryStub stub(1);
- __ mov(eax, Immediate(2));
- __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
- __ CallStub(&stub);
+ // Call the entry.
+ CEntryStub stub(1);
+ __ mov(eax, Immediate(2));
+ __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
+ __ CallStub(&stub);
- // Move result to edi and exit the internal frame.
- __ mov(edi, eax);
- }
+ // Move result to edi and exit the internal frame.
+ __ mov(edi, eax);
+ __ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
// This can happen only for regular CallIC but not KeyedCallIC.
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
-
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(ecx); // save the key
- __ push(edx); // pass the receiver
- __ push(ecx); // pass the key
- __ CallRuntime(Runtime::kKeyedGetProperty, 2);
- __ pop(ecx); // restore the key
- // Leave the internal frame.
- }
-
+ __ EnterInternalFrame();
+ __ push(ecx); // save the key
+ __ push(edx); // pass the receiver
+ __ push(ecx); // pass the key
+ __ CallRuntime(Runtime::kKeyedGetProperty, 2);
+ __ pop(ecx); // restore the key
+ __ LeaveInternalFrame();
__ mov(edi, eax);
__ jmp(&do_call);
ASSERT(is_unused());
status_ = GENERATING;
CpuFeatures::Scope scope(SSE2);
-
- // Open a frame scope to indicate that there is a frame on the stack. The
- // MANUAL indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done in GeneratePrologue).
- FrameScope frame_scope(masm_, StackFrame::MANUAL);
-
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
: Assembler(arg_isolate, buffer, size),
generating_stub_(false),
- allow_stub_calls_(true),
- has_frame_(false) {
+ allow_stub_calls_(true) {
if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
isolate());
void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
- ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
}
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
- ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Object* result;
{ MaybeObject* maybe_result = stub->TryGetCode();
if (!maybe_result->ToObject(&result)) return maybe_result;
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
}
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Object* result;
{ MaybeObject* maybe_result = stub->TryGetCode();
if (!maybe_result->ToObject(&result)) return maybe_result;
}
-bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
- if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
-}
-
-
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
add(Operand(esp), Immediate(num_arguments * kPointerSize));
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
InvokePrologue(expected, actual, Handle<Code>::null(), code,
&done, flag, Label::kNear, call_wrapper,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
Operand dummy(eax);
InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
ASSERT(fun.is(edi));
mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
ASSERT(function->is_compiled());
// Get the function and setup the context.
mov(edi, Immediate(Handle<JSFunction>(function)));
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
InvokeFlag flag,
const CallWrapper& call_wrapper) {
- // You can't call a builtin without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
+ // Calls are not allowed in some stubs.
+ ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
// Rely on the assertion to check that the number of provided
// arguments match the expected number of arguments. Fake a
expected, expected, flag, call_wrapper, CALL_AS_METHOD);
}
-
void MacroAssembler::GetBuiltinFunction(Register target,
Builtins::JavaScript id) {
// Load the JavaScript builtin function from the builtins object.
JSBuiltinsObject::OffsetOfFunctionWithId(id)));
}
-
void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
ASSERT(!target.is(edi));
// Load the JavaScript builtin function from the builtins object.
RecordComment(msg);
}
#endif
+ // Disable stub call restrictions to always allow calls to abort.
+ AllowStubCallsScope allow_scope(this, true);
push(eax);
push(Immediate(p0));
push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
- // Disable stub call restrictions to always allow calls to abort.
- if (!has_frame_) {
- // We don't actually want to generate a pile of code for this, so just
- // claim there is a stack frame, without generating one.
- FrameScope scope(this, StackFrame::NONE);
- CallRuntime(Runtime::kAbort, 2);
- } else {
- CallRuntime(Runtime::kAbort, 2);
- }
+ CallRuntime(Runtime::kAbort, 2);
// will not return here
int3();
}
void MacroAssembler::CallCFunction(Register function,
int num_arguments) {
- ASSERT(has_frame());
// Check stack alignment.
if (emit_debug_code()) {
CheckStackAlignment();
#define V8_IA32_MACRO_ASSEMBLER_IA32_H_
#include "assembler.h"
-#include "frames.h"
#include "v8globals.h"
namespace v8 {
void DebugBreak();
#endif
+ // ---------------------------------------------------------------------------
+ // Activation frames
+
+ void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
+ void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
+
+ void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
+ void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
+
// Enter specific kind of exit frame. Expects the number of
// arguments in register eax and sets up the number of arguments in
// register edi and the pointer to the first argument in register
bool generating_stub() { return generating_stub_; }
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; }
- void set_has_frame(bool value) { has_frame_ = value; }
- bool has_frame() { return has_frame_; }
- inline bool AllowThisStubCall(CodeStub* stub);
// ---------------------------------------------------------------------------
// String utilities.
return SafepointRegisterStackIndex(reg.code());
}
- // Activation support.
- void EnterFrame(StackFrame::Type type);
- void LeaveFrame(StackFrame::Type type);
-
private:
bool generating_stub_;
bool allow_stub_calls_;
- bool has_frame_;
// This handle will be patched with the code object on installation.
Handle<Object> code_object_;
const CallWrapper& call_wrapper = NullCallWrapper(),
CallKind call_kind = CALL_AS_METHOD);
+ // Activation support.
+ void EnterFrame(StackFrame::Type type);
+ void LeaveFrame(StackFrame::Type type);
+
void EnterExitFramePrologue();
void EnterExitFrameEpilogue(int argc, bool save_doubles);
__ add(edx, Operand(esi));
__ mov(Operand(esp, 0 * kPointerSize), edx);
- {
- AllowExternalCallThatCantCauseGC scope(masm_);
- ExternalReference compare =
- ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
- __ CallCFunction(compare, argument_count);
- }
+ ExternalReference compare =
+ ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
+ __ CallCFunction(compare, argument_count);
// Pop original values before reacting on result value.
__ pop(ebx);
__ pop(backtrack_stackpointer());
// Entry code:
__ bind(&entry_label_);
-
- // Tell the system that we have a stack frame. Because the type is MANUAL, no
- // is generated.
- FrameScope scope(masm_, StackFrame::MANUAL);
-
- // Actually emit code to start a new stack frame.
+ // Start new stack frame.
__ push(ebp);
__ mov(ebp, esp);
// Save callee-save registers. Order here should correspond to order of
scratch1, scratch2, scratch3, name,
miss_label);
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
// Save the name_ register across the call.
__ push(name_);
// Restore the name_ register.
__ pop(name_);
-
- // Leave the internal frame.
+ __ LeaveInternalFrame();
}
void LoadWithInterceptor(MacroAssembler* masm,
Register holder,
JSObject* holder_obj,
Label* interceptor_succeeded) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(holder); // Save the holder.
- __ push(name_); // Save the name.
-
- CompileCallLoadPropertyWithInterceptor(masm,
- receiver,
- holder,
- name_,
- holder_obj);
-
- __ pop(name_); // Restore the name.
- __ pop(receiver); // Restore the holder.
- // Leave the internal frame.
- }
+ __ EnterInternalFrame();
+ __ push(holder); // Save the holder.
+ __ push(name_); // Save the name.
+
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
+
+ __ pop(name_); // Restore the name.
+ __ pop(receiver); // Restore the holder.
+ __ LeaveInternalFrame();
__ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
__ j(not_equal, interceptor_succeeded);
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
- {
- FrameScope frame_scope(masm(), StackFrame::INTERNAL);
-
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
- __ push(receiver);
- }
- __ push(holder_reg);
- __ push(name_reg);
+ __ EnterInternalFrame();
- // Invoke an interceptor. Note: map checks from receiver to
- // interceptor's holder has been compiled before (see a caller
- // of this method.)
- CompileCallLoadPropertyWithInterceptor(masm(),
- receiver,
- holder_reg,
- name_reg,
- interceptor_holder);
-
- // Check if interceptor provided a value for property. If it's
- // the case, return immediately.
- Label interceptor_failed;
- __ cmp(eax, factory()->no_interceptor_result_sentinel());
- __ j(equal, &interceptor_failed);
- frame_scope.GenerateLeaveFrame();
- __ ret(0);
-
- __ bind(&interceptor_failed);
- __ pop(name_reg);
- __ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- __ pop(receiver);
- }
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
+ __ push(receiver);
+ }
+ __ push(holder_reg);
+ __ push(name_reg);
+
+ // Invoke an interceptor. Note: map checks from receiver to
+ // interceptor's holder has been compiled before (see a caller
+ // of this method.)
+ CompileCallLoadPropertyWithInterceptor(masm(),
+ receiver,
+ holder_reg,
+ name_reg,
+ interceptor_holder);
+
+ // Check if interceptor provided a value for property. If it's
+ // the case, return immediately.
+ Label interceptor_failed;
+ __ cmp(eax, factory()->no_interceptor_result_sentinel());
+ __ j(equal, &interceptor_failed);
+ __ LeaveInternalFrame();
+ __ ret(0);
- // Leave the internal frame.
+ __ bind(&interceptor_failed);
+ __ pop(name_reg);
+ __ pop(holder_reg);
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ __ pop(receiver);
}
+ __ LeaveInternalFrame();
+
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into holder_reg.
if (interceptor_holder != lookup->holder()) {
namespace v8 {
namespace internal {
-class FrameScope {
- public:
- explicit FrameScope(MacroAssembler* masm, StackFrame::Type type)
- : masm_(masm), type_(type) {
- ASSERT(!masm->has_frame());
- masm->set_has_frame(true);
- if (type != StackFrame::MANUAL && type_ != StackFrame::NONE) {
- masm->EnterFrame(type);
- }
- }
-
- ~FrameScope() {
- if (type_ != StackFrame::MANUAL && type_ != StackFrame::NONE) {
- masm_->LeaveFrame(type_);
- }
- masm_->set_has_frame(false);
- }
-
- // Normally we generate the leave-frame code when this object goes
- // out of scope. Sometimes we may need to generate the code somewhere else
- // in addition. Calling this will achieve that, but the object stays in
- // scope, the MacroAssembler is still marked as being in a frame scope, and
- // the code will be generated again when it goes out of scope.
- void GenerateLeaveFrame() {
- masm_->LeaveFrame(type_);
- }
-
- private:
- MacroAssembler* masm_;
- StackFrame::Type type_;
-};
-
-
-class AllowExternalCallThatCantCauseGC: public FrameScope {
- public:
- explicit AllowExternalCallThatCantCauseGC(MacroAssembler* masm)
- : FrameScope(masm, StackFrame::NONE) { }
-};
-
-
-class NoCurrentFrameScope {
- public:
- explicit NoCurrentFrameScope(MacroAssembler* masm)
- : masm_(masm), saved_(masm->has_frame()) {
- masm->set_has_frame(false);
- }
-
- ~NoCurrentFrameScope() {
- masm_->set_has_frame(saved_);
- }
-
- private:
- MacroAssembler* masm_;
- bool saved_;
-};
-
-
// Support for "structured" code comments.
#ifdef DEBUG
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
- // Enter a construct frame.
- {
- FrameScope scope(masm, StackFrame::CONSTRUCT);
+ // Enter a construct frame.
+ __ EnterConstructFrame();
- // Store a smi-tagged arguments count on the stack.
- __ Integer32ToSmi(rax, rax);
- __ push(rax);
+ // Store a smi-tagged arguments count on the stack.
+ __ Integer32ToSmi(rax, rax);
+ __ push(rax);
- // Push the function to invoke on the stack.
- __ push(rdi);
+ // Push the function to invoke on the stack.
+ __ push(rdi);
- // Try to allocate the object without transitioning into C code. If any of
- // the preconditions is not met, the code bails out to the runtime call.
- Label rt_call, allocated;
- if (FLAG_inline_new) {
- Label undo_allocation;
+ // Try to allocate the object without transitioning into C code. If any of the
+ // preconditions is not met, the code bails out to the runtime call.
+ Label rt_call, allocated;
+ if (FLAG_inline_new) {
+ Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
- ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address(masm->isolate());
- __ movq(kScratchRegister, debug_step_in_fp);
- __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
- __ j(not_equal, &rt_call);
+ ExternalReference debug_step_in_fp =
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
+ __ movq(kScratchRegister, debug_step_in_fp);
+ __ cmpq(Operand(kScratchRegister, 0), Immediate(0));
+ __ j(not_equal, &rt_call);
#endif
- // Verified that the constructor is a JSFunction.
- // Load the initial map and verify that it is in fact a map.
- // rdi: constructor
- __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
- // Will both indicate a NULL and a Smi
- ASSERT(kSmiTag == 0);
- __ JumpIfSmi(rax, &rt_call);
- // rdi: constructor
- // rax: initial map (if proven valid below)
- __ CmpObjectType(rax, MAP_TYPE, rbx);
- __ j(not_equal, &rt_call);
-
- // Check that the constructor is not constructing a JSFunction (see
- // comments in Runtime_NewObject in runtime.cc). In which case the initial
- // map's instance type would be JS_FUNCTION_TYPE.
- // rdi: constructor
- // rax: initial map
- __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
- __ j(equal, &rt_call);
-
- if (count_constructions) {
- Label allocate;
- // Decrease generous allocation count.
- __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
- __ decb(FieldOperand(rcx,
- SharedFunctionInfo::kConstructionCountOffset));
- __ j(not_zero, &allocate);
-
- __ push(rax);
- __ push(rdi);
+ // Verified that the constructor is a JSFunction.
+ // Load the initial map and verify that it is in fact a map.
+ // rdi: constructor
+ __ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a NULL and a Smi
+ ASSERT(kSmiTag == 0);
+ __ JumpIfSmi(rax, &rt_call);
+ // rdi: constructor
+ // rax: initial map (if proven valid below)
+ __ CmpObjectType(rax, MAP_TYPE, rbx);
+ __ j(not_equal, &rt_call);
+
+ // Check that the constructor is not constructing a JSFunction (see comments
+ // in Runtime_NewObject in runtime.cc). In which case the initial map's
+ // instance type would be JS_FUNCTION_TYPE.
+ // rdi: constructor
+ // rax: initial map
+ __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
+ __ j(equal, &rt_call);
+
+ if (count_constructions) {
+ Label allocate;
+ // Decrease generous allocation count.
+ __ movq(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ decb(FieldOperand(rcx, SharedFunctionInfo::kConstructionCountOffset));
+ __ j(not_zero, &allocate);
- __ push(rdi); // constructor
- // The call will replace the stub, so the countdown is only done once.
- __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
+ __ push(rax);
+ __ push(rdi);
- __ pop(rdi);
- __ pop(rax);
+ __ push(rdi); // constructor
+ // The call will replace the stub, so the countdown is only done once.
+ __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
- __ bind(&allocate);
- }
+ __ pop(rdi);
+ __ pop(rax);
- // Now allocate the JSObject on the heap.
- __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
- __ shl(rdi, Immediate(kPointerSizeLog2));
- // rdi: size of new object
- __ AllocateInNewSpace(rdi,
- rbx,
- rdi,
- no_reg,
- &rt_call,
- NO_ALLOCATION_FLAGS);
- // Allocated the JSObject, now initialize the fields.
- // rax: initial map
- // rbx: JSObject (not HeapObject tagged - the actual address).
- // rdi: start of next object
- __ movq(Operand(rbx, JSObject::kMapOffset), rax);
- __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
- __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
- __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
- // Set extra fields in the newly allocated object.
- // rax: initial map
- // rbx: JSObject
- // rdi: start of next object
- { Label loop, entry;
- // To allow for truncation.
- if (count_constructions) {
- __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
- } else {
- __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
- }
- __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
- __ jmp(&entry);
- __ bind(&loop);
- __ movq(Operand(rcx, 0), rdx);
- __ addq(rcx, Immediate(kPointerSize));
- __ bind(&entry);
- __ cmpq(rcx, rdi);
- __ j(less, &loop);
- }
+ __ bind(&allocate);
+ }
- // Add the object tag to make the JSObject real, so that we can continue
- // and jump into the continuation code at any time from now on. Any
- // failures need to undo the allocation, so that the heap is in a
- // consistent state and verifiable.
- // rax: initial map
- // rbx: JSObject
- // rdi: start of next object
- __ or_(rbx, Immediate(kHeapObjectTag));
-
- // Check if a non-empty properties array is needed.
- // Allocate and initialize a FixedArray if it is.
- // rax: initial map
- // rbx: JSObject
- // rdi: start of next object
- // Calculate total properties described map.
- __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
- __ movzxbq(rcx,
- FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
- __ addq(rdx, rcx);
- // Calculate unused properties past the end of the in-object properties.
- __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
- __ subq(rdx, rcx);
- // Done if no extra properties are to be allocated.
- __ j(zero, &allocated);
- __ Assert(positive, "Property allocation count failed.");
-
- // Scale the number of elements by pointer size and add the header for
- // FixedArrays to the start of the next object calculation from above.
- // rbx: JSObject
- // rdi: start of next object (will be start of FixedArray)
- // rdx: number of elements in properties array
- __ AllocateInNewSpace(FixedArray::kHeaderSize,
- times_pointer_size,
- rdx,
- rdi,
- rax,
- no_reg,
- &undo_allocation,
- RESULT_CONTAINS_TOP);
-
- // Initialize the FixedArray.
- // rbx: JSObject
- // rdi: FixedArray
- // rdx: number of elements
- // rax: start of next object
- __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
- __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
- __ Integer32ToSmi(rdx, rdx);
- __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
-
- // Initialize the fields to undefined.
- // rbx: JSObject
- // rdi: FixedArray
- // rax: start of next object
- // rdx: number of elements
- { Label loop, entry;
+ // Now allocate the JSObject on the heap.
+ __ movzxbq(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
+ __ shl(rdi, Immediate(kPointerSizeLog2));
+ // rdi: size of new object
+ __ AllocateInNewSpace(rdi,
+ rbx,
+ rdi,
+ no_reg,
+ &rt_call,
+ NO_ALLOCATION_FLAGS);
+ // Allocated the JSObject, now initialize the fields.
+ // rax: initial map
+ // rbx: JSObject (not HeapObject tagged - the actual address).
+ // rdi: start of next object
+ __ movq(Operand(rbx, JSObject::kMapOffset), rax);
+ __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
+ __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
+ __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
+ // Set extra fields in the newly allocated object.
+ // rax: initial map
+ // rbx: JSObject
+ // rdi: start of next object
+ { Label loop, entry;
+ // To allow for truncation.
+ if (count_constructions) {
+ __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
+ } else {
__ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
- __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
- __ jmp(&entry);
- __ bind(&loop);
- __ movq(Operand(rcx, 0), rdx);
- __ addq(rcx, Immediate(kPointerSize));
- __ bind(&entry);
- __ cmpq(rcx, rax);
- __ j(below, &loop);
}
+ __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(Operand(rcx, 0), rdx);
+ __ addq(rcx, Immediate(kPointerSize));
+ __ bind(&entry);
+ __ cmpq(rcx, rdi);
+ __ j(less, &loop);
+ }
- // Store the initialized FixedArray into the properties field of
- // the JSObject
- // rbx: JSObject
- // rdi: FixedArray
- __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
- __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
+ // Add the object tag to make the JSObject real, so that we can continue and
+ // jump into the continuation code at any time from now on. Any failures
+ // need to undo the allocation, so that the heap is in a consistent state
+ // and verifiable.
+ // rax: initial map
+ // rbx: JSObject
+ // rdi: start of next object
+ __ or_(rbx, Immediate(kHeapObjectTag));
+
+ // Check if a non-empty properties array is needed.
+ // Allocate and initialize a FixedArray if it is.
+ // rax: initial map
+ // rbx: JSObject
+ // rdi: start of next object
+ // Calculate total properties described map.
+ __ movzxbq(rdx, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
+ __ movzxbq(rcx, FieldOperand(rax, Map::kPreAllocatedPropertyFieldsOffset));
+ __ addq(rdx, rcx);
+ // Calculate unused properties past the end of the in-object properties.
+ __ movzxbq(rcx, FieldOperand(rax, Map::kInObjectPropertiesOffset));
+ __ subq(rdx, rcx);
+ // Done if no extra properties are to be allocated.
+ __ j(zero, &allocated);
+ __ Assert(positive, "Property allocation count failed.");
+
+ // Scale the number of elements by pointer size and add the header for
+ // FixedArrays to the start of the next object calculation from above.
+ // rbx: JSObject
+ // rdi: start of next object (will be start of FixedArray)
+ // rdx: number of elements in properties array
+ __ AllocateInNewSpace(FixedArray::kHeaderSize,
+ times_pointer_size,
+ rdx,
+ rdi,
+ rax,
+ no_reg,
+ &undo_allocation,
+ RESULT_CONTAINS_TOP);
+
+ // Initialize the FixedArray.
+ // rbx: JSObject
+ // rdi: FixedArray
+ // rdx: number of elements
+ // rax: start of next object
+ __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
+ __ movq(Operand(rdi, HeapObject::kMapOffset), rcx); // setup the map
+ __ Integer32ToSmi(rdx, rdx);
+ __ movq(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
+
+ // Initialize the fields to undefined.
+ // rbx: JSObject
+ // rdi: FixedArray
+ // rax: start of next object
+ // rdx: number of elements
+ { Label loop, entry;
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
+ __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(Operand(rcx, 0), rdx);
+ __ addq(rcx, Immediate(kPointerSize));
+ __ bind(&entry);
+ __ cmpq(rcx, rax);
+ __ j(below, &loop);
+ }
+ // Store the initialized FixedArray into the properties field of
+ // the JSObject
+ // rbx: JSObject
+ // rdi: FixedArray
+ __ or_(rdi, Immediate(kHeapObjectTag)); // add the heap tag
+ __ movq(FieldOperand(rbx, JSObject::kPropertiesOffset), rdi);
- // Continue with JSObject being successfully allocated
- // rbx: JSObject
- __ jmp(&allocated);
- // Undo the setting of the new top so that the heap is verifiable. For
- // example, the map's unused properties potentially do not match the
- // allocated objects unused properties.
- // rbx: JSObject (previous new top)
- __ bind(&undo_allocation);
- __ UndoAllocationInNewSpace(rbx);
- }
+ // Continue with JSObject being successfully allocated
+ // rbx: JSObject
+ __ jmp(&allocated);
- // Allocate the new receiver object using the runtime call.
- // rdi: function (constructor)
- __ bind(&rt_call);
- // Must restore rdi (constructor) before calling runtime.
- __ movq(rdi, Operand(rsp, 0));
- __ push(rdi);
- __ CallRuntime(Runtime::kNewObject, 1);
- __ movq(rbx, rax); // store result in rbx
+ // Undo the setting of the new top so that the heap is verifiable. For
+ // example, the map's unused properties potentially do not match the
+ // allocated objects unused properties.
+ // rbx: JSObject (previous new top)
+ __ bind(&undo_allocation);
+ __ UndoAllocationInNewSpace(rbx);
+ }
- // New object allocated.
- // rbx: newly allocated object
- __ bind(&allocated);
- // Retrieve the function from the stack.
- __ pop(rdi);
+ // Allocate the new receiver object using the runtime call.
+ // rdi: function (constructor)
+ __ bind(&rt_call);
+ // Must restore rdi (constructor) before calling runtime.
+ __ movq(rdi, Operand(rsp, 0));
+ __ push(rdi);
+ __ CallRuntime(Runtime::kNewObject, 1);
+ __ movq(rbx, rax); // store result in rbx
- // Retrieve smi-tagged arguments count from the stack.
- __ movq(rax, Operand(rsp, 0));
- __ SmiToInteger32(rax, rax);
+ // New object allocated.
+ // rbx: newly allocated object
+ __ bind(&allocated);
+ // Retrieve the function from the stack.
+ __ pop(rdi);
- // Push the allocated receiver to the stack. We need two copies
- // because we may have to return the original one and the calling
- // conventions dictate that the called function pops the receiver.
- __ push(rbx);
- __ push(rbx);
+ // Retrieve smi-tagged arguments count from the stack.
+ __ movq(rax, Operand(rsp, 0));
+ __ SmiToInteger32(rax, rax);
- // Setup pointer to last argument.
- __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
+ // Push the allocated receiver to the stack. We need two copies
+ // because we may have to return the original one and the calling
+ // conventions dictate that the called function pops the receiver.
+ __ push(rbx);
+ __ push(rbx);
- // Copy arguments and receiver to the expression stack.
- Label loop, entry;
- __ movq(rcx, rax);
- __ jmp(&entry);
- __ bind(&loop);
- __ push(Operand(rbx, rcx, times_pointer_size, 0));
- __ bind(&entry);
- __ decq(rcx);
- __ j(greater_equal, &loop);
-
- // Call the function.
- if (is_api_function) {
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- Handle<Code> code =
- masm->isolate()->builtins()->HandleApiCallConstruct();
- ParameterCount expected(0);
- __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
- CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
- } else {
- ParameterCount actual(rax);
- __ InvokeFunction(rdi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
+ // Setup pointer to last argument.
+ __ lea(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
- // Restore context from the frame.
- __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ // Copy arguments and receiver to the expression stack.
+ Label loop, entry;
+ __ movq(rcx, rax);
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ push(Operand(rbx, rcx, times_pointer_size, 0));
+ __ bind(&entry);
+ __ decq(rcx);
+ __ j(greater_equal, &loop);
+
+ // Call the function.
+ if (is_api_function) {
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+ Handle<Code> code =
+ masm->isolate()->builtins()->HandleApiCallConstruct();
+ ParameterCount expected(0);
+ __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
+ CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
+ } else {
+ ParameterCount actual(rax);
+ __ InvokeFunction(rdi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
+ }
- // If the result is an object (in the ECMA sense), we should get rid
- // of the receiver and use the result; see ECMA-262 section 13.2.2-7
- // on page 74.
- Label use_receiver, exit;
- // If the result is a smi, it is *not* an object in the ECMA sense.
- __ JumpIfSmi(rax, &use_receiver);
+ // Restore context from the frame.
+ __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
- // If the type of the result (stored in its map) is less than
- // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
- __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
- __ j(above_equal, &exit);
+ // If the result is an object (in the ECMA sense), we should get rid
+ // of the receiver and use the result; see ECMA-262 section 13.2.2-7
+ // on page 74.
+ Label use_receiver, exit;
+ // If the result is a smi, it is *not* an object in the ECMA sense.
+ __ JumpIfSmi(rax, &use_receiver);
- // Throw away the result of the constructor invocation and use the
- // on-stack receiver as the result.
- __ bind(&use_receiver);
- __ movq(rax, Operand(rsp, 0));
+ // If the type of the result (stored in its map) is less than
+ // FIRST_JS_OBJECT_TYPE, it is not an object in the ECMA sense.
+ __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
+ __ j(above_equal, &exit);
- // Restore the arguments count and leave the construct frame.
- __ bind(&exit);
- __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
+ // Throw away the result of the constructor invocation and use the
+ // on-stack receiver as the result.
+ __ bind(&use_receiver);
+ __ movq(rax, Operand(rsp, 0));
- // Leave the construct frame.
- }
+ // Restore the arguments count and leave the construct frame.
+ __ bind(&exit);
+ __ movq(rbx, Operand(rsp, kPointerSize)); // get arguments count
+ __ LeaveConstructFrame();
// Remove caller arguments from the stack and return.
__ pop(rcx);
// - Object*** argv
// (see Handle::Invoke in execution.cc).
- // Open a C++ scope for the FrameScope.
- {
- // Platform specific argument handling. After this, the stack contains
- // an internal frame and the pushed function and receiver, and
- // register rax and rbx holds the argument count and argument array,
- // while rdi holds the function pointer and rsi the context.
-
+ // Platform specific argument handling. After this, the stack contains
+ // an internal frame and the pushed function and receiver, and
+ // register rax and rbx holds the argument count and argument array,
+ // while rdi holds the function pointer and rsi the context.
#ifdef _WIN64
- // MSVC parameters in:
- // rcx : entry (ignored)
- // rdx : function
- // r8 : receiver
- // r9 : argc
- // [rsp+0x20] : argv
-
- // Clear the context before we push it when entering the internal frame.
- __ Set(rsi, 0);
- // Enter an internal frame.
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Load the function context into rsi.
- __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
-
- // Push the function and the receiver onto the stack.
- __ push(rdx);
- __ push(r8);
-
- // Load the number of arguments and setup pointer to the arguments.
- __ movq(rax, r9);
- // Load the previous frame pointer to access C argument on stack
- __ movq(kScratchRegister, Operand(rbp, 0));
- __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
- // Load the function pointer into rdi.
- __ movq(rdi, rdx);
+ // MSVC parameters in:
+ // rcx : entry (ignored)
+ // rdx : function
+ // r8 : receiver
+ // r9 : argc
+ // [rsp+0x20] : argv
+
+ // Clear the context before we push it when entering the JS frame.
+ __ Set(rsi, 0);
+ __ EnterInternalFrame();
+
+ // Load the function context into rsi.
+ __ movq(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
+
+ // Push the function and the receiver onto the stack.
+ __ push(rdx);
+ __ push(r8);
+
+ // Load the number of arguments and setup pointer to the arguments.
+ __ movq(rax, r9);
+ // Load the previous frame pointer to access C argument on stack
+ __ movq(kScratchRegister, Operand(rbp, 0));
+ __ movq(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
+ // Load the function pointer into rdi.
+ __ movq(rdi, rdx);
#else // _WIN64
- // GCC parameters in:
- // rdi : entry (ignored)
- // rsi : function
- // rdx : receiver
- // rcx : argc
- // r8 : argv
-
- __ movq(rdi, rsi);
- // rdi : function
-
- // Clear the context before we push it when entering the internal frame.
- __ Set(rsi, 0);
- // Enter an internal frame.
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Push the function and receiver and setup the context.
- __ push(rdi);
- __ push(rdx);
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+ // GCC parameters in:
+ // rdi : entry (ignored)
+ // rsi : function
+ // rdx : receiver
+ // rcx : argc
+ // r8 : argv
+
+ __ movq(rdi, rsi);
+ // rdi : function
+
+ // Clear the context before we push it when entering the JS frame.
+ __ Set(rsi, 0);
+ // Enter an internal frame.
+ __ EnterInternalFrame();
+
+ // Push the function and receiver and setup the context.
+ __ push(rdi);
+ __ push(rdx);
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- // Load the number of arguments and setup pointer to the arguments.
- __ movq(rax, rcx);
- __ movq(rbx, r8);
+ // Load the number of arguments and setup pointer to the arguments.
+ __ movq(rax, rcx);
+ __ movq(rbx, r8);
#endif // _WIN64
- // Current stack contents:
- // [rsp + 2 * kPointerSize ... ]: Internal frame
- // [rsp + kPointerSize] : function
- // [rsp] : receiver
- // Current register contents:
- // rax : argc
- // rbx : argv
- // rsi : context
- // rdi : function
-
- // Copy arguments to the stack in a loop.
- // Register rbx points to array of pointers to handle locations.
- // Push the values of these handles.
- Label loop, entry;
- __ Set(rcx, 0); // Set loop variable to 0.
- __ jmp(&entry);
- __ bind(&loop);
- __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
- __ push(Operand(kScratchRegister, 0)); // dereference handle
- __ addq(rcx, Immediate(1));
- __ bind(&entry);
- __ cmpq(rcx, rax);
- __ j(not_equal, &loop);
-
- // Invoke the code.
- if (is_construct) {
- // Expects rdi to hold function pointer.
- __ Call(masm->isolate()->builtins()->JSConstructCall(),
- RelocInfo::CODE_TARGET);
- } else {
- ParameterCount actual(rax);
- // Function must be in rdi.
- __ InvokeFunction(rdi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
- }
- // Exit the internal frame. Notice that this also removes the empty
- // context and the function left on the stack by the code
- // invocation.
+ // Current stack contents:
+ // [rsp + 2 * kPointerSize ... ]: Internal frame
+ // [rsp + kPointerSize] : function
+ // [rsp] : receiver
+ // Current register contents:
+ // rax : argc
+ // rbx : argv
+ // rsi : context
+ // rdi : function
+
+ // Copy arguments to the stack in a loop.
+ // Register rbx points to array of pointers to handle locations.
+ // Push the values of these handles.
+ Label loop, entry;
+ __ Set(rcx, 0); // Set loop variable to 0.
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
+ __ push(Operand(kScratchRegister, 0)); // dereference handle
+ __ addq(rcx, Immediate(1));
+ __ bind(&entry);
+ __ cmpq(rcx, rax);
+ __ j(not_equal, &loop);
+
+ // Invoke the code.
+ if (is_construct) {
+ // Expects rdi to hold function pointer.
+ __ Call(masm->isolate()->builtins()->JSConstructCall(),
+ RelocInfo::CODE_TARGET);
+ } else {
+ ParameterCount actual(rax);
+ // Function must be in rdi.
+ __ InvokeFunction(rdi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
}
+ // Exit the JS frame. Notice that this also removes the empty
+ // context and the function left on the stack by the code
+ // invocation.
+ __ LeaveInternalFrame();
// TODO(X64): Is argument correct? Is there a receiver to remove?
- __ ret(1 * kPointerSize); // Remove receiver.
+ __ ret(1 * kPointerSize); // remove receiver
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
+ // Push a copy of the function onto the stack.
+ __ push(rdi);
+ // Push call kind information.
+ __ push(rcx);
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyCompile, 1);
+ __ push(rdi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kLazyCompile, 1);
- // Restore call kind information.
- __ pop(rcx);
- // Restore receiver.
- __ pop(rdi);
+ // Restore call kind information.
+ __ pop(rcx);
+ // Restore receiver.
+ __ pop(rdi);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Push a copy of the function onto the stack.
- __ push(rdi);
- // Push call kind information.
- __ push(rcx);
+ // Push a copy of the function onto the stack.
+ __ push(rdi);
+ // Push call kind information.
+ __ push(rcx);
- __ push(rdi); // Function is also the parameter to the runtime call.
- __ CallRuntime(Runtime::kLazyRecompile, 1);
+ __ push(rdi); // Function is also the parameter to the runtime call.
+ __ CallRuntime(Runtime::kLazyRecompile, 1);
- // Restore call kind information.
- __ pop(rcx);
- // Restore function.
- __ pop(rdi);
+ // Restore call kind information.
+ __ pop(rcx);
+ // Restore function.
+ __ pop(rdi);
- // Tear down internal frame.
- }
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Pass the deoptimization type to the runtime system.
- __ Push(Smi::FromInt(static_cast<int>(type)));
+ // Pass the deoptimization type to the runtime system.
+ __ Push(Smi::FromInt(static_cast<int>(type)));
- __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
- // Tear down internal frame.
- }
+ __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
// Get the full codegen state from the stack and untag it.
__ SmiToInteger32(rcx, Operand(rsp, 1 * kPointerSize));
// the registers without worrying about which of them contain
// pointers. This seems a bit fragile.
__ Pushad();
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ CallRuntime(Runtime::kNotifyOSR, 0);
- }
+ __ EnterInternalFrame();
+ __ CallRuntime(Runtime::kNotifyOSR, 0);
+ __ LeaveInternalFrame();
__ Popad();
__ ret(0);
}
__ j(above_equal, &shift_arguments);
__ bind(&convert_to_object);
- {
- // Enter an internal frame in order to preserve argument count.
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Integer32ToSmi(rax, rax);
- __ push(rax);
-
- __ push(rbx);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ movq(rbx, rax);
+ __ EnterInternalFrame(); // In order to preserve argument count.
+ __ Integer32ToSmi(rax, rax);
+ __ push(rax);
- __ pop(rax);
- __ SmiToInteger32(rax, rax);
- }
+ __ push(rbx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ movq(rbx, rax);
+ __ pop(rax);
+ __ SmiToInteger32(rax, rax);
+ __ LeaveInternalFrame();
// Restore the function to rdi.
__ movq(rdi, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
__ jmp(&patch_receiver, Label::kNear);
// rsp+8: arguments
// rsp+16: receiver ("this")
// rsp+24: function
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Stack frame:
- // rbp: Old base pointer
- // rbp[1]: return address
- // rbp[2]: function arguments
- // rbp[3]: receiver
- // rbp[4]: function
- static const int kArgumentsOffset = 2 * kPointerSize;
- static const int kReceiverOffset = 3 * kPointerSize;
- static const int kFunctionOffset = 4 * kPointerSize;
- __ push(Operand(rbp, kFunctionOffset));
- __ push(Operand(rbp, kArgumentsOffset));
- __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
-
- // Check the stack for overflow. We are not trying need to catch
- // interruptions (e.g. debug break and preemption) here, so the "real stack
- // limit" is checked.
- Label okay;
- __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
- __ movq(rcx, rsp);
- // Make rcx the space we have left. The stack might already be overflowed
- // here which will cause rcx to become negative.
- __ subq(rcx, kScratchRegister);
- // Make rdx the space we need for the array when it is unrolled onto the
- // stack.
- __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
- // Check if the arguments will overflow the stack.
- __ cmpq(rcx, rdx);
- __ j(greater, &okay); // Signed comparison.
-
- // Out of stack space.
- __ push(Operand(rbp, kFunctionOffset));
- __ push(rax);
- __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
- __ bind(&okay);
- // End of stack check.
-
- // Push current index and limit.
- const int kLimitOffset =
- StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
- const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
- __ push(rax); // limit
- __ push(Immediate(0)); // index
-
- // Change context eagerly to get the right global object if
- // necessary.
- __ movq(rdi, Operand(rbp, kFunctionOffset));
- __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
-
- // Compute the receiver.
- Label call_to_object, use_global_receiver, push_receiver;
- __ movq(rbx, Operand(rbp, kReceiverOffset));
-
- // Do not transform the receiver for strict mode functions.
- __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
- __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
- Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
- __ j(not_equal, &push_receiver);
-
- // Do not transform the receiver for natives.
- __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
- Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
- __ j(not_equal, &push_receiver);
-
- // Compute the receiver in non-strict mode.
- __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
- __ CompareRoot(rbx, Heap::kNullValueRootIndex);
- __ j(equal, &use_global_receiver);
- __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
- __ j(equal, &use_global_receiver);
-
- // If given receiver is already a JavaScript object then there's no
- // reason for converting it.
- STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
- STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
- __ j(above_equal, &push_receiver);
-
- // Convert the receiver to an object.
- __ bind(&call_to_object);
- __ push(rbx);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ movq(rbx, rax);
- __ jmp(&push_receiver, Label::kNear);
-
- // Use the current global receiver object as the receiver.
- __ bind(&use_global_receiver);
- const int kGlobalOffset =
- Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
- __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
- __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
- __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
-
- // Push the receiver.
- __ bind(&push_receiver);
- __ push(rbx);
-
- // Copy all arguments from the array to the stack.
- Label entry, loop;
- __ movq(rax, Operand(rbp, kIndexOffset));
- __ jmp(&entry);
- __ bind(&loop);
- __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
-
- // Use inline caching to speed up access to arguments.
- Handle<Code> ic =
- masm->isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET);
- // It is important that we do not have a test instruction after the
- // call. A test instruction after the call is used to indicate that
- // we have generated an inline version of the keyed load. In this
- // case, we know that we are not generating a test instruction next.
-
- // Push the nth argument.
- __ push(rax);
+ __ EnterInternalFrame();
+ // Stack frame:
+ // rbp: Old base pointer
+ // rbp[1]: return address
+ // rbp[2]: function arguments
+ // rbp[3]: receiver
+ // rbp[4]: function
+ static const int kArgumentsOffset = 2 * kPointerSize;
+ static const int kReceiverOffset = 3 * kPointerSize;
+ static const int kFunctionOffset = 4 * kPointerSize;
+ __ push(Operand(rbp, kFunctionOffset));
+ __ push(Operand(rbp, kArgumentsOffset));
+ __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
+
+ // Check the stack for overflow. We are not trying need to catch
+ // interruptions (e.g. debug break and preemption) here, so the "real stack
+ // limit" is checked.
+ Label okay;
+ __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
+ __ movq(rcx, rsp);
+ // Make rcx the space we have left. The stack might already be overflowed
+ // here which will cause rcx to become negative.
+ __ subq(rcx, kScratchRegister);
+ // Make rdx the space we need for the array when it is unrolled onto the
+ // stack.
+ __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
+ // Check if the arguments will overflow the stack.
+ __ cmpq(rcx, rdx);
+ __ j(greater, &okay); // Signed comparison.
+
+ // Out of stack space.
+ __ push(Operand(rbp, kFunctionOffset));
+ __ push(rax);
+ __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+ __ bind(&okay);
+ // End of stack check.
+
+ // Push current index and limit.
+ const int kLimitOffset =
+ StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
+ const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
+ __ push(rax); // limit
+ __ push(Immediate(0)); // index
+
+ // Change context eagerly to get the right global object if
+ // necessary.
+ __ movq(rdi, Operand(rbp, kFunctionOffset));
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Compute the receiver.
+ Label call_to_object, use_global_receiver, push_receiver;
+ __ movq(rbx, Operand(rbp, kReceiverOffset));
+
+ // Do not transform the receiver for strict mode functions.
+ __ movq(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
+ Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
+ __ j(not_equal, &push_receiver);
+
+ // Do not transform the receiver for natives.
+ __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
+ Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
+ __ j(not_equal, &push_receiver);
+
+ // Compute the receiver in non-strict mode.
+ __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
+ __ CompareRoot(rbx, Heap::kNullValueRootIndex);
+ __ j(equal, &use_global_receiver);
+ __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
+ __ j(equal, &use_global_receiver);
+
+ // If given receiver is already a JavaScript object then there's no
+ // reason for converting it.
+ STATIC_ASSERT(LAST_JS_OBJECT_TYPE + 1 == LAST_TYPE);
+ STATIC_ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
+ __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
+ __ j(above_equal, &push_receiver);
+
+ // Convert the receiver to an object.
+ __ bind(&call_to_object);
+ __ push(rbx);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ movq(rbx, rax);
+ __ jmp(&push_receiver, Label::kNear);
+
+ // Use the current global receiver object as the receiver.
+ __ bind(&use_global_receiver);
+ const int kGlobalOffset =
+ Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
+ __ movq(rbx, FieldOperand(rsi, kGlobalOffset));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalContextOffset));
+ __ movq(rbx, FieldOperand(rbx, kGlobalOffset));
+ __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
+
+ // Push the receiver.
+ __ bind(&push_receiver);
+ __ push(rbx);
+
+ // Copy all arguments from the array to the stack.
+ Label entry, loop;
+ __ movq(rax, Operand(rbp, kIndexOffset));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
+
+ // Use inline caching to speed up access to arguments.
+ Handle<Code> ic =
+ masm->isolate()->builtins()->KeyedLoadIC_Initialize();
+ __ Call(ic, RelocInfo::CODE_TARGET);
+ // It is important that we do not have a test instruction after the
+ // call. A test instruction after the call is used to indicate that
+ // we have generated an inline version of the keyed load. In this
+ // case, we know that we are not generating a test instruction next.
+
+ // Push the nth argument.
+ __ push(rax);
- // Update the index on the stack and in register rax.
- __ movq(rax, Operand(rbp, kIndexOffset));
- __ SmiAddConstant(rax, rax, Smi::FromInt(1));
- __ movq(Operand(rbp, kIndexOffset), rax);
+ // Update the index on the stack and in register rax.
+ __ movq(rax, Operand(rbp, kIndexOffset));
+ __ SmiAddConstant(rax, rax, Smi::FromInt(1));
+ __ movq(Operand(rbp, kIndexOffset), rax);
- __ bind(&entry);
- __ cmpq(rax, Operand(rbp, kLimitOffset));
- __ j(not_equal, &loop);
+ __ bind(&entry);
+ __ cmpq(rax, Operand(rbp, kLimitOffset));
+ __ j(not_equal, &loop);
- // Invoke the function.
- ParameterCount actual(rax);
- __ SmiToInteger32(rax, rax);
- __ movq(rdi, Operand(rbp, kFunctionOffset));
- __ InvokeFunction(rdi, actual, CALL_FUNCTION,
- NullCallWrapper(), CALL_AS_METHOD);
+ // Invoke the function.
+ ParameterCount actual(rax);
+ __ SmiToInteger32(rax, rax);
+ __ movq(rdi, Operand(rbp, kFunctionOffset));
+ __ InvokeFunction(rdi, actual, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
- // Leave internal frame.
- }
+ __ LeaveInternalFrame();
__ ret(3 * kPointerSize); // remove function, receiver, and arguments
}
// Pass the function to optimize as the argument to the on-stack
// replacement runtime function.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(rax);
- __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
- }
+ __ EnterInternalFrame();
+ __ push(rax);
+ __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
+ __ LeaveInternalFrame();
// If the result was -1 it means that we couldn't optimize the
// function. Just return and continue in the unoptimized version.
StackCheckStub stub;
__ TailCallStub(&stub);
- if (FLAG_debug_code) {
- __ Abort("Unreachable code: returned from tail call.");
- }
+ __ Abort("Unreachable code: returned from tail call.");
__ bind(&ok);
__ ret(0);
void ToBooleanStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
Label false_result, true_result, not_string;
__ movq(rax, Operand(rsp, 1 * kPointerSize));
__ jmp(&heapnumber_allocated);
__ bind(&slow_allocate_heapnumber);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(rax);
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- __ movq(rcx, rax);
- __ pop(rax);
- }
+ __ EnterInternalFrame();
+ __ push(rax);
+ __ CallRuntime(Runtime::kNumberAlloc, 0);
+ __ movq(rcx, rax);
+ __ pop(rax);
+ __ LeaveInternalFrame();
__ bind(&heapnumber_allocated);
// rcx: allocated 'empty' number
__ addq(rsp, Immediate(kDoubleSize));
// We return the value in xmm1 without adding it to the cache, but
// we cause a scavenging GC so that future allocations will succeed.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- // Allocate an unused object bigger than a HeapNumber.
- __ Push(Smi::FromInt(2 * kDoubleSize));
- __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
- }
+ __ EnterInternalFrame();
+ // Allocate an unused object bigger than a HeapNumber.
+ __ Push(Smi::FromInt(2 * kDoubleSize));
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
+ __ LeaveInternalFrame();
__ Ret();
}
__ bind(&runtime_call);
__ AllocateHeapNumber(rax, rdi, &skip_cache);
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(rax);
- __ CallRuntime(RuntimeFunction(), 1);
- }
+ __ EnterInternalFrame();
+ __ push(rax);
+ __ CallRuntime(RuntimeFunction(), 1);
+ __ LeaveInternalFrame();
__ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
__ Ret();
}
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(rdx);
- __ push(rax);
- __ Push(Smi::FromInt(op_));
- __ CallExternalReference(miss, 3);
- }
+ __ EnterInternalFrame();
+ __ push(rdx);
+ __ push(rax);
+ __ Push(Smi::FromInt(op_));
+ __ CallExternalReference(miss, 3);
+ __ LeaveInternalFrame();
// Compute the entry point of the rewritten stub.
__ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) {
- // This stub overrides SometimesSetsUpAFrame() to return false. That means
- // we cannot call anything that could cause a GC from this stub.
// Stack frame on entry:
// esp[0 * kPointerSize]: return address.
// esp[1 * kPointerSize]: key's hash.
void Generate(MacroAssembler* masm);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
Major MajorKey() { return ToBoolean; }
int MinorKey() { return 0; }
Register r0,
Register r1);
- virtual bool SometimesSetsUpAFrame() { return false; }
-
private:
static const int kInlinedProbes = 4;
static const int kTotalProbes = 20;
}
#endif
- Major MajorKey() { return StringDictionaryLookup; }
+ Major MajorKey() { return StringDictionaryNegativeLookup; }
int MinorKey() {
return DictionaryBits::encode(dictionary_.code()) |
// Platform-specific RuntimeCallHelper functions.
void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
- masm->EnterFrame(StackFrame::INTERNAL);
- ASSERT(!masm->has_frame());
- masm->set_has_frame(true);
+ masm->EnterInternalFrame();
}
void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
- masm->LeaveFrame(StackFrame::INTERNAL);
- ASSERT(masm->has_frame());
- masm->set_has_frame(false);
+ masm->LeaveInternalFrame();
}
RegList non_object_regs,
bool convert_call_to_jmp) {
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Store the registers containing live values on the expression stack to
- // make sure that these are correctly updated during GC. Non object values
- // are stored as as two smis causing it to be untouched by GC.
- ASSERT((object_regs & ~kJSCallerSaved) == 0);
- ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
- ASSERT((object_regs & non_object_regs) == 0);
- for (int i = 0; i < kNumJSCallerSaved; i++) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- ASSERT(!reg.is(kScratchRegister));
- if ((object_regs & (1 << r)) != 0) {
- __ push(reg);
- }
- // Store the 64-bit value as two smis.
- if ((non_object_regs & (1 << r)) != 0) {
- __ movq(kScratchRegister, reg);
- __ Integer32ToSmi(reg, reg);
- __ push(reg);
- __ sar(kScratchRegister, Immediate(32));
- __ Integer32ToSmi(kScratchRegister, kScratchRegister);
- __ push(kScratchRegister);
- }
+ __ EnterInternalFrame();
+
+ // Store the registers containing live values on the expression stack to
+ // make sure that these are correctly updated during GC. Non object values
+ // are stored as as two smis causing it to be untouched by GC.
+ ASSERT((object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((non_object_regs & ~kJSCallerSaved) == 0);
+ ASSERT((object_regs & non_object_regs) == 0);
+ for (int i = 0; i < kNumJSCallerSaved; i++) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ ASSERT(!reg.is(kScratchRegister));
+ if ((object_regs & (1 << r)) != 0) {
+ __ push(reg);
}
+ // Store the 64-bit value as two smis.
+ if ((non_object_regs & (1 << r)) != 0) {
+ __ movq(kScratchRegister, reg);
+ __ Integer32ToSmi(reg, reg);
+ __ push(reg);
+ __ sar(kScratchRegister, Immediate(32));
+ __ Integer32ToSmi(kScratchRegister, kScratchRegister);
+ __ push(kScratchRegister);
+ }
+ }
#ifdef DEBUG
- __ RecordComment("// Calling from debug break to runtime - come in - over");
+ __ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
- __ Set(rax, 0); // No arguments (argc == 0).
- __ movq(rbx, ExternalReference::debug_break(masm->isolate()));
-
- CEntryStub ceb(1);
- __ CallStub(&ceb);
-
- // Restore the register values from the expression stack.
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
- int r = JSCallerSavedCode(i);
- Register reg = { r };
- if (FLAG_debug_code) {
- __ Set(reg, kDebugZapValue);
- }
- if ((object_regs & (1 << r)) != 0) {
- __ pop(reg);
- }
- // Reconstruct the 64-bit value from two smis.
- if ((non_object_regs & (1 << r)) != 0) {
- __ pop(kScratchRegister);
- __ SmiToInteger32(kScratchRegister, kScratchRegister);
- __ shl(kScratchRegister, Immediate(32));
- __ pop(reg);
- __ SmiToInteger32(reg, reg);
- __ or_(reg, kScratchRegister);
- }
+ __ Set(rax, 0); // No arguments (argc == 0).
+ __ movq(rbx, ExternalReference::debug_break(masm->isolate()));
+
+ CEntryStub ceb(1);
+ __ CallStub(&ceb);
+
+ // Restore the register values from the expression stack.
+ for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
+ int r = JSCallerSavedCode(i);
+ Register reg = { r };
+ if (FLAG_debug_code) {
+ __ Set(reg, kDebugZapValue);
+ }
+ if ((object_regs & (1 << r)) != 0) {
+ __ pop(reg);
+ }
+ // Reconstruct the 64-bit value from two smis.
+ if ((non_object_regs & (1 << r)) != 0) {
+ __ pop(kScratchRegister);
+ __ SmiToInteger32(kScratchRegister, kScratchRegister);
+ __ shl(kScratchRegister, Immediate(32));
+ __ pop(reg);
+ __ SmiToInteger32(reg, reg);
+ __ or_(reg, kScratchRegister);
}
-
- // Get rid of the internal frame.
}
+ // Get rid of the internal frame.
+ __ LeaveInternalFrame();
+
// If this call did not replace a call but patched other code then there will
// be an unwanted return address left on the stack. Here we get rid of that.
if (convert_call_to_jmp) {
Isolate* isolate = masm()->isolate();
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
- }
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
// Preserve deoptimizer object in register rax and get the input
// frame descriptor pointer.
__ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
__ PrepareCallCFunction(2);
__ movq(arg1, rax);
__ LoadAddress(arg2, ExternalReference::isolate_address());
- {
- AllowExternalCallThatCantCauseGC scope(masm());
- __ CallCFunction(
- ExternalReference::compute_output_frames_function(isolate), 2);
- }
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 2);
__ pop(rax);
// Replace the current frame with the output frames.
__ bind(&ok);
}
- // Open a frame scope to indicate that there is a frame on the stack. The
- // MANUAL indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done below).
- FrameScope frame_scope(masm_, StackFrame::MANUAL);
-
__ push(rbp); // Caller's frame pointer.
__ movq(rbp, rsp);
__ push(rsi); // Callee's context.
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
// Enter an internal frame.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
- // Push the receiver and the name of the function.
- __ push(rdx);
- __ push(rcx);
+ // Push the receiver and the name of the function.
+ __ push(rdx);
+ __ push(rcx);
- // Call the entry.
- CEntryStub stub(1);
- __ Set(rax, 2);
- __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
- __ CallStub(&stub);
+ // Call the entry.
+ CEntryStub stub(1);
+ __ Set(rax, 2);
+ __ LoadAddress(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
+ __ CallStub(&stub);
- // Move result to rdi and exit the internal frame.
- __ movq(rdi, rax);
- }
+ // Move result to rdi and exit the internal frame.
+ __ movq(rdi, rax);
+ __ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
// This can happen only for regular CallIC but not KeyedCallIC.
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
__ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(rcx); // save the key
- __ push(rdx); // pass the receiver
- __ push(rcx); // pass the key
- __ CallRuntime(Runtime::kKeyedGetProperty, 2);
- __ pop(rcx); // restore the key
- }
+ __ EnterInternalFrame();
+ __ push(rcx); // save the key
+ __ push(rdx); // pass the receiver
+ __ push(rcx); // pass the key
+ __ CallRuntime(Runtime::kKeyedGetProperty, 2);
+ __ pop(rcx); // restore the key
+ __ LeaveInternalFrame();
__ movq(rdi, rax);
__ jmp(&do_call);
HPhase phase("Code generation", chunk());
ASSERT(is_unused());
status_ = GENERATING;
-
- // Open a frame scope to indicate that there is a frame on the stack. The
- // MANUAL indicates that the scope shouldn't actually generate code to set up
- // the frame (that is done in GeneatePrologue).
- FrameScope frame_scope(masm_, StackFrame::MANUAL);
-
return GeneratePrologue() &&
GenerateBody() &&
GenerateDeferredCode() &&
: Assembler(arg_isolate, buffer, size),
generating_stub_(false),
allow_stub_calls_(true),
- has_frame_(false),
root_array_available_(true) {
if (isolate() != NULL) {
code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
Label L;
j(cc, &L, Label::kNear);
Abort(msg);
- // Control will not return here.
+ // will not return here
bind(&L);
}
RecordComment(msg);
}
#endif
+ // Disable stub call restrictions to always allow calls to abort.
+ AllowStubCallsScope allow_scope(this, true);
+
push(rax);
movq(kScratchRegister, p0, RelocInfo::NONE);
push(kScratchRegister);
reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
RelocInfo::NONE);
push(kScratchRegister);
-
- if (!has_frame_) {
- // We don't actually want to generate a pile of code for this, so just
- // claim there is a stack frame, without generating one.
- FrameScope scope(this, StackFrame::NONE);
- CallRuntime(Runtime::kAbort, 2);
- } else {
- CallRuntime(Runtime::kAbort, 2);
- }
- // Control will not return here.
+ CallRuntime(Runtime::kAbort, 2);
+ // will not return here
int3();
}
void MacroAssembler::CallStub(CodeStub* stub, unsigned ast_id) {
- ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs
+ ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
}
MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
- ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
MaybeObject* result = stub->TryGetCode();
if (!result->IsFailure()) {
call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
MaybeObject* result = stub->TryGetCode();
if (!result->IsFailure()) {
jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
}
-bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
- if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
-}
-
-
void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
addq(rsp, Immediate(num_arguments * kPointerSize));
void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
InvokeFlag flag,
const CallWrapper& call_wrapper) {
- // You can't call a builtin without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
+ // Calls are not allowed in some stubs.
+ ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
// Rely on the assertion to check that the number of provided
// arguments match the expected number of arguments. Fake a
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::DebugBreak() {
+ ASSERT(allow_stub_calls());
Set(rax, 0); // No arguments.
LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
CEntryStub ces(1);
- ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
#endif // ENABLE_DEBUGGER_SUPPORT
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
InvokePrologue(expected,
actual,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
Label done;
Register dummy = rax;
InvokePrologue(expected,
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
ASSERT(function.is(rdi));
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
InvokeFlag flag,
const CallWrapper& call_wrapper,
CallKind call_kind) {
- // You can't call a function without a valid frame.
- ASSERT(flag == JUMP_FUNCTION || has_frame());
-
ASSERT(function->is_compiled());
// Get the function and setup the context.
Move(rdi, Handle<JSFunction>(function));
void MacroAssembler::CallCFunction(Register function, int num_arguments) {
- ASSERT(has_frame());
// Check stack alignment.
if (emit_debug_code()) {
CheckStackAlignment();
#define V8_X64_MACRO_ASSEMBLER_X64_H_
#include "assembler.h"
-#include "frames.h"
#include "v8globals.h"
namespace v8 {
ScaleFactor scale;
};
-
// MacroAssembler implements a collection of frequently used macros.
class MacroAssembler: public Assembler {
public:
void DebugBreak();
#endif
+ // ---------------------------------------------------------------------------
+ // Activation frames
+
+ void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
+ void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
+
+ void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); }
+ void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); }
+
// Enter specific kind of exit frame; either in normal or
// debug mode. Expects the number of arguments in register rax and
// sets up the number of arguments in register rdi and the pointer
bool generating_stub() { return generating_stub_; }
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; }
- void set_has_frame(bool value) { has_frame_ = value; }
- bool has_frame() { return has_frame_; }
- inline bool AllowThisStubCall(CodeStub* stub);
static int SafepointRegisterStackIndex(Register reg) {
return SafepointRegisterStackIndex(reg.code());
}
- // Activation support.
- void EnterFrame(StackFrame::Type type);
- void LeaveFrame(StackFrame::Type type);
-
private:
// Order general registers are pushed by Pushad.
// rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
bool generating_stub_;
bool allow_stub_calls_;
- bool has_frame_;
bool root_array_available_;
// Returns a register holding the smi value. The register MUST NOT be
const CallWrapper& call_wrapper = NullCallWrapper(),
CallKind call_kind = CALL_AS_METHOD);
+ // Activation support.
+ void EnterFrame(StackFrame::Type type);
+ void LeaveFrame(StackFrame::Type type);
+
void EnterExitFramePrologue(bool save_rax);
// Allocates arg_stack_space * kPointerSize memory (not GCed) on the stack
// Isolate.
__ LoadAddress(rcx, ExternalReference::isolate_address());
#endif
-
- { // NOLINT: Can't find a way to open this scope without confusing the
- // linter.
- AllowExternalCallThatCantCauseGC scope(&masm_);
- ExternalReference compare =
- ExternalReference::re_case_insensitive_compare_uc16(masm_.isolate());
- __ CallCFunction(compare, num_arguments);
- }
+ ExternalReference compare =
+ ExternalReference::re_case_insensitive_compare_uc16(masm_.isolate());
+ __ CallCFunction(compare, num_arguments);
// Restore original values before reacting on result value.
__ Move(code_object_pointer(), masm_.CodeObject());
// registers we need.
// Entry code:
__ bind(&entry_label_);
-
- // Tell the system that we have a stack frame. Because the type is MANUAL, no
- // is generated.
- FrameScope scope(&masm_, StackFrame::MANUAL);
-
- // Actually emit code to start a new stack frame.
+ // Start new stack frame.
__ push(rbp);
__ movq(rbp, rsp);
// Save parameters and callee-save registers. Order here should correspond
scratch1, scratch2, scratch3, name,
miss_label);
- FrameScope scope(masm, StackFrame::INTERNAL);
+ __ EnterInternalFrame();
// Save the name_ register across the call.
__ push(name_);
// Restore the name_ register.
__ pop(name_);
-
- // Leave the internal frame.
+ __ LeaveInternalFrame();
}
void LoadWithInterceptor(MacroAssembler* masm,
Register holder,
JSObject* holder_obj,
Label* interceptor_succeeded) {
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(holder); // Save the holder.
- __ push(name_); // Save the name.
-
- CompileCallLoadPropertyWithInterceptor(masm,
- receiver,
- holder,
- name_,
- holder_obj);
-
- __ pop(name_); // Restore the name.
- __ pop(receiver); // Restore the holder.
- // Leave the internal frame.
- }
+ __ EnterInternalFrame();
+ __ push(holder); // Save the holder.
+ __ push(name_); // Save the name.
+
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
+
+ __ pop(name_); // Restore the name.
+ __ pop(receiver); // Restore the holder.
+ __ LeaveInternalFrame();
__ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
__ j(not_equal, interceptor_succeeded);
// Save necessary data before invoking an interceptor.
// Requires a frame to make GC aware of pushed pointers.
- {
- FrameScope frame_scope(masm(), StackFrame::INTERNAL);
-
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- // CALLBACKS case needs a receiver to be passed into C++ callback.
- __ push(receiver);
- }
- __ push(holder_reg);
- __ push(name_reg);
+ __ EnterInternalFrame();
- // Invoke an interceptor. Note: map checks from receiver to
- // interceptor's holder has been compiled before (see a caller
- // of this method.)
- CompileCallLoadPropertyWithInterceptor(masm(),
- receiver,
- holder_reg,
- name_reg,
- interceptor_holder);
-
- // Check if interceptor provided a value for property. If it's
- // the case, return immediately.
- Label interceptor_failed;
- __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
- __ j(equal, &interceptor_failed);
- frame_scope.GenerateLeaveFrame();
- __ ret(0);
-
- __ bind(&interceptor_failed);
- __ pop(name_reg);
- __ pop(holder_reg);
- if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
- __ pop(receiver);
- }
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ // CALLBACKS case needs a receiver to be passed into C++ callback.
+ __ push(receiver);
+ }
+ __ push(holder_reg);
+ __ push(name_reg);
+
+ // Invoke an interceptor. Note: map checks from receiver to
+ // interceptor's holder has been compiled before (see a caller
+ // of this method.)
+ CompileCallLoadPropertyWithInterceptor(masm(),
+ receiver,
+ holder_reg,
+ name_reg,
+ interceptor_holder);
+
+ // Check if interceptor provided a value for property. If it's
+ // the case, return immediately.
+ Label interceptor_failed;
+ __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
+ __ j(equal, &interceptor_failed);
+ __ LeaveInternalFrame();
+ __ ret(0);
- // Leave the internal frame.
+ __ bind(&interceptor_failed);
+ __ pop(name_reg);
+ __ pop(holder_reg);
+ if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
+ __ pop(receiver);
}
+ __ LeaveInternalFrame();
+
// Check that the maps from interceptor's holder to lookup's holder
// haven't changed. And load lookup's holder into |holder| register.
if (interceptor_holder != lookup->holder()) {