VirtualFrame::SpilledScope spilled_scope;
ASSERT(boilerplate->IsBoilerplate());
- // Create a new closure.
- frame_->EmitPush(cp);
__ mov(r0, Operand(boilerplate));
- frame_->EmitPush(r0);
- frame_->CallRuntime(Runtime::kNewClosure, 2);
- frame_->EmitPush(r0);
+ // Use the fast case closure allocation code that allocates in new
+ // space for nested functions that don't need literals cloning.
+ if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
+ FastNewClosureStub stub;
+ frame_->EmitPush(r0);
+ frame_->CallStub(&stub, 1);
+ frame_->EmitPush(r0);
+ } else {
+ // Create a new closure.
+ frame_->EmitPush(cp);
+ frame_->EmitPush(r0);
+ frame_->CallRuntime(Runtime::kNewClosure, 2);
+ frame_->EmitPush(r0);
+ }
}
}
+void FastNewClosureStub::Generate(MacroAssembler* masm) {
+ // Clone the boilerplate in new space. Set the context to the
+ // current context in cp.
+ Label gc;
+
+ // Pop the boilerplate function from the stack.
+ __ pop(r3);
+
+ // Attempt to allocate new JSFunction in new space.
+ __ AllocateInNewSpace(JSFunction::kSize / kPointerSize,
+ r0,
+ r1,
+ r2,
+ &gc,
+ TAG_OBJECT);
+
+ // Compute the function map in the current global context and set that
+ // as the map of the allocated object.
+ __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
+ __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+
+ // Clone the rest of the boilerplate fields. We don't have to update
+ // the write barrier because the allocated object is in new space.
+ for (int offset = kPointerSize;
+ offset < JSFunction::kSize;
+ offset += kPointerSize) {
+ if (offset == JSFunction::kContextOffset) {
+ __ str(cp, FieldMemOperand(r0, offset));
+ } else {
+ __ ldr(r1, FieldMemOperand(r3, offset));
+ __ str(r1, FieldMemOperand(r0, offset));
+ }
+ }
+
+ // Return result. The argument boilerplate has been popped already.
+ __ Ret();
+
+ // Create a new closure through the slower runtime call.
+ __ bind(&gc);
+ __ push(cp);
+ __ push(r3);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
+}
+
+
// Count leading zeros in a 32 bit word. On ARM5 and later it uses the clz
// instruction. On pre-ARM5 hardware this routine gives the wrong answer for 0
// (31 instead of 32).
void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
ASSERT(boilerplate->IsBoilerplate());
+ // The inevitable call will sync frame elements to memory anyway, so
+ // we do it eagerly to allow us to push the arguments directly into
+ // place.
+ frame_->SyncRange(0, frame_->element_count() - 1);
+
// Use the fast case closure allocation code that allocates in new
// space for nested functions that don't need literals cloning.
if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
FastNewClosureStub stub;
- frame_->Push(boilerplate);
+ frame_->EmitPush(Immediate(boilerplate));
Result answer = frame_->CallStub(&stub, 1);
frame_->Push(&answer);
} else {
// Call the runtime to instantiate the function boilerplate
- // object. The inevitable call will sync frame elements to memory
- // anyway, so we do it eagerly to allow us to push the arguments
- // directly into place.
- frame_->SyncRange(0, frame_->element_count() - 1);
-
- // Create a new closure.
+ // object.
frame_->EmitPush(esi);
frame_->EmitPush(Immediate(boilerplate));
Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
}
i::Serializer::Enable();
Persistent<Context> context = v8::Context::New();
+ ASSERT(!context.IsEmpty());
// Make sure all builtin scripts are cached.
{ HandleScope scope;
for (int i = 0; i < i::Natives::GetBuiltinsCount(); i++) {
void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
- // Call the runtime to instantiate the function boilerplate object.
+ ASSERT(boilerplate->IsBoilerplate());
+
// The inevitable call will sync frame elements to memory anyway, so
// we do it eagerly to allow us to push the arguments directly into
// place.
- ASSERT(boilerplate->IsBoilerplate());
frame_->SyncRange(0, frame_->element_count() - 1);
- // Create a new closure.
- frame_->EmitPush(rsi);
- __ movq(kScratchRegister, boilerplate, RelocInfo::EMBEDDED_OBJECT);
- frame_->EmitPush(kScratchRegister);
- Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
- frame_->Push(&result);
+ // Use the fast case closure allocation code that allocates in new
+ // space for nested functions that don't need literals cloning.
+ if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
+ FastNewClosureStub stub;
+ frame_->Push(boilerplate);
+ Result answer = frame_->CallStub(&stub, 1);
+ frame_->Push(&answer);
+ } else {
+ // Call the runtime to instantiate the function boilerplate
+ // object.
+ frame_->EmitPush(rsi);
+ frame_->EmitPush(boilerplate);
+ Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
+ frame_->Push(&result);
+ }
}
}
+void FastNewClosureStub::Generate(MacroAssembler* masm) {
+ // Clone the boilerplate in new space. Set the context to the
+ // current context in rsi.
+ Label gc;
+ __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT);
+
+ // Get the boilerplate function from the stack.
+ __ movq(rdx, Operand(rsp, 1 * kPointerSize));
+
+ // Compute the function map in the current global context and set that
+ // as the map of the allocated object.
+ __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
+ __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx);
+
+ // Clone the rest of the boilerplate fields. We don't have to update
+ // the write barrier because the allocated object is in new space.
+ for (int offset = kPointerSize;
+ offset < JSFunction::kSize;
+ offset += kPointerSize) {
+ if (offset == JSFunction::kContextOffset) {
+ __ movq(FieldOperand(rax, offset), rsi);
+ } else {
+ __ movq(rbx, FieldOperand(rdx, offset));
+ __ movq(FieldOperand(rax, offset), rbx);
+ }
+ }
+
+ // Return and remove the on-stack parameter.
+ __ ret(1 * kPointerSize);
+
+ // Create a new closure through the slower runtime call.
+ __ bind(&gc);
+ __ pop(rcx); // Temporarily remove return address.
+ __ pop(rdx);
+ __ push(rsi);
+ __ push(rdx);
+ __ push(rcx); // Restore return address.
+ __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
+}
+
+
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
__ movq(rax, Operand(rsp, 1 * kPointerSize));