" test_nesting_calls(test_local_variables(1,3), 42, 47),"
" test_local_variables(-25.3, 2));"
" // return test_recursion_with_base(0, 0, 0, 47);\n"
- " var o = { x: 42 };"
+ " var x_value = 42;"
+ " var o = { x: x_value };"
" var a = [ 1, 2, 3 ];"
+ " var x = true ? 42 : 32;"
" return test_if_then_else(0, 46, 47);"
"})()")),
Factory::NewStringFromAscii(CStrVector("CodeGeneratorTestScript")),
}
-void CodeGenerator::VisitEmptyStatement(EmptyStatement* a) {
- UNIMPLEMENTED();
+void CodeGenerator::VisitEmptyStatement(EmptyStatement* node) {
+ ASSERT(!in_spilled_code());
+ Comment cmnt(masm_, "// EmptyStatement");
+ CodeForStatementPosition(node);
+ // nothing to do
}
}
-void CodeGenerator::VisitConditional(Conditional* a) {
- UNIMPLEMENTED();
+void CodeGenerator::VisitConditional(Conditional* node) {
+ Comment cmnt(masm_, "[ Conditional");
+ JumpTarget then;
+ JumpTarget else_;
+ JumpTarget exit;
+ ControlDestination dest(&then, &else_, true);
+ LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true);
+
+ if (dest.false_was_fall_through()) {
+ // The else target was bound, so we compile the else part first.
+ Load(node->else_expression(), typeof_state());
+
+ if (then.is_linked()) {
+ exit.Jump();
+ then.Bind();
+ Load(node->then_expression(), typeof_state());
+ }
+ } else {
+ // The then target was bound, so we compile the then part first.
+ Load(node->then_expression(), typeof_state());
+
+ if (else_.is_linked()) {
+ exit.Jump();
+ else_.Bind();
+ Load(node->else_expression(), typeof_state());
+ }
+ }
+
+ exit.Bind();
}
+
void CodeGenerator::VisitSlot(Slot* node) {
Comment cmnt(masm_, "[ Slot");
LoadFromSlot(node, typeof_state());
}
-void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* a) {
- UNIMPLEMENTED();
+// Materialize the regexp literal 'node' in the literals array
+// 'literals' of the function. Leave the regexp boilerplate in
+// 'boilerplate'.
+class DeferredRegExpLiteral: public DeferredCode {
+ public:
+ DeferredRegExpLiteral(Register boilerplate,
+ Register literals,
+ RegExpLiteral* node)
+ : boilerplate_(boilerplate), literals_(literals), node_(node) {
+ set_comment("[ DeferredRegExpLiteral");
+ }
+
+ void Generate();
+
+ private:
+ Register boilerplate_;
+ Register literals_;
+ RegExpLiteral* node_;
+};
+
+
+void DeferredRegExpLiteral::Generate() {
+ // Since the entry is undefined we call the runtime system to
+ // compute the literal.
+ // Literal array (0).
+ __ push(literals_);
+ // Literal index (1).
+ __ push(Immediate(Smi::FromInt(node_->literal_index())));
+ // RegExp pattern (2).
+ __ Push(node_->pattern());
+ // RegExp flags (3).
+ __ Push(node_->flags());
+ __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
+ if (!boilerplate_.is(rax)) __ movq(boilerplate_, rax);
+}
+
+
+void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
+ Comment cmnt(masm_, "[ RegExp Literal");
+
+ // Retrieve the literals array and check the allocated entry. Begin
+ // with a writable copy of the function of this activation in a
+ // register.
+ frame_->PushFunction();
+ Result literals = frame_->Pop();
+ literals.ToRegister();
+ frame_->Spill(literals.reg());
+
+ // Load the literals array of the function.
+ __ movq(literals.reg(),
+ FieldOperand(literals.reg(), JSFunction::kLiteralsOffset));
+
+ // Load the literal at the ast saved index.
+ Result boilerplate = allocator_->Allocate();
+ ASSERT(boilerplate.is_valid());
+ int literal_offset =
+ FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
+ __ movq(boilerplate.reg(), FieldOperand(literals.reg(), literal_offset));
+
+ // Check whether we need to materialize the RegExp object. If so,
+ // jump to the deferred code passing the literals array.
+ DeferredRegExpLiteral* deferred =
+ new DeferredRegExpLiteral(boilerplate.reg(), literals.reg(), node);
+ __ Cmp(boilerplate.reg(), Factory::undefined_value());
+ deferred->Branch(equal);
+ deferred->BindExit();
+ literals.Unuse();
+
+ // Push the boilerplate object.
+ frame_->Push(&boilerplate);
}
if (CompileTimeValue::IsCompileTimeValue(property->value())) break;
// else fall through.
case ObjectLiteral::Property::COMPUTED: {
- // TODO(X64): Implement setting of computed values in object literals.
- UNIMPLEMENTED();
+ Handle<Object> key(property->key()->handle());
+ if (key->IsSymbol()) {
+ // Duplicate the object as the IC receiver.
+ frame_->Dup();
+ Load(property->value());
+ frame_->Push(key);
+ Result ignored = frame_->CallStoreIC();
+ // Drop the duplicated receiver and ignore the result.
+ frame_->Drop();
+ break;
+ }
+ // Fall through
}
case ObjectLiteral::Property::PROTOTYPE: {
// Duplicate the object as an argument to the runtime call.
} else if (slot->type() == Slot::LOCAL) {
frame_->StoreToLocalAt(slot->index());
} else {
+ // The other slot types (LOOKUP and GLOBAL) cannot reach here.
+ //
+ // The use of SlotOperand below is safe for an unspilled frame
+ // because the slot is a context slot.
ASSERT(slot->type() == Slot::CONTEXT);
frame_->Dup();
Result value = frame_->Pop();
namespace v8 {
namespace internal {
-StackFrame::Type ExitFrame::GetStateForFramePointer(unsigned char* a,
- StackFrame::State* b) {
- // TODO(X64): UNIMPLEMENTED
- return NONE;
+
+StackFrame::Type StackFrame::ComputeType(State* state) {
+ ASSERT(state->fp != NULL);
+ if (StandardFrame::IsArgumentsAdaptorFrame(state->fp)) {
+ return ARGUMENTS_ADAPTOR;
+ }
+ // The marker and function offsets overlap. If the marker isn't a
+ // smi then the frame is a JavaScript frame -- and the marker is
+ // really the function.
+ const int offset = StandardFrameConstants::kMarkerOffset;
+ Object* marker = Memory::Object_at(state->fp + offset);
+ if (!marker->IsSmi()) return JAVA_SCRIPT;
+ return static_cast<StackFrame::Type>(Smi::cast(marker)->value());
}
-int JavaScriptFrame::GetProvidedParametersCount() const {
- UNIMPLEMENTED();
- return 0;
+
+StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) {
+ if (fp == 0) return NONE;
+ // Compute the stack pointer.
+ Address sp = Memory::Address_at(fp + ExitFrameConstants::kSPOffset);
+ // Fill in the state.
+ state->fp = fp;
+ state->sp = sp;
+ state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize);
+ // Determine frame type.
+ if (Memory::Address_at(fp + ExitFrameConstants::kDebugMarkOffset) != 0) {
+ return EXIT_DEBUG;
+ } else {
+ return EXIT;
+ }
}
-StackFrame::Type StackFrame::ComputeType(StackFrame::State* a) {
+int JavaScriptFrame::GetProvidedParametersCount() const {
UNIMPLEMENTED();
- return NONE;
+ return 0;
}
byte* ArgumentsAdaptorFrame::GetCallerStackPointer() const {
void Result::ToRegister() {
ASSERT(is_valid());
if (is_constant()) {
- // TODO(X64): Handle constant results.
- /*
Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate();
ASSERT(fresh.is_valid());
- if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
- CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle());
- } else {
- CodeGeneratorScope::Current()->masm()->Set(fresh.reg(),
- Immediate(handle()));
- }
+ CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
// This result becomes a copy of the fresh one.
*this = fresh;
- */
}
ASSERT(is_register());
}
CodeGeneratorScope::Current()->masm()->movq(fresh.reg(), reg());
} else {
ASSERT(is_constant());
- if (handle()->IsSmi()) {
- if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
- CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle());
- } else {
- CodeGeneratorScope::Current()->masm()->
- movq(fresh.reg(), handle(), RelocInfo::NONE);
- }
- } else {
- CodeGeneratorScope::Current()->masm()->
- movq(fresh.reg(), handle(), RelocInfo::EMBEDDED_OBJECT);
- }
+ CodeGeneratorScope::Current()->masm()->Move(fresh.reg(), handle());
}
*this = fresh;
} else if (is_register() && reg().is(target)) {