global_context()->set_opaque_reference_function(*opaque_reference_fun);
}
+ if (FLAG_disable_native_files) {
+ PrintF("Warning: Running without installed natives!\n");
+ return true;
+ }
+
// Install natives.
for (int i = Natives::GetDebuggerCount();
i < Natives::GetBuiltinsCount();
DEFINE_string(expose_debug_as, NULL, "expose debug in global object")
DEFINE_bool(expose_gc, false, "expose gc extension")
DEFINE_int(stack_trace_limit, 10, "number of stack frames to capture")
+DEFINE_bool(disable_native_files, false, "disable builtin natives files")
// builtins-ia32.cc
DEFINE_bool(inline_new, true, "use fast inline allocation")
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
- UNIMPLEMENTED_MIPS();
+ // Called from JSEntryStub::GenerateBody
+
+ // Registers:
+ // a0: entry_address
+ // a1: function
+ // a2: reveiver_pointer
+ // a3: argc
+ // s0: argv
+ //
+ // Stack:
+ // arguments slots
+ // handler frame
+ // entry frame
+ // callee saved registers + ra
+ // 4 args slots
+ // args
+
+ // Clear the context before we push it when entering the JS frame.
+ __ li(cp, Operand(0));
+
+ // Enter an internal frame.
+ __ EnterInternalFrame();
+
+ // Set up the context from the function argument.
+ __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+
+ // Set up the roots register.
+ ExternalReference roots_address = ExternalReference::roots_address();
+ __ li(s6, Operand(roots_address));
+
+ // Push the function and the receiver onto the stack.
+ __ MultiPushReversed(a1.bit() | a2.bit());
+
+ // Copy arguments to the stack in a loop.
+ // a3: argc
+ // s0: argv, ie points to first arg
+ Label loop, entry;
+ __ sll(t0, a3, kPointerSizeLog2);
+ __ add(t2, s0, t0);
+ __ b(&entry);
+ __ nop(); // Branch delay slot nop.
+ // t2 points past last arg.
+ __ bind(&loop);
+ __ lw(t0, MemOperand(s0)); // Read next parameter.
+ __ addiu(s0, s0, kPointerSize);
+ __ lw(t0, MemOperand(t0)); // Dereference handle.
+ __ Push(t0); // Push parameter.
+ __ bind(&entry);
+ __ Branch(ne, &loop, s0, Operand(t2));
+
+ // Registers:
+ // a0: entry_address
+ // a1: function
+ // a2: reveiver_pointer
+ // a3: argc
+ // s0: argv
+ // s6: roots_address
+ //
+ // Stack:
+ // arguments
+ // receiver
+ // function
+ // arguments slots
+ // handler frame
+ // entry frame
+ // callee saved registers + ra
+ // 4 args slots
+ // args
+
+ // Initialize all JavaScript callee-saved registers, since they will be seen
+ // by the garbage collector as part of handlers.
+ __ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
+ __ mov(s1, t4);
+ __ mov(s2, t4);
+ __ mov(s3, t4);
+ __ mov(s4, s4);
+ __ mov(s5, t4);
+ // s6 holds the root address. Do not clobber.
+ // s7 is cp. Do not init.
+
+ // Invoke the code and pass argc as a0.
+ __ mov(a0, a3);
+ if (is_construct) {
+ UNIMPLEMENTED_MIPS();
+ __ break_(0x164);
+ } else {
+ ParameterCount actual(a0);
+ __ InvokeFunction(a1, actual, CALL_FUNCTION);
+ }
+
+ __ LeaveInternalFrame();
+
+ __ Jump(ra);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
+ __ break_(0x201);
}
// Platform-specific inline functions.
-void DeferredCode::Jump() { __ b(&entry_label_); }
+void DeferredCode::Jump() {
+ __ b(&entry_label_);
+ __ nop();
+}
+
+
+void Reference::GetValueAndSpill() {
+ GetValue();
+}
+
+
+void CodeGenerator::VisitAndSpill(Statement* statement) {
+ Visit(statement);
+}
+
+
+void CodeGenerator::VisitStatementsAndSpill(ZoneList<Statement*>* statements) {
+ VisitStatements(statements);
+}
+
+
+void CodeGenerator::LoadAndSpill(Expression* expression) {
+ Load(expression);
+}
+
#undef __
#include "bootstrapper.h"
#include "codegen-inl.h"
+#include "compiler.h"
#include "debug.h"
#include "ic-inl.h"
#include "parser.h"
#include "register-allocator-inl.h"
#include "runtime.h"
#include "scopes.h"
-#include "compiler.h"
+#include "virtual-frame-inl.h"
-// -------------------------------------------------------------------------
+// -----------------------------------------------------------------------------
// Platform-specific DeferredCode functions.
}
-// -------------------------------------------------------------------------
+// -----------------------------------------------------------------------------
+// CodeGenState implementation.
+
+CodeGenState::CodeGenState(CodeGenerator* owner)
+ : owner_(owner),
+ true_target_(NULL),
+ false_target_(NULL),
+ previous_(NULL) {
+ owner_->set_state(this);
+}
+
+
+CodeGenState::CodeGenState(CodeGenerator* owner,
+ JumpTarget* true_target,
+ JumpTarget* false_target)
+ : owner_(owner),
+ true_target_(true_target),
+ false_target_(false_target),
+ previous_(owner->state()) {
+ owner_->set_state(this);
+}
+
+
+CodeGenState::~CodeGenState() {
+ ASSERT(owner_->state() == this);
+ owner_->set_state(previous_);
+}
+
+
+// -----------------------------------------------------------------------------
// CodeGenerator implementation
CodeGenerator::CodeGenerator(MacroAssembler* masm)
: deferred_(8),
masm_(masm),
- scope_(NULL),
frame_(NULL),
allocator_(NULL),
cc_reg_(cc_always),
// Calling conventions:
-// s8_fp: caller's frame pointer
+// fp: caller's frame pointer
// sp: stack pointer
// a1: called JS function
// cp: callee's context
-void CodeGenerator::Generate(CompilationInfo* infomode) {
- UNIMPLEMENTED_MIPS();
+void CodeGenerator::Generate(CompilationInfo* info) {
+ // Record the position for debugging purposes.
+ CodeForFunctionPosition(info->function());
+
+ // Initialize state.
+ info_ = info;
+ ASSERT(allocator_ == NULL);
+ RegisterAllocator register_allocator(this);
+ allocator_ = ®ister_allocator;
+ ASSERT(frame_ == NULL);
+ frame_ = new VirtualFrame();
+ cc_reg_ = cc_always;
+
+ {
+ CodeGenState state(this);
+
+ // Registers:
+ // a1: called JS function
+ // ra: return address
+ // fp: caller's frame pointer
+ // sp: stack pointer
+ // cp: callee's context
+ //
+ // Stack:
+ // arguments
+ // receiver
+
+ frame_->Enter();
+
+ // Allocate space for locals and initialize them.
+ frame_->AllocateStackSlots();
+
+ // Initialize the function return target.
+ function_return_.set_direction(JumpTarget::BIDIRECTIONAL);
+ function_return_is_shadowed_ = false;
+
+ VirtualFrame::SpilledScope spilled_scope;
+ if (scope()->num_heap_slots() > 0) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ {
+ Comment cmnt2(masm_, "[ copy context parameters into .context");
+
+ // Note that iteration order is relevant here! If we have the same
+ // parameter twice (e.g., function (x, y, x)), and that parameter
+ // needs to be copied into the context, it must be the last argument
+ // passed to the parameter that needs to be copied. This is a rare
+ // case so we don't check for it, instead we rely on the copying
+ // order: such a parameter is copied repeatedly into the same
+ // context location and thus the last value is what is seen inside
+ // the function.
+ for (int i = 0; i < scope()->num_parameters(); i++) {
+ UNIMPLEMENTED_MIPS();
+ }
+ }
+
+ // Store the arguments object. This must happen after context
+ // initialization because the arguments object may be stored in the
+ // context.
+ if (scope()->arguments() != NULL) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // Generate code to 'execute' declarations and initialize functions
+ // (source elements). In case of an illegal redeclaration we need to
+ // handle that instead of processing the declarations.
+ if (scope()->HasIllegalRedeclaration()) {
+ Comment cmnt(masm_, "[ illegal redeclarations");
+ scope()->VisitIllegalRedeclaration(this);
+ } else {
+ Comment cmnt(masm_, "[ declarations");
+ ProcessDeclarations(scope()->declarations());
+ // Bail out if a stack-overflow exception occurred when processing
+ // declarations.
+ if (HasStackOverflow()) return;
+ }
+
+ if (FLAG_trace) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // Compile the body of the function in a vanilla state. Don't
+ // bother compiling all the code if the scope has an illegal
+ // redeclaration.
+ if (!scope()->HasIllegalRedeclaration()) {
+ Comment cmnt(masm_, "[ function body");
+#ifdef DEBUG
+ bool is_builtin = Bootstrapper::IsActive();
+ bool should_trace =
+ is_builtin ? FLAG_trace_builtin_calls : FLAG_trace_calls;
+ if (should_trace) {
+ UNIMPLEMENTED_MIPS();
+ }
+#endif
+ VisitStatementsAndSpill(info->function()->body());
+ }
+ }
+
+ if (has_valid_frame() || function_return_.is_linked()) {
+ if (!function_return_.is_linked()) {
+ CodeForReturnPosition(info->function());
+ }
+ // Registers:
+ // v0: result
+ // sp: stack pointer
+ // fp: frame pointer
+ // cp: callee's context
+
+ __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
+
+ function_return_.Bind();
+ if (FLAG_trace) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // Add a label for checking the size of the code used for returning.
+ Label check_exit_codesize;
+ masm_->bind(&check_exit_codesize);
+
+ masm_->mov(sp, fp);
+ masm_->lw(fp, MemOperand(sp, 0));
+ masm_->lw(ra, MemOperand(sp, 4));
+ masm_->addiu(sp, sp, 8);
+
+ // Here we use masm_-> instead of the __ macro to avoid the code coverage
+ // tool from instrumenting as we rely on the code size here.
+ // TODO(MIPS): Should we be able to use more than 0x1ffe parameters?
+ masm_->addiu(sp, sp, (scope()->num_parameters() + 1) * kPointerSize);
+ masm_->Jump(ra);
+ // The Jump automatically generates a nop in the branch delay slot.
+
+ // Check that the size of the code used for returning matches what is
+ // expected by the debugger.
+ ASSERT_EQ(kJSReturnSequenceLength,
+ masm_->InstructionsGeneratedSince(&check_exit_codesize));
+ }
+
+ // Code generation state must be reset.
+ ASSERT(!has_cc());
+ ASSERT(state_ == NULL);
+ ASSERT(!function_return_is_shadowed_);
+ function_return_.Unuse();
+ DeleteFrame();
+
+ // Process any deferred code using the register allocator.
+ if (!HasStackOverflow()) {
+ ProcessDeferred();
+ }
+
+ allocator_ = NULL;
+}
+
+
+void CodeGenerator::LoadReference(Reference* ref) {
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ LoadReference");
+ Expression* e = ref->expression();
+ Property* property = e->AsProperty();
+ Variable* var = e->AsVariableProxy()->AsVariable();
+
+ if (property != NULL) {
+ UNIMPLEMENTED_MIPS();
+ } else if (var != NULL) {
+ // The expression is a variable proxy that does not rewrite to a
+ // property. Global variables are treated as named property references.
+ if (var->is_global()) {
+ LoadGlobal();
+ ref->set_type(Reference::NAMED);
+ } else {
+ ASSERT(var->slot() != NULL);
+ ref->set_type(Reference::SLOT);
+ }
+ } else {
+ UNIMPLEMENTED_MIPS();
+ }
+}
+
+
+void CodeGenerator::UnloadReference(Reference* ref) {
+ VirtualFrame::SpilledScope spilled_scope;
+ // Pop a reference from the stack while preserving TOS.
+ Comment cmnt(masm_, "[ UnloadReference");
+ int size = ref->size();
+ if (size > 0) {
+ frame_->EmitPop(a0);
+ frame_->Drop(size);
+ frame_->EmitPush(a0);
+ }
+ ref->set_unloaded();
+}
+
+
+MemOperand CodeGenerator::SlotOperand(Slot* slot, Register tmp) {
+ // Currently, this assertion will fail if we try to assign to
+ // a constant variable that is constant because it is read-only
+ // (such as the variable referring to a named function expression).
+ // We need to implement assignments to read-only variables.
+ // Ideally, we should do this during AST generation (by converting
+ // such assignments into expression statements); however, in general
+ // we may not be able to make the decision until past AST generation,
+ // that is when the entire program is known.
+ ASSERT(slot != NULL);
+ int index = slot->index();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ UNIMPLEMENTED_MIPS();
+ return MemOperand(no_reg, 0);
+
+ case Slot::LOCAL:
+ return frame_->LocalAt(index);
+
+ case Slot::CONTEXT: {
+ UNIMPLEMENTED_MIPS();
+ return MemOperand(no_reg, 0);
+ }
+
+ default:
+ UNREACHABLE();
+ return MemOperand(no_reg, 0);
+ }
+}
+
+
+// Loads a value on TOS. If it is a boolean value, the result may have been
+// (partially) translated into branches, or it may have set the condition
+// code register. If force_cc is set, the value is forced to set the
+// condition code register and no value is pushed. If the condition code
+// register was set, has_cc() is true and cc_reg_ contains the condition to
+// test for 'true'.
+void CodeGenerator::LoadCondition(Expression* x,
+ JumpTarget* true_target,
+ JumpTarget* false_target,
+ bool force_cc) {
+ ASSERT(!has_cc());
+ int original_height = frame_->height();
+
+ { CodeGenState new_state(this, true_target, false_target);
+ Visit(x);
+
+ // If we hit a stack overflow, we may not have actually visited
+ // the expression. In that case, we ensure that we have a
+ // valid-looking frame state because we will continue to generate
+ // code as we unwind the C++ stack.
+ //
+ // It's possible to have both a stack overflow and a valid frame
+ // state (eg, a subexpression overflowed, visiting it returned
+ // with a dummied frame state, and visiting this expression
+ // returned with a normal-looking state).
+ if (HasStackOverflow() &&
+ has_valid_frame() &&
+ !has_cc() &&
+ frame_->height() == original_height) {
+ true_target->Jump();
+ }
+ }
+ if (force_cc && frame_ != NULL && !has_cc()) {
+ // Convert the TOS value to a boolean in the condition code register.
+ UNIMPLEMENTED_MIPS();
+ }
+ ASSERT(!force_cc || !has_valid_frame() || has_cc());
+ ASSERT(!has_valid_frame() ||
+ (has_cc() && frame_->height() == original_height) ||
+ (!has_cc() && frame_->height() == original_height + 1));
+}
+
+
+void CodeGenerator::Load(Expression* x) {
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ JumpTarget true_target;
+ JumpTarget false_target;
+ LoadCondition(x, &true_target, &false_target, false);
+
+ if (has_cc()) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ if (true_target.is_linked() || false_target.is_linked()) {
+ UNIMPLEMENTED_MIPS();
+ }
+ ASSERT(has_valid_frame());
+ ASSERT(!has_cc());
+ ASSERT(frame_->height() == original_height + 1);
+}
+
+
+void CodeGenerator::LoadGlobal() {
+ VirtualFrame::SpilledScope spilled_scope;
+ __ lw(a0, GlobalObject());
+ frame_->EmitPush(a0);
+}
+
+
+void CodeGenerator::LoadFromSlot(Slot* slot, TypeofState typeof_state) {
+ VirtualFrame::SpilledScope spilled_scope;
+ if (slot->type() == Slot::LOOKUP) {
+ UNIMPLEMENTED_MIPS();
+ } else {
+ __ lw(a0, SlotOperand(slot, a2));
+ frame_->EmitPush(a0);
+ if (slot->var()->mode() == Variable::CONST) {
+ UNIMPLEMENTED_MIPS();
+ }
+ }
+}
+
+
+void CodeGenerator::StoreToSlot(Slot* slot, InitState init_state) {
+ ASSERT(slot != NULL);
+ if (slot->type() == Slot::LOOKUP) {
+ UNIMPLEMENTED_MIPS();
+ } else {
+ ASSERT(!slot->var()->is_dynamic());
+
+ JumpTarget exit;
+ if (init_state == CONST_INIT) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // We must execute the store. Storing a variable must keep the
+ // (new) value on the stack. This is necessary for compiling
+ // assignment expressions.
+ //
+ // Note: We will reach here even with slot->var()->mode() ==
+ // Variable::CONST because of const declarations which will
+ // initialize consts to 'the hole' value and by doing so, end up
+ // calling this code. a2 may be loaded with context; used below in
+ // RecordWrite.
+ frame_->EmitPop(a0);
+ __ sw(a0, SlotOperand(slot, a2));
+ frame_->EmitPush(a0);
+ if (slot->type() == Slot::CONTEXT) {
+ UNIMPLEMENTED_MIPS();
+ }
+ // If we definitely did not jump over the assignment, we do not need
+ // to bind the exit label. Doing so can defeat peephole
+ // optimization.
+ if (init_state == CONST_INIT || slot->type() == Slot::CONTEXT) {
+ exit.Bind();
+ }
+ }
}
void CodeGenerator::VisitStatements(ZoneList<Statement*>* statements) {
- UNIMPLEMENTED_MIPS();
+ VirtualFrame::SpilledScope spilled_scope;
+ for (int i = 0; frame_ != NULL && i < statements->length(); i++) {
+ VisitAndSpill(statements->at(i));
+ }
}
void CodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
- UNIMPLEMENTED_MIPS();
+ VirtualFrame::SpilledScope spilled_scope;
+ frame_->EmitPush(cp);
+ __ li(t0, Operand(pairs));
+ frame_->EmitPush(t0);
+ __ li(t0, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
+ frame_->EmitPush(t0);
+ frame_->CallRuntime(Runtime::kDeclareGlobals, 3);
+ // The result is discarded.
}
void CodeGenerator::VisitExpressionStatement(ExpressionStatement* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ ExpressionStatement");
+ CodeForStatementPosition(node);
+ Expression* expression = node->expression();
+ expression->MarkAsStatement();
+ LoadAndSpill(expression);
+ frame_->Drop();
+ ASSERT(frame_->height() == original_height);
}
void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
- UNIMPLEMENTED_MIPS();
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ ReturnStatement");
+
+ CodeForStatementPosition(node);
+ LoadAndSpill(node->expression());
+ if (function_return_is_shadowed_) {
+ frame_->EmitPop(v0);
+ function_return_.Jump();
+ } else {
+ // Pop the result from the frame and prepare the frame for
+ // returning thus making it easier to merge.
+ frame_->EmitPop(v0);
+ frame_->PrepareForReturn();
+
+ function_return_.Jump();
+ }
}
}
-void CodeGenerator::VisitFunctionBoilerplateLiteral(
- FunctionBoilerplateLiteral* node) {
+void CodeGenerator::VisitSharedFunctionInfoLiteral(
+ SharedFunctionInfoLiteral* node) {
UNIMPLEMENTED_MIPS();
}
void CodeGenerator::VisitSlot(Slot* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ Slot");
+ LoadFromSlot(node, typeof_state());
+ ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitVariableProxy(VariableProxy* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ VariableProxy");
+
+ Variable* var = node->var();
+ Expression* expr = var->rewrite();
+ if (expr != NULL) {
+ Visit(expr);
+ } else {
+ ASSERT(var->is_global());
+ Reference ref(this, node);
+ ref.GetValueAndSpill();
+ }
+ ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitLiteral(Literal* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ Literal");
+ __ li(t0, Operand(node->handle()));
+ frame_->EmitPush(t0);
+ ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitAssignment(Assignment* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ Assignment");
+
+ { Reference target(this, node->target());
+ if (target.is_illegal()) {
+ // Fool the virtual frame into thinking that we left the assignment's
+ // value on the frame.
+ frame_->EmitPush(zero_reg);
+ ASSERT(frame_->height() == original_height + 1);
+ return;
+ }
+
+ if (node->op() == Token::ASSIGN ||
+ node->op() == Token::INIT_VAR ||
+ node->op() == Token::INIT_CONST) {
+ LoadAndSpill(node->value());
+ } else {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ Variable* var = node->target()->AsVariableProxy()->AsVariable();
+ if (var != NULL &&
+ (var->mode() == Variable::CONST) &&
+ node->op() != Token::INIT_VAR && node->op() != Token::INIT_CONST) {
+ // Assignment ignored - leave the value on the stack.
+ } else {
+ CodeForSourcePosition(node->position());
+ if (node->op() == Token::INIT_CONST) {
+ // Dynamic constant initializations must use the function context
+ // and initialize the actual constant declared. Dynamic variable
+ // initializations are simply assignments and use SetValue.
+ target.SetValue(CONST_INIT);
+ } else {
+ target.SetValue(NOT_CONST_INIT);
+ }
+ }
+ }
+ ASSERT(frame_->height() == original_height + 1);
}
void CodeGenerator::VisitCall(Call* node) {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ int original_height = frame_->height();
+#endif
+ VirtualFrame::SpilledScope spilled_scope;
+ Comment cmnt(masm_, "[ Call");
+
+ Expression* function = node->expression();
+ ZoneList<Expression*>* args = node->arguments();
+
+ // Standard function call.
+ // Check if the function is a variable or a property.
+ Variable* var = function->AsVariableProxy()->AsVariable();
+ Property* property = function->AsProperty();
+
+ // ------------------------------------------------------------------------
+ // Fast-case: Use inline caching.
+ // ---
+ // According to ECMA-262, section 11.2.3, page 44, the function to call
+ // must be resolved after the arguments have been evaluated. The IC code
+ // automatically handles this by loading the arguments before the function
+ // is resolved in cache misses (this also holds for megamorphic calls).
+ // ------------------------------------------------------------------------
+
+ if (var != NULL && var->is_possibly_eval()) {
+ UNIMPLEMENTED_MIPS();
+ } else if (var != NULL && !var->is_this() && var->is_global()) {
+ // ----------------------------------
+ // JavaScript example: 'foo(1, 2, 3)' // foo is global
+ // ----------------------------------
+
+ int arg_count = args->length();
+
+ // We need sp to be 8 bytes aligned when calling the stub.
+ __ SetupAlignedCall(t0, arg_count);
+
+ // Pass the global object as the receiver and let the IC stub
+ // patch the stack to use the global proxy as 'this' in the
+ // invoked function.
+ LoadGlobal();
+
+ // Load the arguments.
+ for (int i = 0; i < arg_count; i++) {
+ LoadAndSpill(args->at(i));
+ }
+
+ // Setup the receiver register and call the IC initialization code.
+ __ li(a2, Operand(var->name()));
+ InLoopFlag in_loop = loop_nesting() > 0 ? IN_LOOP : NOT_IN_LOOP;
+ Handle<Code> stub = ComputeCallInitialize(arg_count, in_loop);
+ CodeForSourcePosition(node->position());
+ frame_->CallCodeObject(stub, RelocInfo::CODE_TARGET_CONTEXT,
+ arg_count + 1);
+ __ ReturnFromAlignedCall();
+ __ lw(cp, frame_->Context());
+ // Remove the function from the stack.
+ frame_->EmitPush(v0);
+
+ } else if (var != NULL && var->slot() != NULL &&
+ var->slot()->type() == Slot::LOOKUP) {
+ UNIMPLEMENTED_MIPS();
+ } else if (property != NULL) {
+ UNIMPLEMENTED_MIPS();
+ } else {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ ASSERT(frame_->height() == original_height + 1);
}
#undef __
#define __ ACCESS_MASM(masm)
+// -----------------------------------------------------------------------------
+// Reference support
-Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
- return Handle<Code>::null();
+Reference::Reference(CodeGenerator* cgen,
+ Expression* expression,
+ bool persist_after_get)
+ : cgen_(cgen),
+ expression_(expression),
+ type_(ILLEGAL),
+ persist_after_get_(persist_after_get) {
+ cgen->LoadReference(this);
+}
+
+
+Reference::~Reference() {
+ ASSERT(is_unloaded() || is_illegal());
+}
+
+
+Handle<String> Reference::GetName() {
+ ASSERT(type_ == NAMED);
+ Property* property = expression_->AsProperty();
+ if (property == NULL) {
+ // Global variable reference treated as a named property reference.
+ VariableProxy* proxy = expression_->AsVariableProxy();
+ ASSERT(proxy->AsVariable() != NULL);
+ ASSERT(proxy->AsVariable()->is_global());
+ return proxy->name();
+ } else {
+ Literal* raw_name = property->key()->AsLiteral();
+ ASSERT(raw_name != NULL);
+ return Handle<String>(String::cast(*raw_name->handle()));
+ }
+}
+
+
+void Reference::GetValue() {
+ ASSERT(cgen_->HasValidEntryRegisters());
+ ASSERT(!is_illegal());
+ ASSERT(!cgen_->has_cc());
+ Property* property = expression_->AsProperty();
+ if (property != NULL) {
+ cgen_->CodeForSourcePosition(property->position());
+ }
+
+ switch (type_) {
+ case SLOT: {
+ UNIMPLEMENTED_MIPS();
+ break;
+ }
+
+ case NAMED: {
+ UNIMPLEMENTED_MIPS();
+ break;
+ }
+
+ case KEYED: {
+ UNIMPLEMENTED_MIPS();
+ break;
+ }
+
+ default:
+ UNREACHABLE();
+ }
}
-// On entry a0 and a1 are the things to be compared. On exit v0 is 0,
+void Reference::SetValue(InitState init_state) {
+ ASSERT(!is_illegal());
+ ASSERT(!cgen_->has_cc());
+ MacroAssembler* masm = cgen_->masm();
+ Property* property = expression_->AsProperty();
+ if (property != NULL) {
+ cgen_->CodeForSourcePosition(property->position());
+ }
+
+ switch (type_) {
+ case SLOT: {
+ Comment cmnt(masm, "[ Store to Slot");
+ Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
+ cgen_->StoreToSlot(slot, init_state);
+ cgen_->UnloadReference(this);
+ break;
+ }
+
+ case NAMED: {
+ UNIMPLEMENTED_MIPS();
+ break;
+ }
+
+ case KEYED: {
+ UNIMPLEMENTED_MIPS();
+ break;
+ }
+
+ default:
+ UNREACHABLE();
+ }
+}
+
+
+// On entry a0 and a1 are the things to be compared. On exit v0 is 0,
// positive or negative to indicate the result of the comparison.
void CompareStub::Generate(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
}
+Handle<Code> GetBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info) {
+ UNIMPLEMENTED_MIPS();
+ return Handle<Code>::null();
+}
+
+
void StackCheckStub::Generate(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
__ break_(0x790);
Label* throw_out_of_memory_exception,
bool do_gc,
bool always_allocate) {
- UNIMPLEMENTED_MIPS();
- __ break_(0x826);
+ // s0: number of arguments including receiver (C callee-saved)
+ // s1: pointer to the first argument (C callee-saved)
+ // s2: pointer to builtin function (C callee-saved)
+
+ if (do_gc) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ ExternalReference scope_depth =
+ ExternalReference::heap_always_allocate_scope_depth();
+ if (always_allocate) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // Call C built-in.
+ // a0 = argc, a1 = argv
+ __ mov(a0, s0);
+ __ mov(a1, s1);
+
+ __ CallBuiltin(s2);
+
+ if (always_allocate) {
+ UNIMPLEMENTED_MIPS();
+ }
+
+ // Check for failure result.
+ Label failure_returned;
+ ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
+ __ addiu(a2, v0, 1);
+ __ andi(t0, a2, kFailureTagMask);
+ __ Branch(eq, &failure_returned, t0, Operand(zero_reg));
+
+ // Exit C frame and return.
+ // v0:v1: result
+ // sp: stack pointer
+ // fp: frame pointer
+ __ LeaveExitFrame(mode_);
+
+ // Check if we should retry or throw exception.
+ Label retry;
+ __ bind(&failure_returned);
+ ASSERT(Failure::RETRY_AFTER_GC == 0);
+ __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize);
+ __ Branch(eq, &retry, t0, Operand(zero_reg));
+
+ // Special handling of out of memory exceptions.
+ Failure* out_of_memory = Failure::OutOfMemoryException();
+ __ Branch(eq, throw_out_of_memory_exception,
+ v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
+
+ // Retrieve the pending exception and clear the variable.
+ __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
+ __ lw(a3, MemOperand(t0));
+ __ LoadExternalReference(t0,
+ ExternalReference(Top::k_pending_exception_address));
+ __ lw(v0, MemOperand(t0));
+ __ sw(a3, MemOperand(t0));
+
+ // Special handling of termination exceptions which are uncatchable
+ // by javascript code.
+ __ Branch(eq, throw_termination_exception,
+ v0, Operand(Factory::termination_exception()));
+
+ // Handle normal exception.
+ __ b(throw_normal_exception);
+ __ nop(); // Branch delay slot nop.
+
+ __ bind(&retry); // pass last failure (r0) as parameter (r0) when retrying
}
void CEntryStub::Generate(MacroAssembler* masm) {
- UNIMPLEMENTED_MIPS();
- __ break_(0x831);
+ // Called from JavaScript; parameters are on stack as if calling JS function
+ // a0: number of arguments including receiver
+ // a1: pointer to builtin function
+ // fp: frame pointer (restored after C call)
+ // sp: stack pointer (restored as callee's sp after C call)
+ // cp: current context (C callee-saved)
+
+ // NOTE: Invocations of builtins may return failure objects
+ // instead of a proper result. The builtin entry handles
+ // this by performing a garbage collection and retrying the
+ // builtin once.
+
+ // Enter the exit frame that transitions from JavaScript to C++.
+ __ EnterExitFrame(mode_, s0, s1, s2);
+
+ // s0: number of arguments (C callee-saved)
+ // s1: pointer to first argument (C callee-saved)
+ // s2: pointer to builtin function (C callee-saved)
+
+ Label throw_normal_exception;
+ Label throw_termination_exception;
+ Label throw_out_of_memory_exception;
+
+ // Call into the runtime system.
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_termination_exception,
+ &throw_out_of_memory_exception,
+ false,
+ false);
+
+ // Do space-specific GC and retry runtime call.
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_termination_exception,
+ &throw_out_of_memory_exception,
+ true,
+ false);
+
+ // Do full GC and retry runtime call one final time.
+ Failure* failure = Failure::InternalError();
+ __ li(v0, Operand(reinterpret_cast<int32_t>(failure)));
+ GenerateCore(masm,
+ &throw_normal_exception,
+ &throw_termination_exception,
+ &throw_out_of_memory_exception,
+ true,
+ true);
+
+ __ bind(&throw_out_of_memory_exception);
+ GenerateThrowUncatchable(masm, OUT_OF_MEMORY);
+
+ __ bind(&throw_termination_exception);
+ GenerateThrowUncatchable(masm, TERMINATION);
+
+ __ bind(&throw_normal_exception);
+ GenerateThrowTOS(masm);
}
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
- UNIMPLEMENTED_MIPS();
+ Label invoke, exit;
+
+ // Registers:
+ // a0: entry address
+ // a1: function
+ // a2: reveiver
+ // a3: argc
+ //
+ // Stack:
+ // 4 args slots
+ // args
// Save callee saved registers on the stack.
- __ MultiPush(kCalleeSaved | ra.bit());
+ __ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit());
- // ********** State **********
- //
- // * Registers:
+ // We build an EntryFrame.
+ __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
+ int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
+ __ li(t2, Operand(Smi::FromInt(marker)));
+ __ li(t1, Operand(Smi::FromInt(marker)));
+ __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+ __ lw(t0, MemOperand(t0));
+ __ MultiPush(t0.bit() | t1.bit() | t2.bit() | t3.bit());
+
+ // Setup frame pointer for the frame to be pushed.
+ __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset);
+
+ // Load argv in s0 register.
+ __ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize +
+ StandardFrameConstants::kCArgsSlotsSize));
+
+ // Registers:
// a0: entry_address
// a1: function
// a2: reveiver_pointer
// a3: argc
+ // s0: argv
//
- // * Stack:
- // ---------------------------
- // args
- // ---------------------------
- // 4 args slots
- // ---------------------------
+ // Stack:
+ // caller fp |
+ // function slot | entry frame
+ // context slot |
+ // bad fp (0xff...f) |
// callee saved registers + ra
- // ---------------------------
+ // 4 args slots
+ // args
+
+ // Call a faked try-block that does the invoke.
+ __ bal(&invoke);
+ __ nop(); // Branch delay slot nop.
+
+ // Caught exception: Store result (exception) in the pending
+ // exception field in the JSEnv and return a failure sentinel.
+ // Coming in here the fp will be invalid because the PushTryHandler below
+ // sets it to 0 to signal the existence of the JSEntry frame.
+ __ LoadExternalReference(t0,
+ ExternalReference(Top::k_pending_exception_address));
+ __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
+ __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
+ __ b(&exit);
+ __ nop(); // Branch delay slot nop.
+
+ // Invoke: Link this frame into the handler chain.
+ __ bind(&invoke);
+ __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
+ // If an exception not caught by another handler occurs, this handler
+ // returns control to the code after the bal(&invoke) above, which
+ // restores all kCalleeSaved registers (including cp and fp) to their
+ // saved values before returning a failure to C.
+
+ // Clear any pending exceptions.
+ __ LoadExternalReference(t0, ExternalReference::the_hole_value_location());
+ __ lw(t1, MemOperand(t0));
+ __ LoadExternalReference(t0,
+ ExternalReference(Top::k_pending_exception_address));
+ __ sw(t1, MemOperand(t0));
+
+ // Invoke the function by calling through JS entry trampoline builtin.
+ // Notice that we cannot store a reference to the trampoline code directly in
+ // this stub, because runtime stubs are not traversed when doing GC.
+
+ // Registers:
+ // a0: entry_address
+ // a1: function
+ // a2: reveiver_pointer
+ // a3: argc
+ // s0: argv
//
- // ***************************
+ // Stack:
+ // handler frame
+ // entry frame
+ // callee saved registers + ra
+ // 4 args slots
+ // args
- __ break_(0x1234);
+ if (is_construct) {
+ ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
+ __ LoadExternalReference(t0, construct_entry);
+ } else {
+ ExternalReference entry(Builtins::JSEntryTrampoline);
+ __ LoadExternalReference(t0, entry);
+ }
+ __ lw(t9, MemOperand(t0)); // deref address
+
+ // Call JSEntryTrampoline.
+ __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
+ __ CallBuiltin(t9);
+
+ // Unlink this frame from the handler chain. When reading the
+ // address of the next handler, there is no need to use the address
+ // displacement since the current stack pointer (sp) points directly
+ // to the stack handler.
+ __ lw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
+ __ LoadExternalReference(t0, ExternalReference(Top::k_handler_address));
+ __ sw(t1, MemOperand(t0));
+
+ // This restores sp to its position before PushTryHandler.
+ __ addiu(sp, sp, StackHandlerConstants::kSize);
+
+ __ bind(&exit); // v0 holds result
+ // Restore the top frame descriptors from the stack.
+ __ Pop(t1);
+ __ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+ __ sw(t1, MemOperand(t0));
+
+ // Reset the stack to the callee saved registers.
+ __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
// Restore callee saved registers from the stack.
- __ MultiPop(kCalleeSaved | ra.bit());
-
- // Load a result.
- __ li(v0, Operand(0x1234));
- __ jr(ra);
- // Return
- __ nop();
+ __ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit());
+ // Return.
+ __ Jump(ra);
}
// This stub performs an instanceof, calling the builtin function if
-// necessary. Uses a1 for the object, a0 for the function that it may
+// necessary. Uses a1 for the object, a0 for the function that it may
// be an instance of (these are fetched from the stack).
void InstanceofStub::Generate(MacroAssembler* masm) {
UNIMPLEMENTED_MIPS();
enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
-// -------------------------------------------------------------------------
+// -----------------------------------------------------------------------------
+// Reference support
+
+// A reference is a C++ stack-allocated object that keeps an ECMA
+// reference on the execution stack while in scope. For variables
+// the reference is empty, indicating that it isn't necessary to
+// store state on the stack for keeping track of references to those.
+// For properties, we keep either one (named) or two (indexed) values
+// on the execution stack to represent the reference.
+class Reference BASE_EMBEDDED {
+ public:
+ // The values of the types is important, see size().
+ enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
+ Reference(CodeGenerator* cgen,
+ Expression* expression,
+ bool persist_after_get = false);
+ ~Reference();
+
+ Expression* expression() const { return expression_; }
+ Type type() const { return type_; }
+ void set_type(Type value) {
+ ASSERT_EQ(ILLEGAL, type_);
+ type_ = value;
+ }
+
+ void set_unloaded() {
+ ASSERT_NE(ILLEGAL, type_);
+ ASSERT_NE(UNLOADED, type_);
+ type_ = UNLOADED;
+ }
+ // The size the reference takes up on the stack.
+ int size() const {
+ return (type_ < SLOT) ? 0 : type_;
+ }
+
+ bool is_illegal() const { return type_ == ILLEGAL; }
+ bool is_slot() const { return type_ == SLOT; }
+ bool is_property() const { return type_ == NAMED || type_ == KEYED; }
+ bool is_unloaded() const { return type_ == UNLOADED; }
+
+ // Return the name. Only valid for named property references.
+ Handle<String> GetName();
+
+ // Generate code to push the value of the reference on top of the
+ // expression stack. The reference is expected to be already on top of
+ // the expression stack, and it is consumed by the call unless the
+ // reference is for a compound assignment.
+ // If the reference is not consumed, it is left in place under its value.
+ void GetValue();
+
+ // Generate code to pop a reference, push the value of the reference,
+ // and then spill the stack frame.
+ inline void GetValueAndSpill();
+
+ // Generate code to store the value on top of the expression stack in the
+ // reference. The reference is expected to be immediately below the value
+ // on the expression stack. The value is stored in the location specified
+ // by the reference, and is left on top of the stack, after the reference
+ // is popped from beneath it (unloaded).
+ void SetValue(InitState init_state);
+
+ private:
+ CodeGenerator* cgen_;
+ Expression* expression_;
+ Type type_;
+ // Keep the reference on the stack after get, so it can be used by set later.
+ bool persist_after_get_;
+};
+
+
+// -----------------------------------------------------------------------------
// Code generation state
// The state is passed down the AST by the code generator (and back up, in
-// -------------------------------------------------------------------------
+// -----------------------------------------------------------------------------
// CodeGenerator
class CodeGenerator: public AstVisitor {
// Number of instructions used for the JS return sequence. The constant is
// used by the debugger to patch the JS return sequence.
- static const int kJSReturnSequenceLength = 6;
+ static const int kJSReturnSequenceLength = 7;
// If the name is an inline runtime function call return the number of
// expected arguments. Otherwise return -1.
AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
+ // Visit a statement and then spill the virtual frame if control flow can
+ // reach the end of the statement (ie, it does not exit via break,
+ // continue, return, or throw). This function is used temporarily while
+ // the code generator is being transformed.
+ inline void VisitAndSpill(Statement* statement);
+
+ // Visit a list of statements and then spill the virtual frame if control
+ // flow can reach the end of the list.
+ inline void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
+
// Main code generation function
void Generate(CompilationInfo* info);
+ // The following are used by class Reference.
+ void LoadReference(Reference* ref);
+ void UnloadReference(Reference* ref);
+
+ MemOperand ContextOperand(Register context, int index) const {
+ return MemOperand(context, Context::SlotOffset(index));
+ }
+
+ MemOperand SlotOperand(Slot* slot, Register tmp);
+
+ // Expressions
+ MemOperand GlobalObject() const {
+ return ContextOperand(cp, Context::GLOBAL_INDEX);
+ }
+
+ void LoadCondition(Expression* x,
+ JumpTarget* true_target,
+ JumpTarget* false_target,
+ bool force_cc);
+ void Load(Expression* x);
+ void LoadGlobal();
+
+ // Generate code to push the value of an expression on top of the frame
+ // and then spill the frame fully to memory. This function is used
+ // temporarily while the code generator is being transformed.
+ inline void LoadAndSpill(Expression* expression);
+
+ // Read a value from a slot and leave it on top of the expression stack.
+ void LoadFromSlot(Slot* slot, TypeofState typeof_state);
+ // Store the value on top of the stack to a slot.
+ void StoreToSlot(Slot* slot, InitState init_state);
+
struct InlineRuntimeLUT {
void (CodeGenerator::*method)(ZoneList<Expression*>*);
const char* name;
CompilationInfo* info_;
// Code generation state
- Scope* scope_;
VirtualFrame* frame_;
RegisterAllocator* allocator_;
Condition cc_reg_;
Address InternalFrame::GetCallerStackPointer() const {
- UNIMPLEMENTED_MIPS();
- return static_cast<Address>(NULL); // UNIMPLEMENTED RETURN
+ return fp() + StandardFrameConstants::kCallerSPOffset;
}
static const int kCallerPCOffset = +1 * kPointerSize;
// FP-relative displacement of the caller's SP.
- static const int kCallerSPDisplacement = +4 * kPointerSize;
+ static const int kCallerSPDisplacement = +3 * kPointerSize;
};
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
UNIMPLEMENTED_MIPS();
+ // Registers:
+ // a2: name
+ // ra: return address
+
+ // Get the receiver of the function from the stack.
+ __ lw(a3, MemOperand(sp, argc*kPointerSize));
+
+ __ EnterInternalFrame();
+
+ // Push the receiver and the name of the function.
+ __ MultiPush(a2.bit() | a3.bit());
+
+ // Call the entry.
+ __ li(a0, Operand(2));
+ __ li(a1, Operand(ExternalReference(IC_Utility(kCallIC_Miss))));
+
+ CEntryStub stub(1);
+ __ CallStub(&stub);
+
+ // Move result to r1 and leave the internal frame.
+ __ mov(a1, v0);
+ __ LeaveInternalFrame();
+
+ // Check if the receiver is a global object of some sort.
+ Label invoke, global;
+ __ lw(a2, MemOperand(sp, argc * kPointerSize));
+ __ andi(t0, a2, kSmiTagMask);
+ __ Branch(eq, &invoke, t0, Operand(zero_reg));
+ __ GetObjectType(a2, a3, a3);
+ __ Branch(eq, &global, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
+ __ Branch(ne, &invoke, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
+
+ // Patch the receiver on the stack.
+ __ bind(&global);
+ __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
+ __ sw(a2, MemOperand(sp, argc * kPointerSize));
+
+ // Invoke the function.
+ ParameterCount actual(argc);
+ __ bind(&invoke);
+ __ InvokeFunction(a1, actual, JUMP_FUNCTION);
}
// Defined in ic.cc.
#define __ ACCESS_MASM(cgen()->masm())
void JumpTarget::DoJump() {
- UNIMPLEMENTED_MIPS();
+ ASSERT(cgen()->has_valid_frame());
+ // Live non-frame registers are not allowed at unconditional jumps
+ // because we have no way of invalidating the corresponding results
+ // which are still live in the C++ code.
+ ASSERT(cgen()->HasValidEntryRegisters());
+
+ if (is_bound()) {
+ // Backward jump. There already a frame expectation at the target.
+ ASSERT(direction_ == BIDIRECTIONAL);
+ cgen()->frame()->MergeTo(entry_frame_);
+ cgen()->DeleteFrame();
+ } else {
+ // Use the current frame as the expected one at the target if necessary.
+ if (entry_frame_ == NULL) {
+ entry_frame_ = cgen()->frame();
+ RegisterFile empty;
+ cgen()->SetFrame(NULL, &empty);
+ } else {
+ cgen()->frame()->MergeTo(entry_frame_);
+ cgen()->DeleteFrame();
+ }
+
+ // The predicate is_linked() should be made true. Its implementation
+ // detects the presence of a frame pointer in the reaching_frames_ list.
+ if (!is_linked()) {
+ reaching_frames_.Add(NULL);
+ ASSERT(is_linked());
+ }
+ }
+ __ b(&entry_label_);
+ __ nop(); // Branch delay slot nop.
}
void JumpTarget::DoBind() {
- UNIMPLEMENTED_MIPS();
+ ASSERT(!is_bound());
+
+ // Live non-frame registers are not allowed at the start of a basic
+ // block.
+ ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
+
+ if (cgen()->has_valid_frame()) {
+ // If there is a current frame we can use it on the fall through.
+ if (entry_frame_ == NULL) {
+ entry_frame_ = new VirtualFrame(cgen()->frame());
+ } else {
+ ASSERT(cgen()->frame()->Equals(entry_frame_));
+ }
+ } else {
+ // If there is no current frame we must have an entry frame which we can
+ // copy.
+ ASSERT(entry_frame_ != NULL);
+ RegisterFile empty;
+ cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
+ }
+
+ // The predicate is_linked() should be made false. Its implementation
+ // detects the presence (or absence) of frame pointers in the
+ // reaching_frames_ list. If we inserted a bogus frame to make
+ // is_linked() true, remove it now.
+ if (is_linked()) {
+ reaching_frames_.Clear();
+ }
+
+ __ bind(&entry_label_);
}
void BreakTarget::Jump() {
- UNIMPLEMENTED_MIPS();
+ // On ARM we do not currently emit merge code for jumps, so we need to do
+ // it explicitly here. The only merging necessary is to drop extra
+ // statement state from the stack.
+ ASSERT(cgen()->has_valid_frame());
+ int count = cgen()->frame()->height() - expected_height_;
+ cgen()->frame()->Drop(count);
+ DoJump();
}
void BreakTarget::Bind() {
- UNIMPLEMENTED_MIPS();
+#ifdef DEBUG
+ // All the forward-reaching frames should have been adjusted at the
+ // jumps to this target.
+ for (int i = 0; i < reaching_frames_.length(); i++) {
+ ASSERT(reaching_frames_[i] == NULL ||
+ reaching_frames_[i]->height() == expected_height_);
+ }
+#endif
+ // Drop leftover statement state from the frame before merging, even
+ // on the fall through. This is so we can bind the return target
+ // with state on the frame.
+ if (cgen()->has_valid_frame()) {
+ int count = cgen()->frame()->height() - expected_height_;
+ // On ARM we do not currently emit merge code at binding sites, so we need
+ // to do it explicitly here. The only merging necessary is to drop extra
+ // statement state from the stack.
+ cgen()->frame()->Drop(count);
+ }
+
+ DoBind();
}
void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
Condition cond, Register r1, const Operand& r2) {
- Jump(Operand(target), cond, r1, r2);
+ Jump(Operand(target, rmode), cond, r1, r2);
}
void MacroAssembler::Call(intptr_t target, RelocInfo::Mode rmode,
Condition cond, Register r1, const Operand& r2) {
- Call(Operand(target), cond, r1, r2);
+ Call(Operand(target, rmode), cond, r1, r2);
}
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index) {
- lw(destination, MemOperand(s4, index << kPointerSizeLog2));
+ lw(destination, MemOperand(s6, index << kPointerSizeLog2));
}
void MacroAssembler::LoadRoot(Register destination,
Condition cond,
Register src1, const Operand& src2) {
Branch(NegateCondition(cond), 2, src1, src2);
- nop();
- lw(destination, MemOperand(s4, index << kPointerSizeLog2));
+ lw(destination, MemOperand(s6, index << kPointerSizeLog2));
}
}
-// load wartd in a register
void MacroAssembler::li(Register rd, Operand j, bool gen2instr) {
ASSERT(!j.is_reg());
int16_t NumToPush = NumberOfBitsSet(regs);
addiu(sp, sp, -4 * NumToPush);
- for (int16_t i = 0; i < kNumRegisters; i++) {
+ for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
}
int16_t NumToPush = NumberOfBitsSet(regs);
addiu(sp, sp, -4 * NumToPush);
- for (int16_t i = kNumRegisters; i > 0; i--) {
+ for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
sw(ToRegister(i), MemOperand(sp, 4 * (NumToPush - ++NumSaved)));
}
void MacroAssembler::MultiPop(RegList regs) {
int16_t NumSaved = 0;
- for (int16_t i = kNumRegisters; i > 0; i--) {
+ for (int16_t i = 0; i < kNumRegisters; i++) {
if ((regs & (1 << i)) != 0) {
lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
}
void MacroAssembler::MultiPopReversed(RegList regs) {
int16_t NumSaved = 0;
- for (int16_t i = 0; i < kNumRegisters; i++) {
+ for (int16_t i = kNumRegisters; i > 0; i--) {
if ((regs & (1 << i)) != 0) {
lw(ToRegister(i), MemOperand(sp, 4 * (NumSaved++)));
}
default:
UNREACHABLE();
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
default:
UNREACHABLE();
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
default:
UNREACHABLE();
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
default:
UNREACHABLE();
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
jr(target.rm());
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
jr(target.rm());
}
} else { // !target.is_reg()
j(target.imm32_);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
- j(target.imm32_); // will generate only one instruction.
+ j(target.imm32_); // Will generate only one instruction.
}
} else { // MustUseAt(target)
- li(at, rt);
+ li(at, target);
if (cond == cc_always) {
jr(at);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
- jr(at); // will generate only one instruction.
+ jr(at); // Will generate only one instruction.
}
}
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
jalr(target.rm());
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
jalr(target.rm());
}
} else { // !target.is_reg()
jal(target.imm32_);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
- jal(target.imm32_); // will generate only one instruction.
+ jal(target.imm32_); // Will generate only one instruction.
}
} else { // MustUseAt(target)
- li(at, rt);
+ li(at, target);
if (cond == cc_always) {
jalr(at);
} else {
Branch(NegateCondition(cond), 2, rs, rt);
- nop();
- jalr(at); // will generate only one instruction.
+ jalr(at); // Will generate only one instruction.
}
}
}
+ // Emit a nop in the branch delay slot.
+ nop();
}
void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
- UNIMPLEMENTED_MIPS();
+ // Adjust this code if not the case.
+ ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ // The return address is passed in register ra.
+ if (try_location == IN_JAVASCRIPT) {
+ if (type == TRY_CATCH_HANDLER) {
+ li(t0, Operand(StackHandler::TRY_CATCH));
+ } else {
+ li(t0, Operand(StackHandler::TRY_FINALLY));
+ }
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize
+ && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ // Save the current handler as the next handler.
+ LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
+ lw(t1, MemOperand(t2));
+
+ addiu(sp, sp, -StackHandlerConstants::kSize);
+ sw(ra, MemOperand(sp, 12));
+ sw(fp, MemOperand(sp, 8));
+ sw(t0, MemOperand(sp, 4));
+ sw(t1, MemOperand(sp, 0));
+
+ // Link this handler as the new current one.
+ sw(sp, MemOperand(t2));
+
+ } else {
+ // Must preserve a0-a3, and s0 (argv).
+ ASSERT(try_location == IN_JS_ENTRY);
+ ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
+ && StackHandlerConstants::kFPOffset == 2 * kPointerSize
+ && StackHandlerConstants::kPCOffset == 3 * kPointerSize
+ && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+
+ // The frame pointer does not point to a JS frame so we save NULL
+ // for fp. We expect the code throwing an exception to check fp
+ // before dereferencing it to restore the context.
+ li(t0, Operand(StackHandler::ENTRY));
+
+ // Save the current handler as the next handler.
+ LoadExternalReference(t2, ExternalReference(Top::k_handler_address));
+ lw(t1, MemOperand(t2));
+
+ addiu(sp, sp, -StackHandlerConstants::kSize);
+ sw(ra, MemOperand(sp, 12));
+ sw(zero_reg, MemOperand(sp, 8));
+ sw(t0, MemOperand(sp, 4));
+ sw(t1, MemOperand(sp, 0));
+
+ // Link this handler as the new current one.
+ sw(sp, MemOperand(t2));
+ }
}
-// ---------------------------------------------------------------------------
+// -----------------------------------------------------------------------------
// Activation frames
+void MacroAssembler::SetupAlignedCall(Register scratch, int arg_count) {
+ Label extra_push, end;
+
+ andi(scratch, sp, 7);
+
+ // We check for args and receiver size on the stack, all of them word sized.
+ // We add one for sp, that we also want to store on the stack.
+ if (((arg_count + 1) % kPointerSizeLog2) == 0) {
+ Branch(ne, &extra_push, at, Operand(zero_reg));
+ } else { // ((arg_count + 1) % 2) == 1
+ Branch(eq, &extra_push, at, Operand(zero_reg));
+ }
+
+ // Save sp on the stack.
+ mov(scratch, sp);
+ Push(scratch);
+ b(&end);
+
+ // Align before saving sp on the stack.
+ bind(&extra_push);
+ mov(scratch, sp);
+ addiu(sp, sp, -8);
+ sw(scratch, MemOperand(sp));
+
+ // The stack is aligned and sp is stored on the top.
+ bind(&end);
+}
+
+
+void MacroAssembler::ReturnFromAlignedCall() {
+ lw(sp, MemOperand(sp));
+}
+
+
+// -----------------------------------------------------------------------------
+// JavaScript invokes
+
+void MacroAssembler::InvokePrologue(const ParameterCount& expected,
+ const ParameterCount& actual,
+ Handle<Code> code_constant,
+ Register code_reg,
+ Label* done,
+ InvokeFlag flag) {
+ bool definitely_matches = false;
+ Label regular_invoke;
+
+ // Check whether the expected and actual arguments count match. If not,
+ // setup registers according to contract with ArgumentsAdaptorTrampoline:
+ // a0: actual arguments count
+ // a1: function (passed through to callee)
+ // a2: expected arguments count
+ // a3: callee code entry
+
+ // The code below is made a lot easier because the calling code already sets
+ // up actual and expected registers according to the contract if values are
+ // passed in registers.
+ ASSERT(actual.is_immediate() || actual.reg().is(a0));
+ ASSERT(expected.is_immediate() || expected.reg().is(a2));
+ ASSERT((!code_constant.is_null() && code_reg.is(no_reg)) || code_reg.is(a3));
+
+ if (expected.is_immediate()) {
+ ASSERT(actual.is_immediate());
+ if (expected.immediate() == actual.immediate()) {
+ definitely_matches = true;
+ } else {
+ li(a0, Operand(actual.immediate()));
+ const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
+ if (expected.immediate() == sentinel) {
+ // Don't worry about adapting arguments for builtins that
+ // don't want that done. Skip adaption code by making it look
+ // like we have a match between expected and actual number of
+ // arguments.
+ definitely_matches = true;
+ } else {
+ li(a2, Operand(expected.immediate()));
+ }
+ }
+ } else if (actual.is_immediate()) {
+ Branch(eq, ®ular_invoke, expected.reg(), Operand(actual.immediate()));
+ li(a0, Operand(actual.immediate()));
+ } else {
+ Branch(eq, ®ular_invoke, expected.reg(), Operand(actual.reg()));
+ }
+
+ if (!definitely_matches) {
+ if (!code_constant.is_null()) {
+ li(a3, Operand(code_constant));
+ addiu(a3, a3, Code::kHeaderSize - kHeapObjectTag);
+ }
+
+ ExternalReference adaptor(Builtins::ArgumentsAdaptorTrampoline);
+ if (flag == CALL_FUNCTION) {
+ CallBuiltin(adaptor);
+ b(done);
+ nop();
+ } else {
+ JumpToBuiltin(adaptor);
+ }
+ bind(®ular_invoke);
+ }
+}
+
+void MacroAssembler::InvokeCode(Register code,
+ const ParameterCount& expected,
+ const ParameterCount& actual,
+ InvokeFlag flag) {
+ Label done;
+
+ InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
+ if (flag == CALL_FUNCTION) {
+ Call(code);
+ } else {
+ ASSERT(flag == JUMP_FUNCTION);
+ Jump(code);
+ }
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
+}
+
+
+void MacroAssembler::InvokeCode(Handle<Code> code,
+ const ParameterCount& expected,
+ const ParameterCount& actual,
+ RelocInfo::Mode rmode,
+ InvokeFlag flag) {
+ Label done;
+
+ InvokePrologue(expected, actual, code, no_reg, &done, flag);
+ if (flag == CALL_FUNCTION) {
+ Call(code, rmode);
+ } else {
+ Jump(code, rmode);
+ }
+ // Continue here if InvokePrologue does handle the invocation due to
+ // mismatched parameter counts.
+ bind(&done);
+}
+
+
+void MacroAssembler::InvokeFunction(Register function,
+ const ParameterCount& actual,
+ InvokeFlag flag) {
+ // Contract with called JS functions requires that function is passed in a1.
+ ASSERT(function.is(a1));
+ Register expected_reg = a2;
+ Register code_reg = a3;
+
+ lw(code_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+ lw(expected_reg,
+ FieldMemOperand(code_reg,
+ SharedFunctionInfo::kFormalParameterCountOffset));
+ lw(code_reg,
+ MemOperand(code_reg, SharedFunctionInfo::kCodeOffset - kHeapObjectTag));
+ addiu(code_reg, code_reg, Code::kHeaderSize - kHeapObjectTag);
+
+ ParameterCount expected(expected_reg);
+ InvokeCode(code_reg, expected, actual, flag);
+}
+
+
+// ---------------------------------------------------------------------------
+// Support functions.
+
+ void MacroAssembler::GetObjectType(Register function,
+ Register map,
+ Register type_reg) {
+ lw(map, FieldMemOperand(function, HeapObject::kMapOffset));
+ lbu(type_reg, FieldMemOperand(map, Map::kInstanceTypeOffset));
+ }
+
+
+ void MacroAssembler::CallBuiltin(ExternalReference builtin_entry) {
+ // Load builtin address.
+ LoadExternalReference(t9, builtin_entry);
+ lw(t9, MemOperand(t9)); // Deref address.
+ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
+ // Call and allocate arguments slots.
+ jalr(t9);
+ // Use the branch delay slot to allocated argument slots.
+ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
+ addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize);
+ }
+
+
+ void MacroAssembler::CallBuiltin(Register target) {
+ // Target already holds target address.
+ // Call and allocate arguments slots.
+ jalr(target);
+ // Use the branch delay slot to allocated argument slots.
+ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
+ addiu(sp, sp, StandardFrameConstants::kRArgsSlotsSize);
+ }
+
+
+ void MacroAssembler::JumpToBuiltin(ExternalReference builtin_entry) {
+ // Load builtin address.
+ LoadExternalReference(t9, builtin_entry);
+ lw(t9, MemOperand(t9)); // Deref address.
+ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag);
+ // Call and allocate arguments slots.
+ jr(t9);
+ // Use the branch delay slot to allocated argument slots.
+ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
+ }
+
+
+ void MacroAssembler::JumpToBuiltin(Register target) {
+ // t9 already holds target address.
+ // Call and allocate arguments slots.
+ jr(t9);
+ // Use the branch delay slot to allocated argument slots.
+ addiu(sp, sp, -StandardFrameConstants::kRArgsSlotsSize);
+ }
+
+
+// -----------------------------------------------------------------------------
+// Runtime calls
+
void MacroAssembler::CallStub(CodeStub* stub, Condition cond,
Register r1, const Operand& r2) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(allow_stub_calls()); // Stub calls are not allowed in some stubs.
+ Call(stub->GetCode(), RelocInfo::CODE_TARGET, cond, r1, r2);
}
}
+void MacroAssembler::IllegalOperation(int num_arguments) {
+ if (num_arguments > 0) {
+ addiu(sp, sp, num_arguments * kPointerSize);
+ }
+ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
+}
+
+
void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
- UNIMPLEMENTED_MIPS();
+ // All parameters are on the stack. v0 has the return value after call.
+
+ // If the expected number of arguments of the runtime function is
+ // constant, we check that the actual number of arguments match the
+ // expectation.
+ if (f->nargs >= 0 && f->nargs != num_arguments) {
+ IllegalOperation(num_arguments);
+ return;
+ }
+
+ // TODO(1236192): Most runtime routines don't need the number of
+ // arguments passed in because it is constant. At some point we
+ // should remove this need and make the runtime routine entry code
+ // smarter.
+ li(a0, num_arguments);
+ LoadExternalReference(a1, ExternalReference(f));
+ CEntryStub stub(1);
+ CallStub(&stub);
}
void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) {
- UNIMPLEMENTED_MIPS();
+ CallRuntime(Runtime::FunctionForId(fid), num_arguments);
}
}
+// -----------------------------------------------------------------------------
+// Debugging
void MacroAssembler::Assert(Condition cc, const char* msg,
Register rs, Operand rt) {
UNIMPLEMENTED_MIPS();
}
+
+void MacroAssembler::EnterFrame(StackFrame::Type type) {
+ addiu(sp, sp, -5 * kPointerSize);
+ li(t0, Operand(Smi::FromInt(type)));
+ li(t1, Operand(CodeObject()));
+ sw(ra, MemOperand(sp, 4 * kPointerSize));
+ sw(fp, MemOperand(sp, 3 * kPointerSize));
+ sw(cp, MemOperand(sp, 2 * kPointerSize));
+ sw(t0, MemOperand(sp, 1 * kPointerSize));
+ sw(t1, MemOperand(sp, 0 * kPointerSize));
+ addiu(fp, sp, 3 * kPointerSize);
+}
+
+
+void MacroAssembler::LeaveFrame(StackFrame::Type type) {
+ mov(sp, fp);
+ lw(fp, MemOperand(sp, 0 * kPointerSize));
+ lw(ra, MemOperand(sp, 1 * kPointerSize));
+ addiu(sp, sp, 2 * kPointerSize);
+}
+
+
+void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode,
+ Register hold_argc,
+ Register hold_argv,
+ Register hold_function) {
+ // Compute the argv pointer and keep it in a callee-saved register.
+ // a0 is argc.
+ sll(t0, a0, kPointerSizeLog2);
+ add(hold_argv, sp, t0);
+ addi(hold_argv, hold_argv, -kPointerSize);
+
+ // Compute callee's stack pointer before making changes and save it as
+ // t1 register so that it is restored as sp register on exit, thereby
+ // popping the args.
+ // t1 = sp + kPointerSize * #args
+ add(t1, sp, t0);
+
+ // Align the stack at this point.
+ AlignStack(0);
+
+ // Save registers.
+ addiu(sp, sp, -12);
+ sw(t1, MemOperand(sp, 8));
+ sw(ra, MemOperand(sp, 4));
+ sw(fp, MemOperand(sp, 0));
+ mov(fp, sp); // Setup new frame pointer.
+
+ // Push debug marker.
+ if (mode == ExitFrame::MODE_DEBUG) {
+ Push(zero_reg);
+ } else {
+ li(t0, Operand(CodeObject()));
+ Push(t0);
+ }
+
+ // Save the frame pointer and the context in top.
+ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+ sw(fp, MemOperand(t0));
+ LoadExternalReference(t0, ExternalReference(Top::k_context_address));
+ sw(cp, MemOperand(t0));
+
+ // Setup argc and the builtin function in callee-saved registers.
+ mov(hold_argc, a0);
+ mov(hold_function, a1);
+}
+
+
+void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode) {
+ // Clear top frame.
+ LoadExternalReference(t0, ExternalReference(Top::k_c_entry_fp_address));
+ sw(zero_reg, MemOperand(t0));
+
+ // Restore current context from top and clear it in debug mode.
+ LoadExternalReference(t0, ExternalReference(Top::k_context_address));
+ lw(cp, MemOperand(t0));
+#ifdef DEBUG
+ sw(a3, MemOperand(t0));
+#endif
+
+ // Pop the arguments, restore registers, and return.
+ mov(sp, fp); // Respect ABI stack constraint.
+ lw(fp, MemOperand(sp, 0));
+ lw(ra, MemOperand(sp, 4));
+ lw(sp, MemOperand(sp, 8));
+ jr(ra);
+ nop(); // Branch delay slot nop.
+}
+
+
+void MacroAssembler::AlignStack(int offset) {
+ // On MIPS an offset of 0 aligns to 0 modulo 8 bytes,
+ // and an offset of 1 aligns to 4 modulo 8 bytes.
+ int activation_frame_alignment = OS::ActivationFrameAlignment();
+ if (activation_frame_alignment != kPointerSize) {
+ // This code needs to be made more general if this assert doesn't hold.
+ ASSERT(activation_frame_alignment == 2 * kPointerSize);
+ if (offset == 0) {
+ andi(t0, sp, activation_frame_alignment - 1);
+ Push(zero_reg, eq, t0, zero_reg);
+ } else {
+ andi(t0, sp, activation_frame_alignment - 1);
+ addiu(t0, t0, -4);
+ Push(zero_reg, eq, t0, zero_reg);
+ }
+ }
+}
+
} } // namespace v8::internal
// unless we know exactly what we do.
// Registers aliases
+// cp is assumed to be a callee saved register.
const Register cp = s7; // JavaScript context pointer
const Register fp = s8_fp; // Alias fp
// Jump unconditionally to given label.
// We NEED a nop in the branch delay slot, as it used by v8, for example in
// CodeGenerator::ProcessDeferred().
+ // Currently the branch delay slot is filled by the MacroAssembler.
// Use rather b(Label) for code generation.
void jmp(Label* L) {
Branch(cc_always, L);
- nop();
}
// Load an object from the root table.
Heap::RootListIndex index,
Condition cond, Register src1, const Operand& src2);
+ // Load an external reference.
+ void LoadExternalReference(Register reg, ExternalReference ext) {
+ li(reg, Operand(ext));
+ }
+
// Sets the remembered set bit for [address+offset].
void RecordWrite(Register object, Register offset, Register scratch);
void Push(Register src, Condition cond, Register tst1, Register tst2) {
// Since we don't have conditionnal execution we use a Branch.
Branch(cond, 3, tst1, Operand(tst2));
- nop();
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
}
+ // ---------------------------------------------------------------------------
+ // Activation frames
+
+ void EnterInternalFrame() { EnterFrame(StackFrame::INTERNAL); }
+ void LeaveInternalFrame() { LeaveFrame(StackFrame::INTERNAL); }
+
+ // Enter specific kind of exit frame; either EXIT or
+ // EXIT_DEBUG. Expects the number of arguments in register a0 and
+ // the builtin function to call in register a1.
+ // On output hold_argc, hold_function, and hold_argv are setup.
+ void EnterExitFrame(ExitFrame::Mode mode,
+ Register hold_argc,
+ Register hold_argv,
+ Register hold_function);
+
+ // Leave the current exit frame. Expects the return value in v0.
+ void LeaveExitFrame(ExitFrame::Mode mode);
+
+ // Align the stack by optionally pushing a Smi zero.
+ void AlignStack(int offset);
+
+ void SetupAlignedCall(Register scratch, int arg_count = 0);
+ void ReturnFromAlignedCall();
+
+
+ // ---------------------------------------------------------------------------
+ // JavaScript invokes
+
+ // Invoke the JavaScript function code by either calling or jumping.
+ void InvokeCode(Register code,
+ const ParameterCount& expected,
+ const ParameterCount& actual,
+ InvokeFlag flag);
+
+ void InvokeCode(Handle<Code> code,
+ const ParameterCount& expected,
+ const ParameterCount& actual,
+ RelocInfo::Mode rmode,
+ InvokeFlag flag);
+
+ // Invoke the JavaScript function in the given register. Changes the
+ // current context to the context in the function before invoking.
+ void InvokeFunction(Register function,
+ const ParameterCount& actual,
+ InvokeFlag flag);
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// ---------------------------------------------------------------------------
// Debugger Support
// Exception handling
// Push a new try handler and link into try handler chain.
- // The return address must be passed in register lr.
- // On exit, r0 contains TOS (code slot).
+ // The return address must be passed in register ra.
void PushTryHandler(CodeLocation try_location, HandlerType type);
// Unlink the stack handler on top of the stack from the try handler chain.
// ---------------------------------------------------------------------------
// Support functions.
+ void GetObjectType(Register function,
+ Register map,
+ Register type_reg);
+
inline void BranchOnSmi(Register value, Label* smi_label,
Register scratch = at) {
ASSERT_EQ(0, kSmiTag);
Branch(ne, not_smi_label, scratch, Operand(zero_reg));
}
+ void CallBuiltin(ExternalReference builtin_entry);
+ void CallBuiltin(Register target);
+ void JumpToBuiltin(ExternalReference builtin_entry);
+ void JumpToBuiltin(Register target);
+
+ // Generates code for reporting that an illegal operation has
+ // occurred.
+ void IllegalOperation(int num_arguments);
+
// ---------------------------------------------------------------------------
// Runtime calls
bool allow_stub_calls() { return allow_stub_calls_; }
private:
+ List<Unresolved> unresolved_;
+ bool generating_stub_;
+ bool allow_stub_calls_;
+ // This handle will be patched with the code object on installation.
+ Handle<Object> code_object_;
+
void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
void Call(intptr_t target, RelocInfo::Mode rmode, Condition cond = cc_always,
Register r1 = zero_reg, const Operand& r2 = Operand(zero_reg));
+ // Helper functions for generating invokes.
+ void InvokePrologue(const ParameterCount& expected,
+ const ParameterCount& actual,
+ Handle<Code> code_constant,
+ Register code_reg,
+ Label* done,
+ InvokeFlag flag);
+
// Get the code for the given builtin. Returns if able to resolve
// the function in the 'resolved' flag.
Handle<Code> ResolveBuiltin(Builtins::JavaScript id, bool* resolved);
- List<Unresolved> unresolved_;
- bool generating_stub_;
- bool allow_stub_calls_;
- // This handle will be patched with the code object on installation.
- Handle<Object> code_object_;
+ // Activation support.
+ // EnterFrame clobbers t0 and t1.
+ void EnterFrame(StackFrame::Type type);
+ void LeaveFrame(StackFrame::Type type);
};
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
- UNIMPLEMENTED_MIPS();
- return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
+ // Registers:
+ // a1: function
+ // ra: return address
+
+ // Enter an internal frame.
+ __ EnterInternalFrame();
+ // Preserve the function.
+ __ Push(a1);
+ // Setup aligned call.
+ __ SetupAlignedCall(t0, 1);
+ // Push the function on the stack as the argument to the runtime function.
+ __ Push(a1);
+ // Call the runtime function
+ __ CallRuntime(Runtime::kLazyCompile, 1);
+ __ ReturnFromAlignedCall();
+ // Calculate the entry point.
+ __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
+ // Restore saved function.
+ __ Pop(a1);
+ // Tear down temporary frame.
+ __ LeaveInternalFrame();
+ // Do a tail-call of the compiled function.
+ __ Jump(t9);
+
+ return GetCodeWithFlags(flags, "LazyCompileStub");
}
}
+Object* CallStubCompiler::CompileArrayPushCall(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ CheckType check) {
+ UNIMPLEMENTED_MIPS();
+ return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
+}
+
+
+Object* CallStubCompiler::CompileArrayPopCall(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ CheckType check) {
+ UNIMPLEMENTED_MIPS();
+ return reinterpret_cast<Object*>(NULL); // UNIMPLEMENTED RETURN
+}
+
+
Object* CallStubCompiler::CompileCallConstant(Object* object,
JSObject* holder,
JSFunction* function,
void VirtualFrame::SyncRange(int begin, int end) {
- UNIMPLEMENTED_MIPS();
+ // All elements are in memory on MIPS (ie, synced).
+#ifdef DEBUG
+ for (int i = begin; i <= end; i++) {
+ ASSERT(elements_[i].is_synced());
+ }
+#endif
}
void VirtualFrame::Enter() {
- UNIMPLEMENTED_MIPS();
+ // TODO(MIPS): Implement DEBUG
+
+ // We are about to push four values to the frame.
+ Adjust(4);
+ __ MultiPush(ra.bit() | fp.bit() | cp.bit() | a1.bit());
+ // Adjust FP to point to saved FP.
+ __ addiu(fp, sp, 2 * kPointerSize);
}
void VirtualFrame::AllocateStackSlots() {
- UNIMPLEMENTED_MIPS();
+ int count = local_count();
+ if (count > 0) {
+ Comment cmnt(masm(), "[ Allocate space for locals");
+ Adjust(count);
+ // Initialize stack slots with 'undefined' value.
+ __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
+ __ addiu(sp, sp, -count * kPointerSize);
+ for (int i = 0; i < count; i++) {
+ __ sw(t0, MemOperand(sp, (count-i-1)*kPointerSize));
+ }
+ }
}
void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
- UNIMPLEMENTED_MIPS();
+ PrepareForCall(arg_count, arg_count);
+ ASSERT(cgen()->HasValidEntryRegisters());
+ __ CallRuntime(f, arg_count);
}
void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
- UNIMPLEMENTED_MIPS();
+ PrepareForCall(arg_count, arg_count);
+ ASSERT(cgen()->HasValidEntryRegisters());
+ __ CallRuntime(id, arg_count);
}
}
-void VirtualFrame::RawCallCodeObject(Handle<Code> code,
- RelocInfo::Mode rmode) {
- UNIMPLEMENTED_MIPS();
-}
-
-
void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode,
int dropped_args) {
- UNIMPLEMENTED_MIPS();
+ switch (code->kind()) {
+ case Code::CALL_IC:
+ break;
+ case Code::FUNCTION:
+ UNIMPLEMENTED_MIPS();
+ break;
+ case Code::KEYED_LOAD_IC:
+ UNIMPLEMENTED_MIPS();
+ break;
+ case Code::LOAD_IC:
+ UNIMPLEMENTED_MIPS();
+ break;
+ case Code::KEYED_STORE_IC:
+ UNIMPLEMENTED_MIPS();
+ break;
+ case Code::STORE_IC:
+ UNIMPLEMENTED_MIPS();
+ break;
+ case Code::BUILTIN:
+ UNIMPLEMENTED_MIPS();
+ break;
+ default:
+ UNREACHABLE();
+ break;
+ }
+ Forget(dropped_args);
+ ASSERT(cgen()->HasValidEntryRegisters());
+ __ Call(code, rmode);
}
void VirtualFrame::Drop(int count) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(count >= 0);
+ ASSERT(height() >= count);
+ int num_virtual_elements = (element_count() - 1) - stack_pointer_;
+
+ // Emit code to lower the stack pointer if necessary.
+ if (num_virtual_elements < count) {
+ int num_dropped = count - num_virtual_elements;
+ stack_pointer_ -= num_dropped;
+ __ addiu(sp, sp, num_dropped * kPointerSize);
+ }
+
+ // Discard elements from the virtual frame and free any registers.
+ for (int i = 0; i < count; i++) {
+ FrameElement dropped = elements_.RemoveLast();
+ if (dropped.is_register()) {
+ Unuse(dropped.reg());
+ }
+ }
}
Result VirtualFrame::Pop() {
UNIMPLEMENTED_MIPS();
Result res = Result();
- return res; // UNIMPLEMENTED RETUR
+ return res; // UNIMPLEMENTED RETURN
}
void VirtualFrame::EmitPop(Register reg) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(stack_pointer_ == element_count() - 1);
+ stack_pointer_--;
+ elements_.RemoveLast();
+ __ Pop(reg);
}
+
void VirtualFrame::EmitMultiPop(RegList regs) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(stack_pointer_ == element_count() - 1);
+ for (int16_t i = 0; i < kNumRegisters; i++) {
+ if ((regs & (1 << i)) != 0) {
+ stack_pointer_--;
+ elements_.RemoveLast();
+ }
+ }
+ __ MultiPop(regs);
}
void VirtualFrame::EmitPush(Register reg) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(stack_pointer_ == element_count() - 1);
+ elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
+ stack_pointer_++;
+ __ Push(reg);
}
+
void VirtualFrame::EmitMultiPush(RegList regs) {
- UNIMPLEMENTED_MIPS();
+ ASSERT(stack_pointer_ == element_count() - 1);
+ for (int16_t i = kNumRegisters; i > 0; i--) {
+ if ((regs & (1 << i)) != 0) {
+ elements_.Add(FrameElement::MemoryElement(NumberInfo::Unknown()));
+ stack_pointer_++;
+ }
+ }
+ __ MultiPush(regs);
}
+
void VirtualFrame::EmitArgumentSlots(RegList reglist) {
UNIMPLEMENTED_MIPS();
}
// -------------------------------------------------------------------------
// Virtual frames
//
-// The virtual frame is an abstraction of the physical stack frame. It
+// The virtual frame is an abstraction of the physical stack frame. It
// encapsulates the parameters, frame-allocated locals, and the expression
-// stack. It supports push/pop operations on the expression stack, as well
+// stack. It supports push/pop operations on the expression stack, as well
// as random access to the expression stack elements, locals, and
// parameters.
class VirtualFrame : public ZoneObject {
public:
// A utility class to introduce a scope where the virtual frame is
- // expected to remain spilled. The constructor spills the code
+ // expected to remain spilled. The constructor spills the code
// generator's current frame, but no attempt is made to require it
- // to stay spilled. It is intended as documentation while the code
+ // to stay spilled. It is intended as documentation while the code
// generator is being transformed.
class SpilledScope BASE_EMBEDDED {
public:
}
// Add extra in-memory elements to the top of the frame to match an actual
- // frame (eg, the frame after an exception handler is pushed). No code is
+ // frame (eg, the frame after an exception handler is pushed). No code is
// emitted.
void Adjust(int count);
// Forget elements from the top of the frame to match an actual frame (eg,
- // the frame after a runtime call). No code is emitted.
+ // the frame after a runtime call). No code is emitted.
void Forget(int count) {
ASSERT(count >= 0);
ASSERT(stack_pointer_ == element_count() - 1);
}
// Forget count elements from the top of the frame and adjust the stack
- // pointer downward. This is used, for example, before merging frames at
+ // pointer downward. This is used, for example, before merging frames at
// break, continue, and return targets.
void ForgetElements(int count);
if (is_used(reg)) SpillElementAt(register_location(reg));
}
- // Spill all occurrences of an arbitrary register if possible. Return the
+ // Spill all occurrences of an arbitrary register if possible. Return the
// register spilled or no_reg if it was not possible to free any register
// (ie, they all have frame-external references).
Register SpillAnyRegister();
// Prepare this virtual frame for merging to an expected frame by
// performing some state changes that do not require generating
- // code. It is guaranteed that no code will be generated.
+ // code. It is guaranteed that no code will be generated.
void PrepareMergeTo(VirtualFrame* expected);
// Make this virtual frame have a state identical to an expected virtual
- // frame. As a side effect, code may be emitted to make this frame match
+ // frame. As a side effect, code may be emitted to make this frame match
// the expected one.
void MergeTo(VirtualFrame* expected);
- // Detach a frame from its code generator, perhaps temporarily. This
+ // Detach a frame from its code generator, perhaps temporarily. This
// tells the register allocator that it is free to use frame-internal
- // registers. Used when the code generator's frame is switched from this
+ // registers. Used when the code generator's frame is switched from this
// one to NULL by an unconditional jump.
void DetachFromCodeGenerator() {
RegisterAllocator* cgen_allocator = cgen()->allocator();
}
}
- // (Re)attach a frame to its code generator. This informs the register
+ // (Re)attach a frame to its code generator. This informs the register
// allocator that the frame-internal register references are active again.
// Used when a code generator's frame is switched from NULL to this one by
// binding a label.
}
}
- // Emit code for the physical JS entry and exit frame sequences. After
+ // Emit code for the physical JS entry and exit frame sequences. After
// calling Enter, the virtual frame is ready for use; and after calling
- // Exit it should not be used. Note that Enter does not allocate space in
+ // Exit it should not be used. Note that Enter does not allocate space in
// the physical frame for storing frame-allocated locals.
void Enter();
void Exit();
// Prepare for returning from the frame by spilling locals and
- // dropping all non-locals elements in the virtual frame. This
+ // dropping all non-locals elements in the virtual frame. This
// avoids generating unnecessary merge code when jumping to the
- // shared return site. Emits code for spills.
+ // shared return site. Emits code for spills.
void PrepareForReturn();
// Allocate and initialize the frame-allocated locals.
return MemOperand(sp, index * kPointerSize);
}
- // Random-access store to a frame-top relative frame element. The result
+ // Random-access store to a frame-top relative frame element. The result
// becomes owned by the frame and is invalidated.
void SetElementAt(int index, Result* value);
- // Set a frame element to a constant. The index is frame-top relative.
+ // Set a frame element to a constant. The index is frame-top relative.
void SetElementAt(int index, Handle<Object> value) {
Result temp(value);
SetElementAt(index, &temp);
}
// Push the value of a local frame slot on top of the frame and invalidate
- // the local slot. The slot should be written to before trying to read
+ // the local slot. The slot should be written to before trying to read
// from it again.
void TakeLocalAt(int index) {
TakeFrameSlotAt(local0_index() + index);
}
- // Store the top value on the virtual frame into a local frame slot. The
+ // Store the top value on the virtual frame into a local frame slot. The
// value is left in place on top of the frame.
void StoreToLocalAt(int index) {
StoreToFrameSlotAt(local0_index() + index);
}
// Push the value of a paramter frame slot on top of the frame and
- // invalidate the parameter slot. The slot should be written to before
+ // invalidate the parameter slot. The slot should be written to before
// trying to read from it again.
void TakeParameterAt(int index) {
TakeFrameSlotAt(param0_index() + index);
RawCallStub(stub);
}
- // Call stub that expects its argument in r0. The argument is given
- // as a result which must be the register r0.
void CallStub(CodeStub* stub, Result* arg);
- // Call stub that expects its arguments in r1 and r0. The arguments
- // are given as results which must be the appropriate registers.
void CallStub(CodeStub* stub, Result* arg0, Result* arg1);
// Call runtime given the number of arguments expected on (and
int arg_count);
// Call into an IC stub given the number of arguments it removes
- // from the stack. Register arguments are passed as results and
+ // from the stack. Register arguments are passed as results and
// consumed by the call.
void CallCodeObject(Handle<Code> ic,
RelocInfo::Mode rmode,
int dropped_args,
bool set_auto_args_slots = false);
- // Drop a number of elements from the top of the expression stack. May
- // emit code to affect the physical frame. Does not clobber any registers
+ // Drop a number of elements from the top of the expression stack. May
+ // emit code to affect the physical frame. Does not clobber any registers
// excepting possibly the stack pointer.
void Drop(int count);
// Similar to VirtualFrame::Drop but we don't modify the actual stack.
// Duplicate the top element of the frame.
void Dup() { PushFrameSlotAt(element_count() - 1); }
- // Pop an element from the top of the expression stack. Returns a
+ // Pop an element from the top of the expression stack. Returns a
// Result, which may be a constant or a register.
Result Pop();
// emit a corresponding pop instruction.
void EmitPop(Register reg);
// Same but for multiple registers
- void EmitMultiPop(RegList regs); // higher indexed registers popped first
- void EmitMultiPopReversed(RegList regs); // lower first
+ void EmitMultiPop(RegList regs);
+ void EmitMultiPopReversed(RegList regs);
// Push an element on top of the expression stack and emit a
// corresponding push instruction.
void EmitPush(Register reg);
// Same but for multiple registers.
- void EmitMultiPush(RegList regs); // lower indexed registers are pushed first
- void EmitMultiPushReversed(RegList regs); // higher first
+ void EmitMultiPush(RegList regs);
+ void EmitMultiPushReversed(RegList regs);
// Push an element on the virtual frame.
inline void Push(Register reg, NumberInfo info = NumberInfo::Unknown());
// Nip removes zero or more elements from immediately below the top
// of the frame, leaving the previous top-of-frame value on top of
- // the frame. Nip(k) is equivalent to x = Pop(), Drop(k), Push(x).
+ // the frame. Nip(k) is equivalent to x = Pop(), Drop(k), Push(x).
inline void Nip(int num_dropped);
// This pushes 4 arguments slots on the stack and saves asked 'a' registers
void EmitArgumentSlots(RegList reglist);
inline void SetTypeForLocalAt(int index, NumberInfo info);
+ inline void SetTypeForParamAt(int index, NumberInfo info);
private:
static const int kLocal0Offset = JavaScriptFrameConstants::kLocal0Offset;
int local_count() { return cgen()->scope()->num_stack_slots(); }
// The index of the element that is at the processor's frame pointer
- // (the fp register). The parameters, receiver, function, and context
+ // (the fp register). The parameters, receiver, function, and context
// are below the frame pointer.
int frame_pointer() { return parameter_count() + 3; }
- // The index of the first parameter. The receiver lies below the first
+ // The index of the first parameter. The receiver lies below the first
// parameter.
int param0_index() { return 1; }
- // The index of the context slot in the frame. It is immediately
+ // The index of the context slot in the frame. It is immediately
// below the frame pointer.
int context_index() { return frame_pointer() - 1; }
- // The index of the function slot in the frame. It is below the frame
+ // The index of the function slot in the frame. It is below the frame
// pointer and context slot.
int function_index() { return frame_pointer() - 2; }
- // The index of the first local. Between the frame pointer and the
+ // The index of the first local. Between the frame pointer and the
// locals lies the return address.
int local0_index() { return frame_pointer() + 2; }
return (frame_pointer() - index) * kPointerSize;
}
- // Record an occurrence of a register in the virtual frame. This has the
+ // Record an occurrence of a register in the virtual frame. This has the
// effect of incrementing the register's external reference count and
// of updating the index of the register's location in the frame.
void Use(Register reg, int index) {
cgen()->allocator()->Use(reg);
}
- // Record that a register reference has been dropped from the frame. This
+ // Record that a register reference has been dropped from the frame. This
// decrements the register's external reference count and invalidates the
// index of the register's location in the frame.
void Unuse(Register reg) {
// constant.
void SpillElementAt(int index);
- // Sync the element at a particular index. If it is a register or
+ // Sync the element at a particular index. If it is a register or
// constant that disagrees with the value on the stack, write it to memory.
// Keep the element type as register or constant, and clear the dirty bit.
void SyncElementAt(int index);
void StoreToFrameSlotAt(int index);
// Spill all elements in registers. Spill the top spilled_args elements
- // on the frame. Sync all other frame elements.
+ // on the frame. Sync all other frame elements.
// Then drop dropped_args elements from the virtual frame, to match
// the effect of an upcoming call that will drop them from the stack.
void PrepareForCall(int spilled_args, int dropped_args);
// Make the memory-to-register and constant-to-register moves
// needed to make this frame equal the expected frame.
// Called after all register-to-memory and register-to-register
- // moves have been made. After this function returns, the frames
+ // moves have been made. After this function returns, the frames
// should be equal.
void MergeMoveMemoryToRegisters(VirtualFrame* expected);
// Invalidates a frame slot (puts an invalid frame element in it).
// Copies on the frame are correctly handled, and if this slot was
// the backing store of copies, the index of the new backing store
- // is returned. Otherwise, returns kIllegalIndex.
+ // is returned. Otherwise, returns kIllegalIndex.
// Register counts are correctly updated.
int InvalidateFrameSlotAt(int index);
'arch:x64': ['test-assembler-x64.cc',
'test-macro-assembler-x64.cc',
'test-log-stack-tracer.cc'],
- 'arch:mips': ['test-assembler-mips.cc'],
+ 'arch:mips': ['test-assembler-mips.cc', 'test-mips.cc'],
'os:linux': ['test-platform-linux.cc'],
'os:macos': ['test-platform-macos.cc'],
'os:nullos': ['test-platform-nullos.cc'],
test-alloc: SKIP
test-api: SKIP
test-compiler: SKIP
+test-cpu-profiler: SKIP
test-debug: SKIP
test-decls: SKIP
test-func-name-inference: SKIP
// The test framework does not accept flags on the command line, so we set them.
static void InitializeVM() {
- // Disable compilation of natives by specifying an empty natives file.
- FLAG_natives_file = "";
+ // Disable compilation of natives.
+ FLAG_disable_native_files = true;
// Enable generation of comments.
FLAG_debug_code = true;
--- /dev/null
+// Copyright 2010 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+#include "v8.h"
+#include "execution.h"
+
+#include "cctest.h"
+
+using ::v8::Local;
+using ::v8::String;
+using ::v8::Script;
+
+namespace i = ::v8::internal;
+
+TEST(MIPSFunctionCalls) {
+ // Disable compilation of natives.
+ i::FLAG_disable_native_files = true;
+ i::FLAG_full_compiler = false;
+
+ v8::HandleScope scope;
+ LocalContext env; // from cctest.h
+
+ const char* c_source = "function foo() { return 0x1234; }; foo();";
+ Local<String> source = ::v8::String::New(c_source);
+ Local<Script> script = ::v8::Script::Compile(source);
+ CHECK_EQ(0x1234, script->Run()->Int32Value());
+}