'x64/debug-x64.cc', 'x64/frames-x64.cc', 'x64/ic-x64.cc',
'x64/jump-target-x64.cc', 'x64/macro-assembler-x64.cc',
# 'x64/regexp-macro-assembler-x64.cc',
+ 'x64/register-allocator-x64.cc',
'x64/stub-cache-x64.cc', 'x64/virtual-frame-x64.cc'
],
'simulator:arm': ['arm/simulator-arm.cc'],
if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
*p -= delta; // relocate entry
- } else if (rmode_ == JS_RETURN && IsCallInstruction()) {
- // Special handling of js_return when a break point is set (call
- // instruction has been inserted).
- intptr_t* p = reinterpret_cast<intptr_t*>(pc_ + 1);
- *p -= delta; // relocate entry
} else if (IsInternalReference(rmode_)) {
// absolute code pointer inside code object moves with the code object.
intptr_t* p = reinterpret_cast<intptr_t*>(pc_);
ASSERT(function.is(rdi));
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
- movq(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
+ movl(rbx, FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
+ // Advances rdx to the end of the Code object headers, to the start of
+ // the executable code.
lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
ParameterCount expected(rbx);
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#include "v8.h"
+
+#include "codegen-inl.h"
+#include "register-allocator-inl.h"
+
+namespace v8 {
+namespace internal {
+
+// -------------------------------------------------------------------------
+// Result implementation.
+
+void Result::ToRegister() {
+ ASSERT(is_valid());
+ if (is_constant()) {
+ // TODO(X64): Handle constant results.
+ /*
+ Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate();
+ ASSERT(fresh.is_valid());
+ if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
+ CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle());
+ } else {
+ CodeGeneratorScope::Current()->masm()->Set(fresh.reg(),
+ Immediate(handle()));
+ }
+ // This result becomes a copy of the fresh one.
+ *this = fresh;
+ */
+ }
+ ASSERT(is_register());
+}
+
+
+void Result::ToRegister(Register target) {
+ ASSERT(is_valid());
+ if (!is_register() || !reg().is(target)) {
+ Result fresh = CodeGeneratorScope::Current()->allocator()->Allocate(target);
+ ASSERT(fresh.is_valid());
+ if (is_register()) {
+ CodeGeneratorScope::Current()->masm()->movq(fresh.reg(), reg());
+ } else {
+ ASSERT(is_constant());
+ /*
+ TODO(X64): Handle constant results.
+ if (CodeGeneratorScope::Current()->IsUnsafeSmi(handle())) {
+ CodeGeneratorScope::Current()->LoadUnsafeSmi(fresh.reg(), handle());
+ } else {
+ CodeGeneratorScope::Current()->masm()->Set(fresh.reg(),
+ Immediate(handle()));
+ }
+ */
+ }
+ *this = fresh;
+ } else if (is_register() && reg().is(target)) {
+ ASSERT(CodeGeneratorScope::Current()->has_valid_frame());
+ CodeGeneratorScope::Current()->frame()->Spill(target);
+ ASSERT(CodeGeneratorScope::Current()->allocator()->count(target) == 1);
+ }
+ ASSERT(is_register());
+ ASSERT(reg().is(target));
+}
+
+
+} } // namespace v8::internal
public:
// Register allocation is not yet implemented on x64, but C++
// forbids 0-length arrays so we use 1 as the number of registers.
- static const int kNumRegisters = 16;
+ static const int kNumRegisters = 12;
static const int kInvalidRegister = -1;
};
// short. We need the return sequence to be a least the size of a
// call instruction to support patching the exit code in the
// debugger. See VisitReturnStatement for the full return sequence.
+ // TODO(X64): A patched call will be very long now. Make sure we
+ // have enough room.
__ movq(rsp, rbp);
stack_pointer_ = frame_pointer();
for (int i = element_count() - 1; i > stack_pointer_; i--) {