From b69591bc0fcc799b01c2ec66b2c2838714aea6ed Mon Sep 17 00:00:00 2001 From: "ager@chromium.org" Date: Tue, 22 Mar 2011 13:20:04 +0000 Subject: [PATCH] Require an isolate parameter for most external reference creation to avoid TLS access in connection with external references. Make the isolate accessible via the assembler. Only for ia32 at this point. If this looks OK to you I will port it. R=vitalyr@chromium.org Review URL: http://codereview.chromium.org/6713074 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@7305 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/arm/assembler-arm.cc | 85 ++++++------ src/arm/assembler-arm.h | 2 +- src/arm/builtins-arm.cc | 23 +-- src/arm/code-stubs-arm.cc | 98 ++++++++----- src/arm/codegen-arm.cc | 14 +- src/arm/debug-arm.cc | 6 +- src/arm/deoptimizer-arm.cc | 10 +- src/arm/full-codegen-arm.cc | 4 +- src/arm/ic-arm.cc | 31 +++-- src/arm/lithium-codegen-arm.cc | 12 +- src/arm/macro-assembler-arm.cc | 61 ++++---- src/arm/regexp-macro-assembler-arm.cc | 12 +- src/arm/stub-cache-arm.cc | 49 ++++--- src/assembler.cc | 239 +++++++++++++++++++------------- src/assembler.h | 117 +++++++++------- src/ia32/assembler-ia32.cc | 104 +++++++------- src/ia32/assembler-ia32.h | 5 +- src/ia32/builtins-ia32.cc | 28 ++-- src/ia32/code-stubs-ia32.cc | 128 ++++++++++------- src/ia32/codegen-ia32.cc | 53 +++---- src/ia32/debug-ia32.cc | 7 +- src/ia32/deoptimizer-ia32.cc | 7 +- src/ia32/full-codegen-ia32.cc | 8 +- src/ia32/ic-ia32.cc | 30 ++-- src/ia32/lithium-codegen-ia32.cc | 19 ++- src/ia32/macro-assembler-ia32.cc | 66 +++++---- src/ia32/macro-assembler-ia32.h | 10 +- src/ia32/regexp-macro-assembler-ia32.cc | 22 +-- src/ia32/stub-cache-ia32.cc | 61 ++++---- src/parser.cc | 38 ++--- src/parser.h | 3 + src/serialize.cc | 96 +++++++------ src/x64/assembler-x64.cc | 30 ++-- src/x64/assembler-x64.h | 2 +- src/x64/builtins-x64.cc | 24 ++-- src/x64/code-stubs-x64.cc | 60 ++++---- src/x64/codegen-x64.cc | 6 +- src/x64/debug-x64.cc | 7 +- src/x64/deoptimizer-x64.cc | 9 +- src/x64/full-codegen-x64.cc | 2 +- src/x64/ic-x64.cc | 28 ++-- src/x64/lithium-codegen-x64.cc | 12 +- src/x64/macro-assembler-x64.cc | 56 ++++---- src/x64/macro-assembler-x64.h | 9 +- src/x64/regexp-macro-assembler-x64.cc | 13 +- src/x64/stub-cache-x64.cc | 37 +++-- test/cctest/test-disasm-ia32.cc | 3 +- test/cctest/test-serialize.cc | 31 +++-- 48 files changed, 1018 insertions(+), 759 deletions(-) diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc index b57602c..be34df9 100644 --- a/src/arm/assembler-arm.cc +++ b/src/arm/assembler-arm.cc @@ -269,19 +269,19 @@ static const int kMinimalBufferSize = 4*KB; Assembler::Assembler(void* buffer, int buffer_size) - : positions_recorder_(this), + : AssemblerBase(Isolate::Current()), + positions_recorder_(this), allow_peephole_optimization_(false), emit_debug_code_(FLAG_debug_code) { - Isolate* isolate = Isolate::Current(); allow_peephole_optimization_ = FLAG_peephole_optimization; if (buffer == NULL) { // Do our own buffer management. if (buffer_size <= kMinimalBufferSize) { buffer_size = kMinimalBufferSize; - if (isolate->assembler_spare_buffer() != NULL) { - buffer = isolate->assembler_spare_buffer(); - isolate->set_assembler_spare_buffer(NULL); + if (isolate()->assembler_spare_buffer() != NULL) { + buffer = isolate()->assembler_spare_buffer(); + isolate()->set_assembler_spare_buffer(NULL); } } if (buffer == NULL) { @@ -314,12 +314,11 @@ Assembler::Assembler(void* buffer, int buffer_size) Assembler::~Assembler() { - Isolate* isolate = Isolate::Current(); ASSERT(const_pool_blocked_nesting_ == 0); if (own_buffer_) { - if (isolate->assembler_spare_buffer() == NULL && + if (isolate()->assembler_spare_buffer() == NULL && buffer_size_ == kMinimalBufferSize) { - isolate->set_assembler_spare_buffer(buffer_); + isolate()->set_assembler_spare_buffer(buffer_); } else { DeleteArray(buffer_); } @@ -823,7 +822,7 @@ void Assembler::addrmod1(Instr instr, Condition cond = Instruction::ConditionField(instr); if ((instr & ~kCondMask) == 13*B21) { // mov, S not set if (x.must_use_constant_pool() || - !Isolate::Current()->cpu_features()->IsSupported(ARMv7)) { + !isolate()->cpu_features()->IsSupported(ARMv7)) { RecordRelocInfo(x.rmode_, x.imm32_); ldr(rd, MemOperand(pc, 0), cond); } else { @@ -1266,7 +1265,7 @@ void Assembler::usat(Register dst, const Operand& src, Condition cond) { // v6 and above. - ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7)); + ASSERT(isolate()->cpu_features()->IsSupported(ARMv7)); ASSERT(!dst.is(pc) && !src.rm_.is(pc)); ASSERT((satpos >= 0) && (satpos <= 31)); ASSERT((src.shift_op_ == ASR) || (src.shift_op_ == LSL)); @@ -1294,7 +1293,7 @@ void Assembler::ubfx(Register dst, int width, Condition cond) { // v7 and above. - ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7)); + ASSERT(isolate()->cpu_features()->IsSupported(ARMv7)); ASSERT(!dst.is(pc) && !src.is(pc)); ASSERT((lsb >= 0) && (lsb <= 31)); ASSERT((width >= 1) && (width <= (32 - lsb))); @@ -1314,7 +1313,7 @@ void Assembler::sbfx(Register dst, int width, Condition cond) { // v7 and above. - ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7)); + ASSERT(isolate()->cpu_features()->IsSupported(ARMv7)); ASSERT(!dst.is(pc) && !src.is(pc)); ASSERT((lsb >= 0) && (lsb <= 31)); ASSERT((width >= 1) && (width <= (32 - lsb))); @@ -1329,7 +1328,7 @@ void Assembler::sbfx(Register dst, // bfc dst, #lsb, #width void Assembler::bfc(Register dst, int lsb, int width, Condition cond) { // v7 and above. - ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7)); + ASSERT(isolate()->cpu_features()->IsSupported(ARMv7)); ASSERT(!dst.is(pc)); ASSERT((lsb >= 0) && (lsb <= 31)); ASSERT((width >= 1) && (width <= (32 - lsb))); @@ -1348,7 +1347,7 @@ void Assembler::bfi(Register dst, int width, Condition cond) { // v7 and above. - ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7)); + ASSERT(isolate()->cpu_features()->IsSupported(ARMv7)); ASSERT(!dst.is(pc) && !src.is(pc)); ASSERT((lsb >= 0) && (lsb <= 31)); ASSERT((width >= 1) && (width <= (32 - lsb))); @@ -1620,7 +1619,7 @@ void Assembler::ldrsh(Register dst, const MemOperand& src, Condition cond) { void Assembler::ldrd(Register dst1, Register dst2, const MemOperand& src, Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7)); + ASSERT(isolate()->cpu_features()->IsEnabled(ARMv7)); ASSERT(src.rm().is(no_reg)); ASSERT(!dst1.is(lr)); // r14. ASSERT_EQ(0, dst1.code() % 2); @@ -1635,7 +1634,7 @@ void Assembler::strd(Register src1, Register src2, ASSERT(!src1.is(lr)); // r14. ASSERT_EQ(0, src1.code() % 2); ASSERT_EQ(src1.code() + 1, src2.code()); - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7)); + ASSERT(isolate()->cpu_features()->IsEnabled(ARMv7)); addrmod3(cond | B7 | B6 | B5 | B4, src1, dst); } @@ -1871,7 +1870,7 @@ void Assembler::vldr(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-628. // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) | // Vdst(15-12) | 1011(11-8) | offset - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); int u = 1; if (offset < 0) { offset = -offset; @@ -1913,7 +1912,7 @@ void Assembler::vldr(const SwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-628. // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) | // Vdst(15-12) | 1010(11-8) | offset - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); int u = 1; if (offset < 0) { offset = -offset; @@ -1957,7 +1956,7 @@ void Assembler::vstr(const DwVfpRegister src, // Instruction details available in ARM DDI 0406A, A8-786. // cond(31-28) | 1101(27-24)| U000(23-20) | | Rbase(19-16) | // Vsrc(15-12) | 1011(11-8) | (offset/4) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); int u = 1; if (offset < 0) { offset = -offset; @@ -1998,7 +1997,7 @@ void Assembler::vstr(const SwVfpRegister src, // Instruction details available in ARM DDI 0406A, A8-786. // cond(31-28) | 1101(27-24)| U000(23-20) | Rbase(19-16) | // Vdst(15-12) | 1010(11-8) | (offset/4) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); int u = 1; if (offset < 0) { offset = -offset; @@ -2097,7 +2096,7 @@ void Assembler::vmov(const DwVfpRegister dst, const Condition cond) { // Dd = immediate // Instruction details available in ARM DDI 0406B, A8-640. - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); uint32_t enc; if (FitsVMOVDoubleImmediate(imm, &enc)) { @@ -2134,7 +2133,7 @@ void Assembler::vmov(const SwVfpRegister dst, const Condition cond) { // Sd = Sm // Instruction details available in ARM DDI 0406B, A8-642. - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); int sd, d, sm, m; dst.split_code(&sd, &d); src.split_code(&sm, &m); @@ -2147,7 +2146,7 @@ void Assembler::vmov(const DwVfpRegister dst, const Condition cond) { // Dd = Dm // Instruction details available in ARM DDI 0406B, A8-642. - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0xB*B20 | dst.code()*B12 | 0x5*B9 | B8 | B6 | src.code()); } @@ -2161,7 +2160,7 @@ void Assembler::vmov(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-646. // cond(31-28) | 1100(27-24)| 010(23-21) | op=0(20) | Rt2(19-16) | // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); ASSERT(!src1.is(pc) && !src2.is(pc)); emit(cond | 0xC*B24 | B22 | src2.code()*B16 | src1.code()*B12 | 0xB*B8 | B4 | dst.code()); @@ -2176,7 +2175,7 @@ void Assembler::vmov(const Register dst1, // Instruction details available in ARM DDI 0406A, A8-646. // cond(31-28) | 1100(27-24)| 010(23-21) | op=1(20) | Rt2(19-16) | // Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); ASSERT(!dst1.is(pc) && !dst2.is(pc)); emit(cond | 0xC*B24 | B22 | B20 | dst2.code()*B16 | dst1.code()*B12 | 0xB*B8 | B4 | src.code()); @@ -2190,7 +2189,7 @@ void Assembler::vmov(const SwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-642. // cond(31-28) | 1110(27-24)| 000(23-21) | op=0(20) | Vn(19-16) | // Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); ASSERT(!src.is(pc)); int sn, n; dst.split_code(&sn, &n); @@ -2205,7 +2204,7 @@ void Assembler::vmov(const Register dst, // Instruction details available in ARM DDI 0406A, A8-642. // cond(31-28) | 1110(27-24)| 000(23-21) | op=1(20) | Vn(19-16) | // Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); ASSERT(!dst.is(pc)); int sn, n; src.split_code(&sn, &n); @@ -2330,7 +2329,7 @@ void Assembler::vcvt_f64_s32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(F64, dst.code(), S32, src.code(), mode, cond)); } @@ -2339,7 +2338,7 @@ void Assembler::vcvt_f32_s32(const SwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(F32, dst.code(), S32, src.code(), mode, cond)); } @@ -2348,7 +2347,7 @@ void Assembler::vcvt_f64_u32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(F64, dst.code(), U32, src.code(), mode, cond)); } @@ -2357,7 +2356,7 @@ void Assembler::vcvt_s32_f64(const SwVfpRegister dst, const DwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(S32, dst.code(), F64, src.code(), mode, cond)); } @@ -2366,7 +2365,7 @@ void Assembler::vcvt_u32_f64(const SwVfpRegister dst, const DwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(U32, dst.code(), F64, src.code(), mode, cond)); } @@ -2375,7 +2374,7 @@ void Assembler::vcvt_f64_f32(const DwVfpRegister dst, const SwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(F64, dst.code(), F32, src.code(), mode, cond)); } @@ -2384,7 +2383,7 @@ void Assembler::vcvt_f32_f64(const SwVfpRegister dst, const DwVfpRegister src, VFPConversionMode mode, const Condition cond) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(EncodeVCVT(F32, dst.code(), F64, src.code(), mode, cond)); } @@ -2414,7 +2413,7 @@ void Assembler::vadd(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-536. // cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 | dst.code()*B12 | 0x5*B9 | B8 | src2.code()); } @@ -2429,7 +2428,7 @@ void Assembler::vsub(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-784. // cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 1(6) | M=?(5) | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 | dst.code()*B12 | 0x5*B9 | B8 | B6 | src2.code()); } @@ -2444,7 +2443,7 @@ void Assembler::vmul(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-784. // cond(31-28) | 11100(27-23)| D=?(22) | 10(21-20) | Vn(19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0x2*B20 | src1.code()*B16 | dst.code()*B12 | 0x5*B9 | B8 | src2.code()); } @@ -2459,7 +2458,7 @@ void Assembler::vdiv(const DwVfpRegister dst, // Instruction details available in ARM DDI 0406A, A8-584. // cond(31-28) | 11101(27-23)| D=?(22) | 00(21-20) | Vn(19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=? | 0(6) | M=?(5) | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | B23 | src1.code()*B16 | dst.code()*B12 | 0x5*B9 | B8 | src2.code()); } @@ -2472,7 +2471,7 @@ void Assembler::vcmp(const DwVfpRegister src1, // Instruction details available in ARM DDI 0406A, A8-570. // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0100 (19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | src1.code()*B12 | 0x5*B9 | B8 | B6 | src2.code()); } @@ -2485,7 +2484,7 @@ void Assembler::vcmp(const DwVfpRegister src1, // Instruction details available in ARM DDI 0406A, A8-570. // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | 0000(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); ASSERT(src2 == 0.0); emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 | src1.code()*B12 | 0x5*B9 | B8 | B6); @@ -2496,7 +2495,7 @@ void Assembler::vmsr(Register dst, Condition cond) { // Instruction details available in ARM DDI 0406A, A8-652. // cond(31-28) | 1110 (27-24) | 1110(23-20)| 0001 (19-16) | // Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0xE*B20 | B16 | dst.code()*B12 | 0xA*B8 | B4); } @@ -2506,7 +2505,7 @@ void Assembler::vmrs(Register dst, Condition cond) { // Instruction details available in ARM DDI 0406A, A8-652. // cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) | // Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | 0xF*B20 | B16 | dst.code()*B12 | 0xA*B8 | B4); } @@ -2517,7 +2516,7 @@ void Assembler::vsqrt(const DwVfpRegister dst, const Condition cond) { // cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0001 (19-16) | // Vd(15-12) | 101(11-9) | sz(8)=1 | 11 (7-6) | M(5)=? | 0(4) | Vm(3-0) - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); + ASSERT(isolate()->cpu_features()->IsEnabled(VFP3)); emit(cond | 0xE*B24 | B23 | 0x3*B20 | B16 | dst.code()*B12 | 0x5*B9 | B8 | 3*B6 | src.code()); } diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h index cf42dc8..91e6244 100644 --- a/src/arm/assembler-arm.h +++ b/src/arm/assembler-arm.h @@ -549,7 +549,7 @@ extern const Instr kAndBicFlip; -class Assembler : public Malloced { +class Assembler : public AssemblerBase { public: // Create an assembler. Instructions and relocation information are emitted // into a buffer, with the instructions starting from the beginning and the diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index bbf2e06..e6d6525 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -68,7 +68,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // JumpToExternalReference expects r0 to contain the number of arguments // including the receiver and the extra arguments. __ add(r0, r0, Operand(num_extra_args + 1)); - __ JumpToExternalReference(ExternalReference(id)); + __ JumpToExternalReference(ExternalReference(id, masm->isolate())); } @@ -442,7 +442,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { // Jump to the generic array code if the specialized code cannot handle // the construction. __ bind(&generic_array_code); - Code* code = Isolate::Current()->builtins()->builtin( + Code* code = masm->isolate()->builtins()->builtin( Builtins::ArrayCodeGeneric); Handle array_code(code); __ Jump(array_code, RelocInfo::CODE_TARGET); @@ -475,7 +475,7 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) { // Jump to the generic construct code in case the specialized code cannot // handle the construction. __ bind(&generic_constructor); - Code* code = Isolate::Current()->builtins()->builtin( + Code* code = masm->isolate()->builtins()->builtin( Builtins::JSConstructStubGeneric); Handle generic_construct_stub(code); __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); @@ -635,7 +635,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { // Set expected number of arguments to zero (not changing r0). __ mov(r2, Operand(0, RelocInfo::NONE)); __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); - __ Jump(Handle(Isolate::Current()->builtins()->builtin( + __ Jump(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); } @@ -661,7 +661,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Label undo_allocation; #ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = - ExternalReference::debug_step_in_fp_address(); + ExternalReference::debug_step_in_fp_address(masm->isolate()); __ mov(r2, Operand(debug_step_in_fp)); __ ldr(r2, MemOperand(r2)); __ tst(r2, r2); @@ -908,7 +908,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, if (is_api_function) { __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); Handle code = Handle( - Isolate::Current()->builtins()->builtin( + masm->isolate()->builtins()->builtin( Builtins::HandleApiCallConstruct)); ParameterCount expected(0); __ InvokeCode(code, expected, expected, @@ -1006,7 +1006,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); // Set up the roots register. - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); __ mov(r10, Operand(roots_address)); // Push the function and the receiver onto the stack. @@ -1042,7 +1043,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Invoke the code and pass argc as r0. __ mov(r0, Operand(r3)); if (is_construct) { - __ Call(Handle(Isolate::Current()->builtins()->builtin( + __ Call(Handle(masm->isolate()->builtins()->builtin( Builtins::JSConstructCall)), RelocInfo::CODE_TARGET); } else { ParameterCount actual(r0); @@ -1172,7 +1173,7 @@ void Builtins::Generate_NotifyOSR(MacroAssembler* masm) { void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { // Probe the CPU to set the supported features, because this builtin // may be called before the initialization performs CPU setup. - Isolate::Current()->cpu_features()->Probe(false); + masm->isolate()->cpu_features()->Probe(false); // Lookup the function in the JavaScript frame and push it as an // argument to the on-stack replacement function. @@ -1335,7 +1336,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // Expected number of arguments is 0 for CALL_NON_FUNCTION. __ mov(r2, Operand(0, RelocInfo::NONE)); __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION); - __ Jump(Handle(Isolate::Current()->builtins()->builtin( + __ Jump(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); __ bind(&function); } @@ -1351,7 +1352,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ mov(r2, Operand(r2, ASR, kSmiTagSize)); __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); __ cmp(r2, r0); // Check formal and actual parameter counts. - __ Jump(Handle(Isolate::Current()->builtins()->builtin( + __ Jump(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET, ne); ParameterCount expected(0); diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 61d9d83..e7b563c 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -949,7 +949,8 @@ void FloatingPointHelper::CallCCodeForDoubleOperation( __ push(lr); __ PrepareCallCFunction(4, scratch); // Two doubles are 4 arguments. // Call C routine that may not cause GC or other trouble. - __ CallCFunction(ExternalReference::double_fp_operation(op), 4); + __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()), + 4); // Store answer in the overwritable heap number. #if !defined(USE_ARM_EABI) // Double returned in fp coprocessor register 0 and 1, encoded as @@ -1304,7 +1305,7 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, // Call C routine that may not cause GC or other trouble. __ push(lr); __ PrepareCallCFunction(4, r5); // Two doubles count as 4 arguments. - __ CallCFunction(ExternalReference::compare_doubles(), 4); + __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()), 4); __ pop(pc); // Return. } } @@ -2018,7 +2019,8 @@ void GenericBinaryOpStub::HandleBinaryOpSlowCases( __ PrepareCallCFunction(4, r4); // Two doubles count as 4 arguments. // Call C routine that may not cause GC or other trouble. r5 is callee // save. - __ CallCFunction(ExternalReference::double_fp_operation(op_), 4); + __ CallCFunction( + ExternalReference::double_fp_operation(op_, masm->isolate()), 4); // Store answer in the overwritable heap number. #if !defined(USE_ARM_EABI) // Double returned in fp coprocessor register 0 and 1, encoded as @@ -2822,7 +2824,7 @@ void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { __ Push(r2, r1, r0); __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()), 5, 1); } @@ -2853,7 +2855,8 @@ void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { __ Push(r2, r1, r0); __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), + masm->isolate()), 5, 1); } @@ -3797,11 +3800,14 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { // r2 = low 32 bits of double value. // r3 = high 32 bits of double value. // r1 = TranscendentalCache::hash(double value). - __ mov(cache_entry, - Operand(ExternalReference::transcendental_cache_array_address())); - // r0 points to cache array. - __ ldr(cache_entry, MemOperand(cache_entry, type_ * sizeof( - Isolate::Current()->transcendental_cache()->caches_[0]))); + Isolate* isolate = masm->isolate(); + ExternalReference cache_array = + ExternalReference::transcendental_cache_array_address(isolate); + __ mov(cache_entry, Operand(cache_array)); + // cache_entry points to cache array. + int cache_array_index + = type_ * sizeof(isolate->transcendental_cache()->caches_[0]); + __ ldr(cache_entry, MemOperand(cache_entry, cache_array_index)); // r0 points to the cache for the type type_. // If NULL, the cache hasn't been initialized yet, so go through runtime. __ cmp(cache_entry, Operand(0, RelocInfo::NONE)); @@ -3846,7 +3852,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { __ bind(&calculate); if (tagged) { __ bind(&invalid_cache); - __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); + ExternalReference runtime_function = + ExternalReference(RuntimeFunction(), masm->isolate()); + __ TailCallExternalReference(runtime_function, 1, 1); } else { if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) UNREACHABLE(); CpuFeatures::Scope scope(VFP3); @@ -3909,18 +3917,20 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm, Register scratch) { + Isolate* isolate = masm->isolate(); + __ push(lr); __ PrepareCallCFunction(2, scratch); __ vmov(r0, r1, d2); switch (type_) { case TranscendentalCache::SIN: - __ CallCFunction(ExternalReference::math_sin_double_function(), 2); + __ CallCFunction(ExternalReference::math_sin_double_function(isolate), 2); break; case TranscendentalCache::COS: - __ CallCFunction(ExternalReference::math_cos_double_function(), 2); + __ CallCFunction(ExternalReference::math_cos_double_function(isolate), 2); break; case TranscendentalCache::LOG: - __ CallCFunction(ExternalReference::math_log_double_function(), 2); + __ CallCFunction(ExternalReference::math_log_double_function(isolate), 2); break; default: UNIMPLEMENTED(); @@ -4140,7 +4150,8 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(3, scratch); __ mov(r2, exponent); __ vmov(r0, r1, double_base); - __ CallCFunction(ExternalReference::power_double_int_function(), 3); + __ CallCFunction( + ExternalReference::power_double_int_function(masm->isolate()), 3); __ pop(lr); __ GetCFunctionDoubleResult(double_result); __ vstr(double_result, @@ -4169,7 +4180,8 @@ void MathPowStub::Generate(MacroAssembler* masm) { __ PrepareCallCFunction(4, scratch); __ vmov(r0, r1, double_base); __ vmov(r2, r3, double_exponent); - __ CallCFunction(ExternalReference::power_double_double_function(), 4); + __ CallCFunction( + ExternalReference::power_double_double_function(masm->isolate()), 4); __ pop(lr); __ GetCFunctionDoubleResult(double_result); __ vstr(double_result, @@ -4209,15 +4221,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // r4: number of arguments including receiver (C callee-saved) // r5: pointer to builtin function (C callee-saved) // r6: pointer to the first argument (C callee-saved) + Isolate* isolate = masm->isolate(); if (do_gc) { // Passing r0. __ PrepareCallCFunction(1, r1); - __ CallCFunction(ExternalReference::perform_gc_function(), 1); + __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1); } ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(); + ExternalReference::heap_always_allocate_scope_depth(isolate); if (always_allocate) { __ mov(r0, Operand(scope_depth)); __ ldr(r1, MemOperand(r0)); @@ -4302,15 +4315,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ b(eq, throw_out_of_memory_exception); // Retrieve the pending exception and clear the variable. - __ mov(ip, Operand(ExternalReference::the_hole_value_location())); + __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate))); __ ldr(r3, MemOperand(ip)); - __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address))); + __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address, + isolate))); __ ldr(r0, MemOperand(ip)); __ str(r3, MemOperand(ip)); // Special handling of termination exceptions which are uncatchable // by javascript code. - __ cmp(r0, Operand(FACTORY->termination_exception())); + __ cmp(r0, Operand(isolate->factory()->termination_exception())); __ b(eq, throw_termination_exception); // Handle normal exception. @@ -4418,11 +4432,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // r2: receiver // r3: argc // r4: argv + Isolate* isolate = masm->isolate(); __ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used. int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY; __ mov(r7, Operand(Smi::FromInt(marker))); __ mov(r6, Operand(Smi::FromInt(marker))); - __ mov(r5, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); + __ mov(r5, + Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); __ ldr(r5, MemOperand(r5)); __ Push(r8, r7, r6, r5); @@ -4431,7 +4447,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { #ifdef ENABLE_LOGGING_AND_PROFILING // If this is the outermost JS call, set js_entry_sp value. - ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address); + ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); __ mov(r5, Operand(ExternalReference(js_entry_sp))); __ ldr(r6, MemOperand(r5)); __ cmp(r6, Operand(0, RelocInfo::NONE)); @@ -4445,7 +4461,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // exception field in the JSEnv and return a failure sentinel. // Coming in here the fp will be invalid because the PushTryHandler below // sets it to 0 to signal the existence of the JSEntry frame. - __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address))); + __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address, + isolate))); __ str(r0, MemOperand(ip)); __ mov(r0, Operand(reinterpret_cast(Failure::Exception()))); __ b(&exit); @@ -4460,9 +4477,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // saved values before returning a failure to C. // Clear any pending exceptions. - __ mov(ip, Operand(ExternalReference::the_hole_value_location())); + __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate))); __ ldr(r5, MemOperand(ip)); - __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address))); + __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address, + isolate))); __ str(r5, MemOperand(ip)); // Invoke the function by calling through JS entry trampoline builtin. @@ -4476,10 +4494,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // r3: argc // r4: argv if (is_construct) { - ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); + ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline, + isolate); __ mov(ip, Operand(construct_entry)); } else { - ExternalReference entry(Builtins::JSEntryTrampoline); + ExternalReference entry(Builtins::JSEntryTrampoline, isolate); __ mov(ip, Operand(entry)); } __ ldr(ip, MemOperand(ip)); // deref address @@ -4495,7 +4514,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // displacement since the current stack pointer (sp) points directly // to the stack handler. __ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset)); - __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address))); + __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate))); __ str(r3, MemOperand(ip)); // No need to restore registers __ add(sp, sp, Operand(StackHandlerConstants::kSize)); @@ -4513,7 +4532,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ bind(&exit); // r0 holds result // Restore the top frame descriptors from the stack. __ pop(r3); - __ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); + __ mov(ip, + Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate))); __ str(r3, MemOperand(ip)); // Reset the stack to the callee saved registers. @@ -4909,10 +4929,11 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { Register last_match_info_elements = r6; // Ensure that a RegExp stack is allocated. + Isolate* isolate = masm->isolate(); ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(); + ExternalReference::address_of_regexp_stack_memory_address(isolate); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(); + ExternalReference::address_of_regexp_stack_memory_size(isolate); __ mov(r0, Operand(address_of_regexp_stack_memory_size)); __ ldr(r0, MemOperand(r0, 0)); __ tst(r0, Operand(r0)); @@ -5095,7 +5116,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ str(r0, MemOperand(sp, 2 * kPointerSize)); // Argument 5 (sp[4]): static offsets vector buffer. - __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector())); + __ mov(r0, + Operand(ExternalReference::address_of_static_offsets_vector(isolate))); __ str(r0, MemOperand(sp, 1 * kPointerSize)); // For arguments 4 and 3 get string length, calculate start of string data and @@ -5143,9 +5165,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // stack overflow (on the backtrack stack) was detected in RegExp code but // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. - __ mov(r1, Operand(ExternalReference::the_hole_value_location())); + __ mov(r1, Operand(ExternalReference::the_hole_value_location(isolate))); __ ldr(r1, MemOperand(r1, 0)); - __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address))); + __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address, + isolate))); __ ldr(r0, MemOperand(r2, 0)); __ cmp(r0, r1); __ b(eq, &runtime); @@ -5198,7 +5221,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(); + ExternalReference::address_of_static_offsets_vector(isolate); __ mov(r2, Operand(address_of_static_offsets_vector)); // r1: number of capture registers @@ -6793,7 +6816,8 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { __ push(lr); // Call the runtime system in a fresh internal frame. - ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); + ExternalReference miss = + ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); __ EnterInternalFrame(); __ Push(r1, r0); __ mov(ip, Operand(Smi::FromInt(op_))); diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc index 12d5c08..5e3b7d0 100644 --- a/src/arm/codegen-arm.cc +++ b/src/arm/codegen-arm.cc @@ -2859,7 +2859,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); // If we can fall off the end of the try block, unlink from try chain. if (has_valid_frame()) { @@ -2975,7 +2975,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); // If we can fall off the end of the try block, unlink from the try // chain and set the state on the frame to FALLING. @@ -5361,7 +5361,7 @@ void CodeGenerator::GenerateRandomHeapNumber( // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) { __ PrepareCallCFunction(0, r1); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0); CpuFeatures::Scope scope(VFP3); // 0x41300000 is the top half of 1.0 x 2^20 as a double. @@ -5382,7 +5382,7 @@ void CodeGenerator::GenerateRandomHeapNumber( __ mov(r0, Operand(r4)); __ PrepareCallCFunction(1, r1); __ CallCFunction( - ExternalReference::fill_heap_number_with_random_function(), 1); + ExternalReference::fill_heap_number_with_random_function(isolate()), 1); frame_->EmitPush(r0); } } @@ -7204,8 +7204,10 @@ void CodeGenerator::EmitKeyedStore(StaticType* key_type, __ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset)); if (!value_is_harmless && wb_info != LIKELY_SMI) { Label ok; - __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask())); - __ cmp(scratch2, Operand(ExternalReference::new_space_start())); + __ and_(scratch2, + scratch1, + Operand(ExternalReference::new_space_mask(isolate()))); + __ cmp(scratch2, Operand(ExternalReference::new_space_start(isolate()))); __ tst(value, Operand(kSmiTagMask), ne); deferred->Branch(ne); #ifdef DEBUG diff --git a/src/arm/debug-arm.cc b/src/arm/debug-arm.cc index 93fa5c3..e6ad98c 100644 --- a/src/arm/debug-arm.cc +++ b/src/arm/debug-arm.cc @@ -159,7 +159,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ RecordComment("// Calling from debug break to runtime - come in - over"); #endif __ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments - __ mov(r1, Operand(ExternalReference::debug_break())); + __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate()))); CEntryStub ceb(1); __ CallStub(&ceb); @@ -185,7 +185,9 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, // Now that the break point has been handled, resume normal execution by // jumping to the target address intended by the caller and that was // overwritten by the address of DebugBreakXXX. - __ mov(ip, Operand(ExternalReference(Debug_Address::AfterBreakTarget()))); + ExternalReference after_break_target = + ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate()); + __ mov(ip, Operand(after_break_target)); __ ldr(ip, MemOperand(ip)); __ Jump(ip); } diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc index 8e970ec..f1fde92 100644 --- a/src/arm/deoptimizer-arm.cc +++ b/src/arm/deoptimizer-arm.cc @@ -513,6 +513,9 @@ void Deoptimizer::DoComputeFrame(TranslationIterator* iterator, // easily ported. void Deoptimizer::EntryGenerator::Generate() { GeneratePrologue(); + + Isolate* isolate = masm()->isolate(); + CpuFeatures::Scope scope(VFP3); // Save all general purpose registers before messing with them. const int kNumberOfRegisters = Register::kNumRegisters; @@ -567,7 +570,7 @@ void Deoptimizer::EntryGenerator::Generate() { // r3: code address or 0 already loaded. __ str(r4, MemOperand(sp, 0 * kPointerSize)); // Fp-to-sp delta. // Call Deoptimizer::New(). - __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5); + __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5); // Preserve "deoptimizer" object in register r0 and get the input // frame descriptor pointer to r1 (deoptimizer->input_); @@ -621,7 +624,8 @@ void Deoptimizer::EntryGenerator::Generate() { // r0: deoptimizer object; r1: scratch. __ PrepareCallCFunction(1, r1); // Call Deoptimizer::ComputeOutputFrames(). - __ CallCFunction(ExternalReference::compute_output_frames_function(), 1); + __ CallCFunction( + ExternalReference::compute_output_frames_function(isolate), 1); __ pop(r0); // Restore deoptimizer object (class Deoptimizer). // Replace the current (input) frame with the output frames. @@ -671,7 +675,7 @@ void Deoptimizer::EntryGenerator::Generate() { __ pop(ip); // remove lr // Set up the roots register. - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = ExternalReference::roots_address(isolate); __ mov(r10, Operand(roots_address)); __ pop(ip); // remove pc diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index c308e3b..2edfd0a 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -2811,7 +2811,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList* args) { // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). if (isolate()->cpu_features()->IsSupported(VFP3)) { __ PrepareCallCFunction(0, r1); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0); CpuFeatures::Scope scope(VFP3); // 0x41300000 is the top half of 1.0 x 2^20 as a double. @@ -2832,7 +2832,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList* args) { __ mov(r0, Operand(r4)); __ PrepareCallCFunction(1, r1); __ CallCFunction( - ExternalReference::fill_heap_number_with_random_function(), 1); + ExternalReference::fill_heap_number_with_random_function(isolate()), 1); } context()->Plug(r0); diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc index d92406b..9f9b63c 100644 --- a/src/arm/ic-arm.cc +++ b/src/arm/ic-arm.cc @@ -663,7 +663,7 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) { // Call the entry. __ mov(r0, Operand(2)); - __ mov(r1, Operand(ExternalReference(IC_Utility(id)))); + __ mov(r1, Operand(ExternalReference(IC_Utility(id), masm->isolate()))); CEntryStub stub(1); __ CallStub(&stub); @@ -918,7 +918,8 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { __ Push(r3, r2); // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1148,7 +1149,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { __ Push(r1, r0); - ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1242,7 +1244,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // Load the key (consisting of map and symbol) from the cache and // check for match. - ExternalReference cache_keys = ExternalReference::keyed_lookup_cache_keys(); + ExternalReference cache_keys = + ExternalReference::keyed_lookup_cache_keys(masm->isolate()); __ mov(r4, Operand(cache_keys)); __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1)); __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to symbol. @@ -1257,8 +1260,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // r1 : receiver // r2 : receiver's map // r3 : lookup cache index - ExternalReference cache_field_offsets - = ExternalReference::keyed_lookup_cache_field_offsets(); + ExternalReference cache_field_offsets = + ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); __ mov(r4, Operand(cache_field_offsets)); __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset)); @@ -1365,8 +1368,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { __ Push(r1, r0); // Receiver, key. // Perform tail call to the entry. - __ TailCallExternalReference(ExternalReference( - IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1); + __ TailCallExternalReference( + ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), + masm->isolate()), + 2, + 1); __ bind(&slow); GenerateMiss(masm); @@ -1384,7 +1390,8 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { // Push receiver, key and value for runtime call. __ Push(r2, r1, r0); - ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } @@ -1550,7 +1557,8 @@ void StoreIC::GenerateMiss(MacroAssembler* masm) { __ Push(r1, r2, r0); // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } @@ -1595,7 +1603,8 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { // Prepare tail call to StoreIC_ArrayLength. __ Push(receiver, value); - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); __ bind(&miss); diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index d02b862..48a9f7f 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -1289,7 +1289,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) { __ PrepareCallCFunction(4, scratch0()); __ vmov(r0, r1, left); __ vmov(r2, r3, right); - __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4); + __ CallCFunction( + ExternalReference::double_fp_operation(Token::MOD, isolate()), 4); // Move the result in the double result register. __ GetCFunctionDoubleResult(ToDoubleRegister(instr->result())); @@ -2742,14 +2743,16 @@ void LCodeGen::DoPower(LPower* instr) { __ PrepareCallCFunction(4, scratch); __ vmov(r0, r1, ToDoubleRegister(left)); __ vmov(r2, r3, ToDoubleRegister(right)); - __ CallCFunction(ExternalReference::power_double_double_function(), 4); + __ CallCFunction( + ExternalReference::power_double_double_function(isolate()), 4); } else if (exponent_type.IsInteger32()) { ASSERT(ToRegister(right).is(r0)); // Prepare arguments and call C function. __ PrepareCallCFunction(4, scratch); __ mov(r2, ToRegister(right)); __ vmov(r0, r1, ToDoubleRegister(left)); - __ CallCFunction(ExternalReference::power_double_int_function(), 4); + __ CallCFunction( + ExternalReference::power_double_int_function(isolate()), 4); } else { ASSERT(exponent_type.IsTagged()); ASSERT(instr->hydrogen()->left()->representation().IsDouble()); @@ -2782,7 +2785,8 @@ void LCodeGen::DoPower(LPower* instr) { __ PrepareCallCFunction(4, scratch); __ vmov(r0, r1, ToDoubleRegister(left)); __ vmov(r2, r3, result_reg); - __ CallCFunction(ExternalReference::power_double_double_function(), 4); + __ CallCFunction( + ExternalReference::power_double_double_function(isolate()), 4); } // Store the result in the result register. __ GetCFunctionDoubleResult(result_reg); diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 8cdbf3b..626c46f 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -461,8 +461,8 @@ void MacroAssembler::InNewSpace(Register object, Condition cond, Label* branch) { ASSERT(cond == eq || cond == ne); - and_(scratch, object, Operand(ExternalReference::new_space_mask())); - cmp(scratch, Operand(ExternalReference::new_space_start())); + and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate()))); + cmp(scratch, Operand(ExternalReference::new_space_start(isolate()))); b(cond, branch); } @@ -739,9 +739,9 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space) { str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset)); // Save the frame pointer and the context in top. - mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); + mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); str(fp, MemOperand(ip)); - mov(ip, Operand(ExternalReference(Isolate::k_context_address))); + mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate()))); str(cp, MemOperand(ip)); // Optionally save all double registers. @@ -817,11 +817,11 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, // Clear top frame. mov(r3, Operand(0, RelocInfo::NONE)); - mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address))); + mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate()))); str(r3, MemOperand(ip)); // Restore current context from top and clear it in debug mode. - mov(ip, Operand(ExternalReference(Isolate::k_context_address))); + mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate()))); ldr(cp, MemOperand(ip)); #ifdef DEBUG str(r3, MemOperand(ip)); @@ -1048,7 +1048,7 @@ void MacroAssembler::IsObjectJSStringType(Register object, void MacroAssembler::DebugBreak() { ASSERT(allow_stub_calls()); mov(r0, Operand(0, RelocInfo::NONE)); - mov(r1, Operand(ExternalReference(Runtime::kDebugBreak))); + mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate()))); CEntryStub ces(1); Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } @@ -1071,7 +1071,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, && StackHandlerConstants::kPCOffset == 3 * kPointerSize); stm(db_w, sp, r3.bit() | fp.bit() | lr.bit()); // Save the current handler as the next handler. - mov(r3, Operand(ExternalReference(Isolate::k_handler_address))); + mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(r1, MemOperand(r3)); ASSERT(StackHandlerConstants::kNextOffset == 0); push(r1); @@ -1090,7 +1090,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, && StackHandlerConstants::kPCOffset == 3 * kPointerSize); stm(db_w, sp, r6.bit() | ip.bit() | lr.bit()); // Save the current handler as the next handler. - mov(r7, Operand(ExternalReference(Isolate::k_handler_address))); + mov(r7, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(r6, MemOperand(r7)); ASSERT(StackHandlerConstants::kNextOffset == 0); push(r6); @@ -1103,7 +1103,7 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PopTryHandler() { ASSERT_EQ(0, StackHandlerConstants::kNextOffset); pop(r1); - mov(ip, Operand(ExternalReference(Isolate::k_handler_address))); + mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize)); str(r1, MemOperand(ip)); } @@ -1119,7 +1119,7 @@ void MacroAssembler::Throw(Register value) { STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize); // Drop the sp to the top of the handler. - mov(r3, Operand(ExternalReference(Isolate::k_handler_address))); + mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(sp, MemOperand(r3)); // Restore the next handler and frame pointer, discard handler state. @@ -1158,7 +1158,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, } // Drop sp to the top stack handler. - mov(r3, Operand(ExternalReference(Isolate::k_handler_address))); + mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate()))); ldr(sp, MemOperand(r3)); // Unwind the handlers until the ENTRY handler is found. @@ -1183,7 +1183,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, if (type == OUT_OF_MEMORY) { // Set external caught exception to false. ExternalReference external_caught( - Isolate::k_external_caught_exception_address); + Isolate::k_external_caught_exception_address, isolate()); mov(r0, Operand(false, RelocInfo::NONE)); mov(r2, Operand(external_caught)); str(r0, MemOperand(r2)); @@ -1191,7 +1191,8 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, // Set pending exception and r0 to out of memory exception. Failure* out_of_memory = Failure::OutOfMemoryException(); mov(r0, Operand(reinterpret_cast(out_of_memory))); - mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address))); + mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address, + isolate()))); str(r0, MemOperand(r2)); } @@ -1332,9 +1333,9 @@ void MacroAssembler::AllocateInNewSpace(int object_size, // Also, assert that the registers are numbered such that the values // are loaded in the correct order. ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); intptr_t top = reinterpret_cast(new_space_allocation_top.address()); intptr_t limit = @@ -1412,9 +1413,9 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, // Also, assert that the registers are numbered such that the values // are loaded in the correct order. ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); intptr_t top = reinterpret_cast(new_space_allocation_top.address()); intptr_t limit = @@ -1473,7 +1474,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, void MacroAssembler::UndoAllocationInNewSpace(Register object, Register scratch) { ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Make sure the object has no tag before resetting top. and_(object, object, Operand(~kHeapObjectTagMask)); @@ -1775,7 +1776,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( // Check if the function scheduled an exception. bind(&leave_exit_frame); LoadRoot(r4, Heap::kTheHoleValueRootIndex); - mov(ip, Operand(ExternalReference::scheduled_exception_address())); + mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate()))); ldr(r5, MemOperand(ip)); cmp(r4, r5); b(ne, &promote_scheduled_exception); @@ -1786,8 +1787,11 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( mov(pc, lr); bind(&promote_scheduled_exception); - MaybeObject* result = TryTailCallExternalReference( - ExternalReference(Runtime::kPromoteScheduledException), 0, 1); + MaybeObject* result + = TryTailCallExternalReference( + ExternalReference(Runtime::kPromoteScheduledException, isolate()), + 0, + 1); if (result->IsFailure()) { return result; } @@ -1797,7 +1801,8 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( str(r5, MemOperand(r7, kLimitOffset)); mov(r4, r0); PrepareCallCFunction(0, r5); - CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0); + CallCFunction( + ExternalReference::delete_handle_scope_extensions(isolate()), 0); mov(r0, r4); jmp(&leave_exit_frame); @@ -2185,7 +2190,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // should remove this need and make the runtime routine entry code // smarter. mov(r0, Operand(num_arguments)); - mov(r1, Operand(ExternalReference(f))); + mov(r1, Operand(ExternalReference(f, isolate()))); CEntryStub stub(1); CallStub(&stub); } @@ -2199,7 +2204,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId fid, int num_arguments) { void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { const Runtime::Function* function = Runtime::FunctionForId(id); mov(r0, Operand(function->nargs)); - mov(r1, Operand(ExternalReference(function))); + mov(r1, Operand(ExternalReference(function, isolate()))); CEntryStub stub(1); stub.SaveDoubles(); CallStub(&stub); @@ -2242,7 +2247,9 @@ MaybeObject* MacroAssembler::TryTailCallExternalReference( void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size) { - TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); + TailCallExternalReference(ExternalReference(fid, isolate()), + num_arguments, + result_size); } @@ -2820,7 +2827,7 @@ void MacroAssembler::CallCFunction(Register function, Register scratch, int num_arguments) { CallCFunctionHelper(function, - ExternalReference::the_hole_value_location(), + ExternalReference::the_hole_value_location(isolate()), scratch, num_arguments); } diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc index 47a3322..8d540d4 100644 --- a/src/arm/regexp-macro-assembler-arm.cc +++ b/src/arm/regexp-macro-assembler-arm.cc @@ -369,7 +369,7 @@ void RegExpMacroAssemblerARM::CheckNotBackReferenceIgnoreCase( __ add(r1, current_input_offset(), Operand(end_of_input_address())); ExternalReference function = - ExternalReference::re_case_insensitive_compare_uc16(); + ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate()); __ CallCFunction(function, argument_count); // Check if function returned non-zero for success or zero for failure. @@ -627,7 +627,7 @@ Handle RegExpMacroAssemblerARM::GetCode(Handle source) { Label stack_ok; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ mov(r0, Operand(stack_limit)); __ ldr(r0, MemOperand(r0)); __ sub(r0, sp, r0, SetCC); @@ -783,7 +783,7 @@ Handle RegExpMacroAssemblerARM::GetCode(Handle source) { __ mov(r0, backtrack_stackpointer()); __ add(r1, frame_pointer(), Operand(kStackHighEnd)); ExternalReference grow_stack = - ExternalReference::re_grow_stack(); + ExternalReference::re_grow_stack(masm_->isolate()); __ CallCFunction(grow_stack, num_arguments); // If return NULL, we have failed to grow the stack, and // must exit with a stack-overflow exception. @@ -999,7 +999,7 @@ void RegExpMacroAssemblerARM::CallCheckStackGuardState(Register scratch) { __ mov(r1, Operand(masm_->CodeObject())); // r0 becomes return address pointer. ExternalReference stack_guard_check = - ExternalReference::re_check_stack_guard_state(); + ExternalReference::re_check_stack_guard_state(masm_->isolate()); CallCFunctionUsingStub(stack_guard_check, num_arguments); } @@ -1161,7 +1161,7 @@ void RegExpMacroAssemblerARM::Pop(Register target) { void RegExpMacroAssemblerARM::CheckPreemption() { // Check for preemption. ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ mov(r0, Operand(stack_limit)); __ ldr(r0, MemOperand(r0)); __ cmp(sp, r0); @@ -1171,7 +1171,7 @@ void RegExpMacroAssemblerARM::CheckPreemption() { void RegExpMacroAssemblerARM::CheckStackLimit() { ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); + ExternalReference::address_of_regexp_stack_limit(masm_->isolate()); __ mov(r0, Operand(stack_limit)); __ ldr(r0, MemOperand(r0)); __ cmp(backtrack_stackpointer(), Operand(r0)); diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc index 1ee7419..c548aab 100644 --- a/src/arm/stub-cache-arm.cc +++ b/src/arm/stub-cache-arm.cc @@ -453,8 +453,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, __ mov(r2, Operand(Handle(transition))); __ Push(r2, r0); __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), - 3, 1); + ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), + masm->isolate()), + 3, + 1); return; } @@ -570,7 +572,8 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, PushInterceptorArguments(masm, receiver, holder, name, holder_obj); ExternalReference ref = - ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), + masm->isolate()); __ mov(r0, Operand(5)); __ mov(r1, Operand(ref)); @@ -659,8 +662,9 @@ static MaybeObject* GenerateFastApiDirectCall(MacroAssembler* masm, // garbage collection but instead return the allocation failure // object. const int kStackUnwindSpace = argc + kFastApiCallArguments + 1; - ExternalReference ref = - ExternalReference(&fun, ExternalReference::DIRECT_API_CALL); + ExternalReference ref = ExternalReference(&fun, + ExternalReference::DIRECT_API_CALL, + masm->isolate()); return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace); } @@ -843,9 +847,9 @@ class CallInterceptorCompiler BASE_EMBEDDED { interceptor_holder); __ CallExternalReference( - ExternalReference( - IC_Utility(IC::kLoadPropertyWithInterceptorForCall)), - 5); + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), + masm->isolate()), + 5); // Restore the name_ register. __ pop(name_); @@ -1276,7 +1280,9 @@ MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object, // object. const int kStackUnwindSpace = 4; ExternalReference ref = - ExternalReference(&fun, ExternalReference::DIRECT_GETTER_CALL); + ExternalReference(&fun, + ExternalReference::DIRECT_GETTER_CALL, + masm()->isolate()); return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace); } @@ -1405,7 +1411,8 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, } ExternalReference ref = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); + ExternalReference(IC_Utility(IC::kLoadCallbackProperty), + masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } else { // !compile_followup_inline @@ -1417,8 +1424,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, PushInterceptorArguments(masm(), receiver, holder_reg, name_reg, interceptor_holder); - ExternalReference ref = ExternalReference( - IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); + ExternalReference ref = + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), + masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } @@ -1634,10 +1642,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ b(&call_builtin); } + Isolate* isolate = masm()->isolate(); ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate); ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate); const int kAllocationDelta = 4; // Load top and check if it is the end of elements. @@ -1677,7 +1686,8 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ Ret(); } __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), + __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, + masm()->isolate()), argc + 1, 1); } @@ -1762,7 +1772,8 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, __ Ret(); __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop), + __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop, + masm()->isolate()), argc + 1, 1); @@ -2636,7 +2647,8 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, // Do tail-call to the runtime system. ExternalReference store_callback_property = - ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); + ExternalReference(IC_Utility(IC::kStoreCallbackProperty), + masm()->isolate()); __ TailCallExternalReference(store_callback_property, 4, 1); // Handle store cache miss. @@ -2685,7 +2697,8 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, // Do tail-call to the runtime system. ExternalReference store_ic_property = - ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); + ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), + masm()->isolate()); __ TailCallExternalReference(store_ic_property, 4, 1); // Handle store cache miss. diff --git a/src/assembler.cc b/src/assembler.cc index a0e3dc6..c6d1cfa 100644 --- a/src/assembler.cc +++ b/src/assembler.cc @@ -552,25 +552,29 @@ void RelocInfo::Verify() { // ----------------------------------------------------------------------------- // Implementation of ExternalReference -ExternalReference::ExternalReference(Builtins::CFunctionId id) - : address_(Redirect(Builtins::c_function_address(id))) {} +ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate) + : address_(Redirect(isolate, Builtins::c_function_address(id))) {} ExternalReference::ExternalReference( - ApiFunction* fun, Type type = ExternalReference::BUILTIN_CALL) - : address_(Redirect(fun->address(), type)) {} + ApiFunction* fun, + Type type = ExternalReference::BUILTIN_CALL, + Isolate* isolate = NULL) + : address_(Redirect(isolate, fun->address(), type)) {} -ExternalReference::ExternalReference(Builtins::Name name) - : address_(Isolate::Current()->builtins()->builtin_address(name)) {} +ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate) + : address_(isolate->builtins()->builtin_address(name)) {} -ExternalReference::ExternalReference(Runtime::FunctionId id) - : address_(Redirect(Runtime::FunctionForId(id)->entry)) {} +ExternalReference::ExternalReference(Runtime::FunctionId id, + Isolate* isolate) + : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {} -ExternalReference::ExternalReference(const Runtime::Function* f) - : address_(Redirect(f->entry)) {} +ExternalReference::ExternalReference(const Runtime::Function* f, + Isolate* isolate) + : address_(Redirect(isolate, f->entry)) {} ExternalReference ExternalReference::isolate_address() { @@ -578,139 +582,154 @@ ExternalReference ExternalReference::isolate_address() { } -ExternalReference::ExternalReference(const IC_Utility& ic_utility) - : address_(Redirect(ic_utility.address())) {} +ExternalReference::ExternalReference(const IC_Utility& ic_utility, + Isolate* isolate) + : address_(Redirect(isolate, ic_utility.address())) {} #ifdef ENABLE_DEBUGGER_SUPPORT -ExternalReference::ExternalReference(const Debug_Address& debug_address) - : address_(debug_address.address(Isolate::Current())) {} +ExternalReference::ExternalReference(const Debug_Address& debug_address, + Isolate* isolate) + : address_(debug_address.address(isolate)) {} #endif ExternalReference::ExternalReference(StatsCounter* counter) : address_(reinterpret_cast
(counter->GetInternalPointer())) {} -ExternalReference::ExternalReference(Isolate::AddressId id) - : address_(Isolate::Current()->get_address_from_id(id)) {} +ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate) + : address_(isolate->get_address_from_id(id)) {} ExternalReference::ExternalReference(const SCTableReference& table_ref) : address_(table_ref.address()) {} -ExternalReference ExternalReference::perform_gc_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(Runtime::PerformGC))); +ExternalReference ExternalReference::perform_gc_function(Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(Runtime::PerformGC))); } -ExternalReference ExternalReference::fill_heap_number_with_random_function() { - return - ExternalReference(Redirect(FUNCTION_ADDR(V8::FillHeapNumberWithRandom))); +ExternalReference ExternalReference::fill_heap_number_with_random_function( + Isolate* isolate) { + return ExternalReference(Redirect( + isolate, + FUNCTION_ADDR(V8::FillHeapNumberWithRandom))); } -ExternalReference ExternalReference::delete_handle_scope_extensions() { - return ExternalReference(Redirect(FUNCTION_ADDR( - HandleScope::DeleteExtensions))); +ExternalReference ExternalReference::delete_handle_scope_extensions( + Isolate* isolate) { + return ExternalReference(Redirect( + isolate, + FUNCTION_ADDR(HandleScope::DeleteExtensions))); } -ExternalReference ExternalReference::random_uint32_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(V8::Random))); +ExternalReference ExternalReference::random_uint32_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, FUNCTION_ADDR(V8::Random))); } -ExternalReference ExternalReference::transcendental_cache_array_address() { - return ExternalReference(Isolate::Current()->transcendental_cache()-> - cache_array_address()); +ExternalReference ExternalReference::transcendental_cache_array_address( + Isolate* isolate) { + return ExternalReference( + isolate->transcendental_cache()->cache_array_address()); } -ExternalReference ExternalReference::new_deoptimizer_function() { +ExternalReference ExternalReference::new_deoptimizer_function( + Isolate* isolate) { return ExternalReference( - Redirect(FUNCTION_ADDR(Deoptimizer::New))); + Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New))); } -ExternalReference ExternalReference::compute_output_frames_function() { +ExternalReference ExternalReference::compute_output_frames_function( + Isolate* isolate) { return ExternalReference( - Redirect(FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames))); + Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames))); } -ExternalReference ExternalReference::global_contexts_list() { - return ExternalReference(Isolate::Current()-> - heap()->global_contexts_list_address()); +ExternalReference ExternalReference::global_contexts_list(Isolate* isolate) { + return ExternalReference(isolate->heap()->global_contexts_list_address()); } -ExternalReference ExternalReference::keyed_lookup_cache_keys() { - return ExternalReference(Isolate::Current()-> - keyed_lookup_cache()->keys_address()); +ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) { + return ExternalReference(isolate->keyed_lookup_cache()->keys_address()); } -ExternalReference ExternalReference::keyed_lookup_cache_field_offsets() { - return ExternalReference(Isolate::Current()-> - keyed_lookup_cache()->field_offsets_address()); +ExternalReference ExternalReference::keyed_lookup_cache_field_offsets( + Isolate* isolate) { + return ExternalReference( + isolate->keyed_lookup_cache()->field_offsets_address()); } -ExternalReference ExternalReference::the_hole_value_location() { - return ExternalReference(FACTORY->the_hole_value().location()); +ExternalReference ExternalReference::the_hole_value_location(Isolate* isolate) { + return ExternalReference(isolate->factory()->the_hole_value().location()); } -ExternalReference ExternalReference::arguments_marker_location() { - return ExternalReference(FACTORY->arguments_marker().location()); +ExternalReference ExternalReference::arguments_marker_location( + Isolate* isolate) { + return ExternalReference(isolate->factory()->arguments_marker().location()); } -ExternalReference ExternalReference::roots_address() { - return ExternalReference(HEAP->roots_address()); +ExternalReference ExternalReference::roots_address(Isolate* isolate) { + return ExternalReference(isolate->heap()->roots_address()); } -ExternalReference ExternalReference::address_of_stack_limit() { - return ExternalReference( - Isolate::Current()->stack_guard()->address_of_jslimit()); +ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) { + return ExternalReference(isolate->stack_guard()->address_of_jslimit()); } -ExternalReference ExternalReference::address_of_real_stack_limit() { - return ExternalReference( - Isolate::Current()->stack_guard()->address_of_real_jslimit()); +ExternalReference ExternalReference::address_of_real_stack_limit( + Isolate* isolate) { + return ExternalReference(isolate->stack_guard()->address_of_real_jslimit()); } -ExternalReference ExternalReference::address_of_regexp_stack_limit() { - return ExternalReference( - Isolate::Current()->regexp_stack()->limit_address()); +ExternalReference ExternalReference::address_of_regexp_stack_limit( + Isolate* isolate) { + return ExternalReference(isolate->regexp_stack()->limit_address()); } -ExternalReference ExternalReference::new_space_start() { - return ExternalReference(HEAP->NewSpaceStart()); +ExternalReference ExternalReference::new_space_start(Isolate* isolate) { + return ExternalReference(isolate->heap()->NewSpaceStart()); } -ExternalReference ExternalReference::new_space_mask() { - return ExternalReference(reinterpret_cast
(HEAP->NewSpaceMask())); +ExternalReference ExternalReference::new_space_mask(Isolate* isolate) { + Address mask = reinterpret_cast
(isolate->heap()->NewSpaceMask()); + return ExternalReference(mask); } -ExternalReference ExternalReference::new_space_allocation_top_address() { - return ExternalReference(HEAP->NewSpaceAllocationTopAddress()); +ExternalReference ExternalReference::new_space_allocation_top_address( + Isolate* isolate) { + return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress()); } -ExternalReference ExternalReference::heap_always_allocate_scope_depth() { - return ExternalReference(HEAP->always_allocate_scope_depth_address()); +ExternalReference ExternalReference::heap_always_allocate_scope_depth( + Isolate* isolate) { + Heap* heap = isolate->heap(); + return ExternalReference(heap->always_allocate_scope_depth_address()); } -ExternalReference ExternalReference::new_space_allocation_limit_address() { - return ExternalReference(HEAP->NewSpaceAllocationLimitAddress()); +ExternalReference ExternalReference::new_space_allocation_limit_address( + Isolate* isolate) { + return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress()); } @@ -729,8 +748,9 @@ ExternalReference ExternalReference::handle_scope_limit_address() { } -ExternalReference ExternalReference::scheduled_exception_address() { - return ExternalReference(Isolate::Current()->scheduled_exception_address()); +ExternalReference ExternalReference::scheduled_exception_address( + Isolate* isolate) { + return ExternalReference(isolate->scheduled_exception_address()); } @@ -766,7 +786,8 @@ ExternalReference ExternalReference::address_of_nan() { #ifndef V8_INTERPRETED_REGEXP -ExternalReference ExternalReference::re_check_stack_guard_state() { +ExternalReference ExternalReference::re_check_stack_guard_state( + Isolate* isolate) { Address function; #ifdef V8_TARGET_ARCH_X64 function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState); @@ -777,16 +798,18 @@ ExternalReference ExternalReference::re_check_stack_guard_state() { #else UNREACHABLE(); #endif - return ExternalReference(Redirect(function)); + return ExternalReference(Redirect(isolate, function)); } -ExternalReference ExternalReference::re_grow_stack() { +ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) { return ExternalReference( - Redirect(FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack))); + Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack))); } -ExternalReference ExternalReference::re_case_insensitive_compare_uc16() { +ExternalReference ExternalReference::re_case_insensitive_compare_uc16( + Isolate* isolate) { return ExternalReference(Redirect( + isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16))); } @@ -795,19 +818,21 @@ ExternalReference ExternalReference::re_word_character_map() { NativeRegExpMacroAssembler::word_character_map_address()); } -ExternalReference ExternalReference::address_of_static_offsets_vector() { - return ExternalReference(OffsetsVector::static_offsets_vector_address( - Isolate::Current())); +ExternalReference ExternalReference::address_of_static_offsets_vector( + Isolate* isolate) { + return ExternalReference( + OffsetsVector::static_offsets_vector_address(isolate)); } -ExternalReference ExternalReference::address_of_regexp_stack_memory_address() { +ExternalReference ExternalReference::address_of_regexp_stack_memory_address( + Isolate* isolate) { return ExternalReference( - Isolate::Current()->regexp_stack()->memory_address()); + isolate->regexp_stack()->memory_address()); } -ExternalReference ExternalReference::address_of_regexp_stack_memory_size() { - return ExternalReference( - Isolate::Current()->regexp_stack()->memory_size_address()); +ExternalReference ExternalReference::address_of_regexp_stack_memory_size( + Isolate* isolate) { + return ExternalReference(isolate->regexp_stack()->memory_size_address()); } #endif // V8_INTERPRETED_REGEXP @@ -853,20 +878,26 @@ static double math_log_double(double x) { } -ExternalReference ExternalReference::math_sin_double_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(math_sin_double), +ExternalReference ExternalReference::math_sin_double_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(math_sin_double), FP_RETURN_CALL)); } -ExternalReference ExternalReference::math_cos_double_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(math_cos_double), +ExternalReference ExternalReference::math_cos_double_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(math_cos_double), FP_RETURN_CALL)); } -ExternalReference ExternalReference::math_log_double_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(math_log_double), +ExternalReference ExternalReference::math_log_double_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(math_log_double), FP_RETURN_CALL)); } @@ -906,14 +937,18 @@ double power_double_double(double x, double y) { } -ExternalReference ExternalReference::power_double_double_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(power_double_double), +ExternalReference ExternalReference::power_double_double_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(power_double_double), FP_RETURN_CALL)); } -ExternalReference ExternalReference::power_double_int_function() { - return ExternalReference(Redirect(FUNCTION_ADDR(power_double_int), +ExternalReference ExternalReference::power_double_int_function( + Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(power_double_int), FP_RETURN_CALL)); } @@ -925,7 +960,7 @@ static int native_compare_doubles(double y, double x) { ExternalReference ExternalReference::double_fp_operation( - Token::Value operation) { + Token::Value operation, Isolate* isolate) { typedef double BinaryFPOperation(double x, double y); BinaryFPOperation* function = NULL; switch (operation) { @@ -948,24 +983,28 @@ ExternalReference ExternalReference::double_fp_operation( UNREACHABLE(); } // Passing true as 2nd parameter indicates that they return an fp value. - return ExternalReference(Redirect(FUNCTION_ADDR(function), FP_RETURN_CALL)); + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(function), + FP_RETURN_CALL)); } -ExternalReference ExternalReference::compare_doubles() { - return ExternalReference(Redirect(FUNCTION_ADDR(native_compare_doubles), +ExternalReference ExternalReference::compare_doubles(Isolate* isolate) { + return ExternalReference(Redirect(isolate, + FUNCTION_ADDR(native_compare_doubles), BUILTIN_CALL)); } #ifdef ENABLE_DEBUGGER_SUPPORT -ExternalReference ExternalReference::debug_break() { - return ExternalReference(Redirect(FUNCTION_ADDR(Debug::Break))); +ExternalReference ExternalReference::debug_break(Isolate* isolate) { + return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug::Break))); } -ExternalReference ExternalReference::debug_step_in_fp_address() { - return ExternalReference(Isolate::Current()->debug()->step_in_fp_addr()); +ExternalReference ExternalReference::debug_step_in_fp_address( + Isolate* isolate) { + return ExternalReference(isolate->debug()->step_in_fp_addr()); } #endif diff --git a/src/assembler.h b/src/assembler.h index d320704..8147227 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -44,6 +44,19 @@ namespace internal { // ----------------------------------------------------------------------------- +// Platform independent assembler base class. + +class AssemblerBase: public Malloced { + public: + explicit AssemblerBase(Isolate* isolate) : isolate_(isolate) {} + + Isolate* isolate() const { return isolate_; } + + private: + Isolate* isolate_; +}; + +// ----------------------------------------------------------------------------- // Common double constants. class DoubleConstant: public AllStatic { @@ -501,25 +514,25 @@ class ExternalReference BASE_EMBEDDED { typedef void* ExternalReferenceRedirector(void* original, Type type); - explicit ExternalReference(Builtins::CFunctionId id); + ExternalReference(Builtins::CFunctionId id, Isolate* isolate); - explicit ExternalReference(ApiFunction* ptr, Type type); + ExternalReference(ApiFunction* ptr, Type type, Isolate* isolate); - explicit ExternalReference(Builtins::Name name); + ExternalReference(Builtins::Name name, Isolate* isolate); - explicit ExternalReference(Runtime::FunctionId id); + ExternalReference(Runtime::FunctionId id, Isolate* isolate); - explicit ExternalReference(const Runtime::Function* f); + ExternalReference(const Runtime::Function* f, Isolate* isolate); - explicit ExternalReference(const IC_Utility& ic_utility); + ExternalReference(const IC_Utility& ic_utility, Isolate* isolate); #ifdef ENABLE_DEBUGGER_SUPPORT - explicit ExternalReference(const Debug_Address& debug_address); + ExternalReference(const Debug_Address& debug_address, Isolate* isolate); #endif explicit ExternalReference(StatsCounter* counter); - explicit ExternalReference(Isolate::AddressId id); + ExternalReference(Isolate::AddressId id, Isolate* isolate); explicit ExternalReference(const SCTableReference& table_ref); @@ -530,63 +543,67 @@ class ExternalReference BASE_EMBEDDED { // pattern. This means that they have to be added to the // ExternalReferenceTable in serialize.cc manually. - static ExternalReference perform_gc_function(); - static ExternalReference fill_heap_number_with_random_function(); - static ExternalReference random_uint32_function(); - static ExternalReference transcendental_cache_array_address(); - static ExternalReference delete_handle_scope_extensions(); + static ExternalReference perform_gc_function(Isolate* isolate); + static ExternalReference fill_heap_number_with_random_function( + Isolate* isolate); + static ExternalReference random_uint32_function(Isolate* isolate); + static ExternalReference transcendental_cache_array_address(Isolate* isolate); + static ExternalReference delete_handle_scope_extensions(Isolate* isolate); // Deoptimization support. - static ExternalReference new_deoptimizer_function(); - static ExternalReference compute_output_frames_function(); - static ExternalReference global_contexts_list(); + static ExternalReference new_deoptimizer_function(Isolate* isolate); + static ExternalReference compute_output_frames_function(Isolate* isolate); + static ExternalReference global_contexts_list(Isolate* isolate); // Static data in the keyed lookup cache. - static ExternalReference keyed_lookup_cache_keys(); - static ExternalReference keyed_lookup_cache_field_offsets(); + static ExternalReference keyed_lookup_cache_keys(Isolate* isolate); + static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate); // Static variable Factory::the_hole_value.location() - static ExternalReference the_hole_value_location(); + static ExternalReference the_hole_value_location(Isolate* isolate); // Static variable Factory::arguments_marker.location() - static ExternalReference arguments_marker_location(); + static ExternalReference arguments_marker_location(Isolate* isolate); // Static variable Heap::roots_address() - static ExternalReference roots_address(); + static ExternalReference roots_address(Isolate* isolate); // Static variable StackGuard::address_of_jslimit() - static ExternalReference address_of_stack_limit(); + static ExternalReference address_of_stack_limit(Isolate* isolate); // Static variable StackGuard::address_of_real_jslimit() - static ExternalReference address_of_real_stack_limit(); + static ExternalReference address_of_real_stack_limit(Isolate* isolate); // Static variable RegExpStack::limit_address() - static ExternalReference address_of_regexp_stack_limit(); + static ExternalReference address_of_regexp_stack_limit(Isolate* isolate); // Static variables for RegExp. - static ExternalReference address_of_static_offsets_vector(); - static ExternalReference address_of_regexp_stack_memory_address(); - static ExternalReference address_of_regexp_stack_memory_size(); + static ExternalReference address_of_static_offsets_vector(Isolate* isolate); + static ExternalReference address_of_regexp_stack_memory_address( + Isolate* isolate); + static ExternalReference address_of_regexp_stack_memory_size( + Isolate* isolate); // Static variable Heap::NewSpaceStart() - static ExternalReference new_space_start(); - static ExternalReference new_space_mask(); - static ExternalReference heap_always_allocate_scope_depth(); + static ExternalReference new_space_start(Isolate* isolate); + static ExternalReference new_space_mask(Isolate* isolate); + static ExternalReference heap_always_allocate_scope_depth(Isolate* isolate); // Used for fast allocation in generated code. - static ExternalReference new_space_allocation_top_address(); - static ExternalReference new_space_allocation_limit_address(); + static ExternalReference new_space_allocation_top_address(Isolate* isolate); + static ExternalReference new_space_allocation_limit_address(Isolate* isolate); - static ExternalReference double_fp_operation(Token::Value operation); - static ExternalReference compare_doubles(); - static ExternalReference power_double_double_function(); - static ExternalReference power_double_int_function(); + static ExternalReference double_fp_operation(Token::Value operation, + Isolate* isolate); + static ExternalReference compare_doubles(Isolate* isolate); + static ExternalReference power_double_double_function(Isolate* isolate); + static ExternalReference power_double_int_function(Isolate* isolate); static ExternalReference handle_scope_next_address(); static ExternalReference handle_scope_limit_address(); static ExternalReference handle_scope_level_address(); - static ExternalReference scheduled_exception_address(); + static ExternalReference scheduled_exception_address(Isolate* isolate); // Static variables containing common double constants. static ExternalReference address_of_min_int(); @@ -595,31 +612,31 @@ class ExternalReference BASE_EMBEDDED { static ExternalReference address_of_negative_infinity(); static ExternalReference address_of_nan(); - static ExternalReference math_sin_double_function(); - static ExternalReference math_cos_double_function(); - static ExternalReference math_log_double_function(); + static ExternalReference math_sin_double_function(Isolate* isolate); + static ExternalReference math_cos_double_function(Isolate* isolate); + static ExternalReference math_log_double_function(Isolate* isolate); Address address() const {return reinterpret_cast
(address_);} #ifdef ENABLE_DEBUGGER_SUPPORT // Function Debug::Break() - static ExternalReference debug_break(); + static ExternalReference debug_break(Isolate* isolate); // Used to check if single stepping is enabled in generated code. - static ExternalReference debug_step_in_fp_address(); + static ExternalReference debug_step_in_fp_address(Isolate* isolate); #endif #ifndef V8_INTERPRETED_REGEXP // C functions called from RegExp generated code. // Function NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16() - static ExternalReference re_case_insensitive_compare_uc16(); + static ExternalReference re_case_insensitive_compare_uc16(Isolate* isolate); // Function RegExpMacroAssembler*::CheckStackGuardState() - static ExternalReference re_check_stack_guard_state(); + static ExternalReference re_check_stack_guard_state(Isolate* isolate); // Function NativeRegExpMacroAssembler::GrowStack() - static ExternalReference re_grow_stack(); + static ExternalReference re_grow_stack(Isolate* isolate); // byte NativeRegExpMacroAssembler::word_character_bitmap static ExternalReference re_word_character_map(); @@ -639,21 +656,23 @@ class ExternalReference BASE_EMBEDDED { explicit ExternalReference(void* address) : address_(address) {} - static void* Redirect(void* address, + static void* Redirect(Isolate* isolate, + void* address, Type type = ExternalReference::BUILTIN_CALL) { ExternalReferenceRedirector* redirector = reinterpret_cast( - Isolate::Current()->external_reference_redirector()); + isolate->external_reference_redirector()); if (redirector == NULL) return address; void* answer = (*redirector)(address, type); return answer; } - static void* Redirect(Address address_arg, + static void* Redirect(Isolate* isolate, + Address address_arg, Type type = ExternalReference::BUILTIN_CALL) { ExternalReferenceRedirector* redirector = reinterpret_cast( - Isolate::Current()->external_reference_redirector()); + isolate->external_reference_redirector()); void* address = reinterpret_cast(address_arg); void* answer = (redirector == NULL) ? address : diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc index b323f09..311ac39 100644 --- a/src/ia32/assembler-ia32.cc +++ b/src/ia32/assembler-ia32.cc @@ -297,17 +297,17 @@ static void InitCoverageLog(); #endif Assembler::Assembler(void* buffer, int buffer_size) - : positions_recorder_(this), + : AssemblerBase(Isolate::Current()), + positions_recorder_(this), emit_debug_code_(FLAG_debug_code) { - Isolate* isolate = Isolate::Current(); if (buffer == NULL) { // Do our own buffer management. if (buffer_size <= kMinimalBufferSize) { buffer_size = kMinimalBufferSize; - if (isolate->assembler_spare_buffer() != NULL) { - buffer = isolate->assembler_spare_buffer(); - isolate->set_assembler_spare_buffer(NULL); + if (isolate()->assembler_spare_buffer() != NULL) { + buffer = isolate()->assembler_spare_buffer(); + isolate()->set_assembler_spare_buffer(NULL); } } if (buffer == NULL) { @@ -347,11 +347,10 @@ Assembler::Assembler(void* buffer, int buffer_size) Assembler::~Assembler() { - Isolate* isolate = Isolate::Current(); if (own_buffer_) { - if (isolate->assembler_spare_buffer() == NULL && + if (isolate()->assembler_spare_buffer() == NULL && buffer_size_ == kMinimalBufferSize) { - isolate->set_assembler_spare_buffer(buffer_); + isolate()->set_assembler_spare_buffer(buffer_); } else { DeleteArray(buffer_); } @@ -388,7 +387,7 @@ void Assembler::CodeTargetAlign() { void Assembler::cpuid() { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID)); + ASSERT(isolate()->cpu_features()->IsEnabled(CPUID)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x0F); @@ -749,7 +748,7 @@ void Assembler::movzx_w(Register dst, const Operand& src) { void Assembler::cmov(Condition cc, Register dst, int32_t imm32) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV)); + ASSERT(isolate()->cpu_features()->IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; UNIMPLEMENTED(); @@ -760,7 +759,7 @@ void Assembler::cmov(Condition cc, Register dst, int32_t imm32) { void Assembler::cmov(Condition cc, Register dst, Handle handle) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV)); + ASSERT(isolate()->cpu_features()->IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; UNIMPLEMENTED(); @@ -771,7 +770,7 @@ void Assembler::cmov(Condition cc, Register dst, Handle handle) { void Assembler::cmov(Condition cc, Register dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV)); + ASSERT(isolate()->cpu_features()->IsEnabled(CMOV)); EnsureSpace ensure_space(this); last_pc_ = pc_; // Opcode: 0f 40 + cc /r. @@ -1452,7 +1451,7 @@ void Assembler::nop() { void Assembler::rdtsc() { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(RDTSC)); + ASSERT(isolate()->cpu_features()->IsEnabled(RDTSC)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x0F); @@ -1858,7 +1857,7 @@ void Assembler::fistp_s(const Operand& adr) { void Assembler::fisttp_s(const Operand& adr) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xDB); @@ -1867,7 +1866,7 @@ void Assembler::fisttp_s(const Operand& adr) { void Assembler::fisttp_d(const Operand& adr) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xDD); @@ -2136,7 +2135,7 @@ void Assembler::setcc(Condition cc, Register reg) { void Assembler::cvttss2si(Register dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF3); @@ -2147,7 +2146,7 @@ void Assembler::cvttss2si(Register dst, const Operand& src) { void Assembler::cvttsd2si(Register dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2158,7 +2157,7 @@ void Assembler::cvttsd2si(Register dst, const Operand& src) { void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2169,7 +2168,7 @@ void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) { void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF3); @@ -2180,7 +2179,7 @@ void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) { void Assembler::addsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2191,7 +2190,7 @@ void Assembler::addsd(XMMRegister dst, XMMRegister src) { void Assembler::mulsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2202,7 +2201,7 @@ void Assembler::mulsd(XMMRegister dst, XMMRegister src) { void Assembler::subsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2213,7 +2212,7 @@ void Assembler::subsd(XMMRegister dst, XMMRegister src) { void Assembler::divsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2224,7 +2223,7 @@ void Assembler::divsd(XMMRegister dst, XMMRegister src) { void Assembler::xorpd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2255,7 +2254,7 @@ void Assembler::andpd(XMMRegister dst, XMMRegister src) { void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2266,7 +2265,7 @@ void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { void Assembler::movmskpd(Register dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2277,7 +2276,7 @@ void Assembler::movmskpd(Register dst, XMMRegister src) { void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2289,7 +2288,7 @@ void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) { void Assembler::movaps(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x0F); @@ -2299,7 +2298,7 @@ void Assembler::movaps(XMMRegister dst, XMMRegister src) { void Assembler::movdqa(const Operand& dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2310,7 +2309,7 @@ void Assembler::movdqa(const Operand& dst, XMMRegister src) { void Assembler::movdqa(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2321,7 +2320,7 @@ void Assembler::movdqa(XMMRegister dst, const Operand& src) { void Assembler::movdqu(const Operand& dst, XMMRegister src ) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF3); @@ -2332,7 +2331,7 @@ void Assembler::movdqu(const Operand& dst, XMMRegister src ) { void Assembler::movdqu(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF3); @@ -2343,7 +2342,7 @@ void Assembler::movdqu(XMMRegister dst, const Operand& src) { void Assembler::movntdqa(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2355,7 +2354,7 @@ void Assembler::movntdqa(XMMRegister dst, const Operand& src) { void Assembler::movntdq(const Operand& dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2391,7 +2390,7 @@ void Assembler::movdbl(const Operand& dst, XMMRegister src) { void Assembler::movsd(const Operand& dst, XMMRegister src ) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); // double @@ -2402,7 +2401,7 @@ void Assembler::movsd(const Operand& dst, XMMRegister src ) { void Assembler::movsd(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); // double @@ -2413,7 +2412,7 @@ void Assembler::movsd(XMMRegister dst, const Operand& src) { void Assembler::movsd(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0xF2); @@ -2424,7 +2423,7 @@ void Assembler::movsd(XMMRegister dst, XMMRegister src) { void Assembler::movd(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2435,7 +2434,7 @@ void Assembler::movd(XMMRegister dst, const Operand& src) { void Assembler::movd(const Operand& dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2446,7 +2445,7 @@ void Assembler::movd(const Operand& dst, XMMRegister src) { void Assembler::pand(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2457,7 +2456,7 @@ void Assembler::pand(XMMRegister dst, XMMRegister src) { void Assembler::pxor(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2468,7 +2467,7 @@ void Assembler::pxor(XMMRegister dst, XMMRegister src) { void Assembler::por(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2479,7 +2478,7 @@ void Assembler::por(XMMRegister dst, XMMRegister src) { void Assembler::ptest(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2491,7 +2490,7 @@ void Assembler::ptest(XMMRegister dst, XMMRegister src) { void Assembler::psllq(XMMRegister reg, int8_t shift) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2503,7 +2502,7 @@ void Assembler::psllq(XMMRegister reg, int8_t shift) { void Assembler::psllq(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2514,7 +2513,7 @@ void Assembler::psllq(XMMRegister dst, XMMRegister src) { void Assembler::psrlq(XMMRegister reg, int8_t shift) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2526,7 +2525,7 @@ void Assembler::psrlq(XMMRegister reg, int8_t shift) { void Assembler::psrlq(XMMRegister dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2537,7 +2536,7 @@ void Assembler::psrlq(XMMRegister dst, XMMRegister src) { void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2549,7 +2548,7 @@ void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) { void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2562,7 +2561,7 @@ void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) { void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1)); EnsureSpace ensure_space(this); last_pc_ = pc_; EMIT(0x66); @@ -2618,7 +2617,6 @@ void Assembler::RecordComment(const char* msg, bool force) { void Assembler::GrowBuffer() { - Isolate* isolate = Isolate::Current(); ASSERT(overflow()); if (!own_buffer_) FATAL("external code buffer is too small"); @@ -2655,9 +2653,9 @@ void Assembler::GrowBuffer() { reloc_info_writer.pos(), desc.reloc_size); // Switch buffers. - if (isolate->assembler_spare_buffer() == NULL && + if (isolate()->assembler_spare_buffer() == NULL && buffer_size_ == kMinimalBufferSize) { - isolate->set_assembler_spare_buffer(buffer_); + isolate()->set_assembler_spare_buffer(buffer_); } else { DeleteArray(buffer_); } diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h index 4bcc1ec..19d879b 100644 --- a/src/ia32/assembler-ia32.h +++ b/src/ia32/assembler-ia32.h @@ -508,7 +508,7 @@ class CpuFeatures { }; -class Assembler : public Malloced { +class Assembler : public AssemblerBase { private: // We check before assembling an instruction that there is sufficient // space to write an instruction and its relocation information. @@ -1010,7 +1010,8 @@ class Assembler : public Malloced { void emit_sse_operand(XMMRegister dst, XMMRegister src); void emit_sse_operand(Register dst, XMMRegister src); - byte* addr_at(int pos) { return buffer_ + pos; } + byte* addr_at(int pos) { return buffer_ + pos; } + private: byte byte_at(int pos) { return buffer_[pos]; } void set_byte_at(int pos, byte value) { buffer_[pos] = value; } diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index 09cbe76..cb41646 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -70,7 +70,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // JumpToExternalReference expects eax to contain the number of arguments // including the receiver and the extra arguments. __ add(Operand(eax), Immediate(num_extra_args + 1)); - __ JumpToExternalReference(ExternalReference(id)); + __ JumpToExternalReference(ExternalReference(id, masm->isolate())); } @@ -100,7 +100,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { // Set expected number of arguments to zero (not changing eax). __ Set(ebx, Immediate(0)); __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); - __ jmp(Handle(Isolate::Current()->builtins()->builtin( + __ jmp(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); } @@ -128,7 +128,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, Label undo_allocation; #ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = - ExternalReference::debug_step_in_fp_address(); + ExternalReference::debug_step_in_fp_address(masm->isolate()); __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0)); __ j(not_equal, &rt_call); #endif @@ -335,8 +335,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, if (is_api_function) { __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); Handle code = Handle( - Isolate::Current()->builtins()->builtin( - Builtins::HandleApiCallConstruct)); + masm->isolate()->builtins()->builtin(Builtins::HandleApiCallConstruct)); ParameterCount expected(0); __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET, CALL_FUNCTION); @@ -437,7 +436,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Invoke the code. if (is_construct) { - __ call(Handle(Isolate::Current()->builtins()->builtin( + __ call(Handle(masm->isolate()->builtins()->builtin( Builtins::JSConstructCall)), RelocInfo::CODE_TARGET); } else { ParameterCount actual(eax); @@ -675,7 +674,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ j(not_zero, &function, taken); __ Set(ebx, Immediate(0)); __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); - __ jmp(Handle(Isolate::Current()->builtins()->builtin( + __ jmp(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); __ bind(&function); } @@ -689,7 +688,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); __ SmiUntag(ebx); __ cmp(eax, Operand(ebx)); - __ j(not_equal, Handle(Isolate::Current()->builtins()->builtin( + __ j(not_equal, Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline))); ParameterCount expected(0); @@ -709,7 +708,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { // limit" is checked. Label okay; ExternalReference real_stack_limit = - ExternalReference::address_of_real_stack_limit(); + ExternalReference::address_of_real_stack_limit(masm->isolate()); __ mov(edi, Operand::StaticVariable(real_stack_limit)); // Make ecx the space we have left. The stack might already be overflowed // here which will cause ecx to become negative. @@ -797,7 +796,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ mov(edx, Operand(ebp, 2 * kPointerSize)); // load arguments // Use inline caching to speed up access to arguments. - Handle ic(Isolate::Current()->builtins()->builtin( + Handle ic(masm->isolate()->builtins()->builtin( Builtins::KeyedLoadIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); // It is important that we do not have a test instruction after the @@ -1235,8 +1234,7 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { // Jump to the generic array code in case the specialized code cannot handle // the construction. __ bind(&generic_array_code); - Code* code = Isolate::Current()->builtins()->builtin( - Builtins::ArrayCodeGeneric); + Code* code = masm->isolate()->builtins()->builtin(Builtins::ArrayCodeGeneric); Handle array_code(code); __ jmp(array_code, RelocInfo::CODE_TARGET); } @@ -1270,7 +1268,7 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) { // Jump to the generic construct code in case the specialized code cannot // handle the construction. __ bind(&generic_constructor); - Code* code = Isolate::Current()->builtins()->builtin( + Code* code = masm->isolate()->builtins()->builtin( Builtins::JSConstructStubGeneric); Handle generic_construct_stub(code); __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET); @@ -1520,7 +1518,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { // We shouldn't be performing on-stack replacement in the first // place if the CPU features we need for the optimized Crankshaft // code aren't supported. - CpuFeatures* cpu_features = Isolate::Current()->cpu_features(); + CpuFeatures* cpu_features = masm->isolate()->cpu_features(); cpu_features->Probe(false); if (!cpu_features->IsSupported(SSE2)) { __ Abort("Unreachable code: Cannot optimize without SSE2 support."); @@ -1566,7 +1564,7 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { __ bind(&stack_check); NearLabel ok; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm->isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, taken); StackCheckStub stub; diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index c8a1b37..eaff1b8 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -763,7 +763,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { // number in eax. __ AllocateHeapNumber(eax, ecx, ebx, slow); // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(left)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -813,7 +813,7 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { } if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) { __ AllocateHeapNumber(ecx, ebx, no_reg, slow); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSSE2Smis(masm, ebx); switch (op_) { @@ -917,7 +917,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { } Label not_floats; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); if (static_operands_type_.IsNumber()) { if (FLAG_debug_code) { @@ -1051,7 +1051,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { default: UNREACHABLE(); } // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(ebx)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -1269,7 +1269,7 @@ void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { // Patch the caller to an appropriate specialized stub and return the // operation result to the caller of the stub. __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()), 5, 1); } @@ -1305,7 +1305,8 @@ void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { // Patch the caller to an appropriate specialized stub and return the // operation result to the caller of the stub. __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), + masm->isolate()), 5, 1); } @@ -1328,7 +1329,8 @@ void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( // Patch the caller to an appropriate specialized stub and return the // operation result to the caller of the stub. __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), + masm->isolate()), 5, 1); } @@ -1646,7 +1648,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, // number in eax. __ AllocateHeapNumber(eax, ecx, ebx, slow); // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(left)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -1691,7 +1693,7 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, break; } __ AllocateHeapNumber(ecx, ebx, no_reg, slow); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSSE2Smis(masm, ebx); switch (op_) { @@ -1823,7 +1825,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { case Token::DIV: { Label not_floats; Label not_int32; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats); FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, ¬_int32, ecx); @@ -1944,7 +1946,7 @@ void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { default: UNREACHABLE(); } // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(ebx)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -2024,7 +2026,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { case Token::MUL: case Token::DIV: { Label not_floats; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats); @@ -2127,7 +2129,7 @@ void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { default: UNREACHABLE(); } // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(ebx)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -2228,7 +2230,7 @@ void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { case Token::MUL: case Token::DIV: { Label not_floats; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats); @@ -2326,7 +2328,7 @@ void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { default: UNREACHABLE(); } // Store the result in the HeapNumber and return. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(ebx)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -2524,7 +2526,7 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { __ bind(&loaded); } else { // UNTAGGED. - if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE4_1)) { CpuFeatures::Scope sse4_scope(SSE4_1); __ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx. } else { @@ -2555,11 +2557,12 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { // ebx = low 32 bits of double value. // edx = high 32 bits of double value. // ecx = TranscendentalCache::hash(double value). - __ mov(eax, - Immediate(ExternalReference::transcendental_cache_array_address())); - // Eax points to cache array. - __ mov(eax, Operand(eax, type_ * sizeof( - Isolate::Current()->transcendental_cache()->caches_[0]))); + ExternalReference cache_array = + ExternalReference::transcendental_cache_array_address(masm->isolate()); + __ mov(eax, Immediate(cache_array)); + int cache_array_index = + type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]); + __ mov(eax, Operand(eax, cache_array_index)); // Eax points to the cache for the type type_. // If NULL, the cache hasn't been initialized yet, so go through runtime. __ test(eax, Operand(eax)); @@ -2645,7 +2648,9 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { __ bind(&runtime_call_clear_stack); __ fstp(0); __ bind(&runtime_call); - __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); + ExternalReference runtime = + ExternalReference(RuntimeFunction(), masm->isolate()); + __ TailCallExternalReference(runtime, 1, 1); } else { // UNTAGGED. __ bind(&runtime_call_clear_stack); __ bind(&runtime_call); @@ -2776,7 +2781,7 @@ void IntegerConvert(MacroAssembler* masm, Register scratch = ebx; Register scratch2 = edi; if (type_info.IsInteger32() && - Isolate::Current()->cpu_features()->IsEnabled(SSE2)) { + masm->isolate()->cpu_features()->IsEnabled(SSE2)) { CpuFeatures::Scope scope(SSE2); __ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset)); return; @@ -3321,7 +3326,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) { IntegerConvert(masm, eax, TypeInfo::Unknown(), - Isolate::Current()->cpu_features()->IsSupported(SSE3), + masm->isolate()->cpu_features()->IsSupported(SSE3), &slow); // Do the bitwise operation and check if the result fits in a smi. @@ -3344,7 +3349,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) { __ AllocateHeapNumber(ebx, edx, edi, &slow); __ mov(eax, Operand(ebx)); } - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); __ cvtsi2sd(xmm0, Operand(ecx)); __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0); @@ -3738,9 +3743,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Ensure that a RegExp stack is allocated. ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(); + ExternalReference::address_of_regexp_stack_memory_address( + masm->isolate()); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(); + ExternalReference::address_of_regexp_stack_memory_size(masm->isolate()); __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); __ test(ebx, Operand(ebx)); __ j(zero, &runtime, not_taken); @@ -3920,7 +3926,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Argument 5: static offsets vector buffer. __ mov(Operand(esp, 4 * kPointerSize), - Immediate(ExternalReference::address_of_static_offsets_vector())); + Immediate(ExternalReference::address_of_static_offsets_vector( + masm->isolate()))); // Argument 4: End of string data // Argument 3: Start of string data @@ -3972,9 +3979,11 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // stack overflow (on the backtrack stack) was detected in RegExp code but // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. - ExternalReference pending_exception(Isolate::k_pending_exception_address); + ExternalReference pending_exception(Isolate::k_pending_exception_address, + masm->isolate()); __ mov(edx, - Operand::StaticVariable(ExternalReference::the_hole_value_location())); + Operand::StaticVariable(ExternalReference::the_hole_value_location( + masm->isolate()))); __ mov(eax, Operand::StaticVariable(pending_exception)); __ cmp(edx, Operand(eax)); __ j(equal, &runtime); @@ -4033,7 +4042,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // Get the static offsets vector filled by the native regexp code. ExternalReference address_of_static_offsets_vector = - ExternalReference::address_of_static_offsets_vector(); + ExternalReference::address_of_static_offsets_vector(masm->isolate()); __ mov(ecx, Immediate(address_of_static_offsets_vector)); // ebx: last_match_info backing store (FixedArray) @@ -4162,7 +4171,8 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, Register scratch = scratch2; // Load the number string cache. - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); __ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex)); __ mov(number_string_cache, Operand::StaticArray(scratch, times_pointer_size, roots_address)); @@ -4207,7 +4217,7 @@ void NumberToStringStub::GenerateLookupNumberStringCache(MacroAssembler* masm, FixedArray::kHeaderSize)); __ test(probe, Immediate(kSmiTagMask)); __ j(zero, not_found); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope fscope(SSE2); __ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); __ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); @@ -4445,7 +4455,7 @@ void CompareStub::Generate(MacroAssembler* masm) { if (include_number_compare_) { Label non_number_comparison; Label unordered; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); CpuFeatures::Scope use_cmov(CMOV); @@ -4664,7 +4674,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { __ Set(eax, Immediate(argc_)); __ Set(ebx, Immediate(0)); __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); - Handle adaptor(Isolate::Current()->builtins()->builtin( + Handle adaptor(masm->isolate()->builtins()->builtin( Builtins::ArgumentsAdaptorTrampoline)); __ jmp(adaptor, RelocInfo::CODE_TARGET); } @@ -4711,7 +4721,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, } ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(); + ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); if (always_allocate_scope) { __ inc(Operand::StaticVariable(scope_depth)); } @@ -4747,14 +4757,14 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ j(zero, &failure_returned, not_taken); ExternalReference pending_exception_address( - Isolate::k_pending_exception_address); + Isolate::k_pending_exception_address, masm->isolate()); // Check that there is no pending exception, otherwise we // should have returned some failure value. if (FLAG_debug_code) { __ push(edx); __ mov(edx, Operand::StaticVariable( - ExternalReference::the_hole_value_location())); + ExternalReference::the_hole_value_location(masm->isolate()))); NearLabel okay; __ cmp(edx, Operand::StaticVariable(pending_exception_address)); // Cannot use check here as it attempts to generate call into runtime. @@ -4782,9 +4792,10 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ j(equal, throw_out_of_memory_exception); // Retrieve the pending exception and clear the variable. + ExternalReference the_hole_location = + ExternalReference::the_hole_value_location(masm->isolate()); __ mov(eax, Operand::StaticVariable(pending_exception_address)); - __ mov(edx, - Operand::StaticVariable(ExternalReference::the_hole_value_location())); + __ mov(edx, Operand::StaticVariable(the_hole_location)); __ mov(Operand::StaticVariable(pending_exception_address), edx); // Special handling of termination exceptions which are uncatchable @@ -4889,12 +4900,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ push(ebx); // Save copies of the top frame descriptor on the stack. - ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address); + ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate()); __ push(Operand::StaticVariable(c_entry_fp)); #ifdef ENABLE_LOGGING_AND_PROFILING // If this is the outermost JS call, set js_entry_sp value. - ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address); + ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, + masm->isolate()); __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); __ j(not_equal, ¬_outermost_js); __ mov(Operand::StaticVariable(js_entry_sp), ebp); @@ -4906,7 +4918,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Caught exception: Store result (exception) in the pending // exception field in the JSEnv and return a failure sentinel. - ExternalReference pending_exception(Isolate::k_pending_exception_address); + ExternalReference pending_exception(Isolate::k_pending_exception_address, + masm->isolate()); __ mov(Operand::StaticVariable(pending_exception), eax); __ mov(eax, reinterpret_cast(Failure::Exception())); __ jmp(&exit); @@ -4916,8 +4929,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); // Clear any pending exceptions. - __ mov(edx, - Operand::StaticVariable(ExternalReference::the_hole_value_location())); + ExternalReference the_hole_location = + ExternalReference::the_hole_value_location(masm->isolate()); + __ mov(edx, Operand::StaticVariable(the_hole_location)); __ mov(Operand::StaticVariable(pending_exception), edx); // Fake a receiver (NULL). @@ -4928,10 +4942,13 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // cannot store a reference to the trampoline code directly in this // stub, because the builtin stubs may not have been generated yet. if (is_construct) { - ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); + ExternalReference construct_entry( + Builtins::JSConstructEntryTrampoline, + masm->isolate()); __ mov(edx, Immediate(construct_entry)); } else { - ExternalReference entry(Builtins::JSEntryTrampoline); + ExternalReference entry(Builtins::JSEntryTrampoline, + masm->isolate()); __ mov(edx, Immediate(entry)); } __ mov(edx, Operand(edx, 0)); // deref address @@ -4940,7 +4957,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Unlink this frame from the handler chain. __ pop(Operand::StaticVariable(ExternalReference( - Isolate::k_handler_address))); + Isolate::k_handler_address, + masm->isolate()))); // Pop next_sp. __ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); @@ -4956,7 +4974,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Restore the top frame descriptor from the stack. __ bind(&exit); __ pop(Operand::StaticVariable(ExternalReference( - Isolate::k_c_entry_fp_address))); + Isolate::k_c_entry_fp_address, + masm->isolate()))); // Restore callee-saved registers (C calling conventions). __ pop(ebx); @@ -5005,7 +5024,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) { static const int8_t kCmpEdiImmediateByte2 = BitCast(0xff); static const int8_t kMovEaxImmediateByte = BitCast(0xb8); - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); ASSERT_EQ(object.code(), InstanceofStub::left().code()); ASSERT_EQ(function.code(), InstanceofStub::right().code()); @@ -5914,7 +5934,8 @@ void StringHelper::GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm, // Load the symbol table. Register symbol_table = c2; - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = + ExternalReference::roots_address(masm->isolate()); __ mov(scratch, Immediate(Heap::kSymbolTableRootIndex)); __ mov(symbol_table, Operand::StaticArray(scratch, times_pointer_size, roots_address)); @@ -6371,7 +6392,7 @@ void ICCompareStub::GenerateHeapNumbers(MacroAssembler* masm) { // Inlining the double comparison and falling back to the general compare // stub if NaN is involved or SS2 or CMOV is unsupported. - CpuFeatures* cpu_features = Isolate::Current()->cpu_features(); + CpuFeatures* cpu_features = masm->isolate()->cpu_features(); if (cpu_features->IsSupported(SSE2) && cpu_features->IsSupported(CMOV)) { CpuFeatures::Scope scope1(SSE2); CpuFeatures::Scope scope2(CMOV); @@ -6437,7 +6458,8 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { __ push(ecx); // Call the runtime system in a fresh internal frame. - ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); + ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss), + masm->isolate()); __ EnterInternalFrame(); __ push(edx); __ push(eax); diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc index ee6f788..7e93493 100644 --- a/src/ia32/codegen-ia32.cc +++ b/src/ia32/codegen-ia32.cc @@ -183,7 +183,7 @@ void CodeGenerator::Generate(CompilationInfo* info) { ASSERT_EQ(0, loop_nesting_); loop_nesting_ = info->is_in_loop() ? 1 : 0; - Isolate::Current()->set_jump_target_compiling_deferred_code(false); + masm()->isolate()->set_jump_target_compiling_deferred_code(false); { CodeGenState state(this); @@ -556,7 +556,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) { __ sar(val, 1); // If there was an overflow, bits 30 and 31 of the original number disagree. __ xor_(val, 0x80000000u); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope fscope(SSE2); __ cvtsi2sd(xmm0, Operand(val)); } else { @@ -574,7 +574,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) { no_reg, &allocation_failed); VirtualFrame* clone = new VirtualFrame(frame_); scratch.Unuse(); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope fscope(SSE2); __ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0); } else { @@ -587,7 +587,7 @@ void CodeGenerator::ConvertInt32ResultToNumber(Result* value) { RegisterFile empty_regs; SetFrame(clone, &empty_regs); __ bind(&allocation_failed); - if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { // Pop the value from the floating point stack. __ fstp(0); } @@ -614,7 +614,7 @@ void CodeGenerator::Load(Expression* expr) { safe_int32_mode_enabled() && expr->side_effect_free() && expr->num_bit_ops() > 2 && - Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + masm()->isolate()->cpu_features()->IsSupported(SSE2)) { BreakTarget unsafe_bailout; JumpTarget done; unsafe_bailout.set_expected_height(frame_->height()); @@ -995,7 +995,7 @@ class DeferredInlineBinaryOperation: public DeferredCode { Label* DeferredInlineBinaryOperation::NonSmiInputLabel() { if (Token::IsBitOp(op_) && - Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + masm()->isolate()->cpu_features()->IsSupported(SSE2)) { return &non_smi_input_; } else { return entry_label(); @@ -1018,7 +1018,7 @@ void DeferredInlineBinaryOperation::JumpToConstantRhs(Condition cond, void DeferredInlineBinaryOperation::Generate() { // Registers are not saved implicitly for this stub, so we should not // tread on the registers that were not passed to us. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2) && + if (masm()->isolate()->cpu_features()->IsSupported(SSE2) && ((op_ == Token::ADD) || (op_ == Token::SUB) || (op_ == Token::MUL) || @@ -1154,7 +1154,7 @@ void DeferredInlineBinaryOperation::GenerateNonSmiInput() { // The left_ and right_ registers have not been initialized yet. __ mov(right_, Immediate(smi_value_)); __ mov(left_, Operand(dst_)); - if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { __ jmp(entry_label()); return; } else { @@ -1267,7 +1267,7 @@ void DeferredInlineBinaryOperation::GenerateAnswerOutOfRange() { // This trashes right_. __ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2); __ bind(&allocation_ok); - if (Isolate::Current()->cpu_features()->IsSupported(SSE2) && + if (masm()->isolate()->cpu_features()->IsSupported(SSE2) && op_ != Token::SHR) { CpuFeatures::Scope use_sse2(SSE2); ASSERT(Token::IsBitOp(op_)); @@ -3032,7 +3032,7 @@ void CodeGenerator::ConstantSmiComparison(Condition cc, // constant smi. If the non-smi is a heap number and this is not // a loop condition, inline the floating point code. if (!is_loop_condition && - Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + masm()->isolate()->cpu_features()->IsSupported(SSE2)) { // Right side is a constant smi and left side has been checked // not to be a smi. CpuFeatures::Scope use_sse2(SSE2); @@ -3196,7 +3196,7 @@ void CodeGenerator::GenerateInlineNumberComparison(Result* left_side, ASSERT(right_side->is_register()); JumpTarget not_numbers; - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope use_sse2(SSE2); // Load left and right operand into registers xmm0 and xmm1 and compare. @@ -3346,7 +3346,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand, __ j(not_equal, &build_args); __ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset)); __ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag)); - Handle apply_code(Isolate::Current()->builtins()->builtin( + Handle apply_code(masm()->isolate()->builtins()->builtin( Builtins::FunctionApply)); __ cmp(Operand(ecx), Immediate(apply_code)); __ j(not_equal, &build_args); @@ -3473,7 +3473,7 @@ void DeferredStackCheck::Generate() { void CodeGenerator::CheckStack() { DeferredStackCheck* deferred = new DeferredStackCheck; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm()->isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); deferred->Branch(below); deferred->BindExit(); @@ -4647,7 +4647,8 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, + masm()->isolate()); // Make sure that there's nothing left on the stack above the // handler structure. @@ -4773,7 +4774,8 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, + masm()->isolate()); // If we can fall off the end of the try block, unlink from the try // chain and set the state on the frame to FALLING. @@ -7446,13 +7448,14 @@ void CodeGenerator::GenerateRandomHeapNumber( __ bind(&heapnumber_allocated); __ PrepareCallCFunction(0, ebx); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(masm()->isolate()), + 0); // Convert 32 random bits in eax to 0.(32 random bits) in a double // by computing: // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). // This is implemented on both SSE2 and FPU. - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope fscope(SSE2); __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single. __ movd(xmm1, Operand(ebx)); @@ -7669,7 +7672,7 @@ void CodeGenerator::GenerateGetFromCache(ZoneList* args) { int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value(); Handle jsfunction_result_caches( - Isolate::Current()->global_context()->jsfunction_result_caches()); + masm()->isolate()->global_context()->jsfunction_result_caches()); if (jsfunction_result_caches->length() <= cache_id) { __ Abort("Attempt to use undefined cache."); frame_->Push(FACTORY->undefined_value()); @@ -7858,7 +7861,7 @@ void CodeGenerator::GenerateMathPow(ZoneList* args) { ASSERT(args->length() == 2); Load(args->at(0)); Load(args->at(1)); - if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { Result res = frame_->CallRuntime(Runtime::kMath_pow, 2); frame_->Push(&res); } else { @@ -8075,7 +8078,7 @@ void CodeGenerator::GenerateMathSqrt(ZoneList* args) { ASSERT_EQ(args->length(), 1); Load(args->at(0)); - if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) { Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1); frame()->Push(&result); } else { @@ -9388,7 +9391,7 @@ void DeferredReferenceGetNamedValue::Generate() { __ mov(eax, receiver_); } __ Set(ecx, Immediate(name_)); - Handle ic(Isolate::Current()->builtins()->builtin( + Handle ic(masm()->isolate()->builtins()->builtin( Builtins::LoadIC_Initialize)); RelocInfo::Mode mode = is_contextual_ ? RelocInfo::CODE_TARGET_CONTEXT @@ -9468,7 +9471,7 @@ void DeferredReferenceGetKeyedValue::Generate() { // it in the IC initialization code and patch the cmp instruction. // This means that we cannot allow test instructions after calls to // KeyedLoadIC stubs in other places. - Handle ic(Isolate::Current()->builtins()->builtin( + Handle ic(masm()->isolate()->builtins()->builtin( Builtins::KeyedLoadIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); // The delta from the start of the map-compare instruction to the @@ -9570,7 +9573,7 @@ void DeferredReferenceSetKeyedValue::Generate() { } // Call the IC stub. - Handle ic(Isolate::Current()->builtins()->builtin( + Handle ic(masm()->isolate()->builtins()->builtin( (strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict : Builtins::KeyedStoreIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); @@ -9595,7 +9598,7 @@ Result CodeGenerator::EmitNamedLoad(Handle name, bool is_contextual) { bool contextual_load_in_builtin = is_contextual && - (Isolate::Current()->bootstrapper()->IsActive() || + (masm()->isolate()->bootstrapper()->IsActive() || (!info_->closure().is_null() && info_->closure()->IsBuiltin())); Result result; @@ -10193,7 +10196,7 @@ MemCopyFunction CreateMemCopyFunction() { __ int3(); __ bind(&ok); } - if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) { + if (masm.isolate()->cpu_features()->IsSupported(SSE2)) { CpuFeatures::Scope enable(SSE2); __ push(edi); __ push(esi); diff --git a/src/ia32/debug-ia32.cc b/src/ia32/debug-ia32.cc index 72edaa7..33c5251 100644 --- a/src/ia32/debug-ia32.cc +++ b/src/ia32/debug-ia32.cc @@ -128,7 +128,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ RecordComment("// Calling from debug break to runtime - come in - over"); #endif __ Set(eax, Immediate(0)); // No arguments. - __ mov(ebx, Immediate(ExternalReference::debug_break())); + __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate()))); CEntryStub ceb(1); __ CallStub(&ceb); @@ -163,7 +163,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, // jumping to the target address intended by the caller and that was // overwritten by the address of DebugBreakXXX. ExternalReference after_break_target = - ExternalReference(Debug_Address::AfterBreakTarget()); + ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate()); __ jmp(Operand::StaticVariable(after_break_target)); } @@ -279,7 +279,8 @@ void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) { void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) { ExternalReference restarter_frame_function_slot = - ExternalReference(Debug_Address::RestarterFrameFunctionPointer()); + ExternalReference(Debug_Address::RestarterFrameFunctionPointer(), + masm->isolate()); __ mov(Operand::StaticVariable(restarter_frame_function_slot), Immediate(0)); // We do not know our frame height, but set esp based on ebp. diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc index 82d3f1e..4eaf7fe 100644 --- a/src/ia32/deoptimizer-ia32.cc +++ b/src/ia32/deoptimizer-ia32.cc @@ -510,6 +510,8 @@ void Deoptimizer::EntryGenerator::Generate() { GeneratePrologue(); CpuFeatures::Scope scope(SSE2); + Isolate* isolate = masm()->isolate(); + // Save all general purpose registers before messing with them. const int kNumberOfRegisters = Register::kNumRegisters; @@ -550,7 +552,7 @@ void Deoptimizer::EntryGenerator::Generate() { __ mov(Operand(esp, 2 * kPointerSize), ebx); // Bailout id. __ mov(Operand(esp, 3 * kPointerSize), ecx); // Code address or 0. __ mov(Operand(esp, 4 * kPointerSize), edx); // Fp-to-sp delta. - __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5); + __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5); // Preserve deoptimizer object in register eax and get the input // frame descriptor pointer. @@ -598,7 +600,8 @@ void Deoptimizer::EntryGenerator::Generate() { __ push(eax); __ PrepareCallCFunction(1, ebx); __ mov(Operand(esp, 0 * kPointerSize), eax); - __ CallCFunction(ExternalReference::compute_output_frames_function(), 1); + __ CallCFunction( + ExternalReference::compute_output_frames_function(isolate), 1); __ pop(eax); // Replace the current frame with the output frames. diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 2e2890d..b1c53d6 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -234,7 +234,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) { PrepareForBailout(info->function(), NO_REGISTERS); NearLabel ok; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, taken); StackCheckStub stub; @@ -266,7 +266,8 @@ void FullCodeGenerator::ClearAccumulator() { void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { Comment cmnt(masm_, "[ Stack check"); NearLabel ok; - ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, taken); StackCheckStub stub; @@ -2723,7 +2724,8 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList* args) { __ bind(&heapnumber_allocated); __ PrepareCallCFunction(0, ebx); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), + 0); // Convert 32 random bits in eax to 0.(32 random bits) in a double // by computing: diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc index ae8ed6a..f2558bf 100644 --- a/src/ia32/ic-ia32.cc +++ b/src/ia32/ic-ia32.cc @@ -621,8 +621,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // Load the key (consisting of map and symbol) from the cache and // check for match. - ExternalReference cache_keys - = ExternalReference::keyed_lookup_cache_keys(); + ExternalReference cache_keys = + ExternalReference::keyed_lookup_cache_keys(masm->isolate()); __ mov(edi, ecx); __ shl(edi, kPointerSizeLog2 + 1); __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); @@ -636,8 +636,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // ebx : receiver's map // eax : key // ecx : lookup cache index - ExternalReference cache_field_offsets - = ExternalReference::keyed_lookup_cache_field_offsets(); + ExternalReference cache_field_offsets = + ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); __ mov(edi, Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets)); __ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); @@ -745,8 +745,9 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { __ push(ecx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference( - IC_Utility(kKeyedLoadPropertyWithInterceptor)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), + masm->isolate()); __ TailCallExternalReference(ref, 2, 1); __ bind(&slow); @@ -985,7 +986,7 @@ static void GenerateCallMiss(MacroAssembler* masm, // Call the entry. CEntryStub stub(1); __ mov(eax, Immediate(2)); - __ mov(ebx, Immediate(ExternalReference(IC_Utility(id)))); + __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate()))); __ CallStub(&stub); // Move result to edi and exit the internal frame. @@ -1258,7 +1259,8 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { __ push(ebx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1444,7 +1446,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { __ push(ebx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1502,7 +1505,8 @@ void StoreIC::GenerateMiss(MacroAssembler* masm) { __ push(ebx); // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } @@ -1557,7 +1561,8 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ push(value); __ push(scratch); // return address - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); __ bind(&miss); @@ -1654,7 +1659,8 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { __ push(ebx); // Do tail-call to runtime routine. - ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index df2a8b3..ef30b68 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -1226,7 +1226,9 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) { __ PrepareCallCFunction(4, eax); __ movdbl(Operand(esp, 0 * kDoubleSize), left); __ movdbl(Operand(esp, 1 * kDoubleSize), right); - __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4); + __ CallCFunction( + ExternalReference::double_fp_operation(Token::MOD, isolate()), + 4); // Return value is in st(0) on ia32. // Store it into the (fixed) result register. @@ -1348,7 +1350,7 @@ void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) { // Perform stack overflow check if this goto needs it before jumping. if (deferred_stack_check != NULL) { ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, chunk_->GetAssemblyLabel(block)); __ jmp(deferred_stack_check->entry()); @@ -2640,13 +2642,15 @@ void LCodeGen::DoPower(LPower* instr) { LOperand* right = instr->InputAt(1); DoubleRegister result_reg = ToDoubleRegister(instr->result()); Representation exponent_type = instr->hydrogen()->right()->representation(); + if (exponent_type.IsDouble()) { // It is safe to use ebx directly since the instruction is marked // as a call. __ PrepareCallCFunction(4, ebx); __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right)); - __ CallCFunction(ExternalReference::power_double_double_function(), 4); + __ CallCFunction(ExternalReference::power_double_double_function(isolate()), + 4); } else if (exponent_type.IsInteger32()) { // It is safe to use ebx directly since the instruction is marked // as a call. @@ -2654,7 +2658,8 @@ void LCodeGen::DoPower(LPower* instr) { __ PrepareCallCFunction(4, ebx); __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right)); - __ CallCFunction(ExternalReference::power_double_int_function(), 4); + __ CallCFunction(ExternalReference::power_double_int_function(isolate()), + 4); } else { ASSERT(exponent_type.IsTagged()); CpuFeatures::Scope scope(SSE2); @@ -2679,7 +2684,8 @@ void LCodeGen::DoPower(LPower* instr) { __ PrepareCallCFunction(4, ebx); __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left)); __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg); - __ CallCFunction(ExternalReference::power_double_double_function(), 4); + __ CallCFunction(ExternalReference::power_double_double_function(isolate()), + 4); } // Return value is in st(0) on ia32. @@ -3978,7 +3984,8 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { void LCodeGen::DoStackCheck(LStackCheck* instr) { // Perform stack overflow check. NearLabel done; - ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &done); diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index 542533c..950cf31 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -152,7 +152,7 @@ void MacroAssembler::RecordWrite(Register object, #ifdef ENABLE_DEBUGGER_SUPPORT void MacroAssembler::DebugBreak() { Set(eax, Immediate(0)); - mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak))); + mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate()))); CEntryStub ces(1); call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } @@ -316,8 +316,10 @@ void MacroAssembler::EnterExitFramePrologue() { push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot. // Save the frame pointer and the context in top. - ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); - ExternalReference context_address(Isolate::k_context_address); + ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address, + isolate()); + ExternalReference context_address(Isolate::k_context_address, + isolate()); mov(Operand::StaticVariable(c_entry_fp_address), ebp); mov(Operand::StaticVariable(context_address), esi); } @@ -395,14 +397,15 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) { void MacroAssembler::LeaveExitFrameEpilogue() { // Restore current context from top and clear it in debug mode. - ExternalReference context_address(Isolate::k_context_address); + ExternalReference context_address(Isolate::k_context_address, isolate()); mov(esi, Operand::StaticVariable(context_address)); #ifdef DEBUG mov(Operand::StaticVariable(context_address), Immediate(0)); #endif // Clear the top frame. - ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); + ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address, + isolate()); mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0)); } @@ -436,16 +439,19 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, push(Immediate(0)); // NULL frame pointer. } // Save the current handler as the next handler. - push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address))); + push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address, + isolate()))); // Link this handler as the new current one. - mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)), + mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address, + isolate())), esp); } void MacroAssembler::PopTryHandler() { ASSERT_EQ(0, StackHandlerConstants::kNextOffset); - pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address))); + pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address, + isolate()))); add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize)); } @@ -460,7 +466,8 @@ void MacroAssembler::Throw(Register value) { } // Drop the sp to the top of the handler. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, + isolate()); mov(esp, Operand::StaticVariable(handler_address)); // Restore next handler and frame pointer, discard handler state. @@ -496,7 +503,8 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, } // Drop sp to the top stack handler. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, + isolate()); mov(esp, Operand::StaticVariable(handler_address)); // Unwind the handlers until the ENTRY handler is found. @@ -519,12 +527,14 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, if (type == OUT_OF_MEMORY) { // Set external caught exception to false. ExternalReference external_caught( - Isolate::k_external_caught_exception_address); + Isolate::k_external_caught_exception_address, + isolate()); mov(eax, false); mov(Operand::StaticVariable(external_caught), eax); // Set pending exception and eax to out of memory exception. - ExternalReference pending_exception(Isolate::k_pending_exception_address); + ExternalReference pending_exception(Isolate::k_pending_exception_address, + isolate()); mov(eax, reinterpret_cast(Failure::OutOfMemoryException())); mov(Operand::StaticVariable(pending_exception), eax); } @@ -614,7 +624,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result, Register scratch, AllocationFlags flags) { ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Just return if allocation top is already known. if ((flags & RESULT_CONTAINS_TOP) != 0) { @@ -646,7 +656,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end, } ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Update new top. Use scratch if available. if (scratch.is(no_reg)) { @@ -686,7 +696,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); if (!top_reg.is(result)) { mov(top_reg, result); @@ -740,7 +750,7 @@ void MacroAssembler::AllocateInNewSpace(int header_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); // We assume that element_count*element_size + header_size does not // overflow. @@ -786,7 +796,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); if (!object_size.is(result_end)) { mov(result_end, object_size); } @@ -807,7 +817,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, void MacroAssembler::UndoAllocationInNewSpace(Register object) { ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Make sure the object has no tag before resetting top. and_(Operand(object), Immediate(~kHeapObjectTagMask)); @@ -1179,7 +1189,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { const Runtime::Function* function = Runtime::FunctionForId(id); Set(eax, Immediate(function->nargs)); - mov(ebx, Immediate(ExternalReference(function))); + mov(ebx, Immediate(ExternalReference(function, isolate()))); CEntryStub ces(1); ces.SaveDoubles(); CallStub(&ces); @@ -1207,7 +1217,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // should remove this need and make the runtime routine entry code // smarter. Set(eax, Immediate(num_arguments)); - mov(ebx, Immediate(ExternalReference(f))); + mov(ebx, Immediate(ExternalReference(f, isolate()))); CEntryStub ces(1); CallStub(&ces); } @@ -1227,7 +1237,7 @@ MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, // should remove this need and make the runtime routine entry code // smarter. Set(eax, Immediate(num_arguments)); - mov(ebx, Immediate(ExternalReference(f))); + mov(ebx, Immediate(ExternalReference(f, isolate()))); CEntryStub ces(1); return TryCallStub(&ces); } @@ -1269,7 +1279,9 @@ MaybeObject* MacroAssembler::TryTailCallExternalReference( void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size) { - TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); + TailCallExternalReference(ExternalReference(fid, isolate()), + num_arguments, + result_size); } @@ -1277,7 +1289,7 @@ MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size) { return TryTailCallExternalReference( - ExternalReference(fid), num_arguments, result_size); + ExternalReference(fid, isolate()), num_arguments, result_size); } @@ -1377,9 +1389,9 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function, // Check if the function scheduled an exception. ExternalReference scheduled_exception_address = - ExternalReference::scheduled_exception_address(); + ExternalReference::scheduled_exception_address(isolate()); cmp(Operand::StaticVariable(scheduled_exception_address), - Immediate(FACTORY->the_hole_value())); + Immediate(isolate()->factory()->the_hole_value())); j(not_equal, &promote_scheduled_exception, not_taken); LeaveApiExitFrame(); ret(stack_space * kPointerSize); @@ -1395,11 +1407,13 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(ApiFunction* function, jmp(&prologue); // HandleScope limit has changed. Delete allocated extensions. + ExternalReference delete_extensions = + ExternalReference::delete_handle_scope_extensions(isolate()); bind(&delete_allocated_handles); mov(Operand::StaticVariable(limit_address), edi); mov(edi, eax); mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address())); - mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions())); + mov(eax, Immediate(delete_extensions)); call(Operand(eax)); mov(eax, edi); jmp(&leave_exit_frame); diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index 3addba9..bafb175 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -695,14 +695,16 @@ void MacroAssembler::InNewSpace(Register object, // The mask isn't really an address. We load it as an external reference in // case the size of the new space is different between the snapshot maker // and the running system. - and_(Operand(scratch), Immediate(ExternalReference::new_space_mask())); - cmp(Operand(scratch), Immediate(ExternalReference::new_space_start())); + and_(Operand(scratch), + Immediate(ExternalReference::new_space_mask(isolate()))); + cmp(Operand(scratch), + Immediate(ExternalReference::new_space_start(isolate()))); j(cc, branch); } else { int32_t new_space_start = reinterpret_cast( - ExternalReference::new_space_start().address()); + ExternalReference::new_space_start(isolate()).address()); lea(scratch, Operand(object, -new_space_start)); - and_(scratch, HEAP->NewSpaceMask()); + and_(scratch, isolate()->heap()->NewSpaceMask()); j(cc, branch); } } diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc index a25aabc..f1c773b 100644 --- a/src/ia32/regexp-macro-assembler-ia32.cc +++ b/src/ia32/regexp-macro-assembler-ia32.cc @@ -393,7 +393,7 @@ void RegExpMacroAssemblerIA32::CheckNotBackReferenceIgnoreCase( __ mov(Operand(esp, 0 * kPointerSize), edx); ExternalReference compare = - ExternalReference::re_case_insensitive_compare_uc16(); + ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate()); __ CallCFunction(compare, argument_count); // Pop original values before reacting on result value. __ pop(ebx); @@ -679,7 +679,7 @@ Handle RegExpMacroAssemblerIA32::GetCode(Handle source) { Label stack_ok; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ mov(ecx, esp); __ sub(ecx, Operand::StaticVariable(stack_limit)); // Handle it if the stack pointer is already below the stack limit. @@ -843,7 +843,8 @@ Handle RegExpMacroAssemblerIA32::GetCode(Handle source) { __ lea(eax, Operand(ebp, kStackHighEnd)); __ mov(Operand(esp, 1 * kPointerSize), eax); __ mov(Operand(esp, 0 * kPointerSize), backtrack_stackpointer()); - ExternalReference grow_stack = ExternalReference::re_grow_stack(); + ExternalReference grow_stack = + ExternalReference::re_grow_stack(masm_->isolate()); __ CallCFunction(grow_stack, num_arguments); // If return NULL, we have failed to grow the stack, and // must exit with a stack-overflow exception. @@ -867,12 +868,11 @@ Handle RegExpMacroAssemblerIA32::GetCode(Handle source) { CodeDesc code_desc; masm_->GetCode(&code_desc); - Isolate* isolate = ISOLATE; Handle code = - isolate->factory()->NewCode(code_desc, - Code::ComputeFlags(Code::REGEXP), - masm_->CodeObject()); - PROFILE(isolate, RegExpCodeCreateEvent(*code, *source)); + masm_->isolate()->factory()->NewCode(code_desc, + Code::ComputeFlags(Code::REGEXP), + masm_->CodeObject()); + PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source)); return Handle::cast(code); } @@ -1027,7 +1027,7 @@ void RegExpMacroAssemblerIA32::CallCheckStackGuardState(Register scratch) { __ lea(eax, Operand(esp, -kPointerSize)); __ mov(Operand(esp, 0 * kPointerSize), eax); ExternalReference check_stack_guard = - ExternalReference::re_check_stack_guard_state(); + ExternalReference::re_check_stack_guard_state(masm_->isolate()); __ CallCFunction(check_stack_guard, num_arguments); } @@ -1201,7 +1201,7 @@ void RegExpMacroAssemblerIA32::CheckPreemption() { // Check for preemption. Label no_preempt; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above, &no_preempt, taken); @@ -1214,7 +1214,7 @@ void RegExpMacroAssemblerIA32::CheckPreemption() { void RegExpMacroAssemblerIA32::CheckStackLimit() { Label no_stack_overflow; ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); + ExternalReference::address_of_regexp_stack_limit(masm_->isolate()); __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit)); __ j(above, &no_stack_overflow); diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc index fd5310d..a5243a8 100644 --- a/src/ia32/stub-cache-ia32.cc +++ b/src/ia32/stub-cache-ia32.cc @@ -415,8 +415,9 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, JSObject* holder_obj) { PushInterceptorArguments(masm, receiver, holder, name, holder_obj); __ CallExternalReference( - ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)), - 5); + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), + masm->isolate()), + 5); } @@ -703,9 +704,9 @@ class CallInterceptorCompiler BASE_EMBEDDED { interceptor_holder); __ CallExternalReference( - ExternalReference( - IC_Utility(IC::kLoadPropertyWithInterceptorForCall)), - 5); + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), + masm->isolate()), + 5); // Restore the name_ register. __ pop(name_); @@ -731,7 +732,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { __ pop(receiver); // Restore the holder. __ LeaveInternalFrame(); - __ cmp(eax, FACTORY->no_interceptor_result_sentinel()); + __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel()); __ j(not_equal, interceptor_succeeded); } @@ -793,7 +794,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, __ push(eax); __ push(scratch); __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1); + ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), + masm->isolate()), + 3, + 1); return; } @@ -1262,7 +1266,8 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, __ push(scratch2); // restore return address ExternalReference ref = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); + ExternalReference(IC_Utility(IC::kLoadCallbackProperty), + masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } else { // !compile_followup_inline @@ -1276,8 +1281,9 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, name_reg, interceptor_holder); __ push(scratch2); // restore old return address - ExternalReference ref = ExternalReference( - IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); + ExternalReference ref = + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), + masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } @@ -1509,10 +1515,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ jmp(&call_builtin); } + Isolate* isolate = masm()->isolate(); ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate); ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate); const int kAllocationDelta = 4; // Load top. @@ -1553,9 +1560,10 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, } __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), - argc + 1, - 1); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPush, masm()->isolate()), + argc + 1, + 1); } __ bind(&miss); @@ -1635,9 +1643,10 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, __ ret((argc + 1) * kPointerSize); __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop), - argc + 1, - 1); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, masm()->isolate()), + argc + 1, + 1); __ bind(&miss); MaybeObject* maybe_result = GenerateMissBranch(); @@ -1663,7 +1672,9 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall( // ----------------------------------- // If object is not a string, bail out to regular call. - if (!object->IsString() || cell != NULL) return HEAP->undefined_value(); + if (!object->IsString() || cell != NULL) { + return masm()->isolate()->heap()->undefined_value(); + } const int argc = arguments().immediate(); @@ -2514,14 +2525,14 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, __ push(ebx); // restore return address // Do tail-call to the runtime system. + Isolate* isolate = masm()->isolate(); ExternalReference store_callback_property = - ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); + ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate); __ TailCallExternalReference(store_callback_property, 4, 1); // Handle store cache miss. __ bind(&miss); - Handle ic(Isolate::Current()->builtins()->builtin( - Builtins::StoreIC_Miss)); + Handle ic(isolate->builtins()->builtin(Builtins::StoreIC_Miss)); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. @@ -2565,14 +2576,14 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, __ push(ebx); // restore return address // Do tail-call to the runtime system. + Isolate* isolate = masm()->isolate(); ExternalReference store_ic_property = - ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); + ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate); __ TailCallExternalReference(store_ic_property, 4, 1); // Handle store cache miss. __ bind(&miss); - Handle ic(Isolate::Current()->builtins()->builtin( - Builtins::StoreIC_Miss)); + Handle ic(isolate->builtins()->builtin(Builtins::StoreIC_Miss)); __ jmp(ic, RelocInfo::CODE_TARGET); // Return the generated code. diff --git a/src/parser.cc b/src/parser.cc index 46d49f3..a10f9f9 100644 --- a/src/parser.cc +++ b/src/parser.cc @@ -252,7 +252,7 @@ void RegExpBuilder::AddQuantifierToAtom(int min, // allocated and hence used by the pre-parser. class TemporaryScope BASE_EMBEDDED { public: - explicit TemporaryScope(TemporaryScope** variable); + TemporaryScope(TemporaryScope** variable, Isolate* isolate); ~TemporaryScope(); int NextMaterializedLiteralIndex() { @@ -306,12 +306,11 @@ class TemporaryScope BASE_EMBEDDED { }; -TemporaryScope::TemporaryScope(TemporaryScope** variable) +TemporaryScope::TemporaryScope(TemporaryScope** variable, Isolate* isolate) : materialized_literal_count_(0), expected_property_count_(0), only_simple_this_property_assignments_(false), - this_property_assignments_( - Isolate::Current()->factory()->empty_fixed_array()), + this_property_assignments_(isolate->factory()->empty_fixed_array()), loop_count_(0), variable_(variable), parent_(*variable) { @@ -659,7 +658,7 @@ FunctionLiteral* Parser::DoParseProgram(Handle source, { Scope* scope = NewScope(top_scope_, type, inside_with()); LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_, scope); - TemporaryScope temp_scope(&this->temp_scope_); + TemporaryScope temp_scope(&this->temp_scope_, isolate()); if (strict_mode == kStrictMode) { top_scope_->EnableStrictMode(); } @@ -749,7 +748,7 @@ FunctionLiteral* Parser::ParseLazy(CompilationInfo* info, } LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_, scope); - TemporaryScope temp_scope(&this->temp_scope_); + TemporaryScope temp_scope(&this->temp_scope_, isolate()); if (shared_info->strict_mode()) { top_scope_->EnableStrictMode(); @@ -952,8 +951,9 @@ class InitializationBlockFinder : public ParserFinder { // function contains only assignments of this type. class ThisNamedPropertyAssigmentFinder : public ParserFinder { public: - ThisNamedPropertyAssigmentFinder() - : only_simple_this_property_assignments_(true), + explicit ThisNamedPropertyAssigmentFinder(Isolate* isolate) + : isolate_(isolate), + only_simple_this_property_assignments_(true), names_(NULL), assigned_arguments_(NULL), assigned_constants_(NULL) {} @@ -984,14 +984,14 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder { // form this.x = y; Handle GetThisPropertyAssignments() { if (names_ == NULL) { - return FACTORY->empty_fixed_array(); + return isolate_->factory()->empty_fixed_array(); } ASSERT(names_ != NULL); ASSERT(assigned_arguments_ != NULL); ASSERT_EQ(names_->length(), assigned_arguments_->length()); ASSERT_EQ(names_->length(), assigned_constants_->length()); Handle assignments = - FACTORY->NewFixedArray(names_->length() * 3); + isolate_->factory()->NewFixedArray(names_->length() * 3); for (int i = 0; i < names_->length(); i++) { assignments->set(i * 3, *names_->at(i)); assignments->set(i * 3 + 1, Smi::FromInt(assigned_arguments_->at(i))); @@ -1021,7 +1021,8 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder { uint32_t dummy; if (literal != NULL && literal->handle()->IsString() && - !String::cast(*(literal->handle()))->Equals(HEAP->Proto_symbol()) && + !String::cast(*(literal->handle()))->Equals( + isolate_->heap()->Proto_symbol()) && !String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) { Handle key = Handle::cast(literal->handle()); @@ -1055,7 +1056,7 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder { EnsureAllocation(); names_->Add(name); assigned_arguments_->Add(index); - assigned_constants_->Add(FACTORY->undefined_value()); + assigned_constants_->Add(isolate_->factory()->undefined_value()); } void AssignmentFromConstant(Handle name, Handle value) { @@ -1080,6 +1081,7 @@ class ThisNamedPropertyAssigmentFinder : public ParserFinder { } } + Isolate* isolate_; bool only_simple_this_property_assignments_; ZoneStringList* names_; ZoneList* assigned_arguments_; @@ -1101,7 +1103,7 @@ void* Parser::ParseSourceElements(ZoneList* processor, ASSERT(processor != NULL); InitializationBlockFinder block_finder; - ThisNamedPropertyAssigmentFinder this_property_assignment_finder; + ThisNamedPropertyAssigmentFinder this_property_assignment_finder(isolate()); bool directive_prologue = true; // Parsing directive prologue. while (peek() != end_token) { @@ -1519,11 +1521,13 @@ Block* Parser::ParseVariableStatement(bool* ok) { return result; } -static bool IsEvalOrArguments(Handle string) { - return string.is_identical_to(FACTORY->eval_symbol()) || - string.is_identical_to(FACTORY->arguments_symbol()); + +bool Parser::IsEvalOrArguments(Handle string) { + return string.is_identical_to(isolate()->factory()->eval_symbol()) || + string.is_identical_to(isolate()->factory()->arguments_symbol()); } + // If the variable declaration declares exactly one non-const // variable, then *var is set to that variable. In all other cases, // *var is untouched; in particular, it is the caller's responsibility @@ -3540,7 +3544,7 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle var_name, NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with()); LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_, scope); - TemporaryScope temp_scope(&this->temp_scope_); + TemporaryScope temp_scope(&this->temp_scope_, isolate()); top_scope_->SetScopeName(name); // FormalParameterList :: diff --git a/src/parser.h b/src/parser.h index 0ebc843..5221752 100644 --- a/src/parser.h +++ b/src/parser.h @@ -470,6 +470,9 @@ class Parser { Mode mode() const { return mode_; } ScriptDataImpl* pre_data() const { return pre_data_; } + // Check if the given string is 'eval' or 'arguments'. + bool IsEvalOrArguments(Handle string); + // All ParseXXX functions take as the last argument an *ok parameter // which is set to false if parsing failed; it is unchanged otherwise. // By making the 'exception handling' explicit, we are forced to check diff --git a/src/serialize.cc b/src/serialize.cc index acf13fb..260abd8 100644 --- a/src/serialize.cc +++ b/src/serialize.cc @@ -102,7 +102,10 @@ class ExternalReferenceTable { void PopulateTable(Isolate* isolate); // For a few types of references, we can get their address from their id. - void AddFromId(TypeCode type, uint16_t id, const char* name); + void AddFromId(TypeCode type, + uint16_t id, + const char* name, + Isolate* isolate); // For other types of references, the caller will figure out the address. void Add(Address address, TypeCode type, uint16_t id, const char* name); @@ -114,26 +117,28 @@ class ExternalReferenceTable { void ExternalReferenceTable::AddFromId(TypeCode type, uint16_t id, - const char* name) { + const char* name, + Isolate* isolate) { Address address; switch (type) { case C_BUILTIN: { - ExternalReference ref(static_cast(id)); + ExternalReference ref(static_cast(id), isolate); address = ref.address(); break; } case BUILTIN: { - ExternalReference ref(static_cast(id)); + ExternalReference ref(static_cast(id), isolate); address = ref.address(); break; } case RUNTIME_FUNCTION: { - ExternalReference ref(static_cast(id)); + ExternalReference ref(static_cast(id), isolate); address = ref.address(); break; } case IC_UTILITY: { - ExternalReference ref(IC_Utility(static_cast(id))); + ExternalReference ref(IC_Utility(static_cast(id)), + isolate); address = ref.address(); break; } @@ -221,7 +226,10 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { }; // end of ref_table[]. for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) { - AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name); + AddFromId(ref_table[i].type, + ref_table[i].id, + ref_table[i].name, + isolate); } #ifdef ENABLE_DEBUGGER_SUPPORT @@ -317,122 +325,124 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { "StubCache::secondary_->value"); // Runtime entries - Add(ExternalReference::perform_gc_function().address(), + Add(ExternalReference::perform_gc_function(isolate).address(), RUNTIME_ENTRY, 1, "Runtime::PerformGC"); - Add(ExternalReference::fill_heap_number_with_random_function().address(), + Add(ExternalReference::fill_heap_number_with_random_function( + isolate).address(), RUNTIME_ENTRY, 2, "V8::FillHeapNumberWithRandom"); - Add(ExternalReference::random_uint32_function().address(), + Add(ExternalReference::random_uint32_function(isolate).address(), RUNTIME_ENTRY, 3, "V8::Random"); - Add(ExternalReference::delete_handle_scope_extensions().address(), + Add(ExternalReference::delete_handle_scope_extensions(isolate).address(), RUNTIME_ENTRY, 4, "HandleScope::DeleteExtensions"); // Miscellaneous - Add(ExternalReference::the_hole_value_location().address(), + Add(ExternalReference::the_hole_value_location(isolate).address(), UNCLASSIFIED, 2, "Factory::the_hole_value().location()"); - Add(ExternalReference::roots_address().address(), + Add(ExternalReference::roots_address(isolate).address(), UNCLASSIFIED, 3, "Heap::roots_address()"); - Add(ExternalReference::address_of_stack_limit().address(), + Add(ExternalReference::address_of_stack_limit(isolate).address(), UNCLASSIFIED, 4, "StackGuard::address_of_jslimit()"); - Add(ExternalReference::address_of_real_stack_limit().address(), + Add(ExternalReference::address_of_real_stack_limit(isolate).address(), UNCLASSIFIED, 5, "StackGuard::address_of_real_jslimit()"); #ifndef V8_INTERPRETED_REGEXP - Add(ExternalReference::address_of_regexp_stack_limit().address(), + Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(), UNCLASSIFIED, 6, "RegExpStack::limit_address()"); - Add(ExternalReference::address_of_regexp_stack_memory_address().address(), + Add(ExternalReference::address_of_regexp_stack_memory_address( + isolate).address(), UNCLASSIFIED, 7, "RegExpStack::memory_address()"); - Add(ExternalReference::address_of_regexp_stack_memory_size().address(), + Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(), UNCLASSIFIED, 8, "RegExpStack::memory_size()"); - Add(ExternalReference::address_of_static_offsets_vector().address(), + Add(ExternalReference::address_of_static_offsets_vector(isolate).address(), UNCLASSIFIED, 9, "OffsetsVector::static_offsets_vector"); #endif // V8_INTERPRETED_REGEXP - Add(ExternalReference::new_space_start().address(), + Add(ExternalReference::new_space_start(isolate).address(), UNCLASSIFIED, 10, "Heap::NewSpaceStart()"); - Add(ExternalReference::new_space_mask().address(), + Add(ExternalReference::new_space_mask(isolate).address(), UNCLASSIFIED, 11, "Heap::NewSpaceMask()"); - Add(ExternalReference::heap_always_allocate_scope_depth().address(), + Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(), UNCLASSIFIED, 12, "Heap::always_allocate_scope_depth()"); - Add(ExternalReference::new_space_allocation_limit_address().address(), + Add(ExternalReference::new_space_allocation_limit_address(isolate).address(), UNCLASSIFIED, 13, "Heap::NewSpaceAllocationLimitAddress()"); - Add(ExternalReference::new_space_allocation_top_address().address(), + Add(ExternalReference::new_space_allocation_top_address(isolate).address(), UNCLASSIFIED, 14, "Heap::NewSpaceAllocationTopAddress()"); #ifdef ENABLE_DEBUGGER_SUPPORT - Add(ExternalReference::debug_break().address(), + Add(ExternalReference::debug_break(isolate).address(), UNCLASSIFIED, 15, "Debug::Break()"); - Add(ExternalReference::debug_step_in_fp_address().address(), + Add(ExternalReference::debug_step_in_fp_address(isolate).address(), UNCLASSIFIED, 16, "Debug::step_in_fp_addr()"); #endif - Add(ExternalReference::double_fp_operation(Token::ADD).address(), + Add(ExternalReference::double_fp_operation(Token::ADD, isolate).address(), UNCLASSIFIED, 17, "add_two_doubles"); - Add(ExternalReference::double_fp_operation(Token::SUB).address(), + Add(ExternalReference::double_fp_operation(Token::SUB, isolate).address(), UNCLASSIFIED, 18, "sub_two_doubles"); - Add(ExternalReference::double_fp_operation(Token::MUL).address(), + Add(ExternalReference::double_fp_operation(Token::MUL, isolate).address(), UNCLASSIFIED, 19, "mul_two_doubles"); - Add(ExternalReference::double_fp_operation(Token::DIV).address(), + Add(ExternalReference::double_fp_operation(Token::DIV, isolate).address(), UNCLASSIFIED, 20, "div_two_doubles"); - Add(ExternalReference::double_fp_operation(Token::MOD).address(), + Add(ExternalReference::double_fp_operation(Token::MOD, isolate).address(), UNCLASSIFIED, 21, "mod_two_doubles"); - Add(ExternalReference::compare_doubles().address(), + Add(ExternalReference::compare_doubles(isolate).address(), UNCLASSIFIED, 22, "compare_doubles"); #ifndef V8_INTERPRETED_REGEXP - Add(ExternalReference::re_case_insensitive_compare_uc16().address(), + Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(), UNCLASSIFIED, 23, "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()"); - Add(ExternalReference::re_check_stack_guard_state().address(), + Add(ExternalReference::re_check_stack_guard_state(isolate).address(), UNCLASSIFIED, 24, "RegExpMacroAssembler*::CheckStackGuardState()"); - Add(ExternalReference::re_grow_stack().address(), + Add(ExternalReference::re_grow_stack(isolate).address(), UNCLASSIFIED, 25, "NativeRegExpMacroAssembler::GrowStack()"); @@ -442,15 +452,15 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { "NativeRegExpMacroAssembler::word_character_map"); #endif // V8_INTERPRETED_REGEXP // Keyed lookup cache. - Add(ExternalReference::keyed_lookup_cache_keys().address(), + Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(), UNCLASSIFIED, 27, "KeyedLookupCache::keys()"); - Add(ExternalReference::keyed_lookup_cache_field_offsets().address(), + Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(), UNCLASSIFIED, 28, "KeyedLookupCache::field_offsets()"); - Add(ExternalReference::transcendental_cache_array_address().address(), + Add(ExternalReference::transcendental_cache_array_address(isolate).address(), UNCLASSIFIED, 29, "TranscendentalCache::caches()"); @@ -466,11 +476,11 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 32, "HandleScope::level"); - Add(ExternalReference::new_deoptimizer_function().address(), + Add(ExternalReference::new_deoptimizer_function(isolate).address(), UNCLASSIFIED, 33, "Deoptimizer::New()"); - Add(ExternalReference::compute_output_frames_function().address(), + Add(ExternalReference::compute_output_frames_function(isolate).address(), UNCLASSIFIED, 34, "Deoptimizer::ComputeOutputFrames()"); @@ -494,15 +504,15 @@ void ExternalReferenceTable::PopulateTable(Isolate* isolate) { UNCLASSIFIED, 39, "LDoubleConstant::negative_infinity"); - Add(ExternalReference::power_double_double_function().address(), + Add(ExternalReference::power_double_double_function(isolate).address(), UNCLASSIFIED, 40, "power_double_double_function"); - Add(ExternalReference::power_double_int_function().address(), + Add(ExternalReference::power_double_int_function(isolate).address(), UNCLASSIFIED, 41, "power_double_int_function"); - Add(ExternalReference::arguments_marker_location().address(), + Add(ExternalReference::arguments_marker_location(isolate).address(), UNCLASSIFIED, 42, "Factory::arguments_marker().location()"); diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index 26966e6..9a031ee 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -340,18 +340,18 @@ static void InitCoverageLog(); #endif Assembler::Assembler(void* buffer, int buffer_size) - : code_targets_(100), + : AssemblerBase(Isolate::Current()), + code_targets_(100), positions_recorder_(this), emit_debug_code_(FLAG_debug_code) { - Isolate* isolate = Isolate::Current(); if (buffer == NULL) { // Do our own buffer management. if (buffer_size <= kMinimalBufferSize) { buffer_size = kMinimalBufferSize; - if (isolate->assembler_spare_buffer() != NULL) { - buffer = isolate->assembler_spare_buffer(); - isolate->set_assembler_spare_buffer(NULL); + if (isolate()->assembler_spare_buffer() != NULL) { + buffer = isolate()->assembler_spare_buffer(); + isolate()->set_assembler_spare_buffer(NULL); } } if (buffer == NULL) { @@ -392,11 +392,10 @@ Assembler::Assembler(void* buffer, int buffer_size) Assembler::~Assembler() { - Isolate* isolate = Isolate::Current(); if (own_buffer_) { - if (isolate->assembler_spare_buffer() == NULL && + if (isolate()->assembler_spare_buffer() == NULL && buffer_size_ == kMinimalBufferSize) { - isolate->set_assembler_spare_buffer(buffer_); + isolate()->set_assembler_spare_buffer(buffer_); } else { DeleteArray(buffer_); } @@ -481,7 +480,6 @@ void Assembler::bind(NearLabel* L) { void Assembler::GrowBuffer() { - Isolate* isolate = Isolate::Current(); ASSERT(buffer_overflow()); if (!own_buffer_) FATAL("external code buffer is too small"); @@ -520,9 +518,9 @@ void Assembler::GrowBuffer() { reloc_info_writer.pos(), desc.reloc_size); // Switch buffers. - if (isolate->assembler_spare_buffer() == NULL && + if (isolate()->assembler_spare_buffer() == NULL && buffer_size_ == kMinimalBufferSize) { - isolate->set_assembler_spare_buffer(buffer_); + isolate()->set_assembler_spare_buffer(buffer_); } else { DeleteArray(buffer_); } @@ -1035,7 +1033,7 @@ void Assembler::cmpb_al(Immediate imm8) { void Assembler::cpuid() { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID)); + ASSERT(isolate()->cpu_features()->IsEnabled(CPUID)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit(0x0F); @@ -2386,7 +2384,7 @@ void Assembler::fistp_s(const Operand& adr) { void Assembler::fisttp_s(const Operand& adr) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit_optional_rex_32(adr); @@ -2396,7 +2394,7 @@ void Assembler::fisttp_s(const Operand& adr) { void Assembler::fisttp_d(const Operand& adr) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit_optional_rex_32(adr); @@ -2714,7 +2712,7 @@ void Assembler::movq(Register dst, XMMRegister src) { void Assembler::movdqa(const Operand& dst, XMMRegister src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit(0x66); @@ -2726,7 +2724,7 @@ void Assembler::movdqa(const Operand& dst, XMMRegister src) { void Assembler::movdqa(XMMRegister dst, const Operand& src) { - ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2)); + ASSERT(isolate()->cpu_features()->IsEnabled(SSE2)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit(0x66); diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index a7450f0..5d91436 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -499,7 +499,7 @@ class CpuFeatures { }; -class Assembler : public Malloced { +class Assembler : public AssemblerBase { private: // We check before assembling an instruction that there is sufficient // space to write an instruction and its relocation information. diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 8f782a8..3db449c 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -69,7 +69,7 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // JumpToExternalReference expects rax to contain the number of arguments // including the receiver and the extra arguments. __ addq(rax, Immediate(num_extra_args + 1)); - __ JumpToExternalReference(ExternalReference(id), 1); + __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1); } @@ -98,7 +98,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) { // Set expected number of arguments to zero (not changing rax). __ movq(rbx, Immediate(0)); __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); - __ Jump(Handle(Isolate::Current()->builtins()->builtin( + __ Jump(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); } @@ -127,7 +127,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, #ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference debug_step_in_fp = - ExternalReference::debug_step_in_fp_address(); + ExternalReference::debug_step_in_fp_address(masm->isolate()); __ movq(kScratchRegister, debug_step_in_fp); __ cmpq(Operand(kScratchRegister, 0), Immediate(0)); __ j(not_equal, &rt_call); @@ -339,7 +339,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, // Call the function. if (is_api_function) { __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); - Handle code = Handle(Isolate::Current()->builtins()->builtin( + Handle code = Handle(masm->isolate()->builtins()->builtin( Builtins::HandleApiCallConstruct)); ParameterCount expected(0); __ InvokeCode(code, expected, expected, @@ -492,7 +492,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Invoke the code. if (is_construct) { // Expects rdi to hold function pointer. - __ Call(Handle(Isolate::Current()->builtins()->builtin( + __ Call(Handle(masm->isolate()->builtins()->builtin( Builtins::JSConstructCall)), RelocInfo::CODE_TARGET); } else { ParameterCount actual(rax); @@ -733,7 +733,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ j(not_zero, &function); __ Set(rbx, 0); __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); - __ Jump(Handle(Isolate::Current()->builtins()->builtin( + __ Jump(Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); __ bind(&function); } @@ -748,7 +748,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); __ cmpq(rax, rbx); __ j(not_equal, - Handle(Isolate::Current()->builtins()->builtin( + Handle(masm->isolate()->builtins()->builtin( ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET); ParameterCount expected(0); @@ -863,7 +863,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments // Use inline caching to speed up access to arguments. - Handle ic(Isolate::Current()->builtins()->builtin( + Handle ic(masm->isolate()->builtins()->builtin( Builtins::KeyedLoadIC_Initialize)); __ Call(ic, RelocInfo::CODE_TARGET); // It is important that we do not have a test instruction after the @@ -1265,8 +1265,8 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { // Jump to the generic array code in case the specialized code cannot handle // the construction. __ bind(&generic_array_code); - Code* code = Isolate::Current()->builtins()->builtin( - Builtins::ArrayCodeGeneric); + Code* code = + masm->isolate()->builtins()->builtin(Builtins::ArrayCodeGeneric); Handle array_code(code); __ Jump(array_code, RelocInfo::CODE_TARGET); } @@ -1300,8 +1300,8 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) { // Jump to the generic construct code in case the specialized code cannot // handle the construction. __ bind(&generic_constructor); - Code* code = Isolate::Current()->builtins()->builtin( - Builtins::JSConstructStubGeneric); + Code* code = + masm->isolate()->builtins()->builtin(Builtins::JSConstructStubGeneric); Handle generic_construct_stub(code); __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); } diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 1d35361..5e53360 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -1000,7 +1000,7 @@ void GenericBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { // Perform patching to an appropriate fast case and return the result. __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()), 5, 1); } @@ -1036,7 +1036,8 @@ void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { // Patch the caller to an appropriate specialized stub and return the // operation result to the caller of the stub. __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)), + ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), + masm->isolate()), 5, 1); } @@ -1590,10 +1591,12 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { // ST[0] == double value. // rbx = bits of double value. // rcx = TranscendentalCache::hash(double value). - __ movq(rax, ExternalReference::transcendental_cache_array_address()); - // rax points to cache array. - __ movq(rax, Operand(rax, type_ * sizeof( - Isolate::Current()->transcendental_cache()->caches_[0]))); + ExternalReference cache_array = + ExternalReference::transcendental_cache_array_address(masm->isolate()); + __ movq(rax, cache_array); + int cache_array_index = + type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]); + __ movq(rax, Operand(rax, cache_array_index)); // rax points to the cache for the type type_. // If NULL, the cache hasn't been initialized yet, so go through runtime. __ testq(rax, rax); @@ -1674,7 +1677,8 @@ void TranscendentalCacheStub::Generate(MacroAssembler* masm) { __ bind(&runtime_call_clear_stack); __ fstp(0); __ bind(&runtime_call); - __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1); + __ TailCallExternalReference( + ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1); } else { // UNTAGGED. __ bind(&runtime_call_clear_stack); __ bind(&runtime_call); @@ -2440,10 +2444,11 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { Label runtime; // Ensure that a RegExp stack is allocated. + Isolate* isolate = masm->isolate(); ExternalReference address_of_regexp_stack_memory_address = - ExternalReference::address_of_regexp_stack_memory_address(); + ExternalReference::address_of_regexp_stack_memory_address(isolate); ExternalReference address_of_regexp_stack_memory_size = - ExternalReference::address_of_regexp_stack_memory_size(); + ExternalReference::address_of_regexp_stack_memory_size(isolate); __ movq(kScratchRegister, address_of_regexp_stack_memory_size); __ movq(kScratchRegister, Operand(kScratchRegister, 0)); __ testq(kScratchRegister, kScratchRegister); @@ -2625,7 +2630,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { #endif // Argument 5: static offsets vector buffer. - __ movq(r8, ExternalReference::address_of_static_offsets_vector()); + __ movq(r8, ExternalReference::address_of_static_offsets_vector(isolate)); // Argument 5 passed in r8 on Linux and on the stack on Windows. #ifdef _WIN64 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8); @@ -2729,7 +2734,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi); // Get the static offsets vector filled by the native regexp code. - __ movq(rcx, ExternalReference::address_of_static_offsets_vector()); + __ movq(rcx, ExternalReference::address_of_static_offsets_vector(isolate)); // rbx: last_match_info backing store (FixedArray) // rcx: offsets vector @@ -2762,7 +2767,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) { // haven't created the exception yet. Handle that in the runtime system. // TODO(592): Rerunning the RegExp to get the stack overflow exception. ExternalReference pending_exception_address( - Isolate::k_pending_exception_address); + Isolate::k_pending_exception_address, isolate); __ movq(rbx, pending_exception_address); __ movq(rax, Operand(rbx, 0)); __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); @@ -3378,7 +3383,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, } ExternalReference scope_depth = - ExternalReference::heap_always_allocate_scope_depth(); + ExternalReference::heap_always_allocate_scope_depth(masm->isolate()); if (always_allocate_scope) { __ movq(kScratchRegister, scope_depth); __ incl(Operand(kScratchRegister, 0)); @@ -3457,10 +3462,10 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, // Retrieve the pending exception and clear the variable. ExternalReference pending_exception_address( - Isolate::k_pending_exception_address); + Isolate::k_pending_exception_address, masm->isolate()); __ movq(kScratchRegister, pending_exception_address); __ movq(rax, Operand(kScratchRegister, 0)); - __ movq(rdx, ExternalReference::the_hole_value_location()); + __ movq(rdx, ExternalReference::the_hole_value_location(masm->isolate())); __ movq(rdx, Operand(rdx, 0)); __ movq(Operand(kScratchRegister, 0), rdx); @@ -3589,8 +3594,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are // callee save as well. + Isolate* isolate = masm->isolate(); + // Save copies of the top frame descriptor on the stack. - ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address); + ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate); __ load_rax(c_entry_fp); __ push(rax); @@ -3601,7 +3608,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { #ifdef ENABLE_LOGGING_AND_PROFILING // If this is the outermost JS call, set js_entry_sp value. - ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address); + ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate); __ load_rax(js_entry_sp); __ testq(rax, rax); __ j(not_zero, ¬_outermost_js); @@ -3615,7 +3622,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Caught exception: Store result (exception) in the pending // exception field in the JSEnv and return a failure sentinel. - ExternalReference pending_exception(Isolate::k_pending_exception_address); + ExternalReference pending_exception(Isolate::k_pending_exception_address, + isolate); __ store_rax(pending_exception); __ movq(rax, Failure::Exception(), RelocInfo::NONE); __ jmp(&exit); @@ -3625,7 +3633,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { __ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER); // Clear any pending exceptions. - __ load_rax(ExternalReference::the_hole_value_location()); + __ load_rax(ExternalReference::the_hole_value_location(isolate)); __ store_rax(pending_exception); // Fake a receiver (NULL). @@ -3637,17 +3645,19 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // directly in the code, because the builtin stubs may not have been // generated yet at the time this code is generated. if (is_construct) { - ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline); + ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline, + isolate); __ load_rax(construct_entry); } else { - ExternalReference entry(Builtins::JSEntryTrampoline); + ExternalReference entry(Builtins::JSEntryTrampoline, isolate); __ load_rax(entry); } __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); __ call(kScratchRegister); // Unlink this frame from the handler chain. - __ movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); + __ movq(kScratchRegister, + ExternalReference(Isolate::k_handler_address, isolate)); __ pop(Operand(kScratchRegister, 0)); // Pop next_sp. __ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); @@ -3664,7 +3674,8 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Restore the top frame descriptor from the stack. __ bind(&exit); - __ movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address)); + __ movq(kScratchRegister, + ExternalReference(Isolate::k_c_entry_fp_address, isolate)); __ pop(Operand(kScratchRegister, 0)); // Restore callee-saved registers (X64 conventions). @@ -5041,7 +5052,8 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) { __ push(rcx); // Call the runtime system in a fresh internal frame. - ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss)); + ExternalReference miss = + ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); __ EnterInternalFrame(); __ push(rdx); __ push(rax); diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index 40d6563..33c5752 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -3981,7 +3981,7 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); // Make sure that there's nothing left on the stack above the // handler structure. @@ -4110,7 +4110,7 @@ void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) { function_return_is_shadowed_ = function_return_was_shadowed; // Get an external reference to the handler address. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); // If we can fall off the end of the try block, unlink from the try // chain and set the state on the frame to FALLING. @@ -6439,7 +6439,7 @@ void CodeGenerator::GenerateRandomHeapNumber( // Return a random uint32 number in rax. // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. __ PrepareCallCFunction(0); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0); // Convert 32 random bits in rax to 0.(32 random bits) in a double // by computing: diff --git a/src/x64/debug-x64.cc b/src/x64/debug-x64.cc index 19175bc..0398465 100644 --- a/src/x64/debug-x64.cc +++ b/src/x64/debug-x64.cc @@ -129,7 +129,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, __ RecordComment("// Calling from debug break to runtime - come in - over"); #endif __ Set(rax, 0); // No arguments (argc == 0). - __ movq(rbx, ExternalReference::debug_break()); + __ movq(rbx, ExternalReference::debug_break(masm->isolate())); CEntryStub ceb(1); __ CallStub(&ceb); @@ -168,7 +168,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm, // jumping to the target address intended by the caller and that was // overwritten by the address of DebugBreakXXX. ExternalReference after_break_target = - ExternalReference(Debug_Address::AfterBreakTarget()); + ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate()); __ movq(kScratchRegister, after_break_target); __ jmp(Operand(kScratchRegister, 0)); } @@ -284,7 +284,8 @@ void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) { void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) { ExternalReference restarter_frame_function_slot = - ExternalReference(Debug_Address::RestarterFrameFunctionPointer()); + ExternalReference(Debug_Address::RestarterFrameFunctionPointer(), + masm->isolate()); __ movq(rax, restarter_frame_function_slot); __ movq(Operand(rax, 0), Immediate(0)); diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index 257b7ea..b1c4274 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -654,7 +654,9 @@ void Deoptimizer::EntryGenerator::Generate() { __ movq(r8, arg5); #endif - __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5); + Isolate* isolate = masm()->isolate(); + + __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5); // Preserve deoptimizer object in register rax and get the input // frame descriptor pointer. __ movq(rbx, Operand(rax, Deoptimizer::input_offset())); @@ -699,7 +701,8 @@ void Deoptimizer::EntryGenerator::Generate() { __ push(rax); __ PrepareCallCFunction(1); __ movq(arg1, rax); - __ CallCFunction(ExternalReference::compute_output_frames_function(), 1); + __ CallCFunction( + ExternalReference::compute_output_frames_function(isolate), 1); __ pop(rax); // Replace the current frame with the output frames. @@ -757,7 +760,7 @@ void Deoptimizer::EntryGenerator::Generate() { } // Set up the roots register. - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = ExternalReference::roots_address(isolate); __ InitializeRootRegister(); __ InitializeSmiConstantRegister(); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index f04c3cb..2253596 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -2701,7 +2701,7 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList* args) { // Return a random uint32 number in rax. // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs. __ PrepareCallCFunction(0); - __ CallCFunction(ExternalReference::random_uint32_function(), 0); + __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0); // Convert 32 random bits in rax to 0.(32 random bits) in a double // by computing: diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc index 2774fbe..64b1f06 100644 --- a/src/x64/ic-x64.cc +++ b/src/x64/ic-x64.cc @@ -630,7 +630,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // Load the key (consisting of map and symbol) from the cache and // check for match. ExternalReference cache_keys - = ExternalReference::keyed_lookup_cache_keys(); + = ExternalReference::keyed_lookup_cache_keys(masm->isolate()); __ movq(rdi, rcx); __ shl(rdi, Immediate(kPointerSizeLog2 + 1)); __ movq(kScratchRegister, cache_keys); @@ -641,7 +641,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { // Get field offset, which is a 32-bit integer. ExternalReference cache_field_offsets - = ExternalReference::keyed_lookup_cache_field_offsets(); + = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); __ movq(kScratchRegister, cache_field_offsets); __ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0)); __ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); @@ -750,8 +750,11 @@ void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) { __ push(rcx); // return address // Perform tail call to the entry. - __ TailCallExternalReference(ExternalReference( - IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1); + __ TailCallExternalReference( + ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor), + masm->isolate()), + 2, + 1); __ bind(&slow); GenerateMiss(masm); @@ -1006,7 +1009,7 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) { // Call the entry. CEntryStub stub(1); __ movq(rax, Immediate(2)); - __ movq(rbx, ExternalReference(IC_Utility(id))); + __ movq(rbx, ExternalReference(IC_Utility(id), masm->isolate())); __ CallStub(&stub); // Move result to rdi and exit the internal frame. @@ -1284,7 +1287,8 @@ void LoadIC::GenerateMiss(MacroAssembler* masm) { __ push(rbx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1428,7 +1432,8 @@ void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) { __ push(rbx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss)); + ExternalReference ref + = ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); } @@ -1487,7 +1492,8 @@ void StoreIC::GenerateMiss(MacroAssembler* masm) { __ push(rbx); // return address // Perform tail call to the entry. - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } @@ -1540,7 +1546,8 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ push(value); __ push(scratch); // return address - ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength)); + ExternalReference ref = + ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate()); __ TailCallExternalReference(ref, 2, 1); __ bind(&miss); @@ -1629,7 +1636,8 @@ void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) { __ push(rbx); // return address // Do tail-call to runtime routine. - ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss)); + ExternalReference ref = + ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate()); __ TailCallExternalReference(ref, 3, 1); } diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index 69eee58..6385416 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -1200,7 +1200,8 @@ void LCodeGen::DoArithmeticD(LArithmeticD* instr) { __ PrepareCallCFunction(2); __ movsd(xmm0, left); ASSERT(right.is(xmm1)); - __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 2); + __ CallCFunction( + ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); __ movsd(result, xmm0); break; @@ -2621,7 +2622,8 @@ void LCodeGen::DoPower(LPower* instr) { // Move arguments to correct registers __ movsd(xmm0, left_reg); ASSERT(ToDoubleRegister(right).is(xmm1)); - __ CallCFunction(ExternalReference::power_double_double_function(), 2); + __ CallCFunction( + ExternalReference::power_double_double_function(isolate()), 2); } else if (exponent_type.IsInteger32()) { __ PrepareCallCFunction(2); // Move arguments to correct registers: xmm0 and edi (not rdi). @@ -2632,7 +2634,8 @@ void LCodeGen::DoPower(LPower* instr) { #else ASSERT(ToRegister(right).is(rdi)); #endif - __ CallCFunction(ExternalReference::power_double_int_function(), 2); + __ CallCFunction( + ExternalReference::power_double_int_function(isolate()), 2); } else { ASSERT(exponent_type.IsTagged()); CpuFeatures::Scope scope(SSE2); @@ -2654,7 +2657,8 @@ void LCodeGen::DoPower(LPower* instr) { // Move arguments to correct registers xmm0 and xmm1. __ movsd(xmm0, left_reg); // Right argument is already in xmm1. - __ CallCFunction(ExternalReference::power_double_double_function(), 2); + __ CallCFunction( + ExternalReference::power_double_double_function(isolate()), 2); } // Return value is in xmm0. __ movsd(result_reg, xmm0); diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index b4d4d9d..75a0d9a 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -393,7 +393,7 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) { void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) { const Runtime::Function* function = Runtime::FunctionForId(id); Set(rax, function->nargs); - movq(rbx, ExternalReference(function)); + movq(rbx, ExternalReference(function, isolate())); CEntryStub ces(1); ces.SaveDoubles(); CallStub(&ces); @@ -421,7 +421,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f, // should remove this need and make the runtime routine entry code // smarter. Set(rax, num_arguments); - movq(rbx, ExternalReference(f)); + movq(rbx, ExternalReference(f, isolate())); CEntryStub ces(f->result_size); CallStub(&ces); } @@ -441,7 +441,7 @@ MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f, // should remove this need and make the runtime routine entry code // smarter. Set(rax, num_arguments); - movq(rbx, ExternalReference(f)); + movq(rbx, ExternalReference(f, isolate())); CEntryStub ces(f->result_size); return TryCallStub(&ces); } @@ -497,14 +497,16 @@ MaybeObject* MacroAssembler::TryTailCallExternalReference( void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size) { - TailCallExternalReference(ExternalReference(fid), num_arguments, result_size); + TailCallExternalReference(ExternalReference(fid, isolate()), + num_arguments, + result_size); } MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid, int num_arguments, int result_size) { - return TryTailCallExternalReference(ExternalReference(fid), + return TryTailCallExternalReference(ExternalReference(fid, isolate()), num_arguments, result_size); } @@ -551,7 +553,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( ExternalReference::handle_scope_level_address(), next_address); ExternalReference scheduled_exception_address = - ExternalReference::scheduled_exception_address(); + ExternalReference::scheduled_exception_address(isolate()); // Allocate HandleScope in callee-save registers. Register prev_next_address_reg = r14; @@ -615,7 +617,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn( #else movq(rdi, ExternalReference::isolate_address()); #endif - movq(rax, ExternalReference::delete_handle_scope_extensions()); + movq(rax, ExternalReference::delete_handle_scope_extensions(isolate())); call(rax); movq(rax, prev_limit_reg); jmp(&leave_exit_frame); @@ -1615,7 +1617,8 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, push(Immediate(0)); // NULL frame pointer. } // Save the current handler. - movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); + movq(kScratchRegister, + ExternalReference(Isolate::k_handler_address, isolate())); push(Operand(kScratchRegister, 0)); // Link this handler. movq(Operand(kScratchRegister, 0), rsp); @@ -1625,7 +1628,8 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location, void MacroAssembler::PopTryHandler() { ASSERT_EQ(0, StackHandlerConstants::kNextOffset); // Unlink this handler. - movq(kScratchRegister, ExternalReference(Isolate::k_handler_address)); + movq(kScratchRegister, + ExternalReference(Isolate::k_handler_address, isolate())); pop(Operand(kScratchRegister, 0)); // Remove the remaining fields. addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize)); @@ -1644,7 +1648,7 @@ void MacroAssembler::Throw(Register value) { movq(rax, value); } - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); movq(kScratchRegister, handler_address); movq(rsp, Operand(kScratchRegister, 0)); // get next in chain @@ -1672,7 +1676,7 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, movq(rax, value); } // Fetch top stack handler. - ExternalReference handler_address(Isolate::k_handler_address); + ExternalReference handler_address(Isolate::k_handler_address, isolate()); movq(kScratchRegister, handler_address); movq(rsp, Operand(kScratchRegister, 0)); @@ -1696,12 +1700,13 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type, if (type == OUT_OF_MEMORY) { // Set external caught exception to false. ExternalReference external_caught( - Isolate::k_external_caught_exception_address); + Isolate::k_external_caught_exception_address, isolate()); movq(rax, Immediate(false)); store_rax(external_caught); // Set pending exception and rax to out of memory exception. - ExternalReference pending_exception(Isolate::k_pending_exception_address); + ExternalReference pending_exception(Isolate::k_pending_exception_address, + isolate()); movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); store_rax(pending_exception); } @@ -1921,7 +1926,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) { void MacroAssembler::DebugBreak() { ASSERT(allow_stub_calls()); Set(rax, 0); // No arguments. - movq(rbx, ExternalReference(Runtime::kDebugBreak)); + movq(rbx, ExternalReference(Runtime::kDebugBreak, isolate())); CEntryStub ces(1); Call(ces.GetCode(), RelocInfo::DEBUG_BREAK); } @@ -2075,10 +2080,12 @@ void MacroAssembler::EnterExitFramePrologue(bool save_rax) { movq(r14, rax); // Backup rax in callee-save register. } - movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address)); + movq(kScratchRegister, + ExternalReference(Isolate::k_c_entry_fp_address, isolate())); movq(Operand(kScratchRegister, 0), rbp); - movq(kScratchRegister, ExternalReference(Isolate::k_context_address)); + movq(kScratchRegister, + ExternalReference(Isolate::k_context_address, isolate())); movq(Operand(kScratchRegister, 0), rsi); } @@ -2170,7 +2177,7 @@ void MacroAssembler::LeaveApiExitFrame() { void MacroAssembler::LeaveExitFrameEpilogue() { // Restore current context from top and clear it in debug mode. - ExternalReference context_address(Isolate::k_context_address); + ExternalReference context_address(Isolate::k_context_address, isolate()); movq(kScratchRegister, context_address); movq(rsi, Operand(kScratchRegister, 0)); #ifdef DEBUG @@ -2178,7 +2185,8 @@ void MacroAssembler::LeaveExitFrameEpilogue() { #endif // Clear the top frame. - ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address); + ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address, + isolate()); movq(kScratchRegister, c_entry_fp_address); movq(Operand(kScratchRegister, 0), Immediate(0)); } @@ -2251,7 +2259,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result, Register scratch, AllocationFlags flags) { ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Just return if allocation top is already known. if ((flags & RESULT_CONTAINS_TOP) != 0) { @@ -2288,7 +2296,7 @@ void MacroAssembler::UpdateAllocationTopHelper(Register result_end, } ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Update new top. if (result_end.is(rax)) { @@ -2333,7 +2341,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); Register top_reg = result_end.is_valid() ? result_end : result; @@ -2390,7 +2398,7 @@ void MacroAssembler::AllocateInNewSpace(int header_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); // We assume that element_count*element_size + header_size does not // overflow. @@ -2437,7 +2445,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, // Calculate new top and bail out if new space is exhausted. ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate()); if (!object_size.is(result_end)) { movq(result_end, object_size); } @@ -2459,7 +2467,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size, void MacroAssembler::UndoAllocationInNewSpace(Register object) { ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate()); // Make sure the object has no tag before resetting top. and_(object, Immediate(~kHeapObjectTagMask)); diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index b7bd039..9653e1c 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -186,7 +186,8 @@ class MacroAssembler: public Assembler { void LoadFromSafepointRegisterSlot(Register dst, Register src); void InitializeRootRegister() { - ExternalReference roots_address = ExternalReference::roots_address(); + ExternalReference roots_address = + ExternalReference::roots_address(isolate()); movq(kRootRegister, roots_address); addq(kRootRegister, Immediate(kRootRegisterBias)); } @@ -1797,13 +1798,13 @@ void MacroAssembler::InNewSpace(Register object, // case the size of the new space is different between the snapshot maker // and the running system. if (scratch.is(object)) { - movq(kScratchRegister, ExternalReference::new_space_mask()); + movq(kScratchRegister, ExternalReference::new_space_mask(isolate())); and_(scratch, kScratchRegister); } else { - movq(scratch, ExternalReference::new_space_mask()); + movq(scratch, ExternalReference::new_space_mask(isolate())); and_(scratch, object); } - movq(kScratchRegister, ExternalReference::new_space_start()); + movq(kScratchRegister, ExternalReference::new_space_start(isolate())); cmpq(scratch, kScratchRegister); j(cc, branch); } else { diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc index 37d93cc..49c1377 100644 --- a/src/x64/regexp-macro-assembler-x64.cc +++ b/src/x64/regexp-macro-assembler-x64.cc @@ -423,7 +423,7 @@ void RegExpMacroAssemblerX64::CheckNotBackReferenceIgnoreCase( __ movq(rdx, rbx); #endif ExternalReference compare = - ExternalReference::re_case_insensitive_compare_uc16(); + ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate()); __ CallCFunction(compare, num_arguments); // Restore original values before reacting on result value. @@ -741,7 +741,7 @@ Handle RegExpMacroAssemblerX64::GetCode(Handle source) { Label stack_ok; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ movq(rcx, rsp); __ movq(kScratchRegister, stack_limit); __ subq(rcx, Operand(kScratchRegister, 0)); @@ -926,7 +926,8 @@ Handle RegExpMacroAssemblerX64::GetCode(Handle source) { __ movq(rdi, backtrack_stackpointer()); // First argument. __ lea(rsi, Operand(rbp, kStackHighEnd)); // Second argument. #endif - ExternalReference grow_stack = ExternalReference::re_grow_stack(); + ExternalReference grow_stack = + ExternalReference::re_grow_stack(masm_->isolate()); __ CallCFunction(grow_stack, num_arguments); // If return NULL, we have failed to grow the stack, and // must exit with a stack-overflow exception. @@ -1128,7 +1129,7 @@ void RegExpMacroAssemblerX64::CallCheckStackGuardState() { __ lea(rdi, Operand(rsp, -kPointerSize)); #endif ExternalReference stack_check = - ExternalReference::re_check_stack_guard_state(); + ExternalReference::re_check_stack_guard_state(masm_->isolate()); __ CallCFunction(stack_check, num_arguments); } @@ -1328,7 +1329,7 @@ void RegExpMacroAssemblerX64::CheckPreemption() { // Check for preemption. Label no_preempt; ExternalReference stack_limit = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(masm_->isolate()); __ load_rax(stack_limit); __ cmpq(rsp, rax); __ j(above, &no_preempt); @@ -1342,7 +1343,7 @@ void RegExpMacroAssemblerX64::CheckPreemption() { void RegExpMacroAssemblerX64::CheckStackLimit() { Label no_stack_overflow; ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); + ExternalReference::address_of_regexp_stack_limit(masm_->isolate()); __ load_rax(stack_limit); __ cmpq(backtrack_stackpointer(), rax); __ j(above, &no_stack_overflow); diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc index d57940c..9090516 100644 --- a/src/x64/stub-cache-x64.cc +++ b/src/x64/stub-cache-x64.cc @@ -395,7 +395,8 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm, PushInterceptorArguments(masm, receiver, holder, name, holder_obj); ExternalReference ref = - ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)); + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly), + masm->isolate()); __ movq(rax, Immediate(5)); __ movq(rbx, ref); @@ -691,7 +692,8 @@ class CallInterceptorCompiler BASE_EMBEDDED { interceptor_holder); __ CallExternalReference( - ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall)), + ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall), + masm->isolate()), 5); // Restore the name_ register. @@ -779,7 +781,10 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, __ push(rax); __ push(scratch); __ TailCallExternalReference( - ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1); + ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage), + masm->isolate()), + 3, + 1); return; } @@ -1233,7 +1238,8 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, __ push(scratch2); // restore return address ExternalReference ref = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); + ExternalReference(IC_Utility(IC::kLoadCallbackProperty), + masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } else { // !compile_followup_inline @@ -1248,7 +1254,7 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object, __ push(scratch2); // restore old return address ExternalReference ref = ExternalReference( - IC_Utility(IC::kLoadPropertyWithInterceptorForLoad)); + IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate()); __ TailCallExternalReference(ref, 5, 1); } } @@ -1477,10 +1483,11 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, __ jmp(&call_builtin); } + Isolate* isolate = masm()->isolate(); ExternalReference new_space_allocation_top = - ExternalReference::new_space_allocation_top_address(); + ExternalReference::new_space_allocation_top_address(isolate); ExternalReference new_space_allocation_limit = - ExternalReference::new_space_allocation_limit_address(); + ExternalReference::new_space_allocation_limit_address(isolate); const int kAllocationDelta = 4; // Load top. @@ -1527,7 +1534,8 @@ MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object, } __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush), + __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush, + masm()->isolate()), argc + 1, 1); } @@ -1610,9 +1618,10 @@ MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object, __ ret((argc + 1) * kPointerSize); __ bind(&call_builtin); - __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop), - argc + 1, - 1); + __ TailCallExternalReference( + ExternalReference(Builtins::c_ArrayPop, masm()->isolate()), + argc + 1, + 1); __ bind(&miss); MaybeObject* maybe_result = GenerateMissBranch(); @@ -2367,7 +2376,8 @@ MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object, // Do tail-call to the runtime system. ExternalReference store_callback_property = - ExternalReference(IC_Utility(IC::kStoreCallbackProperty)); + ExternalReference(IC_Utility(IC::kStoreCallbackProperty), + masm()->isolate()); __ TailCallExternalReference(store_callback_property, 4, 1); // Handle store cache miss. @@ -2417,7 +2427,8 @@ MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver, // Do tail-call to the runtime system. ExternalReference store_ic_property = - ExternalReference(IC_Utility(IC::kStoreInterceptorProperty)); + ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), + masm()->isolate()); __ TailCallExternalReference(store_ic_property, 4, 1); // Handle store cache miss. diff --git a/test/cctest/test-disasm-ia32.cc b/test/cctest/test-disasm-ia32.cc index 2f10177..d65656a 100644 --- a/test/cctest/test-disasm-ia32.cc +++ b/test/cctest/test-disasm-ia32.cc @@ -283,7 +283,8 @@ TEST(DisasmIa320) { __ jmp(Operand(ebx, ecx, times_4, 10000)); #ifdef ENABLE_DEBUGGER_SUPPORT ExternalReference after_break_target = - ExternalReference(Debug_Address::AfterBreakTarget()); + ExternalReference(Debug_Address::AfterBreakTarget(), + assm.isolate()); __ jmp(Operand::StaticVariable(after_break_target)); #endif // ENABLE_DEBUGGER_SUPPORT __ jmp(ic, RelocInfo::CODE_TARGET); diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc index b7a6ca9..ff01985 100644 --- a/test/cctest/test-serialize.cc +++ b/test/cctest/test-serialize.cc @@ -83,7 +83,7 @@ static int* counter_function(const char* name) { template static Address AddressOf(T id) { - return ExternalReference(id).address(); + return ExternalReference(id, i::Isolate::Current()).address(); } @@ -100,7 +100,8 @@ static int make_code(TypeCode type, int id) { TEST(ExternalReferenceEncoder) { OS::Setup(); - i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function); + Isolate* isolate = i::Isolate::Current(); + isolate->stats_table()->SetCounterFunction(counter_function); HEAP->Setup(false); ExternalReferenceEncoder encoder; CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode), @@ -114,31 +115,33 @@ TEST(ExternalReferenceEncoder) { CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype), encoder.Encode(keyed_load_function_prototype.address())); ExternalReference the_hole_value_location = - ExternalReference::the_hole_value_location(); + ExternalReference::the_hole_value_location(isolate); CHECK_EQ(make_code(UNCLASSIFIED, 2), encoder.Encode(the_hole_value_location.address())); ExternalReference stack_limit_address = - ExternalReference::address_of_stack_limit(); + ExternalReference::address_of_stack_limit(isolate); CHECK_EQ(make_code(UNCLASSIFIED, 4), encoder.Encode(stack_limit_address.address())); ExternalReference real_stack_limit_address = - ExternalReference::address_of_real_stack_limit(); + ExternalReference::address_of_real_stack_limit(isolate); CHECK_EQ(make_code(UNCLASSIFIED, 5), encoder.Encode(real_stack_limit_address.address())); #ifdef ENABLE_DEBUGGER_SUPPORT CHECK_EQ(make_code(UNCLASSIFIED, 15), - encoder.Encode(ExternalReference::debug_break().address())); + encoder.Encode(ExternalReference::debug_break(isolate).address())); #endif // ENABLE_DEBUGGER_SUPPORT CHECK_EQ(make_code(UNCLASSIFIED, 10), - encoder.Encode(ExternalReference::new_space_start().address())); + encoder.Encode( + ExternalReference::new_space_start(isolate).address())); CHECK_EQ(make_code(UNCLASSIFIED, 3), - encoder.Encode(ExternalReference::roots_address().address())); + encoder.Encode(ExternalReference::roots_address(isolate).address())); } TEST(ExternalReferenceDecoder) { OS::Setup(); - i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function); + Isolate* isolate = i::Isolate::Current(); + isolate->stats_table()->SetCounterFunction(counter_function); HEAP->Setup(false); ExternalReferenceDecoder decoder; CHECK_EQ(AddressOf(Builtins::ArrayCode), @@ -154,17 +157,17 @@ TEST(ExternalReferenceDecoder) { decoder.Decode( make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype))); - CHECK_EQ(ExternalReference::the_hole_value_location().address(), + CHECK_EQ(ExternalReference::the_hole_value_location(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 2))); - CHECK_EQ(ExternalReference::address_of_stack_limit().address(), + CHECK_EQ(ExternalReference::address_of_stack_limit(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 4))); - CHECK_EQ(ExternalReference::address_of_real_stack_limit().address(), + CHECK_EQ(ExternalReference::address_of_real_stack_limit(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 5))); #ifdef ENABLE_DEBUGGER_SUPPORT - CHECK_EQ(ExternalReference::debug_break().address(), + CHECK_EQ(ExternalReference::debug_break(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 15))); #endif // ENABLE_DEBUGGER_SUPPORT - CHECK_EQ(ExternalReference::new_space_start().address(), + CHECK_EQ(ExternalReference::new_space_start(isolate).address(), decoder.Decode(make_code(UNCLASSIFIED, 10))); } -- 2.7.4