Assembler::Assembler(void* buffer, int buffer_size)
- : positions_recorder_(this),
+ : AssemblerBase(Isolate::Current()),
+ positions_recorder_(this),
allow_peephole_optimization_(false),
emit_debug_code_(FLAG_debug_code) {
- Isolate* isolate = Isolate::Current();
allow_peephole_optimization_ = FLAG_peephole_optimization;
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
buffer_size = kMinimalBufferSize;
- if (isolate->assembler_spare_buffer() != NULL) {
- buffer = isolate->assembler_spare_buffer();
- isolate->set_assembler_spare_buffer(NULL);
+ if (isolate()->assembler_spare_buffer() != NULL) {
+ buffer = isolate()->assembler_spare_buffer();
+ isolate()->set_assembler_spare_buffer(NULL);
}
}
if (buffer == NULL) {
Assembler::~Assembler() {
- Isolate* isolate = Isolate::Current();
ASSERT(const_pool_blocked_nesting_ == 0);
if (own_buffer_) {
- if (isolate->assembler_spare_buffer() == NULL &&
+ if (isolate()->assembler_spare_buffer() == NULL &&
buffer_size_ == kMinimalBufferSize) {
- isolate->set_assembler_spare_buffer(buffer_);
+ isolate()->set_assembler_spare_buffer(buffer_);
} else {
DeleteArray(buffer_);
}
Condition cond = Instruction::ConditionField(instr);
if ((instr & ~kCondMask) == 13*B21) { // mov, S not set
if (x.must_use_constant_pool() ||
- !Isolate::Current()->cpu_features()->IsSupported(ARMv7)) {
+ !isolate()->cpu_features()->IsSupported(ARMv7)) {
RecordRelocInfo(x.rmode_, x.imm32_);
ldr(rd, MemOperand(pc, 0), cond);
} else {
const Operand& src,
Condition cond) {
// v6 and above.
- ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsSupported(ARMv7));
ASSERT(!dst.is(pc) && !src.rm_.is(pc));
ASSERT((satpos >= 0) && (satpos <= 31));
ASSERT((src.shift_op_ == ASR) || (src.shift_op_ == LSL));
int width,
Condition cond) {
// v7 and above.
- ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsSupported(ARMv7));
ASSERT(!dst.is(pc) && !src.is(pc));
ASSERT((lsb >= 0) && (lsb <= 31));
ASSERT((width >= 1) && (width <= (32 - lsb)));
int width,
Condition cond) {
// v7 and above.
- ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsSupported(ARMv7));
ASSERT(!dst.is(pc) && !src.is(pc));
ASSERT((lsb >= 0) && (lsb <= 31));
ASSERT((width >= 1) && (width <= (32 - lsb)));
// bfc dst, #lsb, #width
void Assembler::bfc(Register dst, int lsb, int width, Condition cond) {
// v7 and above.
- ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsSupported(ARMv7));
ASSERT(!dst.is(pc));
ASSERT((lsb >= 0) && (lsb <= 31));
ASSERT((width >= 1) && (width <= (32 - lsb)));
int width,
Condition cond) {
// v7 and above.
- ASSERT(Isolate::Current()->cpu_features()->IsSupported(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsSupported(ARMv7));
ASSERT(!dst.is(pc) && !src.is(pc));
ASSERT((lsb >= 0) && (lsb <= 31));
ASSERT((width >= 1) && (width <= (32 - lsb)));
void Assembler::ldrd(Register dst1, Register dst2,
const MemOperand& src, Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsEnabled(ARMv7));
ASSERT(src.rm().is(no_reg));
ASSERT(!dst1.is(lr)); // r14.
ASSERT_EQ(0, dst1.code() % 2);
ASSERT(!src1.is(lr)); // r14.
ASSERT_EQ(0, src1.code() % 2);
ASSERT_EQ(src1.code() + 1, src2.code());
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(ARMv7));
+ ASSERT(isolate()->cpu_features()->IsEnabled(ARMv7));
addrmod3(cond | B7 | B6 | B5 | B4, src1, dst);
}
// Instruction details available in ARM DDI 0406A, A8-628.
// cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1011(11-8) | offset
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
int u = 1;
if (offset < 0) {
offset = -offset;
// Instruction details available in ARM DDI 0406A, A8-628.
// cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1010(11-8) | offset
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
int u = 1;
if (offset < 0) {
offset = -offset;
// Instruction details available in ARM DDI 0406A, A8-786.
// cond(31-28) | 1101(27-24)| U000(23-20) | | Rbase(19-16) |
// Vsrc(15-12) | 1011(11-8) | (offset/4)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
int u = 1;
if (offset < 0) {
offset = -offset;
// Instruction details available in ARM DDI 0406A, A8-786.
// cond(31-28) | 1101(27-24)| U000(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1010(11-8) | (offset/4)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
int u = 1;
if (offset < 0) {
offset = -offset;
const Condition cond) {
// Dd = immediate
// Instruction details available in ARM DDI 0406B, A8-640.
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
uint32_t enc;
if (FitsVMOVDoubleImmediate(imm, &enc)) {
const Condition cond) {
// Sd = Sm
// Instruction details available in ARM DDI 0406B, A8-642.
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
int sd, d, sm, m;
dst.split_code(&sd, &d);
src.split_code(&sm, &m);
const Condition cond) {
// Dd = Dm
// Instruction details available in ARM DDI 0406B, A8-642.
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0xB*B20 |
dst.code()*B12 | 0x5*B9 | B8 | B6 | src.code());
}
// Instruction details available in ARM DDI 0406A, A8-646.
// cond(31-28) | 1100(27-24)| 010(23-21) | op=0(20) | Rt2(19-16) |
// Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
ASSERT(!src1.is(pc) && !src2.is(pc));
emit(cond | 0xC*B24 | B22 | src2.code()*B16 |
src1.code()*B12 | 0xB*B8 | B4 | dst.code());
// Instruction details available in ARM DDI 0406A, A8-646.
// cond(31-28) | 1100(27-24)| 010(23-21) | op=1(20) | Rt2(19-16) |
// Rt(15-12) | 1011(11-8) | 00(7-6) | M(5) | 1(4) | Vm
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
ASSERT(!dst1.is(pc) && !dst2.is(pc));
emit(cond | 0xC*B24 | B22 | B20 | dst2.code()*B16 |
dst1.code()*B12 | 0xB*B8 | B4 | src.code());
// Instruction details available in ARM DDI 0406A, A8-642.
// cond(31-28) | 1110(27-24)| 000(23-21) | op=0(20) | Vn(19-16) |
// Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
ASSERT(!src.is(pc));
int sn, n;
dst.split_code(&sn, &n);
// Instruction details available in ARM DDI 0406A, A8-642.
// cond(31-28) | 1110(27-24)| 000(23-21) | op=1(20) | Vn(19-16) |
// Rt(15-12) | 1010(11-8) | N(7)=0 | 00(6-5) | 1(4) | 0000(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
ASSERT(!dst.is(pc));
int sn, n;
src.split_code(&sn, &n);
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(F64, dst.code(), S32, src.code(), mode, cond));
}
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(F32, dst.code(), S32, src.code(), mode, cond));
}
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(F64, dst.code(), U32, src.code(), mode, cond));
}
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(S32, dst.code(), F64, src.code(), mode, cond));
}
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(U32, dst.code(), F64, src.code(), mode, cond));
}
const SwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(F64, dst.code(), F32, src.code(), mode, cond));
}
const DwVfpRegister src,
VFPConversionMode mode,
const Condition cond) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(EncodeVCVT(F32, dst.code(), F64, src.code(), mode, cond));
}
// Instruction details available in ARM DDI 0406A, A8-536.
// cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 11100(27-23)| D=?(22) | 11(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 1(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0x3*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
}
// Instruction details available in ARM DDI 0406A, A8-784.
// cond(31-28) | 11100(27-23)| D=?(22) | 10(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=0 | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0x2*B20 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
// Instruction details available in ARM DDI 0406A, A8-584.
// cond(31-28) | 11101(27-23)| D=?(22) | 00(21-20) | Vn(19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | N(7)=? | 0(6) | M=?(5) | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | B23 | src1.code()*B16 |
dst.code()*B12 | 0x5*B9 | B8 | src2.code());
}
// Instruction details available in ARM DDI 0406A, A8-570.
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0100 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 |
src1.code()*B12 | 0x5*B9 | B8 | B6 | src2.code());
}
// Instruction details available in ARM DDI 0406A, A8-570.
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0101 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | E(7)=0 | 1(6) | M(5)=? | 0(4) | 0000(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
ASSERT(src2 == 0.0);
emit(cond | 0xE*B24 |B23 | 0x3*B20 | B18 | B16 |
src1.code()*B12 | 0x5*B9 | B8 | B6);
// Instruction details available in ARM DDI 0406A, A8-652.
// cond(31-28) | 1110 (27-24) | 1110(23-20)| 0001 (19-16) |
// Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0xE*B20 | B16 |
dst.code()*B12 | 0xA*B8 | B4);
}
// Instruction details available in ARM DDI 0406A, A8-652.
// cond(31-28) | 1110 (27-24) | 1111(23-20)| 0001 (19-16) |
// Rt(15-12) | 1010 (11-8) | 0(7) | 00 (6-5) | 1(4) | 0000(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | 0xF*B20 | B16 |
dst.code()*B12 | 0xA*B8 | B4);
}
const Condition cond) {
// cond(31-28) | 11101 (27-23)| D=?(22) | 11 (21-20) | 0001 (19-16) |
// Vd(15-12) | 101(11-9) | sz(8)=1 | 11 (7-6) | M(5)=? | 0(4) | Vm(3-0)
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(VFP3));
emit(cond | 0xE*B24 | B23 | 0x3*B20 | B16 |
dst.code()*B12 | 0x5*B9 | B8 | 3*B6 | src.code());
}
-class Assembler : public Malloced {
+class Assembler : public AssemblerBase {
public:
// Create an assembler. Instructions and relocation information are emitted
// into a buffer, with the instructions starting from the beginning and the
// JumpToExternalReference expects r0 to contain the number of arguments
// including the receiver and the extra arguments.
__ add(r0, r0, Operand(num_extra_args + 1));
- __ JumpToExternalReference(ExternalReference(id));
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
// Jump to the generic array code if the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
- Code* code = Isolate::Current()->builtins()->builtin(
+ Code* code = masm->isolate()->builtins()->builtin(
Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ Jump(array_code, RelocInfo::CODE_TARGET);
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
- Code* code = Isolate::Current()->builtins()->builtin(
+ Code* code = masm->isolate()->builtins()->builtin(
Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
// Set expected number of arguments to zero (not changing r0).
__ mov(r2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Jump(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
}
Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address();
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
__ mov(r2, Operand(debug_step_in_fp));
__ ldr(r2, MemOperand(r2));
__ tst(r2, r2);
if (is_api_function) {
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Handle<Code> code = Handle<Code>(
- Isolate::Current()->builtins()->builtin(
+ masm->isolate()->builtins()->builtin(
Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Set up the roots register.
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address =
+ ExternalReference::roots_address(masm->isolate());
__ mov(r10, Operand(roots_address));
// Push the function and the receiver onto the stack.
// Invoke the code and pass argc as r0.
__ mov(r0, Operand(r3));
if (is_construct) {
- __ Call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Call(Handle<Code>(masm->isolate()->builtins()->builtin(
Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
} else {
ParameterCount actual(r0);
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
// Probe the CPU to set the supported features, because this builtin
// may be called before the initialization performs CPU setup.
- Isolate::Current()->cpu_features()->Probe(false);
+ masm->isolate()->cpu_features()->Probe(false);
// Lookup the function in the JavaScript frame and push it as an
// argument to the on-stack replacement function.
// Expected number of arguments is 0 for CALL_NON_FUNCTION.
__ mov(r2, Operand(0, RelocInfo::NONE));
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
- __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Jump(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
__ bind(&function);
}
__ mov(r2, Operand(r2, ASR, kSmiTagSize));
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
__ cmp(r2, r0); // Check formal and actual parameter counts.
- __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Jump(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET, ne);
ParameterCount expected(0);
__ push(lr);
__ PrepareCallCFunction(4, scratch); // Two doubles are 4 arguments.
// Call C routine that may not cause GC or other trouble.
- __ CallCFunction(ExternalReference::double_fp_operation(op), 4);
+ __ CallCFunction(ExternalReference::double_fp_operation(op, masm->isolate()),
+ 4);
// Store answer in the overwritable heap number.
#if !defined(USE_ARM_EABI)
// Double returned in fp coprocessor register 0 and 1, encoded as
// Call C routine that may not cause GC or other trouble.
__ push(lr);
__ PrepareCallCFunction(4, r5); // Two doubles count as 4 arguments.
- __ CallCFunction(ExternalReference::compare_doubles(), 4);
+ __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()), 4);
__ pop(pc); // Return.
}
}
__ PrepareCallCFunction(4, r4); // Two doubles count as 4 arguments.
// Call C routine that may not cause GC or other trouble. r5 is callee
// save.
- __ CallCFunction(ExternalReference::double_fp_operation(op_), 4);
+ __ CallCFunction(
+ ExternalReference::double_fp_operation(op_, masm->isolate()), 4);
// Store answer in the overwritable heap number.
#if !defined(USE_ARM_EABI)
// Double returned in fp coprocessor register 0 and 1, encoded as
__ Push(r2, r1, r0);
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
5,
1);
}
__ Push(r2, r1, r0);
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
+ masm->isolate()),
5,
1);
}
// r2 = low 32 bits of double value.
// r3 = high 32 bits of double value.
// r1 = TranscendentalCache::hash(double value).
- __ mov(cache_entry,
- Operand(ExternalReference::transcendental_cache_array_address()));
- // r0 points to cache array.
- __ ldr(cache_entry, MemOperand(cache_entry, type_ * sizeof(
- Isolate::Current()->transcendental_cache()->caches_[0])));
+ Isolate* isolate = masm->isolate();
+ ExternalReference cache_array =
+ ExternalReference::transcendental_cache_array_address(isolate);
+ __ mov(cache_entry, Operand(cache_array));
+ // cache_entry points to cache array.
+ int cache_array_index
+ = type_ * sizeof(isolate->transcendental_cache()->caches_[0]);
+ __ ldr(cache_entry, MemOperand(cache_entry, cache_array_index));
// r0 points to the cache for the type type_.
// If NULL, the cache hasn't been initialized yet, so go through runtime.
__ cmp(cache_entry, Operand(0, RelocInfo::NONE));
__ bind(&calculate);
if (tagged) {
__ bind(&invalid_cache);
- __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
+ ExternalReference runtime_function =
+ ExternalReference(RuntimeFunction(), masm->isolate());
+ __ TailCallExternalReference(runtime_function, 1, 1);
} else {
if (!Isolate::Current()->cpu_features()->IsSupported(VFP3)) UNREACHABLE();
CpuFeatures::Scope scope(VFP3);
void TranscendentalCacheStub::GenerateCallCFunction(MacroAssembler* masm,
Register scratch) {
+ Isolate* isolate = masm->isolate();
+
__ push(lr);
__ PrepareCallCFunction(2, scratch);
__ vmov(r0, r1, d2);
switch (type_) {
case TranscendentalCache::SIN:
- __ CallCFunction(ExternalReference::math_sin_double_function(), 2);
+ __ CallCFunction(ExternalReference::math_sin_double_function(isolate), 2);
break;
case TranscendentalCache::COS:
- __ CallCFunction(ExternalReference::math_cos_double_function(), 2);
+ __ CallCFunction(ExternalReference::math_cos_double_function(isolate), 2);
break;
case TranscendentalCache::LOG:
- __ CallCFunction(ExternalReference::math_log_double_function(), 2);
+ __ CallCFunction(ExternalReference::math_log_double_function(isolate), 2);
break;
default:
UNIMPLEMENTED();
__ PrepareCallCFunction(3, scratch);
__ mov(r2, exponent);
__ vmov(r0, r1, double_base);
- __ CallCFunction(ExternalReference::power_double_int_function(), 3);
+ __ CallCFunction(
+ ExternalReference::power_double_int_function(masm->isolate()), 3);
__ pop(lr);
__ GetCFunctionDoubleResult(double_result);
__ vstr(double_result,
__ PrepareCallCFunction(4, scratch);
__ vmov(r0, r1, double_base);
__ vmov(r2, r3, double_exponent);
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(masm->isolate()), 4);
__ pop(lr);
__ GetCFunctionDoubleResult(double_result);
__ vstr(double_result,
// r4: number of arguments including receiver (C callee-saved)
// r5: pointer to builtin function (C callee-saved)
// r6: pointer to the first argument (C callee-saved)
+ Isolate* isolate = masm->isolate();
if (do_gc) {
// Passing r0.
__ PrepareCallCFunction(1, r1);
- __ CallCFunction(ExternalReference::perform_gc_function(), 1);
+ __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1);
}
ExternalReference scope_depth =
- ExternalReference::heap_always_allocate_scope_depth();
+ ExternalReference::heap_always_allocate_scope_depth(isolate);
if (always_allocate) {
__ mov(r0, Operand(scope_depth));
__ ldr(r1, MemOperand(r0));
__ b(eq, throw_out_of_memory_exception);
// Retrieve the pending exception and clear the variable.
- __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
+ __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate)));
__ ldr(r3, MemOperand(ip));
- __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
+ __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address,
+ isolate)));
__ ldr(r0, MemOperand(ip));
__ str(r3, MemOperand(ip));
// Special handling of termination exceptions which are uncatchable
// by javascript code.
- __ cmp(r0, Operand(FACTORY->termination_exception()));
+ __ cmp(r0, Operand(isolate->factory()->termination_exception()));
__ b(eq, throw_termination_exception);
// Handle normal exception.
// r2: receiver
// r3: argc
// r4: argv
+ Isolate* isolate = masm->isolate();
__ mov(r8, Operand(-1)); // Push a bad frame pointer to fail if it is used.
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
__ mov(r7, Operand(Smi::FromInt(marker)));
__ mov(r6, Operand(Smi::FromInt(marker)));
- __ mov(r5, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
+ __ mov(r5,
+ Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate)));
__ ldr(r5, MemOperand(r5));
__ Push(r8, r7, r6, r5);
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
- ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
+ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ ldr(r6, MemOperand(r5));
__ cmp(r6, Operand(0, RelocInfo::NONE));
// exception field in the JSEnv and return a failure sentinel.
// Coming in here the fp will be invalid because the PushTryHandler below
// sets it to 0 to signal the existence of the JSEntry frame.
- __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
+ __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address,
+ isolate)));
__ str(r0, MemOperand(ip));
__ mov(r0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
__ b(&exit);
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ mov(ip, Operand(ExternalReference::the_hole_value_location()));
+ __ mov(ip, Operand(ExternalReference::the_hole_value_location(isolate)));
__ ldr(r5, MemOperand(ip));
- __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address)));
+ __ mov(ip, Operand(ExternalReference(Isolate::k_pending_exception_address,
+ isolate)));
__ str(r5, MemOperand(ip));
// Invoke the function by calling through JS entry trampoline builtin.
// r3: argc
// r4: argv
if (is_construct) {
- ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
+ ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline,
+ isolate);
__ mov(ip, Operand(construct_entry));
} else {
- ExternalReference entry(Builtins::JSEntryTrampoline);
+ ExternalReference entry(Builtins::JSEntryTrampoline, isolate);
__ mov(ip, Operand(entry));
}
__ ldr(ip, MemOperand(ip)); // deref address
// displacement since the current stack pointer (sp) points directly
// to the stack handler.
__ ldr(r3, MemOperand(sp, StackHandlerConstants::kNextOffset));
- __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address)));
+ __ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate)));
__ str(r3, MemOperand(ip));
// No need to restore registers
__ add(sp, sp, Operand(StackHandlerConstants::kSize));
__ bind(&exit); // r0 holds result
// Restore the top frame descriptors from the stack.
__ pop(r3);
- __ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
+ __ mov(ip,
+ Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate)));
__ str(r3, MemOperand(ip));
// Reset the stack to the callee saved registers.
Register last_match_info_elements = r6;
// Ensure that a RegExp stack is allocated.
+ Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address();
+ ExternalReference::address_of_regexp_stack_memory_address(isolate);
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size();
+ ExternalReference::address_of_regexp_stack_memory_size(isolate);
__ mov(r0, Operand(address_of_regexp_stack_memory_size));
__ ldr(r0, MemOperand(r0, 0));
__ tst(r0, Operand(r0));
__ str(r0, MemOperand(sp, 2 * kPointerSize));
// Argument 5 (sp[4]): static offsets vector buffer.
- __ mov(r0, Operand(ExternalReference::address_of_static_offsets_vector()));
+ __ mov(r0,
+ Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
__ str(r0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data and
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ mov(r1, Operand(ExternalReference::the_hole_value_location()));
+ __ mov(r1, Operand(ExternalReference::the_hole_value_location(isolate)));
__ ldr(r1, MemOperand(r1, 0));
- __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address)));
+ __ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
+ isolate)));
__ ldr(r0, MemOperand(r2, 0));
__ cmp(r0, r1);
__ b(eq, &runtime);
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector();
+ ExternalReference::address_of_static_offsets_vector(isolate);
__ mov(r2, Operand(address_of_static_offsets_vector));
// r1: number of capture registers
__ push(lr);
// Call the runtime system in a fresh internal frame.
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
+ ExternalReference miss =
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
__ EnterInternalFrame();
__ Push(r1, r0);
__ mov(ip, Operand(Smi::FromInt(op_)));
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
// If we can fall off the end of the try block, unlink from try chain.
if (has_valid_frame()) {
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
// If we can fall off the end of the try block, unlink from the try
// chain and set the state on the frame to FALLING.
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
__ PrepareCallCFunction(0, r1);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0);
CpuFeatures::Scope scope(VFP3);
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
__ mov(r0, Operand(r4));
__ PrepareCallCFunction(1, r1);
__ CallCFunction(
- ExternalReference::fill_heap_number_with_random_function(), 1);
+ ExternalReference::fill_heap_number_with_random_function(isolate()), 1);
frame_->EmitPush(r0);
}
}
__ ldr(scratch1, FieldMemOperand(receiver, JSObject::kElementsOffset));
if (!value_is_harmless && wb_info != LIKELY_SMI) {
Label ok;
- __ and_(scratch2, scratch1, Operand(ExternalReference::new_space_mask()));
- __ cmp(scratch2, Operand(ExternalReference::new_space_start()));
+ __ and_(scratch2,
+ scratch1,
+ Operand(ExternalReference::new_space_mask(isolate())));
+ __ cmp(scratch2, Operand(ExternalReference::new_space_start(isolate())));
__ tst(value, Operand(kSmiTagMask), ne);
deferred->Branch(ne);
#ifdef DEBUG
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
__ mov(r0, Operand(0, RelocInfo::NONE)); // no arguments
- __ mov(r1, Operand(ExternalReference::debug_break()));
+ __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
CEntryStub ceb(1);
__ CallStub(&ceb);
// Now that the break point has been handled, resume normal execution by
// jumping to the target address intended by the caller and that was
// overwritten by the address of DebugBreakXXX.
- __ mov(ip, Operand(ExternalReference(Debug_Address::AfterBreakTarget())));
+ ExternalReference after_break_target =
+ ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate());
+ __ mov(ip, Operand(after_break_target));
__ ldr(ip, MemOperand(ip));
__ Jump(ip);
}
// easily ported.
void Deoptimizer::EntryGenerator::Generate() {
GeneratePrologue();
+
+ Isolate* isolate = masm()->isolate();
+
CpuFeatures::Scope scope(VFP3);
// Save all general purpose registers before messing with them.
const int kNumberOfRegisters = Register::kNumRegisters;
// r3: code address or 0 already loaded.
__ str(r4, MemOperand(sp, 0 * kPointerSize)); // Fp-to-sp delta.
// Call Deoptimizer::New().
- __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5);
// Preserve "deoptimizer" object in register r0 and get the input
// frame descriptor pointer to r1 (deoptimizer->input_);
// r0: deoptimizer object; r1: scratch.
__ PrepareCallCFunction(1, r1);
// Call Deoptimizer::ComputeOutputFrames().
- __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 1);
__ pop(r0); // Restore deoptimizer object (class Deoptimizer).
// Replace the current (input) frame with the output frames.
__ pop(ip); // remove lr
// Set up the roots register.
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address = ExternalReference::roots_address(isolate);
__ mov(r10, Operand(roots_address));
__ pop(ip); // remove pc
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
if (isolate()->cpu_features()->IsSupported(VFP3)) {
__ PrepareCallCFunction(0, r1);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0);
CpuFeatures::Scope scope(VFP3);
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
__ mov(r0, Operand(r4));
__ PrepareCallCFunction(1, r1);
__ CallCFunction(
- ExternalReference::fill_heap_number_with_random_function(), 1);
+ ExternalReference::fill_heap_number_with_random_function(isolate()), 1);
}
context()->Plug(r0);
// Call the entry.
__ mov(r0, Operand(2));
- __ mov(r1, Operand(ExternalReference(IC_Utility(id))));
+ __ mov(r1, Operand(ExternalReference(IC_Utility(id), masm->isolate())));
CEntryStub stub(1);
__ CallStub(&stub);
__ Push(r3, r2);
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
__ Push(r1, r0);
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
// Load the key (consisting of map and symbol) from the cache and
// check for match.
- ExternalReference cache_keys = ExternalReference::keyed_lookup_cache_keys();
+ ExternalReference cache_keys =
+ ExternalReference::keyed_lookup_cache_keys(masm->isolate());
__ mov(r4, Operand(cache_keys));
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
__ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to symbol.
// r1 : receiver
// r2 : receiver's map
// r3 : lookup cache index
- ExternalReference cache_field_offsets
- = ExternalReference::keyed_lookup_cache_field_offsets();
+ ExternalReference cache_field_offsets =
+ ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
__ mov(r4, Operand(cache_field_offsets));
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
__ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
__ Push(r1, r0); // Receiver, key.
// Perform tail call to the entry.
- __ TailCallExternalReference(ExternalReference(
- IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
+ masm->isolate()),
+ 2,
+ 1);
__ bind(&slow);
GenerateMiss(masm);
// Push receiver, key and value for runtime call.
__ Push(r2, r1, r0);
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
__ Push(r1, r2, r0);
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
// Prepare tail call to StoreIC_ArrayLength.
__ Push(receiver, value);
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&miss);
__ PrepareCallCFunction(4, scratch0());
__ vmov(r0, r1, left);
__ vmov(r2, r3, right);
- __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
+ __ CallCFunction(
+ ExternalReference::double_fp_operation(Token::MOD, isolate()), 4);
// Move the result in the double result register.
__ GetCFunctionDoubleResult(ToDoubleRegister(instr->result()));
__ PrepareCallCFunction(4, scratch);
__ vmov(r0, r1, ToDoubleRegister(left));
__ vmov(r2, r3, ToDoubleRegister(right));
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(isolate()), 4);
} else if (exponent_type.IsInteger32()) {
ASSERT(ToRegister(right).is(r0));
// Prepare arguments and call C function.
__ PrepareCallCFunction(4, scratch);
__ mov(r2, ToRegister(right));
__ vmov(r0, r1, ToDoubleRegister(left));
- __ CallCFunction(ExternalReference::power_double_int_function(), 4);
+ __ CallCFunction(
+ ExternalReference::power_double_int_function(isolate()), 4);
} else {
ASSERT(exponent_type.IsTagged());
ASSERT(instr->hydrogen()->left()->representation().IsDouble());
__ PrepareCallCFunction(4, scratch);
__ vmov(r0, r1, ToDoubleRegister(left));
__ vmov(r2, r3, result_reg);
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(isolate()), 4);
}
// Store the result in the result register.
__ GetCFunctionDoubleResult(result_reg);
Condition cond,
Label* branch) {
ASSERT(cond == eq || cond == ne);
- and_(scratch, object, Operand(ExternalReference::new_space_mask()));
- cmp(scratch, Operand(ExternalReference::new_space_start()));
+ and_(scratch, object, Operand(ExternalReference::new_space_mask(isolate())));
+ cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
b(cond, branch);
}
str(ip, MemOperand(fp, ExitFrameConstants::kCodeOffset));
// Save the frame pointer and the context in top.
- mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
+ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
str(fp, MemOperand(ip));
- mov(ip, Operand(ExternalReference(Isolate::k_context_address)));
+ mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate())));
str(cp, MemOperand(ip));
// Optionally save all double registers.
// Clear top frame.
mov(r3, Operand(0, RelocInfo::NONE));
- mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address)));
+ mov(ip, Operand(ExternalReference(Isolate::k_c_entry_fp_address, isolate())));
str(r3, MemOperand(ip));
// Restore current context from top and clear it in debug mode.
- mov(ip, Operand(ExternalReference(Isolate::k_context_address)));
+ mov(ip, Operand(ExternalReference(Isolate::k_context_address, isolate())));
ldr(cp, MemOperand(ip));
#ifdef DEBUG
str(r3, MemOperand(ip));
void MacroAssembler::DebugBreak() {
ASSERT(allow_stub_calls());
mov(r0, Operand(0, RelocInfo::NONE));
- mov(r1, Operand(ExternalReference(Runtime::kDebugBreak)));
+ mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
CEntryStub ces(1);
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
&& StackHandlerConstants::kPCOffset == 3 * kPointerSize);
stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
// Save the current handler as the next handler.
- mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(r1, MemOperand(r3));
ASSERT(StackHandlerConstants::kNextOffset == 0);
push(r1);
&& StackHandlerConstants::kPCOffset == 3 * kPointerSize);
stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
// Save the current handler as the next handler.
- mov(r7, Operand(ExternalReference(Isolate::k_handler_address)));
+ mov(r7, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(r6, MemOperand(r7));
ASSERT(StackHandlerConstants::kNextOffset == 0);
push(r6);
void MacroAssembler::PopTryHandler() {
ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
pop(r1);
- mov(ip, Operand(ExternalReference(Isolate::k_handler_address)));
+ mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
str(r1, MemOperand(ip));
}
STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
// Drop the sp to the top of the handler.
- mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(sp, MemOperand(r3));
// Restore the next handler and frame pointer, discard handler state.
}
// Drop sp to the top stack handler.
- mov(r3, Operand(ExternalReference(Isolate::k_handler_address)));
+ mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(sp, MemOperand(r3));
// Unwind the handlers until the ENTRY handler is found.
if (type == OUT_OF_MEMORY) {
// Set external caught exception to false.
ExternalReference external_caught(
- Isolate::k_external_caught_exception_address);
+ Isolate::k_external_caught_exception_address, isolate());
mov(r0, Operand(false, RelocInfo::NONE));
mov(r2, Operand(external_caught));
str(r0, MemOperand(r2));
// Set pending exception and r0 to out of memory exception.
Failure* out_of_memory = Failure::OutOfMemoryException();
mov(r0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
- mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address)));
+ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
+ isolate())));
str(r0, MemOperand(r2));
}
// Also, assert that the registers are numbered such that the values
// are loaded in the correct order.
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
intptr_t top =
reinterpret_cast<intptr_t>(new_space_allocation_top.address());
intptr_t limit =
// Also, assert that the registers are numbered such that the values
// are loaded in the correct order.
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
intptr_t top =
reinterpret_cast<intptr_t>(new_space_allocation_top.address());
intptr_t limit =
void MacroAssembler::UndoAllocationInNewSpace(Register object,
Register scratch) {
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Make sure the object has no tag before resetting top.
and_(object, object, Operand(~kHeapObjectTagMask));
// Check if the function scheduled an exception.
bind(&leave_exit_frame);
LoadRoot(r4, Heap::kTheHoleValueRootIndex);
- mov(ip, Operand(ExternalReference::scheduled_exception_address()));
+ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
ldr(r5, MemOperand(ip));
cmp(r4, r5);
b(ne, &promote_scheduled_exception);
mov(pc, lr);
bind(&promote_scheduled_exception);
- MaybeObject* result = TryTailCallExternalReference(
- ExternalReference(Runtime::kPromoteScheduledException), 0, 1);
+ MaybeObject* result
+ = TryTailCallExternalReference(
+ ExternalReference(Runtime::kPromoteScheduledException, isolate()),
+ 0,
+ 1);
if (result->IsFailure()) {
return result;
}
str(r5, MemOperand(r7, kLimitOffset));
mov(r4, r0);
PrepareCallCFunction(0, r5);
- CallCFunction(ExternalReference::delete_handle_scope_extensions(), 0);
+ CallCFunction(
+ ExternalReference::delete_handle_scope_extensions(isolate()), 0);
mov(r0, r4);
jmp(&leave_exit_frame);
// should remove this need and make the runtime routine entry code
// smarter.
mov(r0, Operand(num_arguments));
- mov(r1, Operand(ExternalReference(f)));
+ mov(r1, Operand(ExternalReference(f, isolate())));
CEntryStub stub(1);
CallStub(&stub);
}
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
const Runtime::Function* function = Runtime::FunctionForId(id);
mov(r0, Operand(function->nargs));
- mov(r1, Operand(ExternalReference(function)));
+ mov(r1, Operand(ExternalReference(function, isolate())));
CEntryStub stub(1);
stub.SaveDoubles();
CallStub(&stub);
void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size) {
- TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
+ TailCallExternalReference(ExternalReference(fid, isolate()),
+ num_arguments,
+ result_size);
}
Register scratch,
int num_arguments) {
CallCFunctionHelper(function,
- ExternalReference::the_hole_value_location(),
+ ExternalReference::the_hole_value_location(isolate()),
scratch,
num_arguments);
}
__ add(r1, current_input_offset(), Operand(end_of_input_address()));
ExternalReference function =
- ExternalReference::re_case_insensitive_compare_uc16();
+ ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
__ CallCFunction(function, argument_count);
// Check if function returned non-zero for success or zero for failure.
Label stack_ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ mov(r0, Operand(stack_limit));
__ ldr(r0, MemOperand(r0));
__ sub(r0, sp, r0, SetCC);
__ mov(r0, backtrack_stackpointer());
__ add(r1, frame_pointer(), Operand(kStackHighEnd));
ExternalReference grow_stack =
- ExternalReference::re_grow_stack();
+ ExternalReference::re_grow_stack(masm_->isolate());
__ CallCFunction(grow_stack, num_arguments);
// If return NULL, we have failed to grow the stack, and
// must exit with a stack-overflow exception.
__ mov(r1, Operand(masm_->CodeObject()));
// r0 becomes return address pointer.
ExternalReference stack_guard_check =
- ExternalReference::re_check_stack_guard_state();
+ ExternalReference::re_check_stack_guard_state(masm_->isolate());
CallCFunctionUsingStub(stack_guard_check, num_arguments);
}
void RegExpMacroAssemblerARM::CheckPreemption() {
// Check for preemption.
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ mov(r0, Operand(stack_limit));
__ ldr(r0, MemOperand(r0));
__ cmp(sp, r0);
void RegExpMacroAssemblerARM::CheckStackLimit() {
ExternalReference stack_limit =
- ExternalReference::address_of_regexp_stack_limit();
+ ExternalReference::address_of_regexp_stack_limit(masm_->isolate());
__ mov(r0, Operand(stack_limit));
__ ldr(r0, MemOperand(r0));
__ cmp(backtrack_stackpointer(), Operand(r0));
__ mov(r2, Operand(Handle<Map>(transition)));
__ Push(r2, r0);
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)),
- 3, 1);
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
+ masm->isolate()),
+ 3,
+ 1);
return;
}
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
+ masm->isolate());
__ mov(r0, Operand(5));
__ mov(r1, Operand(ref));
// garbage collection but instead return the allocation failure
// object.
const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
- ExternalReference ref =
- ExternalReference(&fun, ExternalReference::DIRECT_API_CALL);
+ ExternalReference ref = ExternalReference(&fun,
+ ExternalReference::DIRECT_API_CALL,
+ masm->isolate());
return masm->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
}
interceptor_holder);
__ CallExternalReference(
- ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
- 5);
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
+ masm->isolate()),
+ 5);
// Restore the name_ register.
__ pop(name_);
// object.
const int kStackUnwindSpace = 4;
ExternalReference ref =
- ExternalReference(&fun, ExternalReference::DIRECT_GETTER_CALL);
+ ExternalReference(&fun,
+ ExternalReference::DIRECT_GETTER_CALL,
+ masm()->isolate());
return masm()->TryCallApiFunctionAndReturn(ref, kStackUnwindSpace);
}
}
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
+ masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
} else { // !compile_followup_inline
PushInterceptorArguments(masm(), receiver, holder_reg,
name_reg, interceptor_holder);
- ExternalReference ref = ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
+ masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
}
__ b(&call_builtin);
}
+ Isolate* isolate = masm()->isolate();
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate);
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate);
const int kAllocationDelta = 4;
// Load top and check if it is the end of elements.
__ Ret();
}
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
+ masm()->isolate()),
argc + 1,
1);
}
__ Ret();
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop,
+ masm()->isolate()),
argc + 1,
1);
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
+ masm()->isolate());
__ TailCallExternalReference(store_callback_property, 4, 1);
// Handle store cache miss.
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
+ masm()->isolate());
__ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
// -----------------------------------------------------------------------------
// Implementation of ExternalReference
-ExternalReference::ExternalReference(Builtins::CFunctionId id)
- : address_(Redirect(Builtins::c_function_address(id))) {}
+ExternalReference::ExternalReference(Builtins::CFunctionId id, Isolate* isolate)
+ : address_(Redirect(isolate, Builtins::c_function_address(id))) {}
ExternalReference::ExternalReference(
- ApiFunction* fun, Type type = ExternalReference::BUILTIN_CALL)
- : address_(Redirect(fun->address(), type)) {}
+ ApiFunction* fun,
+ Type type = ExternalReference::BUILTIN_CALL,
+ Isolate* isolate = NULL)
+ : address_(Redirect(isolate, fun->address(), type)) {}
-ExternalReference::ExternalReference(Builtins::Name name)
- : address_(Isolate::Current()->builtins()->builtin_address(name)) {}
+ExternalReference::ExternalReference(Builtins::Name name, Isolate* isolate)
+ : address_(isolate->builtins()->builtin_address(name)) {}
-ExternalReference::ExternalReference(Runtime::FunctionId id)
- : address_(Redirect(Runtime::FunctionForId(id)->entry)) {}
+ExternalReference::ExternalReference(Runtime::FunctionId id,
+ Isolate* isolate)
+ : address_(Redirect(isolate, Runtime::FunctionForId(id)->entry)) {}
-ExternalReference::ExternalReference(const Runtime::Function* f)
- : address_(Redirect(f->entry)) {}
+ExternalReference::ExternalReference(const Runtime::Function* f,
+ Isolate* isolate)
+ : address_(Redirect(isolate, f->entry)) {}
ExternalReference ExternalReference::isolate_address() {
}
-ExternalReference::ExternalReference(const IC_Utility& ic_utility)
- : address_(Redirect(ic_utility.address())) {}
+ExternalReference::ExternalReference(const IC_Utility& ic_utility,
+ Isolate* isolate)
+ : address_(Redirect(isolate, ic_utility.address())) {}
#ifdef ENABLE_DEBUGGER_SUPPORT
-ExternalReference::ExternalReference(const Debug_Address& debug_address)
- : address_(debug_address.address(Isolate::Current())) {}
+ExternalReference::ExternalReference(const Debug_Address& debug_address,
+ Isolate* isolate)
+ : address_(debug_address.address(isolate)) {}
#endif
ExternalReference::ExternalReference(StatsCounter* counter)
: address_(reinterpret_cast<Address>(counter->GetInternalPointer())) {}
-ExternalReference::ExternalReference(Isolate::AddressId id)
- : address_(Isolate::Current()->get_address_from_id(id)) {}
+ExternalReference::ExternalReference(Isolate::AddressId id, Isolate* isolate)
+ : address_(isolate->get_address_from_id(id)) {}
ExternalReference::ExternalReference(const SCTableReference& table_ref)
: address_(table_ref.address()) {}
-ExternalReference ExternalReference::perform_gc_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(Runtime::PerformGC)));
+ExternalReference ExternalReference::perform_gc_function(Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(Runtime::PerformGC)));
}
-ExternalReference ExternalReference::fill_heap_number_with_random_function() {
- return
- ExternalReference(Redirect(FUNCTION_ADDR(V8::FillHeapNumberWithRandom)));
+ExternalReference ExternalReference::fill_heap_number_with_random_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(
+ isolate,
+ FUNCTION_ADDR(V8::FillHeapNumberWithRandom)));
}
-ExternalReference ExternalReference::delete_handle_scope_extensions() {
- return ExternalReference(Redirect(FUNCTION_ADDR(
- HandleScope::DeleteExtensions)));
+ExternalReference ExternalReference::delete_handle_scope_extensions(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(
+ isolate,
+ FUNCTION_ADDR(HandleScope::DeleteExtensions)));
}
-ExternalReference ExternalReference::random_uint32_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(V8::Random)));
+ExternalReference ExternalReference::random_uint32_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate, FUNCTION_ADDR(V8::Random)));
}
-ExternalReference ExternalReference::transcendental_cache_array_address() {
- return ExternalReference(Isolate::Current()->transcendental_cache()->
- cache_array_address());
+ExternalReference ExternalReference::transcendental_cache_array_address(
+ Isolate* isolate) {
+ return ExternalReference(
+ isolate->transcendental_cache()->cache_array_address());
}
-ExternalReference ExternalReference::new_deoptimizer_function() {
+ExternalReference ExternalReference::new_deoptimizer_function(
+ Isolate* isolate) {
return ExternalReference(
- Redirect(FUNCTION_ADDR(Deoptimizer::New)));
+ Redirect(isolate, FUNCTION_ADDR(Deoptimizer::New)));
}
-ExternalReference ExternalReference::compute_output_frames_function() {
+ExternalReference ExternalReference::compute_output_frames_function(
+ Isolate* isolate) {
return ExternalReference(
- Redirect(FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
+ Redirect(isolate, FUNCTION_ADDR(Deoptimizer::ComputeOutputFrames)));
}
-ExternalReference ExternalReference::global_contexts_list() {
- return ExternalReference(Isolate::Current()->
- heap()->global_contexts_list_address());
+ExternalReference ExternalReference::global_contexts_list(Isolate* isolate) {
+ return ExternalReference(isolate->heap()->global_contexts_list_address());
}
-ExternalReference ExternalReference::keyed_lookup_cache_keys() {
- return ExternalReference(Isolate::Current()->
- keyed_lookup_cache()->keys_address());
+ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) {
+ return ExternalReference(isolate->keyed_lookup_cache()->keys_address());
}
-ExternalReference ExternalReference::keyed_lookup_cache_field_offsets() {
- return ExternalReference(Isolate::Current()->
- keyed_lookup_cache()->field_offsets_address());
+ExternalReference ExternalReference::keyed_lookup_cache_field_offsets(
+ Isolate* isolate) {
+ return ExternalReference(
+ isolate->keyed_lookup_cache()->field_offsets_address());
}
-ExternalReference ExternalReference::the_hole_value_location() {
- return ExternalReference(FACTORY->the_hole_value().location());
+ExternalReference ExternalReference::the_hole_value_location(Isolate* isolate) {
+ return ExternalReference(isolate->factory()->the_hole_value().location());
}
-ExternalReference ExternalReference::arguments_marker_location() {
- return ExternalReference(FACTORY->arguments_marker().location());
+ExternalReference ExternalReference::arguments_marker_location(
+ Isolate* isolate) {
+ return ExternalReference(isolate->factory()->arguments_marker().location());
}
-ExternalReference ExternalReference::roots_address() {
- return ExternalReference(HEAP->roots_address());
+ExternalReference ExternalReference::roots_address(Isolate* isolate) {
+ return ExternalReference(isolate->heap()->roots_address());
}
-ExternalReference ExternalReference::address_of_stack_limit() {
- return ExternalReference(
- Isolate::Current()->stack_guard()->address_of_jslimit());
+ExternalReference ExternalReference::address_of_stack_limit(Isolate* isolate) {
+ return ExternalReference(isolate->stack_guard()->address_of_jslimit());
}
-ExternalReference ExternalReference::address_of_real_stack_limit() {
- return ExternalReference(
- Isolate::Current()->stack_guard()->address_of_real_jslimit());
+ExternalReference ExternalReference::address_of_real_stack_limit(
+ Isolate* isolate) {
+ return ExternalReference(isolate->stack_guard()->address_of_real_jslimit());
}
-ExternalReference ExternalReference::address_of_regexp_stack_limit() {
- return ExternalReference(
- Isolate::Current()->regexp_stack()->limit_address());
+ExternalReference ExternalReference::address_of_regexp_stack_limit(
+ Isolate* isolate) {
+ return ExternalReference(isolate->regexp_stack()->limit_address());
}
-ExternalReference ExternalReference::new_space_start() {
- return ExternalReference(HEAP->NewSpaceStart());
+ExternalReference ExternalReference::new_space_start(Isolate* isolate) {
+ return ExternalReference(isolate->heap()->NewSpaceStart());
}
-ExternalReference ExternalReference::new_space_mask() {
- return ExternalReference(reinterpret_cast<Address>(HEAP->NewSpaceMask()));
+ExternalReference ExternalReference::new_space_mask(Isolate* isolate) {
+ Address mask = reinterpret_cast<Address>(isolate->heap()->NewSpaceMask());
+ return ExternalReference(mask);
}
-ExternalReference ExternalReference::new_space_allocation_top_address() {
- return ExternalReference(HEAP->NewSpaceAllocationTopAddress());
+ExternalReference ExternalReference::new_space_allocation_top_address(
+ Isolate* isolate) {
+ return ExternalReference(isolate->heap()->NewSpaceAllocationTopAddress());
}
-ExternalReference ExternalReference::heap_always_allocate_scope_depth() {
- return ExternalReference(HEAP->always_allocate_scope_depth_address());
+ExternalReference ExternalReference::heap_always_allocate_scope_depth(
+ Isolate* isolate) {
+ Heap* heap = isolate->heap();
+ return ExternalReference(heap->always_allocate_scope_depth_address());
}
-ExternalReference ExternalReference::new_space_allocation_limit_address() {
- return ExternalReference(HEAP->NewSpaceAllocationLimitAddress());
+ExternalReference ExternalReference::new_space_allocation_limit_address(
+ Isolate* isolate) {
+ return ExternalReference(isolate->heap()->NewSpaceAllocationLimitAddress());
}
}
-ExternalReference ExternalReference::scheduled_exception_address() {
- return ExternalReference(Isolate::Current()->scheduled_exception_address());
+ExternalReference ExternalReference::scheduled_exception_address(
+ Isolate* isolate) {
+ return ExternalReference(isolate->scheduled_exception_address());
}
#ifndef V8_INTERPRETED_REGEXP
-ExternalReference ExternalReference::re_check_stack_guard_state() {
+ExternalReference ExternalReference::re_check_stack_guard_state(
+ Isolate* isolate) {
Address function;
#ifdef V8_TARGET_ARCH_X64
function = FUNCTION_ADDR(RegExpMacroAssemblerX64::CheckStackGuardState);
#else
UNREACHABLE();
#endif
- return ExternalReference(Redirect(function));
+ return ExternalReference(Redirect(isolate, function));
}
-ExternalReference ExternalReference::re_grow_stack() {
+ExternalReference ExternalReference::re_grow_stack(Isolate* isolate) {
return ExternalReference(
- Redirect(FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
+ Redirect(isolate, FUNCTION_ADDR(NativeRegExpMacroAssembler::GrowStack)));
}
-ExternalReference ExternalReference::re_case_insensitive_compare_uc16() {
+ExternalReference ExternalReference::re_case_insensitive_compare_uc16(
+ Isolate* isolate) {
return ExternalReference(Redirect(
+ isolate,
FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
}
NativeRegExpMacroAssembler::word_character_map_address());
}
-ExternalReference ExternalReference::address_of_static_offsets_vector() {
- return ExternalReference(OffsetsVector::static_offsets_vector_address(
- Isolate::Current()));
+ExternalReference ExternalReference::address_of_static_offsets_vector(
+ Isolate* isolate) {
+ return ExternalReference(
+ OffsetsVector::static_offsets_vector_address(isolate));
}
-ExternalReference ExternalReference::address_of_regexp_stack_memory_address() {
+ExternalReference ExternalReference::address_of_regexp_stack_memory_address(
+ Isolate* isolate) {
return ExternalReference(
- Isolate::Current()->regexp_stack()->memory_address());
+ isolate->regexp_stack()->memory_address());
}
-ExternalReference ExternalReference::address_of_regexp_stack_memory_size() {
- return ExternalReference(
- Isolate::Current()->regexp_stack()->memory_size_address());
+ExternalReference ExternalReference::address_of_regexp_stack_memory_size(
+ Isolate* isolate) {
+ return ExternalReference(isolate->regexp_stack()->memory_size_address());
}
#endif // V8_INTERPRETED_REGEXP
}
-ExternalReference ExternalReference::math_sin_double_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(math_sin_double),
+ExternalReference ExternalReference::math_sin_double_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(math_sin_double),
FP_RETURN_CALL));
}
-ExternalReference ExternalReference::math_cos_double_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(math_cos_double),
+ExternalReference ExternalReference::math_cos_double_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(math_cos_double),
FP_RETURN_CALL));
}
-ExternalReference ExternalReference::math_log_double_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(math_log_double),
+ExternalReference ExternalReference::math_log_double_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(math_log_double),
FP_RETURN_CALL));
}
}
-ExternalReference ExternalReference::power_double_double_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(power_double_double),
+ExternalReference ExternalReference::power_double_double_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(power_double_double),
FP_RETURN_CALL));
}
-ExternalReference ExternalReference::power_double_int_function() {
- return ExternalReference(Redirect(FUNCTION_ADDR(power_double_int),
+ExternalReference ExternalReference::power_double_int_function(
+ Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(power_double_int),
FP_RETURN_CALL));
}
ExternalReference ExternalReference::double_fp_operation(
- Token::Value operation) {
+ Token::Value operation, Isolate* isolate) {
typedef double BinaryFPOperation(double x, double y);
BinaryFPOperation* function = NULL;
switch (operation) {
UNREACHABLE();
}
// Passing true as 2nd parameter indicates that they return an fp value.
- return ExternalReference(Redirect(FUNCTION_ADDR(function), FP_RETURN_CALL));
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(function),
+ FP_RETURN_CALL));
}
-ExternalReference ExternalReference::compare_doubles() {
- return ExternalReference(Redirect(FUNCTION_ADDR(native_compare_doubles),
+ExternalReference ExternalReference::compare_doubles(Isolate* isolate) {
+ return ExternalReference(Redirect(isolate,
+ FUNCTION_ADDR(native_compare_doubles),
BUILTIN_CALL));
}
#ifdef ENABLE_DEBUGGER_SUPPORT
-ExternalReference ExternalReference::debug_break() {
- return ExternalReference(Redirect(FUNCTION_ADDR(Debug::Break)));
+ExternalReference ExternalReference::debug_break(Isolate* isolate) {
+ return ExternalReference(Redirect(isolate, FUNCTION_ADDR(Debug::Break)));
}
-ExternalReference ExternalReference::debug_step_in_fp_address() {
- return ExternalReference(Isolate::Current()->debug()->step_in_fp_addr());
+ExternalReference ExternalReference::debug_step_in_fp_address(
+ Isolate* isolate) {
+ return ExternalReference(isolate->debug()->step_in_fp_addr());
}
#endif
// -----------------------------------------------------------------------------
+// Platform independent assembler base class.
+
+class AssemblerBase: public Malloced {
+ public:
+ explicit AssemblerBase(Isolate* isolate) : isolate_(isolate) {}
+
+ Isolate* isolate() const { return isolate_; }
+
+ private:
+ Isolate* isolate_;
+};
+
+// -----------------------------------------------------------------------------
// Common double constants.
class DoubleConstant: public AllStatic {
typedef void* ExternalReferenceRedirector(void* original, Type type);
- explicit ExternalReference(Builtins::CFunctionId id);
+ ExternalReference(Builtins::CFunctionId id, Isolate* isolate);
- explicit ExternalReference(ApiFunction* ptr, Type type);
+ ExternalReference(ApiFunction* ptr, Type type, Isolate* isolate);
- explicit ExternalReference(Builtins::Name name);
+ ExternalReference(Builtins::Name name, Isolate* isolate);
- explicit ExternalReference(Runtime::FunctionId id);
+ ExternalReference(Runtime::FunctionId id, Isolate* isolate);
- explicit ExternalReference(const Runtime::Function* f);
+ ExternalReference(const Runtime::Function* f, Isolate* isolate);
- explicit ExternalReference(const IC_Utility& ic_utility);
+ ExternalReference(const IC_Utility& ic_utility, Isolate* isolate);
#ifdef ENABLE_DEBUGGER_SUPPORT
- explicit ExternalReference(const Debug_Address& debug_address);
+ ExternalReference(const Debug_Address& debug_address, Isolate* isolate);
#endif
explicit ExternalReference(StatsCounter* counter);
- explicit ExternalReference(Isolate::AddressId id);
+ ExternalReference(Isolate::AddressId id, Isolate* isolate);
explicit ExternalReference(const SCTableReference& table_ref);
// pattern. This means that they have to be added to the
// ExternalReferenceTable in serialize.cc manually.
- static ExternalReference perform_gc_function();
- static ExternalReference fill_heap_number_with_random_function();
- static ExternalReference random_uint32_function();
- static ExternalReference transcendental_cache_array_address();
- static ExternalReference delete_handle_scope_extensions();
+ static ExternalReference perform_gc_function(Isolate* isolate);
+ static ExternalReference fill_heap_number_with_random_function(
+ Isolate* isolate);
+ static ExternalReference random_uint32_function(Isolate* isolate);
+ static ExternalReference transcendental_cache_array_address(Isolate* isolate);
+ static ExternalReference delete_handle_scope_extensions(Isolate* isolate);
// Deoptimization support.
- static ExternalReference new_deoptimizer_function();
- static ExternalReference compute_output_frames_function();
- static ExternalReference global_contexts_list();
+ static ExternalReference new_deoptimizer_function(Isolate* isolate);
+ static ExternalReference compute_output_frames_function(Isolate* isolate);
+ static ExternalReference global_contexts_list(Isolate* isolate);
// Static data in the keyed lookup cache.
- static ExternalReference keyed_lookup_cache_keys();
- static ExternalReference keyed_lookup_cache_field_offsets();
+ static ExternalReference keyed_lookup_cache_keys(Isolate* isolate);
+ static ExternalReference keyed_lookup_cache_field_offsets(Isolate* isolate);
// Static variable Factory::the_hole_value.location()
- static ExternalReference the_hole_value_location();
+ static ExternalReference the_hole_value_location(Isolate* isolate);
// Static variable Factory::arguments_marker.location()
- static ExternalReference arguments_marker_location();
+ static ExternalReference arguments_marker_location(Isolate* isolate);
// Static variable Heap::roots_address()
- static ExternalReference roots_address();
+ static ExternalReference roots_address(Isolate* isolate);
// Static variable StackGuard::address_of_jslimit()
- static ExternalReference address_of_stack_limit();
+ static ExternalReference address_of_stack_limit(Isolate* isolate);
// Static variable StackGuard::address_of_real_jslimit()
- static ExternalReference address_of_real_stack_limit();
+ static ExternalReference address_of_real_stack_limit(Isolate* isolate);
// Static variable RegExpStack::limit_address()
- static ExternalReference address_of_regexp_stack_limit();
+ static ExternalReference address_of_regexp_stack_limit(Isolate* isolate);
// Static variables for RegExp.
- static ExternalReference address_of_static_offsets_vector();
- static ExternalReference address_of_regexp_stack_memory_address();
- static ExternalReference address_of_regexp_stack_memory_size();
+ static ExternalReference address_of_static_offsets_vector(Isolate* isolate);
+ static ExternalReference address_of_regexp_stack_memory_address(
+ Isolate* isolate);
+ static ExternalReference address_of_regexp_stack_memory_size(
+ Isolate* isolate);
// Static variable Heap::NewSpaceStart()
- static ExternalReference new_space_start();
- static ExternalReference new_space_mask();
- static ExternalReference heap_always_allocate_scope_depth();
+ static ExternalReference new_space_start(Isolate* isolate);
+ static ExternalReference new_space_mask(Isolate* isolate);
+ static ExternalReference heap_always_allocate_scope_depth(Isolate* isolate);
// Used for fast allocation in generated code.
- static ExternalReference new_space_allocation_top_address();
- static ExternalReference new_space_allocation_limit_address();
+ static ExternalReference new_space_allocation_top_address(Isolate* isolate);
+ static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
- static ExternalReference double_fp_operation(Token::Value operation);
- static ExternalReference compare_doubles();
- static ExternalReference power_double_double_function();
- static ExternalReference power_double_int_function();
+ static ExternalReference double_fp_operation(Token::Value operation,
+ Isolate* isolate);
+ static ExternalReference compare_doubles(Isolate* isolate);
+ static ExternalReference power_double_double_function(Isolate* isolate);
+ static ExternalReference power_double_int_function(Isolate* isolate);
static ExternalReference handle_scope_next_address();
static ExternalReference handle_scope_limit_address();
static ExternalReference handle_scope_level_address();
- static ExternalReference scheduled_exception_address();
+ static ExternalReference scheduled_exception_address(Isolate* isolate);
// Static variables containing common double constants.
static ExternalReference address_of_min_int();
static ExternalReference address_of_negative_infinity();
static ExternalReference address_of_nan();
- static ExternalReference math_sin_double_function();
- static ExternalReference math_cos_double_function();
- static ExternalReference math_log_double_function();
+ static ExternalReference math_sin_double_function(Isolate* isolate);
+ static ExternalReference math_cos_double_function(Isolate* isolate);
+ static ExternalReference math_log_double_function(Isolate* isolate);
Address address() const {return reinterpret_cast<Address>(address_);}
#ifdef ENABLE_DEBUGGER_SUPPORT
// Function Debug::Break()
- static ExternalReference debug_break();
+ static ExternalReference debug_break(Isolate* isolate);
// Used to check if single stepping is enabled in generated code.
- static ExternalReference debug_step_in_fp_address();
+ static ExternalReference debug_step_in_fp_address(Isolate* isolate);
#endif
#ifndef V8_INTERPRETED_REGEXP
// C functions called from RegExp generated code.
// Function NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()
- static ExternalReference re_case_insensitive_compare_uc16();
+ static ExternalReference re_case_insensitive_compare_uc16(Isolate* isolate);
// Function RegExpMacroAssembler*::CheckStackGuardState()
- static ExternalReference re_check_stack_guard_state();
+ static ExternalReference re_check_stack_guard_state(Isolate* isolate);
// Function NativeRegExpMacroAssembler::GrowStack()
- static ExternalReference re_grow_stack();
+ static ExternalReference re_grow_stack(Isolate* isolate);
// byte NativeRegExpMacroAssembler::word_character_bitmap
static ExternalReference re_word_character_map();
explicit ExternalReference(void* address)
: address_(address) {}
- static void* Redirect(void* address,
+ static void* Redirect(Isolate* isolate,
+ void* address,
Type type = ExternalReference::BUILTIN_CALL) {
ExternalReferenceRedirector* redirector =
reinterpret_cast<ExternalReferenceRedirector*>(
- Isolate::Current()->external_reference_redirector());
+ isolate->external_reference_redirector());
if (redirector == NULL) return address;
void* answer = (*redirector)(address, type);
return answer;
}
- static void* Redirect(Address address_arg,
+ static void* Redirect(Isolate* isolate,
+ Address address_arg,
Type type = ExternalReference::BUILTIN_CALL) {
ExternalReferenceRedirector* redirector =
reinterpret_cast<ExternalReferenceRedirector*>(
- Isolate::Current()->external_reference_redirector());
+ isolate->external_reference_redirector());
void* address = reinterpret_cast<void*>(address_arg);
void* answer = (redirector == NULL) ?
address :
#endif
Assembler::Assembler(void* buffer, int buffer_size)
- : positions_recorder_(this),
+ : AssemblerBase(Isolate::Current()),
+ positions_recorder_(this),
emit_debug_code_(FLAG_debug_code) {
- Isolate* isolate = Isolate::Current();
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
buffer_size = kMinimalBufferSize;
- if (isolate->assembler_spare_buffer() != NULL) {
- buffer = isolate->assembler_spare_buffer();
- isolate->set_assembler_spare_buffer(NULL);
+ if (isolate()->assembler_spare_buffer() != NULL) {
+ buffer = isolate()->assembler_spare_buffer();
+ isolate()->set_assembler_spare_buffer(NULL);
}
}
if (buffer == NULL) {
Assembler::~Assembler() {
- Isolate* isolate = Isolate::Current();
if (own_buffer_) {
- if (isolate->assembler_spare_buffer() == NULL &&
+ if (isolate()->assembler_spare_buffer() == NULL &&
buffer_size_ == kMinimalBufferSize) {
- isolate->set_assembler_spare_buffer(buffer_);
+ isolate()->set_assembler_spare_buffer(buffer_);
} else {
DeleteArray(buffer_);
}
void Assembler::cpuid() {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID));
+ ASSERT(isolate()->cpu_features()->IsEnabled(CPUID));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x0F);
void Assembler::cmov(Condition cc, Register dst, int32_t imm32) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
+ ASSERT(isolate()->cpu_features()->IsEnabled(CMOV));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
UNIMPLEMENTED();
void Assembler::cmov(Condition cc, Register dst, Handle<Object> handle) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
+ ASSERT(isolate()->cpu_features()->IsEnabled(CMOV));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
UNIMPLEMENTED();
void Assembler::cmov(Condition cc, Register dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CMOV));
+ ASSERT(isolate()->cpu_features()->IsEnabled(CMOV));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
// Opcode: 0f 40 + cc /r.
void Assembler::rdtsc() {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(RDTSC));
+ ASSERT(isolate()->cpu_features()->IsEnabled(RDTSC));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x0F);
void Assembler::fisttp_s(const Operand& adr) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE3));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xDB);
void Assembler::fisttp_d(const Operand& adr) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE3));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xDD);
void Assembler::cvttss2si(Register dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF3);
void Assembler::cvttsd2si(Register dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::cvtsi2sd(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF3);
void Assembler::addsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::subsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::divsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movmskpd(Register dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::movaps(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x0F);
void Assembler::movdqa(const Operand& dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movdqa(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movdqu(const Operand& dst, XMMRegister src ) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF3);
void Assembler::movdqu(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF3);
void Assembler::movntdqa(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movntdq(const Operand& dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movsd(const Operand& dst, XMMRegister src ) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2); // double
void Assembler::movsd(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2); // double
void Assembler::movsd(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0xF2);
void Assembler::movd(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::movd(const Operand& dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::pand(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::pxor(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::por(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::ptest(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::psllq(XMMRegister reg, int8_t shift) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::psllq(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::psrlq(XMMRegister reg, int8_t shift) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::psrlq(XMMRegister dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::pshufd(XMMRegister dst, XMMRegister src, int8_t shuffle) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::pextrd(const Operand& dst, XMMRegister src, int8_t offset) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::pinsrd(XMMRegister dst, const Operand& src, int8_t offset) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE4_1));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x66);
void Assembler::GrowBuffer() {
- Isolate* isolate = Isolate::Current();
ASSERT(overflow());
if (!own_buffer_) FATAL("external code buffer is too small");
reloc_info_writer.pos(), desc.reloc_size);
// Switch buffers.
- if (isolate->assembler_spare_buffer() == NULL &&
+ if (isolate()->assembler_spare_buffer() == NULL &&
buffer_size_ == kMinimalBufferSize) {
- isolate->set_assembler_spare_buffer(buffer_);
+ isolate()->set_assembler_spare_buffer(buffer_);
} else {
DeleteArray(buffer_);
}
};
-class Assembler : public Malloced {
+class Assembler : public AssemblerBase {
private:
// We check before assembling an instruction that there is sufficient
// space to write an instruction and its relocation information.
void emit_sse_operand(XMMRegister dst, XMMRegister src);
void emit_sse_operand(Register dst, XMMRegister src);
- byte* addr_at(int pos) { return buffer_ + pos; }
+ byte* addr_at(int pos) { return buffer_ + pos; }
+
private:
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
// JumpToExternalReference expects eax to contain the number of arguments
// including the receiver and the extra arguments.
__ add(Operand(eax), Immediate(num_extra_args + 1));
- __ JumpToExternalReference(ExternalReference(id));
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
}
// Set expected number of arguments to zero (not changing eax).
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ jmp(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ jmp(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
}
Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address();
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
__ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
__ j(not_equal, &rt_call);
#endif
if (is_api_function) {
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Handle<Code> code = Handle<Code>(
- Isolate::Current()->builtins()->builtin(
- Builtins::HandleApiCallConstruct));
+ masm->isolate()->builtins()->builtin(Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
RelocInfo::CODE_TARGET, CALL_FUNCTION);
// Invoke the code.
if (is_construct) {
- __ call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ call(Handle<Code>(masm->isolate()->builtins()->builtin(
Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
} else {
ParameterCount actual(eax);
__ j(not_zero, &function, taken);
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- __ jmp(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ jmp(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
__ bind(&function);
}
__ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
__ SmiUntag(ebx);
__ cmp(eax, Operand(ebx));
- __ j(not_equal, Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ j(not_equal, Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)));
ParameterCount expected(0);
// limit" is checked.
Label okay;
ExternalReference real_stack_limit =
- ExternalReference::address_of_real_stack_limit();
+ ExternalReference::address_of_real_stack_limit(masm->isolate());
__ mov(edi, Operand::StaticVariable(real_stack_limit));
// Make ecx the space we have left. The stack might already be overflowed
// here which will cause ecx to become negative.
__ mov(edx, Operand(ebp, 2 * kPointerSize)); // load arguments
// Use inline caching to speed up access to arguments.
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Handle<Code> ic(masm->isolate()->builtins()->builtin(
Builtins::KeyedLoadIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
// It is important that we do not have a test instruction after the
// Jump to the generic array code in case the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
- Code* code = Isolate::Current()->builtins()->builtin(
- Builtins::ArrayCodeGeneric);
+ Code* code = masm->isolate()->builtins()->builtin(Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ jmp(array_code, RelocInfo::CODE_TARGET);
}
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
- Code* code = Isolate::Current()->builtins()->builtin(
+ Code* code = masm->isolate()->builtins()->builtin(
Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
// We shouldn't be performing on-stack replacement in the first
// place if the CPU features we need for the optimized Crankshaft
// code aren't supported.
- CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+ CpuFeatures* cpu_features = masm->isolate()->cpu_features();
cpu_features->Probe(false);
if (!cpu_features->IsSupported(SSE2)) {
__ Abort("Unreachable code: Cannot optimize without SSE2 support.");
__ bind(&stack_check);
NearLabel ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken);
StackCheckStub stub;
// number in eax.
__ AllocateHeapNumber(eax, ecx, ebx, slow);
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(left));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
}
if (runtime_operands_type_ != BinaryOpIC::UNINIT_OR_SMI) {
__ AllocateHeapNumber(ecx, ebx, no_reg, slow);
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Smis(masm, ebx);
switch (op_) {
}
Label not_floats;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
if (static_operands_type_.IsNumber()) {
if (FLAG_debug_code) {
default: UNREACHABLE();
}
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ebx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
5,
1);
}
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
+ masm->isolate()),
5,
1);
}
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
+ masm->isolate()),
5,
1);
}
// number in eax.
__ AllocateHeapNumber(eax, ecx, ebx, slow);
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(left));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
break;
}
__ AllocateHeapNumber(ecx, ebx, no_reg, slow);
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Smis(masm, ebx);
switch (op_) {
case Token::DIV: {
Label not_floats;
Label not_int32;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
FloatingPointHelper::CheckSSE2OperandsAreInt32(masm, ¬_int32, ecx);
default: UNREACHABLE();
}
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ebx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
case Token::MUL:
case Token::DIV: {
Label not_floats;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
default: UNREACHABLE();
}
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ebx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
case Token::MUL:
case Token::DIV: {
Label not_floats;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
FloatingPointHelper::LoadSSE2Operands(masm, ¬_floats);
default: UNREACHABLE();
}
// Store the result in the HeapNumber and return.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ebx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
__ bind(&loaded);
} else { // UNTAGGED.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE4_1)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE4_1)) {
CpuFeatures::Scope sse4_scope(SSE4_1);
__ pextrd(Operand(edx), xmm1, 0x1); // copy xmm1[63..32] to edx.
} else {
// ebx = low 32 bits of double value.
// edx = high 32 bits of double value.
// ecx = TranscendentalCache::hash(double value).
- __ mov(eax,
- Immediate(ExternalReference::transcendental_cache_array_address()));
- // Eax points to cache array.
- __ mov(eax, Operand(eax, type_ * sizeof(
- Isolate::Current()->transcendental_cache()->caches_[0])));
+ ExternalReference cache_array =
+ ExternalReference::transcendental_cache_array_address(masm->isolate());
+ __ mov(eax, Immediate(cache_array));
+ int cache_array_index =
+ type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
+ __ mov(eax, Operand(eax, cache_array_index));
// Eax points to the cache for the type type_.
// If NULL, the cache hasn't been initialized yet, so go through runtime.
__ test(eax, Operand(eax));
__ bind(&runtime_call_clear_stack);
__ fstp(0);
__ bind(&runtime_call);
- __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
+ ExternalReference runtime =
+ ExternalReference(RuntimeFunction(), masm->isolate());
+ __ TailCallExternalReference(runtime, 1, 1);
} else { // UNTAGGED.
__ bind(&runtime_call_clear_stack);
__ bind(&runtime_call);
Register scratch = ebx;
Register scratch2 = edi;
if (type_info.IsInteger32() &&
- Isolate::Current()->cpu_features()->IsEnabled(SSE2)) {
+ masm->isolate()->cpu_features()->IsEnabled(SSE2)) {
CpuFeatures::Scope scope(SSE2);
__ cvttsd2si(ecx, FieldOperand(source, HeapNumber::kValueOffset));
return;
IntegerConvert(masm,
eax,
TypeInfo::Unknown(),
- Isolate::Current()->cpu_features()->IsSupported(SSE3),
+ masm->isolate()->cpu_features()->IsSupported(SSE3),
&slow);
// Do the bitwise operation and check if the result fits in a smi.
__ AllocateHeapNumber(ebx, edx, edi, &slow);
__ mov(eax, Operand(ebx));
}
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ecx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
// Ensure that a RegExp stack is allocated.
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address();
+ ExternalReference::address_of_regexp_stack_memory_address(
+ masm->isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size();
+ ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
__ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
__ test(ebx, Operand(ebx));
__ j(zero, &runtime, not_taken);
// Argument 5: static offsets vector buffer.
__ mov(Operand(esp, 4 * kPointerSize),
- Immediate(ExternalReference::address_of_static_offsets_vector()));
+ Immediate(ExternalReference::address_of_static_offsets_vector(
+ masm->isolate())));
// Argument 4: End of string data
// Argument 3: Start of string data
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- ExternalReference pending_exception(Isolate::k_pending_exception_address);
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
+ masm->isolate());
__ mov(edx,
- Operand::StaticVariable(ExternalReference::the_hole_value_location()));
+ Operand::StaticVariable(ExternalReference::the_hole_value_location(
+ masm->isolate())));
__ mov(eax, Operand::StaticVariable(pending_exception));
__ cmp(edx, Operand(eax));
__ j(equal, &runtime);
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector();
+ ExternalReference::address_of_static_offsets_vector(masm->isolate());
__ mov(ecx, Immediate(address_of_static_offsets_vector));
// ebx: last_match_info backing store (FixedArray)
Register scratch = scratch2;
// Load the number string cache.
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address =
+ ExternalReference::roots_address(masm->isolate());
__ mov(scratch, Immediate(Heap::kNumberStringCacheRootIndex));
__ mov(number_string_cache,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
FixedArray::kHeaderSize));
__ test(probe, Immediate(kSmiTagMask));
__ j(zero, not_found);
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope fscope(SSE2);
__ movdbl(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
__ movdbl(xmm1, FieldOperand(probe, HeapNumber::kValueOffset));
if (include_number_compare_) {
Label non_number_comparison;
Label unordered;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
CpuFeatures::Scope use_cmov(CMOV);
__ Set(eax, Immediate(argc_));
__ Set(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- Handle<Code> adaptor(Isolate::Current()->builtins()->builtin(
+ Handle<Code> adaptor(masm->isolate()->builtins()->builtin(
Builtins::ArgumentsAdaptorTrampoline));
__ jmp(adaptor, RelocInfo::CODE_TARGET);
}
}
ExternalReference scope_depth =
- ExternalReference::heap_always_allocate_scope_depth();
+ ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
if (always_allocate_scope) {
__ inc(Operand::StaticVariable(scope_depth));
}
__ j(zero, &failure_returned, not_taken);
ExternalReference pending_exception_address(
- Isolate::k_pending_exception_address);
+ Isolate::k_pending_exception_address, masm->isolate());
// Check that there is no pending exception, otherwise we
// should have returned some failure value.
if (FLAG_debug_code) {
__ push(edx);
__ mov(edx, Operand::StaticVariable(
- ExternalReference::the_hole_value_location()));
+ ExternalReference::the_hole_value_location(masm->isolate())));
NearLabel okay;
__ cmp(edx, Operand::StaticVariable(pending_exception_address));
// Cannot use check here as it attempts to generate call into runtime.
__ j(equal, throw_out_of_memory_exception);
// Retrieve the pending exception and clear the variable.
+ ExternalReference the_hole_location =
+ ExternalReference::the_hole_value_location(masm->isolate());
__ mov(eax, Operand::StaticVariable(pending_exception_address));
- __ mov(edx,
- Operand::StaticVariable(ExternalReference::the_hole_value_location()));
+ __ mov(edx, Operand::StaticVariable(the_hole_location));
__ mov(Operand::StaticVariable(pending_exception_address), edx);
// Special handling of termination exceptions which are uncatchable
__ push(ebx);
// Save copies of the top frame descriptor on the stack.
- ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address);
+ ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, masm->isolate());
__ push(Operand::StaticVariable(c_entry_fp));
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
- ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
+ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
+ masm->isolate());
__ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ j(not_equal, ¬_outermost_js);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
// Caught exception: Store result (exception) in the pending
// exception field in the JSEnv and return a failure sentinel.
- ExternalReference pending_exception(Isolate::k_pending_exception_address);
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
+ masm->isolate());
__ mov(Operand::StaticVariable(pending_exception), eax);
__ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
__ jmp(&exit);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
// Clear any pending exceptions.
- __ mov(edx,
- Operand::StaticVariable(ExternalReference::the_hole_value_location()));
+ ExternalReference the_hole_location =
+ ExternalReference::the_hole_value_location(masm->isolate());
+ __ mov(edx, Operand::StaticVariable(the_hole_location));
__ mov(Operand::StaticVariable(pending_exception), edx);
// Fake a receiver (NULL).
// cannot store a reference to the trampoline code directly in this
// stub, because the builtin stubs may not have been generated yet.
if (is_construct) {
- ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
+ ExternalReference construct_entry(
+ Builtins::JSConstructEntryTrampoline,
+ masm->isolate());
__ mov(edx, Immediate(construct_entry));
} else {
- ExternalReference entry(Builtins::JSEntryTrampoline);
+ ExternalReference entry(Builtins::JSEntryTrampoline,
+ masm->isolate());
__ mov(edx, Immediate(entry));
}
__ mov(edx, Operand(edx, 0)); // deref address
// Unlink this frame from the handler chain.
__ pop(Operand::StaticVariable(ExternalReference(
- Isolate::k_handler_address)));
+ Isolate::k_handler_address,
+ masm->isolate())));
// Pop next_sp.
__ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
// Restore the top frame descriptor from the stack.
__ bind(&exit);
__ pop(Operand::StaticVariable(ExternalReference(
- Isolate::k_c_entry_fp_address)));
+ Isolate::k_c_entry_fp_address,
+ masm->isolate())));
// Restore callee-saved registers (C calling conventions).
__ pop(ebx);
static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address =
+ ExternalReference::roots_address(masm->isolate());
ASSERT_EQ(object.code(), InstanceofStub::left().code());
ASSERT_EQ(function.code(), InstanceofStub::right().code());
// Load the symbol table.
Register symbol_table = c2;
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address =
+ ExternalReference::roots_address(masm->isolate());
__ mov(scratch, Immediate(Heap::kSymbolTableRootIndex));
__ mov(symbol_table,
Operand::StaticArray(scratch, times_pointer_size, roots_address));
// Inlining the double comparison and falling back to the general compare
// stub if NaN is involved or SS2 or CMOV is unsupported.
- CpuFeatures* cpu_features = Isolate::Current()->cpu_features();
+ CpuFeatures* cpu_features = masm->isolate()->cpu_features();
if (cpu_features->IsSupported(SSE2) && cpu_features->IsSupported(CMOV)) {
CpuFeatures::Scope scope1(SSE2);
CpuFeatures::Scope scope2(CMOV);
__ push(ecx);
// Call the runtime system in a fresh internal frame.
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
+ ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
+ masm->isolate());
__ EnterInternalFrame();
__ push(edx);
__ push(eax);
ASSERT_EQ(0, loop_nesting_);
loop_nesting_ = info->is_in_loop() ? 1 : 0;
- Isolate::Current()->set_jump_target_compiling_deferred_code(false);
+ masm()->isolate()->set_jump_target_compiling_deferred_code(false);
{
CodeGenState state(this);
__ sar(val, 1);
// If there was an overflow, bits 30 and 31 of the original number disagree.
__ xor_(val, 0x80000000u);
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope fscope(SSE2);
__ cvtsi2sd(xmm0, Operand(val));
} else {
no_reg, &allocation_failed);
VirtualFrame* clone = new VirtualFrame(frame_);
scratch.Unuse();
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope fscope(SSE2);
__ movdbl(FieldOperand(val, HeapNumber::kValueOffset), xmm0);
} else {
RegisterFile empty_regs;
SetFrame(clone, &empty_regs);
__ bind(&allocation_failed);
- if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
// Pop the value from the floating point stack.
__ fstp(0);
}
safe_int32_mode_enabled() &&
expr->side_effect_free() &&
expr->num_bit_ops() > 2 &&
- Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
BreakTarget unsafe_bailout;
JumpTarget done;
unsafe_bailout.set_expected_height(frame_->height());
Label* DeferredInlineBinaryOperation::NonSmiInputLabel() {
if (Token::IsBitOp(op_) &&
- Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
return &non_smi_input_;
} else {
return entry_label();
void DeferredInlineBinaryOperation::Generate() {
// Registers are not saved implicitly for this stub, so we should not
// tread on the registers that were not passed to us.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2) &&
((op_ == Token::ADD) ||
(op_ == Token::SUB) ||
(op_ == Token::MUL) ||
// The left_ and right_ registers have not been initialized yet.
__ mov(right_, Immediate(smi_value_));
__ mov(left_, Operand(dst_));
- if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
__ jmp(entry_label());
return;
} else {
// This trashes right_.
__ AllocateHeapNumber(left_, right_, no_reg, &after_alloc_failure2);
__ bind(&allocation_ok);
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2) &&
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2) &&
op_ != Token::SHR) {
CpuFeatures::Scope use_sse2(SSE2);
ASSERT(Token::IsBitOp(op_));
// constant smi. If the non-smi is a heap number and this is not
// a loop condition, inline the floating point code.
if (!is_loop_condition &&
- Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
// Right side is a constant smi and left side has been checked
// not to be a smi.
CpuFeatures::Scope use_sse2(SSE2);
ASSERT(right_side->is_register());
JumpTarget not_numbers;
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
// Load left and right operand into registers xmm0 and xmm1 and compare.
__ j(not_equal, &build_args);
__ mov(ecx, FieldOperand(eax, JSFunction::kCodeEntryOffset));
__ sub(Operand(ecx), Immediate(Code::kHeaderSize - kHeapObjectTag));
- Handle<Code> apply_code(Isolate::Current()->builtins()->builtin(
+ Handle<Code> apply_code(masm()->isolate()->builtins()->builtin(
Builtins::FunctionApply));
__ cmp(Operand(ecx), Immediate(apply_code));
__ j(not_equal, &build_args);
void CodeGenerator::CheckStack() {
DeferredStackCheck* deferred = new DeferredStackCheck;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm()->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
deferred->Branch(below);
deferred->BindExit();
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address,
+ masm()->isolate());
// Make sure that there's nothing left on the stack above the
// handler structure.
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address,
+ masm()->isolate());
// If we can fall off the end of the try block, unlink from the try
// chain and set the state on the frame to FALLING.
__ bind(&heapnumber_allocated);
__ PrepareCallCFunction(0, ebx);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(masm()->isolate()),
+ 0);
// Convert 32 random bits in eax to 0.(32 random bits) in a double
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
// This is implemented on both SSE2 and FPU.
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope fscope(SSE2);
__ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
__ movd(xmm1, Operand(ebx));
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- Isolate::Current()->global_context()->jsfunction_result_caches());
+ masm()->isolate()->global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
frame_->Push(FACTORY->undefined_value());
ASSERT(args->length() == 2);
Load(args->at(0));
Load(args->at(1));
- if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Result res = frame_->CallRuntime(Runtime::kMath_pow, 2);
frame_->Push(&res);
} else {
ASSERT_EQ(args->length(), 1);
Load(args->at(0));
- if (!Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (!masm()->isolate()->cpu_features()->IsSupported(SSE2)) {
Result result = frame()->CallRuntime(Runtime::kMath_sqrt, 1);
frame()->Push(&result);
} else {
__ mov(eax, receiver_);
}
__ Set(ecx, Immediate(name_));
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Handle<Code> ic(masm()->isolate()->builtins()->builtin(
Builtins::LoadIC_Initialize));
RelocInfo::Mode mode = is_contextual_
? RelocInfo::CODE_TARGET_CONTEXT
// it in the IC initialization code and patch the cmp instruction.
// This means that we cannot allow test instructions after calls to
// KeyedLoadIC stubs in other places.
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Handle<Code> ic(masm()->isolate()->builtins()->builtin(
Builtins::KeyedLoadIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
// The delta from the start of the map-compare instruction to the
}
// Call the IC stub.
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Handle<Code> ic(masm()->isolate()->builtins()->builtin(
(strict_mode_ == kStrictMode) ? Builtins::KeyedStoreIC_Initialize_Strict
: Builtins::KeyedStoreIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
bool contextual_load_in_builtin =
is_contextual &&
- (Isolate::Current()->bootstrapper()->IsActive() ||
+ (masm()->isolate()->bootstrapper()->IsActive() ||
(!info_->closure().is_null() && info_->closure()->IsBuiltin()));
Result result;
__ int3();
__ bind(&ok);
}
- if (Isolate::Current()->cpu_features()->IsSupported(SSE2)) {
+ if (masm.isolate()->cpu_features()->IsSupported(SSE2)) {
CpuFeatures::Scope enable(SSE2);
__ push(edi);
__ push(esi);
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
__ Set(eax, Immediate(0)); // No arguments.
- __ mov(ebx, Immediate(ExternalReference::debug_break()));
+ __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
CEntryStub ceb(1);
__ CallStub(&ceb);
// jumping to the target address intended by the caller and that was
// overwritten by the address of DebugBreakXXX.
ExternalReference after_break_target =
- ExternalReference(Debug_Address::AfterBreakTarget());
+ ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate());
__ jmp(Operand::StaticVariable(after_break_target));
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
ExternalReference restarter_frame_function_slot =
- ExternalReference(Debug_Address::RestarterFrameFunctionPointer());
+ ExternalReference(Debug_Address::RestarterFrameFunctionPointer(),
+ masm->isolate());
__ mov(Operand::StaticVariable(restarter_frame_function_slot), Immediate(0));
// We do not know our frame height, but set esp based on ebp.
GeneratePrologue();
CpuFeatures::Scope scope(SSE2);
+ Isolate* isolate = masm()->isolate();
+
// Save all general purpose registers before messing with them.
const int kNumberOfRegisters = Register::kNumRegisters;
__ mov(Operand(esp, 2 * kPointerSize), ebx); // Bailout id.
__ mov(Operand(esp, 3 * kPointerSize), ecx); // Code address or 0.
__ mov(Operand(esp, 4 * kPointerSize), edx); // Fp-to-sp delta.
- __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5);
// Preserve deoptimizer object in register eax and get the input
// frame descriptor pointer.
__ push(eax);
__ PrepareCallCFunction(1, ebx);
__ mov(Operand(esp, 0 * kPointerSize), eax);
- __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 1);
__ pop(eax);
// Replace the current frame with the output frames.
PrepareForBailout(info->function(), NO_REGISTERS);
NearLabel ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken);
StackCheckStub stub;
void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
Comment cmnt(masm_, "[ Stack check");
NearLabel ok;
- ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, taken);
StackCheckStub stub;
__ bind(&heapnumber_allocated);
__ PrepareCallCFunction(0, ebx);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()),
+ 0);
// Convert 32 random bits in eax to 0.(32 random bits) in a double
// by computing:
// Load the key (consisting of map and symbol) from the cache and
// check for match.
- ExternalReference cache_keys
- = ExternalReference::keyed_lookup_cache_keys();
+ ExternalReference cache_keys =
+ ExternalReference::keyed_lookup_cache_keys(masm->isolate());
__ mov(edi, ecx);
__ shl(edi, kPointerSizeLog2 + 1);
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
// ebx : receiver's map
// eax : key
// ecx : lookup cache index
- ExternalReference cache_field_offsets
- = ExternalReference::keyed_lookup_cache_field_offsets();
+ ExternalReference cache_field_offsets =
+ ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
__ mov(edi,
Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
__ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
__ push(ecx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(
- IC_Utility(kKeyedLoadPropertyWithInterceptor));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
+ masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&slow);
// Call the entry.
CEntryStub stub(1);
__ mov(eax, Immediate(2));
- __ mov(ebx, Immediate(ExternalReference(IC_Utility(id))));
+ __ mov(ebx, Immediate(ExternalReference(IC_Utility(id), masm->isolate())));
__ CallStub(&stub);
// Move result to edi and exit the internal frame.
__ push(ebx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
__ push(ebx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
__ push(ebx);
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
__ push(value);
__ push(scratch); // return address
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&miss);
__ push(ebx);
// Do tail-call to runtime routine.
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
__ PrepareCallCFunction(4, eax);
__ movdbl(Operand(esp, 0 * kDoubleSize), left);
__ movdbl(Operand(esp, 1 * kDoubleSize), right);
- __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 4);
+ __ CallCFunction(
+ ExternalReference::double_fp_operation(Token::MOD, isolate()),
+ 4);
// Return value is in st(0) on ia32.
// Store it into the (fixed) result register.
// Perform stack overflow check if this goto needs it before jumping.
if (deferred_stack_check != NULL) {
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, chunk_->GetAssemblyLabel(block));
__ jmp(deferred_stack_check->entry());
LOperand* right = instr->InputAt(1);
DoubleRegister result_reg = ToDoubleRegister(instr->result());
Representation exponent_type = instr->hydrogen()->right()->representation();
+
if (exponent_type.IsDouble()) {
// It is safe to use ebx directly since the instruction is marked
// as a call.
__ PrepareCallCFunction(4, ebx);
__ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
__ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
+ __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
+ 4);
} else if (exponent_type.IsInteger32()) {
// It is safe to use ebx directly since the instruction is marked
// as a call.
__ PrepareCallCFunction(4, ebx);
__ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
__ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
- __ CallCFunction(ExternalReference::power_double_int_function(), 4);
+ __ CallCFunction(ExternalReference::power_double_int_function(isolate()),
+ 4);
} else {
ASSERT(exponent_type.IsTagged());
CpuFeatures::Scope scope(SSE2);
__ PrepareCallCFunction(4, ebx);
__ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
__ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
- __ CallCFunction(ExternalReference::power_double_double_function(), 4);
+ __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
+ 4);
}
// Return value is in st(0) on ia32.
void LCodeGen::DoStackCheck(LStackCheck* instr) {
// Perform stack overflow check.
NearLabel done;
- ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
+ ExternalReference stack_limit =
+ ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &done);
#ifdef ENABLE_DEBUGGER_SUPPORT
void MacroAssembler::DebugBreak() {
Set(eax, Immediate(0));
- mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak)));
+ mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
CEntryStub ces(1);
call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
// Save the frame pointer and the context in top.
- ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
- ExternalReference context_address(Isolate::k_context_address);
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
+ isolate());
+ ExternalReference context_address(Isolate::k_context_address,
+ isolate());
mov(Operand::StaticVariable(c_entry_fp_address), ebp);
mov(Operand::StaticVariable(context_address), esi);
}
void MacroAssembler::LeaveExitFrameEpilogue() {
// Restore current context from top and clear it in debug mode.
- ExternalReference context_address(Isolate::k_context_address);
+ ExternalReference context_address(Isolate::k_context_address, isolate());
mov(esi, Operand::StaticVariable(context_address));
#ifdef DEBUG
mov(Operand::StaticVariable(context_address), Immediate(0));
#endif
// Clear the top frame.
- ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
+ isolate());
mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
}
push(Immediate(0)); // NULL frame pointer.
}
// Save the current handler as the next handler.
- push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)));
+ push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
+ isolate())));
// Link this handler as the new current one.
- mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)),
+ mov(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
+ isolate())),
esp);
}
void MacroAssembler::PopTryHandler() {
ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
- pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address)));
+ pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
+ isolate())));
add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
}
}
// Drop the sp to the top of the handler.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address,
+ isolate());
mov(esp, Operand::StaticVariable(handler_address));
// Restore next handler and frame pointer, discard handler state.
}
// Drop sp to the top stack handler.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address,
+ isolate());
mov(esp, Operand::StaticVariable(handler_address));
// Unwind the handlers until the ENTRY handler is found.
if (type == OUT_OF_MEMORY) {
// Set external caught exception to false.
ExternalReference external_caught(
- Isolate::k_external_caught_exception_address);
+ Isolate::k_external_caught_exception_address,
+ isolate());
mov(eax, false);
mov(Operand::StaticVariable(external_caught), eax);
// Set pending exception and eax to out of memory exception.
- ExternalReference pending_exception(Isolate::k_pending_exception_address);
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
+ isolate());
mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
mov(Operand::StaticVariable(pending_exception), eax);
}
Register scratch,
AllocationFlags flags) {
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Just return if allocation top is already known.
if ((flags & RESULT_CONTAINS_TOP) != 0) {
}
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Update new top. Use scratch if available.
if (scratch.is(no_reg)) {
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
if (!top_reg.is(result)) {
mov(top_reg, result);
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
// We assume that element_count*element_size + header_size does not
// overflow.
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
if (!object_size.is(result_end)) {
mov(result_end, object_size);
}
void MacroAssembler::UndoAllocationInNewSpace(Register object) {
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Make sure the object has no tag before resetting top.
and_(Operand(object), Immediate(~kHeapObjectTagMask));
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
const Runtime::Function* function = Runtime::FunctionForId(id);
Set(eax, Immediate(function->nargs));
- mov(ebx, Immediate(ExternalReference(function)));
+ mov(ebx, Immediate(ExternalReference(function, isolate())));
CEntryStub ces(1);
ces.SaveDoubles();
CallStub(&ces);
// should remove this need and make the runtime routine entry code
// smarter.
Set(eax, Immediate(num_arguments));
- mov(ebx, Immediate(ExternalReference(f)));
+ mov(ebx, Immediate(ExternalReference(f, isolate())));
CEntryStub ces(1);
CallStub(&ces);
}
// should remove this need and make the runtime routine entry code
// smarter.
Set(eax, Immediate(num_arguments));
- mov(ebx, Immediate(ExternalReference(f)));
+ mov(ebx, Immediate(ExternalReference(f, isolate())));
CEntryStub ces(1);
return TryCallStub(&ces);
}
void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size) {
- TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
+ TailCallExternalReference(ExternalReference(fid, isolate()),
+ num_arguments,
+ result_size);
}
int num_arguments,
int result_size) {
return TryTailCallExternalReference(
- ExternalReference(fid), num_arguments, result_size);
+ ExternalReference(fid, isolate()), num_arguments, result_size);
}
// Check if the function scheduled an exception.
ExternalReference scheduled_exception_address =
- ExternalReference::scheduled_exception_address();
+ ExternalReference::scheduled_exception_address(isolate());
cmp(Operand::StaticVariable(scheduled_exception_address),
- Immediate(FACTORY->the_hole_value()));
+ Immediate(isolate()->factory()->the_hole_value()));
j(not_equal, &promote_scheduled_exception, not_taken);
LeaveApiExitFrame();
ret(stack_space * kPointerSize);
jmp(&prologue);
// HandleScope limit has changed. Delete allocated extensions.
+ ExternalReference delete_extensions =
+ ExternalReference::delete_handle_scope_extensions(isolate());
bind(&delete_allocated_handles);
mov(Operand::StaticVariable(limit_address), edi);
mov(edi, eax);
mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
- mov(eax, Immediate(ExternalReference::delete_handle_scope_extensions()));
+ mov(eax, Immediate(delete_extensions));
call(Operand(eax));
mov(eax, edi);
jmp(&leave_exit_frame);
// The mask isn't really an address. We load it as an external reference in
// case the size of the new space is different between the snapshot maker
// and the running system.
- and_(Operand(scratch), Immediate(ExternalReference::new_space_mask()));
- cmp(Operand(scratch), Immediate(ExternalReference::new_space_start()));
+ and_(Operand(scratch),
+ Immediate(ExternalReference::new_space_mask(isolate())));
+ cmp(Operand(scratch),
+ Immediate(ExternalReference::new_space_start(isolate())));
j(cc, branch);
} else {
int32_t new_space_start = reinterpret_cast<int32_t>(
- ExternalReference::new_space_start().address());
+ ExternalReference::new_space_start(isolate()).address());
lea(scratch, Operand(object, -new_space_start));
- and_(scratch, HEAP->NewSpaceMask());
+ and_(scratch, isolate()->heap()->NewSpaceMask());
j(cc, branch);
}
}
__ mov(Operand(esp, 0 * kPointerSize), edx);
ExternalReference compare =
- ExternalReference::re_case_insensitive_compare_uc16();
+ ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
__ CallCFunction(compare, argument_count);
// Pop original values before reacting on result value.
__ pop(ebx);
Label stack_ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ mov(ecx, esp);
__ sub(ecx, Operand::StaticVariable(stack_limit));
// Handle it if the stack pointer is already below the stack limit.
__ lea(eax, Operand(ebp, kStackHighEnd));
__ mov(Operand(esp, 1 * kPointerSize), eax);
__ mov(Operand(esp, 0 * kPointerSize), backtrack_stackpointer());
- ExternalReference grow_stack = ExternalReference::re_grow_stack();
+ ExternalReference grow_stack =
+ ExternalReference::re_grow_stack(masm_->isolate());
__ CallCFunction(grow_stack, num_arguments);
// If return NULL, we have failed to grow the stack, and
// must exit with a stack-overflow exception.
CodeDesc code_desc;
masm_->GetCode(&code_desc);
- Isolate* isolate = ISOLATE;
Handle<Code> code =
- isolate->factory()->NewCode(code_desc,
- Code::ComputeFlags(Code::REGEXP),
- masm_->CodeObject());
- PROFILE(isolate, RegExpCodeCreateEvent(*code, *source));
+ masm_->isolate()->factory()->NewCode(code_desc,
+ Code::ComputeFlags(Code::REGEXP),
+ masm_->CodeObject());
+ PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
return Handle<Object>::cast(code);
}
__ lea(eax, Operand(esp, -kPointerSize));
__ mov(Operand(esp, 0 * kPointerSize), eax);
ExternalReference check_stack_guard =
- ExternalReference::re_check_stack_guard_state();
+ ExternalReference::re_check_stack_guard_state(masm_->isolate());
__ CallCFunction(check_stack_guard, num_arguments);
}
// Check for preemption.
Label no_preempt;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above, &no_preempt, taken);
void RegExpMacroAssemblerIA32::CheckStackLimit() {
Label no_stack_overflow;
ExternalReference stack_limit =
- ExternalReference::address_of_regexp_stack_limit();
+ ExternalReference::address_of_regexp_stack_limit(masm_->isolate());
__ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
__ j(above, &no_stack_overflow);
JSObject* holder_obj) {
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
__ CallExternalReference(
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly)),
- 5);
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
+ masm->isolate()),
+ 5);
}
interceptor_holder);
__ CallExternalReference(
- ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
- 5);
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
+ masm->isolate()),
+ 5);
// Restore the name_ register.
__ pop(name_);
__ pop(receiver); // Restore the holder.
__ LeaveInternalFrame();
- __ cmp(eax, FACTORY->no_interceptor_result_sentinel());
+ __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
__ j(not_equal, interceptor_succeeded);
}
__ push(eax);
__ push(scratch);
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
+ masm->isolate()),
+ 3,
+ 1);
return;
}
__ push(scratch2); // restore return address
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
+ masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
} else { // !compile_followup_inline
name_reg, interceptor_holder);
__ push(scratch2); // restore old return address
- ExternalReference ref = ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
+ masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
}
__ jmp(&call_builtin);
}
+ Isolate* isolate = masm()->isolate();
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate);
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate);
const int kAllocationDelta = 4;
// Load top.
}
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
- argc + 1,
- 1);
+ __ TailCallExternalReference(
+ ExternalReference(Builtins::c_ArrayPush, masm()->isolate()),
+ argc + 1,
+ 1);
}
__ bind(&miss);
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
- argc + 1,
- 1);
+ __ TailCallExternalReference(
+ ExternalReference(Builtins::c_ArrayPop, masm()->isolate()),
+ argc + 1,
+ 1);
__ bind(&miss);
MaybeObject* maybe_result = GenerateMissBranch();
// -----------------------------------
// If object is not a string, bail out to regular call.
- if (!object->IsString() || cell != NULL) return HEAP->undefined_value();
+ if (!object->IsString() || cell != NULL) {
+ return masm()->isolate()->heap()->undefined_value();
+ }
const int argc = arguments().immediate();
__ push(ebx); // restore return address
// Do tail-call to the runtime system.
+ Isolate* isolate = masm()->isolate();
ExternalReference store_callback_property =
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate);
__ TailCallExternalReference(store_callback_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
- Builtins::StoreIC_Miss));
+ Handle<Code> ic(isolate->builtins()->builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
__ push(ebx); // restore return address
// Do tail-call to the runtime system.
+ Isolate* isolate = masm()->isolate();
ExternalReference store_ic_property =
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate);
__ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
__ bind(&miss);
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
- Builtins::StoreIC_Miss));
+ Handle<Code> ic(isolate->builtins()->builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
// Return the generated code.
// allocated and hence used by the pre-parser.
class TemporaryScope BASE_EMBEDDED {
public:
- explicit TemporaryScope(TemporaryScope** variable);
+ TemporaryScope(TemporaryScope** variable, Isolate* isolate);
~TemporaryScope();
int NextMaterializedLiteralIndex() {
};
-TemporaryScope::TemporaryScope(TemporaryScope** variable)
+TemporaryScope::TemporaryScope(TemporaryScope** variable, Isolate* isolate)
: materialized_literal_count_(0),
expected_property_count_(0),
only_simple_this_property_assignments_(false),
- this_property_assignments_(
- Isolate::Current()->factory()->empty_fixed_array()),
+ this_property_assignments_(isolate->factory()->empty_fixed_array()),
loop_count_(0),
variable_(variable),
parent_(*variable) {
{ Scope* scope = NewScope(top_scope_, type, inside_with());
LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_,
scope);
- TemporaryScope temp_scope(&this->temp_scope_);
+ TemporaryScope temp_scope(&this->temp_scope_, isolate());
if (strict_mode == kStrictMode) {
top_scope_->EnableStrictMode();
}
}
LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_,
scope);
- TemporaryScope temp_scope(&this->temp_scope_);
+ TemporaryScope temp_scope(&this->temp_scope_, isolate());
if (shared_info->strict_mode()) {
top_scope_->EnableStrictMode();
// function contains only assignments of this type.
class ThisNamedPropertyAssigmentFinder : public ParserFinder {
public:
- ThisNamedPropertyAssigmentFinder()
- : only_simple_this_property_assignments_(true),
+ explicit ThisNamedPropertyAssigmentFinder(Isolate* isolate)
+ : isolate_(isolate),
+ only_simple_this_property_assignments_(true),
names_(NULL),
assigned_arguments_(NULL),
assigned_constants_(NULL) {}
// form this.x = y;
Handle<FixedArray> GetThisPropertyAssignments() {
if (names_ == NULL) {
- return FACTORY->empty_fixed_array();
+ return isolate_->factory()->empty_fixed_array();
}
ASSERT(names_ != NULL);
ASSERT(assigned_arguments_ != NULL);
ASSERT_EQ(names_->length(), assigned_arguments_->length());
ASSERT_EQ(names_->length(), assigned_constants_->length());
Handle<FixedArray> assignments =
- FACTORY->NewFixedArray(names_->length() * 3);
+ isolate_->factory()->NewFixedArray(names_->length() * 3);
for (int i = 0; i < names_->length(); i++) {
assignments->set(i * 3, *names_->at(i));
assignments->set(i * 3 + 1, Smi::FromInt(assigned_arguments_->at(i)));
uint32_t dummy;
if (literal != NULL &&
literal->handle()->IsString() &&
- !String::cast(*(literal->handle()))->Equals(HEAP->Proto_symbol()) &&
+ !String::cast(*(literal->handle()))->Equals(
+ isolate_->heap()->Proto_symbol()) &&
!String::cast(*(literal->handle()))->AsArrayIndex(&dummy)) {
Handle<String> key = Handle<String>::cast(literal->handle());
EnsureAllocation();
names_->Add(name);
assigned_arguments_->Add(index);
- assigned_constants_->Add(FACTORY->undefined_value());
+ assigned_constants_->Add(isolate_->factory()->undefined_value());
}
void AssignmentFromConstant(Handle<String> name, Handle<Object> value) {
}
}
+ Isolate* isolate_;
bool only_simple_this_property_assignments_;
ZoneStringList* names_;
ZoneList<int>* assigned_arguments_;
ASSERT(processor != NULL);
InitializationBlockFinder block_finder;
- ThisNamedPropertyAssigmentFinder this_property_assignment_finder;
+ ThisNamedPropertyAssigmentFinder this_property_assignment_finder(isolate());
bool directive_prologue = true; // Parsing directive prologue.
while (peek() != end_token) {
return result;
}
-static bool IsEvalOrArguments(Handle<String> string) {
- return string.is_identical_to(FACTORY->eval_symbol()) ||
- string.is_identical_to(FACTORY->arguments_symbol());
+
+bool Parser::IsEvalOrArguments(Handle<String> string) {
+ return string.is_identical_to(isolate()->factory()->eval_symbol()) ||
+ string.is_identical_to(isolate()->factory()->arguments_symbol());
}
+
// If the variable declaration declares exactly one non-const
// variable, then *var is set to that variable. In all other cases,
// *var is untouched; in particular, it is the caller's responsibility
NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
LexicalScope lexical_scope(&this->top_scope_, &this->with_nesting_level_,
scope);
- TemporaryScope temp_scope(&this->temp_scope_);
+ TemporaryScope temp_scope(&this->temp_scope_, isolate());
top_scope_->SetScopeName(name);
// FormalParameterList ::
Mode mode() const { return mode_; }
ScriptDataImpl* pre_data() const { return pre_data_; }
+ // Check if the given string is 'eval' or 'arguments'.
+ bool IsEvalOrArguments(Handle<String> string);
+
// All ParseXXX functions take as the last argument an *ok parameter
// which is set to false if parsing failed; it is unchanged otherwise.
// By making the 'exception handling' explicit, we are forced to check
void PopulateTable(Isolate* isolate);
// For a few types of references, we can get their address from their id.
- void AddFromId(TypeCode type, uint16_t id, const char* name);
+ void AddFromId(TypeCode type,
+ uint16_t id,
+ const char* name,
+ Isolate* isolate);
// For other types of references, the caller will figure out the address.
void Add(Address address, TypeCode type, uint16_t id, const char* name);
void ExternalReferenceTable::AddFromId(TypeCode type,
uint16_t id,
- const char* name) {
+ const char* name,
+ Isolate* isolate) {
Address address;
switch (type) {
case C_BUILTIN: {
- ExternalReference ref(static_cast<Builtins::CFunctionId>(id));
+ ExternalReference ref(static_cast<Builtins::CFunctionId>(id), isolate);
address = ref.address();
break;
}
case BUILTIN: {
- ExternalReference ref(static_cast<Builtins::Name>(id));
+ ExternalReference ref(static_cast<Builtins::Name>(id), isolate);
address = ref.address();
break;
}
case RUNTIME_FUNCTION: {
- ExternalReference ref(static_cast<Runtime::FunctionId>(id));
+ ExternalReference ref(static_cast<Runtime::FunctionId>(id), isolate);
address = ref.address();
break;
}
case IC_UTILITY: {
- ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)));
+ ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)),
+ isolate);
address = ref.address();
break;
}
}; // end of ref_table[].
for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
- AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name);
+ AddFromId(ref_table[i].type,
+ ref_table[i].id,
+ ref_table[i].name,
+ isolate);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
"StubCache::secondary_->value");
// Runtime entries
- Add(ExternalReference::perform_gc_function().address(),
+ Add(ExternalReference::perform_gc_function(isolate).address(),
RUNTIME_ENTRY,
1,
"Runtime::PerformGC");
- Add(ExternalReference::fill_heap_number_with_random_function().address(),
+ Add(ExternalReference::fill_heap_number_with_random_function(
+ isolate).address(),
RUNTIME_ENTRY,
2,
"V8::FillHeapNumberWithRandom");
- Add(ExternalReference::random_uint32_function().address(),
+ Add(ExternalReference::random_uint32_function(isolate).address(),
RUNTIME_ENTRY,
3,
"V8::Random");
- Add(ExternalReference::delete_handle_scope_extensions().address(),
+ Add(ExternalReference::delete_handle_scope_extensions(isolate).address(),
RUNTIME_ENTRY,
4,
"HandleScope::DeleteExtensions");
// Miscellaneous
- Add(ExternalReference::the_hole_value_location().address(),
+ Add(ExternalReference::the_hole_value_location(isolate).address(),
UNCLASSIFIED,
2,
"Factory::the_hole_value().location()");
- Add(ExternalReference::roots_address().address(),
+ Add(ExternalReference::roots_address(isolate).address(),
UNCLASSIFIED,
3,
"Heap::roots_address()");
- Add(ExternalReference::address_of_stack_limit().address(),
+ Add(ExternalReference::address_of_stack_limit(isolate).address(),
UNCLASSIFIED,
4,
"StackGuard::address_of_jslimit()");
- Add(ExternalReference::address_of_real_stack_limit().address(),
+ Add(ExternalReference::address_of_real_stack_limit(isolate).address(),
UNCLASSIFIED,
5,
"StackGuard::address_of_real_jslimit()");
#ifndef V8_INTERPRETED_REGEXP
- Add(ExternalReference::address_of_regexp_stack_limit().address(),
+ Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(),
UNCLASSIFIED,
6,
"RegExpStack::limit_address()");
- Add(ExternalReference::address_of_regexp_stack_memory_address().address(),
+ Add(ExternalReference::address_of_regexp_stack_memory_address(
+ isolate).address(),
UNCLASSIFIED,
7,
"RegExpStack::memory_address()");
- Add(ExternalReference::address_of_regexp_stack_memory_size().address(),
+ Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(),
UNCLASSIFIED,
8,
"RegExpStack::memory_size()");
- Add(ExternalReference::address_of_static_offsets_vector().address(),
+ Add(ExternalReference::address_of_static_offsets_vector(isolate).address(),
UNCLASSIFIED,
9,
"OffsetsVector::static_offsets_vector");
#endif // V8_INTERPRETED_REGEXP
- Add(ExternalReference::new_space_start().address(),
+ Add(ExternalReference::new_space_start(isolate).address(),
UNCLASSIFIED,
10,
"Heap::NewSpaceStart()");
- Add(ExternalReference::new_space_mask().address(),
+ Add(ExternalReference::new_space_mask(isolate).address(),
UNCLASSIFIED,
11,
"Heap::NewSpaceMask()");
- Add(ExternalReference::heap_always_allocate_scope_depth().address(),
+ Add(ExternalReference::heap_always_allocate_scope_depth(isolate).address(),
UNCLASSIFIED,
12,
"Heap::always_allocate_scope_depth()");
- Add(ExternalReference::new_space_allocation_limit_address().address(),
+ Add(ExternalReference::new_space_allocation_limit_address(isolate).address(),
UNCLASSIFIED,
13,
"Heap::NewSpaceAllocationLimitAddress()");
- Add(ExternalReference::new_space_allocation_top_address().address(),
+ Add(ExternalReference::new_space_allocation_top_address(isolate).address(),
UNCLASSIFIED,
14,
"Heap::NewSpaceAllocationTopAddress()");
#ifdef ENABLE_DEBUGGER_SUPPORT
- Add(ExternalReference::debug_break().address(),
+ Add(ExternalReference::debug_break(isolate).address(),
UNCLASSIFIED,
15,
"Debug::Break()");
- Add(ExternalReference::debug_step_in_fp_address().address(),
+ Add(ExternalReference::debug_step_in_fp_address(isolate).address(),
UNCLASSIFIED,
16,
"Debug::step_in_fp_addr()");
#endif
- Add(ExternalReference::double_fp_operation(Token::ADD).address(),
+ Add(ExternalReference::double_fp_operation(Token::ADD, isolate).address(),
UNCLASSIFIED,
17,
"add_two_doubles");
- Add(ExternalReference::double_fp_operation(Token::SUB).address(),
+ Add(ExternalReference::double_fp_operation(Token::SUB, isolate).address(),
UNCLASSIFIED,
18,
"sub_two_doubles");
- Add(ExternalReference::double_fp_operation(Token::MUL).address(),
+ Add(ExternalReference::double_fp_operation(Token::MUL, isolate).address(),
UNCLASSIFIED,
19,
"mul_two_doubles");
- Add(ExternalReference::double_fp_operation(Token::DIV).address(),
+ Add(ExternalReference::double_fp_operation(Token::DIV, isolate).address(),
UNCLASSIFIED,
20,
"div_two_doubles");
- Add(ExternalReference::double_fp_operation(Token::MOD).address(),
+ Add(ExternalReference::double_fp_operation(Token::MOD, isolate).address(),
UNCLASSIFIED,
21,
"mod_two_doubles");
- Add(ExternalReference::compare_doubles().address(),
+ Add(ExternalReference::compare_doubles(isolate).address(),
UNCLASSIFIED,
22,
"compare_doubles");
#ifndef V8_INTERPRETED_REGEXP
- Add(ExternalReference::re_case_insensitive_compare_uc16().address(),
+ Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(),
UNCLASSIFIED,
23,
"NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
- Add(ExternalReference::re_check_stack_guard_state().address(),
+ Add(ExternalReference::re_check_stack_guard_state(isolate).address(),
UNCLASSIFIED,
24,
"RegExpMacroAssembler*::CheckStackGuardState()");
- Add(ExternalReference::re_grow_stack().address(),
+ Add(ExternalReference::re_grow_stack(isolate).address(),
UNCLASSIFIED,
25,
"NativeRegExpMacroAssembler::GrowStack()");
"NativeRegExpMacroAssembler::word_character_map");
#endif // V8_INTERPRETED_REGEXP
// Keyed lookup cache.
- Add(ExternalReference::keyed_lookup_cache_keys().address(),
+ Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(),
UNCLASSIFIED,
27,
"KeyedLookupCache::keys()");
- Add(ExternalReference::keyed_lookup_cache_field_offsets().address(),
+ Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(),
UNCLASSIFIED,
28,
"KeyedLookupCache::field_offsets()");
- Add(ExternalReference::transcendental_cache_array_address().address(),
+ Add(ExternalReference::transcendental_cache_array_address(isolate).address(),
UNCLASSIFIED,
29,
"TranscendentalCache::caches()");
UNCLASSIFIED,
32,
"HandleScope::level");
- Add(ExternalReference::new_deoptimizer_function().address(),
+ Add(ExternalReference::new_deoptimizer_function(isolate).address(),
UNCLASSIFIED,
33,
"Deoptimizer::New()");
- Add(ExternalReference::compute_output_frames_function().address(),
+ Add(ExternalReference::compute_output_frames_function(isolate).address(),
UNCLASSIFIED,
34,
"Deoptimizer::ComputeOutputFrames()");
UNCLASSIFIED,
39,
"LDoubleConstant::negative_infinity");
- Add(ExternalReference::power_double_double_function().address(),
+ Add(ExternalReference::power_double_double_function(isolate).address(),
UNCLASSIFIED,
40,
"power_double_double_function");
- Add(ExternalReference::power_double_int_function().address(),
+ Add(ExternalReference::power_double_int_function(isolate).address(),
UNCLASSIFIED,
41,
"power_double_int_function");
- Add(ExternalReference::arguments_marker_location().address(),
+ Add(ExternalReference::arguments_marker_location(isolate).address(),
UNCLASSIFIED,
42,
"Factory::arguments_marker().location()");
#endif
Assembler::Assembler(void* buffer, int buffer_size)
- : code_targets_(100),
+ : AssemblerBase(Isolate::Current()),
+ code_targets_(100),
positions_recorder_(this),
emit_debug_code_(FLAG_debug_code) {
- Isolate* isolate = Isolate::Current();
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
buffer_size = kMinimalBufferSize;
- if (isolate->assembler_spare_buffer() != NULL) {
- buffer = isolate->assembler_spare_buffer();
- isolate->set_assembler_spare_buffer(NULL);
+ if (isolate()->assembler_spare_buffer() != NULL) {
+ buffer = isolate()->assembler_spare_buffer();
+ isolate()->set_assembler_spare_buffer(NULL);
}
}
if (buffer == NULL) {
Assembler::~Assembler() {
- Isolate* isolate = Isolate::Current();
if (own_buffer_) {
- if (isolate->assembler_spare_buffer() == NULL &&
+ if (isolate()->assembler_spare_buffer() == NULL &&
buffer_size_ == kMinimalBufferSize) {
- isolate->set_assembler_spare_buffer(buffer_);
+ isolate()->set_assembler_spare_buffer(buffer_);
} else {
DeleteArray(buffer_);
}
void Assembler::GrowBuffer() {
- Isolate* isolate = Isolate::Current();
ASSERT(buffer_overflow());
if (!own_buffer_) FATAL("external code buffer is too small");
reloc_info_writer.pos(), desc.reloc_size);
// Switch buffers.
- if (isolate->assembler_spare_buffer() == NULL &&
+ if (isolate()->assembler_spare_buffer() == NULL &&
buffer_size_ == kMinimalBufferSize) {
- isolate->set_assembler_spare_buffer(buffer_);
+ isolate()->set_assembler_spare_buffer(buffer_);
} else {
DeleteArray(buffer_);
}
void Assembler::cpuid() {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(CPUID));
+ ASSERT(isolate()->cpu_features()->IsEnabled(CPUID));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit(0x0F);
void Assembler::fisttp_s(const Operand& adr) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE3));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit_optional_rex_32(adr);
void Assembler::fisttp_d(const Operand& adr) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE3));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE3));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit_optional_rex_32(adr);
void Assembler::movdqa(const Operand& dst, XMMRegister src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit(0x66);
void Assembler::movdqa(XMMRegister dst, const Operand& src) {
- ASSERT(Isolate::Current()->cpu_features()->IsEnabled(SSE2));
+ ASSERT(isolate()->cpu_features()->IsEnabled(SSE2));
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit(0x66);
};
-class Assembler : public Malloced {
+class Assembler : public AssemblerBase {
private:
// We check before assembling an instruction that there is sufficient
// space to write an instruction and its relocation information.
// JumpToExternalReference expects rax to contain the number of arguments
// including the receiver and the extra arguments.
__ addq(rax, Immediate(num_extra_args + 1));
- __ JumpToExternalReference(ExternalReference(id), 1);
+ __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
}
// Set expected number of arguments to zero (not changing rax).
__ movq(rbx, Immediate(0));
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
- __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Jump(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address();
+ ExternalReference::debug_step_in_fp_address(masm->isolate());
__ movq(kScratchRegister, debug_step_in_fp);
__ cmpq(Operand(kScratchRegister, 0), Immediate(0));
__ j(not_equal, &rt_call);
// Call the function.
if (is_api_function) {
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
- Handle<Code> code = Handle<Code>(Isolate::Current()->builtins()->builtin(
+ Handle<Code> code = Handle<Code>(masm->isolate()->builtins()->builtin(
Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
// Invoke the code.
if (is_construct) {
// Expects rdi to hold function pointer.
- __ Call(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Call(Handle<Code>(masm->isolate()->builtins()->builtin(
Builtins::JSConstructCall)), RelocInfo::CODE_TARGET);
} else {
ParameterCount actual(rax);
__ j(not_zero, &function);
__ Set(rbx, 0);
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
- __ Jump(Handle<Code>(Isolate::Current()->builtins()->builtin(
+ __ Jump(Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
__ bind(&function);
}
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
__ cmpq(rax, rbx);
__ j(not_equal,
- Handle<Code>(Isolate::Current()->builtins()->builtin(
+ Handle<Code>(masm->isolate()->builtins()->builtin(
ArgumentsAdaptorTrampoline)), RelocInfo::CODE_TARGET);
ParameterCount expected(0);
__ movq(rdx, Operand(rbp, kArgumentsOffset)); // load arguments
// Use inline caching to speed up access to arguments.
- Handle<Code> ic(Isolate::Current()->builtins()->builtin(
+ Handle<Code> ic(masm->isolate()->builtins()->builtin(
Builtins::KeyedLoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// It is important that we do not have a test instruction after the
// Jump to the generic array code in case the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
- Code* code = Isolate::Current()->builtins()->builtin(
- Builtins::ArrayCodeGeneric);
+ Code* code =
+ masm->isolate()->builtins()->builtin(Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ Jump(array_code, RelocInfo::CODE_TARGET);
}
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
- Code* code = Isolate::Current()->builtins()->builtin(
- Builtins::JSConstructStubGeneric);
+ Code* code =
+ masm->isolate()->builtins()->builtin(Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
// Perform patching to an appropriate fast case and return the result.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kBinaryOp_Patch), masm->isolate()),
5,
1);
}
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch)),
+ ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch),
+ masm->isolate()),
5,
1);
}
// ST[0] == double value.
// rbx = bits of double value.
// rcx = TranscendentalCache::hash(double value).
- __ movq(rax, ExternalReference::transcendental_cache_array_address());
- // rax points to cache array.
- __ movq(rax, Operand(rax, type_ * sizeof(
- Isolate::Current()->transcendental_cache()->caches_[0])));
+ ExternalReference cache_array =
+ ExternalReference::transcendental_cache_array_address(masm->isolate());
+ __ movq(rax, cache_array);
+ int cache_array_index =
+ type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
+ __ movq(rax, Operand(rax, cache_array_index));
// rax points to the cache for the type type_.
// If NULL, the cache hasn't been initialized yet, so go through runtime.
__ testq(rax, rax);
__ bind(&runtime_call_clear_stack);
__ fstp(0);
__ bind(&runtime_call);
- __ TailCallExternalReference(ExternalReference(RuntimeFunction()), 1, 1);
+ __ TailCallExternalReference(
+ ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
} else { // UNTAGGED.
__ bind(&runtime_call_clear_stack);
__ bind(&runtime_call);
Label runtime;
// Ensure that a RegExp stack is allocated.
+ Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address();
+ ExternalReference::address_of_regexp_stack_memory_address(isolate);
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size();
+ ExternalReference::address_of_regexp_stack_memory_size(isolate);
__ movq(kScratchRegister, address_of_regexp_stack_memory_size);
__ movq(kScratchRegister, Operand(kScratchRegister, 0));
__ testq(kScratchRegister, kScratchRegister);
#endif
// Argument 5: static offsets vector buffer.
- __ movq(r8, ExternalReference::address_of_static_offsets_vector());
+ __ movq(r8, ExternalReference::address_of_static_offsets_vector(isolate));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
__ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
__ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
// Get the static offsets vector filled by the native regexp code.
- __ movq(rcx, ExternalReference::address_of_static_offsets_vector());
+ __ movq(rcx, ExternalReference::address_of_static_offsets_vector(isolate));
// rbx: last_match_info backing store (FixedArray)
// rcx: offsets vector
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
ExternalReference pending_exception_address(
- Isolate::k_pending_exception_address);
+ Isolate::k_pending_exception_address, isolate);
__ movq(rbx, pending_exception_address);
__ movq(rax, Operand(rbx, 0));
__ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
}
ExternalReference scope_depth =
- ExternalReference::heap_always_allocate_scope_depth();
+ ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
if (always_allocate_scope) {
__ movq(kScratchRegister, scope_depth);
__ incl(Operand(kScratchRegister, 0));
// Retrieve the pending exception and clear the variable.
ExternalReference pending_exception_address(
- Isolate::k_pending_exception_address);
+ Isolate::k_pending_exception_address, masm->isolate());
__ movq(kScratchRegister, pending_exception_address);
__ movq(rax, Operand(kScratchRegister, 0));
- __ movq(rdx, ExternalReference::the_hole_value_location());
+ __ movq(rdx, ExternalReference::the_hole_value_location(masm->isolate()));
__ movq(rdx, Operand(rdx, 0));
__ movq(Operand(kScratchRegister, 0), rdx);
// TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
// callee save as well.
+ Isolate* isolate = masm->isolate();
+
// Save copies of the top frame descriptor on the stack.
- ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address);
+ ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate);
__ load_rax(c_entry_fp);
__ push(rax);
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
- ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address);
+ ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ load_rax(js_entry_sp);
__ testq(rax, rax);
__ j(not_zero, ¬_outermost_js);
// Caught exception: Store result (exception) in the pending
// exception field in the JSEnv and return a failure sentinel.
- ExternalReference pending_exception(Isolate::k_pending_exception_address);
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
+ isolate);
__ store_rax(pending_exception);
__ movq(rax, Failure::Exception(), RelocInfo::NONE);
__ jmp(&exit);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
// Clear any pending exceptions.
- __ load_rax(ExternalReference::the_hole_value_location());
+ __ load_rax(ExternalReference::the_hole_value_location(isolate));
__ store_rax(pending_exception);
// Fake a receiver (NULL).
// directly in the code, because the builtin stubs may not have been
// generated yet at the time this code is generated.
if (is_construct) {
- ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
+ ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline,
+ isolate);
__ load_rax(construct_entry);
} else {
- ExternalReference entry(Builtins::JSEntryTrampoline);
+ ExternalReference entry(Builtins::JSEntryTrampoline, isolate);
__ load_rax(entry);
}
__ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
__ call(kScratchRegister);
// Unlink this frame from the handler chain.
- __ movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
+ __ movq(kScratchRegister,
+ ExternalReference(Isolate::k_handler_address, isolate));
__ pop(Operand(kScratchRegister, 0));
// Pop next_sp.
__ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
// Restore the top frame descriptor from the stack.
__ bind(&exit);
- __ movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address));
+ __ movq(kScratchRegister,
+ ExternalReference(Isolate::k_c_entry_fp_address, isolate));
__ pop(Operand(kScratchRegister, 0));
// Restore callee-saved registers (X64 conventions).
__ push(rcx);
// Call the runtime system in a fresh internal frame.
- ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss));
+ ExternalReference miss =
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
__ EnterInternalFrame();
__ push(rdx);
__ push(rax);
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
// Make sure that there's nothing left on the stack above the
// handler structure.
function_return_is_shadowed_ = function_return_was_shadowed;
// Get an external reference to the handler address.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
// If we can fall off the end of the try block, unlink from the try
// chain and set the state on the frame to FALLING.
// Return a random uint32 number in rax.
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
__ PrepareCallCFunction(0);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0);
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
__ Set(rax, 0); // No arguments (argc == 0).
- __ movq(rbx, ExternalReference::debug_break());
+ __ movq(rbx, ExternalReference::debug_break(masm->isolate()));
CEntryStub ceb(1);
__ CallStub(&ceb);
// jumping to the target address intended by the caller and that was
// overwritten by the address of DebugBreakXXX.
ExternalReference after_break_target =
- ExternalReference(Debug_Address::AfterBreakTarget());
+ ExternalReference(Debug_Address::AfterBreakTarget(), masm->isolate());
__ movq(kScratchRegister, after_break_target);
__ jmp(Operand(kScratchRegister, 0));
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
ExternalReference restarter_frame_function_slot =
- ExternalReference(Debug_Address::RestarterFrameFunctionPointer());
+ ExternalReference(Debug_Address::RestarterFrameFunctionPointer(),
+ masm->isolate());
__ movq(rax, restarter_frame_function_slot);
__ movq(Operand(rax, 0), Immediate(0));
__ movq(r8, arg5);
#endif
- __ CallCFunction(ExternalReference::new_deoptimizer_function(), 5);
+ Isolate* isolate = masm()->isolate();
+
+ __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 5);
// Preserve deoptimizer object in register rax and get the input
// frame descriptor pointer.
__ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
__ push(rax);
__ PrepareCallCFunction(1);
__ movq(arg1, rax);
- __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
+ __ CallCFunction(
+ ExternalReference::compute_output_frames_function(isolate), 1);
__ pop(rax);
// Replace the current frame with the output frames.
}
// Set up the roots register.
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address = ExternalReference::roots_address(isolate);
__ InitializeRootRegister();
__ InitializeSmiConstantRegister();
// Return a random uint32 number in rax.
// The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
__ PrepareCallCFunction(0);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 0);
// Convert 32 random bits in rax to 0.(32 random bits) in a double
// by computing:
// Load the key (consisting of map and symbol) from the cache and
// check for match.
ExternalReference cache_keys
- = ExternalReference::keyed_lookup_cache_keys();
+ = ExternalReference::keyed_lookup_cache_keys(masm->isolate());
__ movq(rdi, rcx);
__ shl(rdi, Immediate(kPointerSizeLog2 + 1));
__ movq(kScratchRegister, cache_keys);
// Get field offset, which is a 32-bit integer.
ExternalReference cache_field_offsets
- = ExternalReference::keyed_lookup_cache_field_offsets();
+ = ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
__ movq(kScratchRegister, cache_field_offsets);
__ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
__ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
__ push(rcx); // return address
// Perform tail call to the entry.
- __ TailCallExternalReference(ExternalReference(
- IC_Utility(kKeyedLoadPropertyWithInterceptor)), 2, 1);
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
+ masm->isolate()),
+ 2,
+ 1);
__ bind(&slow);
GenerateMiss(masm);
// Call the entry.
CEntryStub stub(1);
__ movq(rax, Immediate(2));
- __ movq(rbx, ExternalReference(IC_Utility(id)));
+ __ movq(rbx, ExternalReference(IC_Utility(id), masm->isolate()));
__ CallStub(&stub);
// Move result to rdi and exit the internal frame.
__ push(rbx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
__ push(rbx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedLoadIC_Miss));
+ ExternalReference ref
+ = ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
}
__ push(rbx); // return address
// Perform tail call to the entry.
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
__ push(value);
__ push(scratch); // return address
- ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
__ TailCallExternalReference(ref, 2, 1);
__ bind(&miss);
__ push(rbx); // return address
// Do tail-call to runtime routine.
- ExternalReference ref = ExternalReference(IC_Utility(kKeyedStoreIC_Miss));
+ ExternalReference ref =
+ ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
__ TailCallExternalReference(ref, 3, 1);
}
__ PrepareCallCFunction(2);
__ movsd(xmm0, left);
ASSERT(right.is(xmm1));
- __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 2);
+ __ CallCFunction(
+ ExternalReference::double_fp_operation(Token::MOD, isolate()), 2);
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ movsd(result, xmm0);
break;
// Move arguments to correct registers
__ movsd(xmm0, left_reg);
ASSERT(ToDoubleRegister(right).is(xmm1));
- __ CallCFunction(ExternalReference::power_double_double_function(), 2);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(isolate()), 2);
} else if (exponent_type.IsInteger32()) {
__ PrepareCallCFunction(2);
// Move arguments to correct registers: xmm0 and edi (not rdi).
#else
ASSERT(ToRegister(right).is(rdi));
#endif
- __ CallCFunction(ExternalReference::power_double_int_function(), 2);
+ __ CallCFunction(
+ ExternalReference::power_double_int_function(isolate()), 2);
} else {
ASSERT(exponent_type.IsTagged());
CpuFeatures::Scope scope(SSE2);
// Move arguments to correct registers xmm0 and xmm1.
__ movsd(xmm0, left_reg);
// Right argument is already in xmm1.
- __ CallCFunction(ExternalReference::power_double_double_function(), 2);
+ __ CallCFunction(
+ ExternalReference::power_double_double_function(isolate()), 2);
}
// Return value is in xmm0.
__ movsd(result_reg, xmm0);
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
const Runtime::Function* function = Runtime::FunctionForId(id);
Set(rax, function->nargs);
- movq(rbx, ExternalReference(function));
+ movq(rbx, ExternalReference(function, isolate()));
CEntryStub ces(1);
ces.SaveDoubles();
CallStub(&ces);
// should remove this need and make the runtime routine entry code
// smarter.
Set(rax, num_arguments);
- movq(rbx, ExternalReference(f));
+ movq(rbx, ExternalReference(f, isolate()));
CEntryStub ces(f->result_size);
CallStub(&ces);
}
// should remove this need and make the runtime routine entry code
// smarter.
Set(rax, num_arguments);
- movq(rbx, ExternalReference(f));
+ movq(rbx, ExternalReference(f, isolate()));
CEntryStub ces(f->result_size);
return TryCallStub(&ces);
}
void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size) {
- TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
+ TailCallExternalReference(ExternalReference(fid, isolate()),
+ num_arguments,
+ result_size);
}
MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
int num_arguments,
int result_size) {
- return TryTailCallExternalReference(ExternalReference(fid),
+ return TryTailCallExternalReference(ExternalReference(fid, isolate()),
num_arguments,
result_size);
}
ExternalReference::handle_scope_level_address(),
next_address);
ExternalReference scheduled_exception_address =
- ExternalReference::scheduled_exception_address();
+ ExternalReference::scheduled_exception_address(isolate());
// Allocate HandleScope in callee-save registers.
Register prev_next_address_reg = r14;
#else
movq(rdi, ExternalReference::isolate_address());
#endif
- movq(rax, ExternalReference::delete_handle_scope_extensions());
+ movq(rax, ExternalReference::delete_handle_scope_extensions(isolate()));
call(rax);
movq(rax, prev_limit_reg);
jmp(&leave_exit_frame);
push(Immediate(0)); // NULL frame pointer.
}
// Save the current handler.
- movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
+ movq(kScratchRegister,
+ ExternalReference(Isolate::k_handler_address, isolate()));
push(Operand(kScratchRegister, 0));
// Link this handler.
movq(Operand(kScratchRegister, 0), rsp);
void MacroAssembler::PopTryHandler() {
ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
// Unlink this handler.
- movq(kScratchRegister, ExternalReference(Isolate::k_handler_address));
+ movq(kScratchRegister,
+ ExternalReference(Isolate::k_handler_address, isolate()));
pop(Operand(kScratchRegister, 0));
// Remove the remaining fields.
addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
movq(rax, value);
}
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
movq(kScratchRegister, handler_address);
movq(rsp, Operand(kScratchRegister, 0));
// get next in chain
movq(rax, value);
}
// Fetch top stack handler.
- ExternalReference handler_address(Isolate::k_handler_address);
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
movq(kScratchRegister, handler_address);
movq(rsp, Operand(kScratchRegister, 0));
if (type == OUT_OF_MEMORY) {
// Set external caught exception to false.
ExternalReference external_caught(
- Isolate::k_external_caught_exception_address);
+ Isolate::k_external_caught_exception_address, isolate());
movq(rax, Immediate(false));
store_rax(external_caught);
// Set pending exception and rax to out of memory exception.
- ExternalReference pending_exception(Isolate::k_pending_exception_address);
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
+ isolate());
movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
store_rax(pending_exception);
}
void MacroAssembler::DebugBreak() {
ASSERT(allow_stub_calls());
Set(rax, 0); // No arguments.
- movq(rbx, ExternalReference(Runtime::kDebugBreak));
+ movq(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
CEntryStub ces(1);
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
movq(r14, rax); // Backup rax in callee-save register.
}
- movq(kScratchRegister, ExternalReference(Isolate::k_c_entry_fp_address));
+ movq(kScratchRegister,
+ ExternalReference(Isolate::k_c_entry_fp_address, isolate()));
movq(Operand(kScratchRegister, 0), rbp);
- movq(kScratchRegister, ExternalReference(Isolate::k_context_address));
+ movq(kScratchRegister,
+ ExternalReference(Isolate::k_context_address, isolate()));
movq(Operand(kScratchRegister, 0), rsi);
}
void MacroAssembler::LeaveExitFrameEpilogue() {
// Restore current context from top and clear it in debug mode.
- ExternalReference context_address(Isolate::k_context_address);
+ ExternalReference context_address(Isolate::k_context_address, isolate());
movq(kScratchRegister, context_address);
movq(rsi, Operand(kScratchRegister, 0));
#ifdef DEBUG
#endif
// Clear the top frame.
- ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address);
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
+ isolate());
movq(kScratchRegister, c_entry_fp_address);
movq(Operand(kScratchRegister, 0), Immediate(0));
}
Register scratch,
AllocationFlags flags) {
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Just return if allocation top is already known.
if ((flags & RESULT_CONTAINS_TOP) != 0) {
}
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Update new top.
if (result_end.is(rax)) {
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
Register top_reg = result_end.is_valid() ? result_end : result;
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
// We assume that element_count*element_size + header_size does not
// overflow.
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate());
if (!object_size.is(result_end)) {
movq(result_end, object_size);
}
void MacroAssembler::UndoAllocationInNewSpace(Register object) {
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate());
// Make sure the object has no tag before resetting top.
and_(object, Immediate(~kHeapObjectTagMask));
void LoadFromSafepointRegisterSlot(Register dst, Register src);
void InitializeRootRegister() {
- ExternalReference roots_address = ExternalReference::roots_address();
+ ExternalReference roots_address =
+ ExternalReference::roots_address(isolate());
movq(kRootRegister, roots_address);
addq(kRootRegister, Immediate(kRootRegisterBias));
}
// case the size of the new space is different between the snapshot maker
// and the running system.
if (scratch.is(object)) {
- movq(kScratchRegister, ExternalReference::new_space_mask());
+ movq(kScratchRegister, ExternalReference::new_space_mask(isolate()));
and_(scratch, kScratchRegister);
} else {
- movq(scratch, ExternalReference::new_space_mask());
+ movq(scratch, ExternalReference::new_space_mask(isolate()));
and_(scratch, object);
}
- movq(kScratchRegister, ExternalReference::new_space_start());
+ movq(kScratchRegister, ExternalReference::new_space_start(isolate()));
cmpq(scratch, kScratchRegister);
j(cc, branch);
} else {
__ movq(rdx, rbx);
#endif
ExternalReference compare =
- ExternalReference::re_case_insensitive_compare_uc16();
+ ExternalReference::re_case_insensitive_compare_uc16(masm_->isolate());
__ CallCFunction(compare, num_arguments);
// Restore original values before reacting on result value.
Label stack_ok;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ movq(rcx, rsp);
__ movq(kScratchRegister, stack_limit);
__ subq(rcx, Operand(kScratchRegister, 0));
__ movq(rdi, backtrack_stackpointer()); // First argument.
__ lea(rsi, Operand(rbp, kStackHighEnd)); // Second argument.
#endif
- ExternalReference grow_stack = ExternalReference::re_grow_stack();
+ ExternalReference grow_stack =
+ ExternalReference::re_grow_stack(masm_->isolate());
__ CallCFunction(grow_stack, num_arguments);
// If return NULL, we have failed to grow the stack, and
// must exit with a stack-overflow exception.
__ lea(rdi, Operand(rsp, -kPointerSize));
#endif
ExternalReference stack_check =
- ExternalReference::re_check_stack_guard_state();
+ ExternalReference::re_check_stack_guard_state(masm_->isolate());
__ CallCFunction(stack_check, num_arguments);
}
// Check for preemption.
Label no_preempt;
ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(masm_->isolate());
__ load_rax(stack_limit);
__ cmpq(rsp, rax);
__ j(above, &no_preempt);
void RegExpMacroAssemblerX64::CheckStackLimit() {
Label no_stack_overflow;
ExternalReference stack_limit =
- ExternalReference::address_of_regexp_stack_limit();
+ ExternalReference::address_of_regexp_stack_limit(masm_->isolate());
__ load_rax(stack_limit);
__ cmpq(backtrack_stackpointer(), rax);
__ j(above, &no_stack_overflow);
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
+ masm->isolate());
__ movq(rax, Immediate(5));
__ movq(rbx, ref);
interceptor_holder);
__ CallExternalReference(
- ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall)),
+ ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
+ masm->isolate()),
5);
// Restore the name_ register.
__ push(rax);
__ push(scratch);
__ TailCallExternalReference(
- ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage)), 3, 1);
+ ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
+ masm->isolate()),
+ 3,
+ 1);
return;
}
__ push(scratch2); // restore return address
ExternalReference ref =
- ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
+ ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
+ masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
} else { // !compile_followup_inline
__ push(scratch2); // restore old return address
ExternalReference ref = ExternalReference(
- IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
+ IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), masm()->isolate());
__ TailCallExternalReference(ref, 5, 1);
}
}
__ jmp(&call_builtin);
}
+ Isolate* isolate = masm()->isolate();
ExternalReference new_space_allocation_top =
- ExternalReference::new_space_allocation_top_address();
+ ExternalReference::new_space_allocation_top_address(isolate);
ExternalReference new_space_allocation_limit =
- ExternalReference::new_space_allocation_limit_address();
+ ExternalReference::new_space_allocation_limit_address(isolate);
const int kAllocationDelta = 4;
// Load top.
}
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush),
+ __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
+ masm()->isolate()),
argc + 1,
1);
}
__ ret((argc + 1) * kPointerSize);
__ bind(&call_builtin);
- __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPop),
- argc + 1,
- 1);
+ __ TailCallExternalReference(
+ ExternalReference(Builtins::c_ArrayPop, masm()->isolate()),
+ argc + 1,
+ 1);
__ bind(&miss);
MaybeObject* maybe_result = GenerateMissBranch();
// Do tail-call to the runtime system.
ExternalReference store_callback_property =
- ExternalReference(IC_Utility(IC::kStoreCallbackProperty));
+ ExternalReference(IC_Utility(IC::kStoreCallbackProperty),
+ masm()->isolate());
__ TailCallExternalReference(store_callback_property, 4, 1);
// Handle store cache miss.
// Do tail-call to the runtime system.
ExternalReference store_ic_property =
- ExternalReference(IC_Utility(IC::kStoreInterceptorProperty));
+ ExternalReference(IC_Utility(IC::kStoreInterceptorProperty),
+ masm()->isolate());
__ TailCallExternalReference(store_ic_property, 4, 1);
// Handle store cache miss.
__ jmp(Operand(ebx, ecx, times_4, 10000));
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference after_break_target =
- ExternalReference(Debug_Address::AfterBreakTarget());
+ ExternalReference(Debug_Address::AfterBreakTarget(),
+ assm.isolate());
__ jmp(Operand::StaticVariable(after_break_target));
#endif // ENABLE_DEBUGGER_SUPPORT
__ jmp(ic, RelocInfo::CODE_TARGET);
template <class T>
static Address AddressOf(T id) {
- return ExternalReference(id).address();
+ return ExternalReference(id, i::Isolate::Current()).address();
}
TEST(ExternalReferenceEncoder) {
OS::Setup();
- i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
+ Isolate* isolate = i::Isolate::Current();
+ isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false);
ExternalReferenceEncoder encoder;
CHECK_EQ(make_code(BUILTIN, Builtins::ArrayCode),
CHECK_EQ(make_code(STATS_COUNTER, Counters::k_keyed_load_function_prototype),
encoder.Encode(keyed_load_function_prototype.address()));
ExternalReference the_hole_value_location =
- ExternalReference::the_hole_value_location();
+ ExternalReference::the_hole_value_location(isolate);
CHECK_EQ(make_code(UNCLASSIFIED, 2),
encoder.Encode(the_hole_value_location.address()));
ExternalReference stack_limit_address =
- ExternalReference::address_of_stack_limit();
+ ExternalReference::address_of_stack_limit(isolate);
CHECK_EQ(make_code(UNCLASSIFIED, 4),
encoder.Encode(stack_limit_address.address()));
ExternalReference real_stack_limit_address =
- ExternalReference::address_of_real_stack_limit();
+ ExternalReference::address_of_real_stack_limit(isolate);
CHECK_EQ(make_code(UNCLASSIFIED, 5),
encoder.Encode(real_stack_limit_address.address()));
#ifdef ENABLE_DEBUGGER_SUPPORT
CHECK_EQ(make_code(UNCLASSIFIED, 15),
- encoder.Encode(ExternalReference::debug_break().address()));
+ encoder.Encode(ExternalReference::debug_break(isolate).address()));
#endif // ENABLE_DEBUGGER_SUPPORT
CHECK_EQ(make_code(UNCLASSIFIED, 10),
- encoder.Encode(ExternalReference::new_space_start().address()));
+ encoder.Encode(
+ ExternalReference::new_space_start(isolate).address()));
CHECK_EQ(make_code(UNCLASSIFIED, 3),
- encoder.Encode(ExternalReference::roots_address().address()));
+ encoder.Encode(ExternalReference::roots_address(isolate).address()));
}
TEST(ExternalReferenceDecoder) {
OS::Setup();
- i::Isolate::Current()->stats_table()->SetCounterFunction(counter_function);
+ Isolate* isolate = i::Isolate::Current();
+ isolate->stats_table()->SetCounterFunction(counter_function);
HEAP->Setup(false);
ExternalReferenceDecoder decoder;
CHECK_EQ(AddressOf(Builtins::ArrayCode),
decoder.Decode(
make_code(STATS_COUNTER,
Counters::k_keyed_load_function_prototype)));
- CHECK_EQ(ExternalReference::the_hole_value_location().address(),
+ CHECK_EQ(ExternalReference::the_hole_value_location(isolate).address(),
decoder.Decode(make_code(UNCLASSIFIED, 2)));
- CHECK_EQ(ExternalReference::address_of_stack_limit().address(),
+ CHECK_EQ(ExternalReference::address_of_stack_limit(isolate).address(),
decoder.Decode(make_code(UNCLASSIFIED, 4)));
- CHECK_EQ(ExternalReference::address_of_real_stack_limit().address(),
+ CHECK_EQ(ExternalReference::address_of_real_stack_limit(isolate).address(),
decoder.Decode(make_code(UNCLASSIFIED, 5)));
#ifdef ENABLE_DEBUGGER_SUPPORT
- CHECK_EQ(ExternalReference::debug_break().address(),
+ CHECK_EQ(ExternalReference::debug_break(isolate).address(),
decoder.Decode(make_code(UNCLASSIFIED, 15)));
#endif // ENABLE_DEBUGGER_SUPPORT
- CHECK_EQ(ExternalReference::new_space_start().address(),
+ CHECK_EQ(ExternalReference::new_space_start(isolate).address(),
decoder.Decode(make_code(UNCLASSIFIED, 10)));
}