1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_X64_ASSEMBLER_X64_INL_H_
6 #define V8_X64_ASSEMBLER_X64_INL_H_
8 #include "src/x64/assembler-x64.h"
11 #include "src/debug.h"
12 #include "src/v8memory.h"
17 bool CpuFeatures::SupportsCrankshaft() { return true; }
18 bool CpuFeatures::SupportsSIMD128InCrankshaft() { return true; }
21 // -----------------------------------------------------------------------------
22 // Implementation of Assembler
25 static const byte kCallOpcode = 0xE8;
26 // The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi).
27 static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ? 6 : 17;
30 void Assembler::emitl(uint32_t x) {
31 Memory::uint32_at(pc_) = x;
32 pc_ += sizeof(uint32_t);
36 void Assembler::emitp(void* x, RelocInfo::Mode rmode) {
37 uintptr_t value = reinterpret_cast<uintptr_t>(x);
38 Memory::uintptr_at(pc_) = value;
39 if (!RelocInfo::IsNone(rmode)) {
40 RecordRelocInfo(rmode, value);
42 pc_ += sizeof(uintptr_t);
46 void Assembler::emitq(uint64_t x) {
47 Memory::uint64_at(pc_) = x;
48 pc_ += sizeof(uint64_t);
52 void Assembler::emitw(uint16_t x) {
53 Memory::uint16_at(pc_) = x;
54 pc_ += sizeof(uint16_t);
58 void Assembler::emit_code_target(Handle<Code> target,
59 RelocInfo::Mode rmode,
60 TypeFeedbackId ast_id) {
61 ASSERT(RelocInfo::IsCodeTarget(rmode) ||
62 rmode == RelocInfo::CODE_AGE_SEQUENCE);
63 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
64 RecordRelocInfo(RelocInfo::CODE_TARGET_WITH_ID, ast_id.ToInt());
66 RecordRelocInfo(rmode);
68 int current = code_targets_.length();
69 if (current > 0 && code_targets_.last().is_identical_to(target)) {
70 // Optimization if we keep jumping to the same code target.
73 code_targets_.Add(target);
79 void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) {
80 ASSERT(RelocInfo::IsRuntimeEntry(rmode));
81 RecordRelocInfo(rmode);
82 emitl(static_cast<uint32_t>(entry - isolate()->code_range()->start()));
86 void Assembler::emit_rex_64(Register reg, Register rm_reg) {
87 emit(0x48 | reg.high_bit() << 2 | rm_reg.high_bit());
91 void Assembler::emit_rex_64(XMMRegister reg, Register rm_reg) {
92 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
96 void Assembler::emit_rex_64(Register reg, XMMRegister rm_reg) {
97 emit(0x48 | (reg.code() & 0x8) >> 1 | rm_reg.code() >> 3);
101 void Assembler::emit_rex_64(Register reg, const Operand& op) {
102 emit(0x48 | reg.high_bit() << 2 | op.rex_);
106 void Assembler::emit_rex_64(XMMRegister reg, const Operand& op) {
107 emit(0x48 | (reg.code() & 0x8) >> 1 | op.rex_);
111 void Assembler::emit_rex_64(Register rm_reg) {
112 ASSERT_EQ(rm_reg.code() & 0xf, rm_reg.code());
113 emit(0x48 | rm_reg.high_bit());
117 void Assembler::emit_rex_64(const Operand& op) {
118 emit(0x48 | op.rex_);
122 void Assembler::emit_rex_32(Register reg, Register rm_reg) {
123 emit(0x40 | reg.high_bit() << 2 | rm_reg.high_bit());
127 void Assembler::emit_rex_32(Register reg, const Operand& op) {
128 emit(0x40 | reg.high_bit() << 2 | op.rex_);
132 void Assembler::emit_rex_32(Register rm_reg) {
133 emit(0x40 | rm_reg.high_bit());
137 void Assembler::emit_rex_32(const Operand& op) {
138 emit(0x40 | op.rex_);
142 void Assembler::emit_optional_rex_32(Register reg, Register rm_reg) {
143 byte rex_bits = reg.high_bit() << 2 | rm_reg.high_bit();
144 if (rex_bits != 0) emit(0x40 | rex_bits);
148 void Assembler::emit_optional_rex_32(Register reg, const Operand& op) {
149 byte rex_bits = reg.high_bit() << 2 | op.rex_;
150 if (rex_bits != 0) emit(0x40 | rex_bits);
154 void Assembler::emit_optional_rex_32(XMMRegister reg, const Operand& op) {
155 byte rex_bits = (reg.code() & 0x8) >> 1 | op.rex_;
156 if (rex_bits != 0) emit(0x40 | rex_bits);
160 void Assembler::emit_optional_rex_32(XMMRegister reg, XMMRegister base) {
161 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
162 if (rex_bits != 0) emit(0x40 | rex_bits);
166 void Assembler::emit_optional_rex_32(XMMRegister reg, Register base) {
167 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
168 if (rex_bits != 0) emit(0x40 | rex_bits);
172 void Assembler::emit_optional_rex_32(Register reg, XMMRegister base) {
173 byte rex_bits = (reg.code() & 0x8) >> 1 | (base.code() & 0x8) >> 3;
174 if (rex_bits != 0) emit(0x40 | rex_bits);
178 void Assembler::emit_optional_rex_32(Register rm_reg) {
179 if (rm_reg.high_bit()) emit(0x41);
182 void Assembler::emit_optional_rex_32(XMMRegister reg) {
183 byte rex_bits = (reg.code() & 0x8) >> 1;
184 if (rex_bits != 0) emit(0x40 | rex_bits);
187 void Assembler::emit_optional_rex_32(const Operand& op) {
188 if (op.rex_ != 0) emit(0x40 | op.rex_);
192 Address Assembler::target_address_at(Address pc,
193 ConstantPoolArray* constant_pool) {
194 return Memory::int32_at(pc) + pc + 4;
198 void Assembler::set_target_address_at(Address pc,
199 ConstantPoolArray* constant_pool,
201 ICacheFlushMode icache_flush_mode) {
202 Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4);
203 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
204 CPU::FlushICache(pc, sizeof(int32_t));
209 Address Assembler::target_address_from_return_address(Address pc) {
210 return pc - kCallTargetAddressOffset;
214 Handle<Object> Assembler::code_target_object_handle_at(Address pc) {
215 return code_targets_[Memory::int32_at(pc)];
219 Address Assembler::runtime_entry_at(Address pc) {
220 return Memory::int32_at(pc) + isolate()->code_range()->start();
223 // -----------------------------------------------------------------------------
224 // Implementation of RelocInfo
226 // The modes possibly affected by apply must be in kApplyMask.
227 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
228 bool flush_icache = icache_flush_mode != SKIP_ICACHE_FLUSH;
229 if (IsInternalReference(rmode_)) {
230 // absolute code pointer inside code object moves with the code object.
231 Memory::Address_at(pc_) += static_cast<int32_t>(delta);
232 if (flush_icache) CPU::FlushICache(pc_, sizeof(Address));
233 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
234 Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
235 if (flush_icache) CPU::FlushICache(pc_, sizeof(int32_t));
236 } else if (rmode_ == CODE_AGE_SEQUENCE) {
237 if (*pc_ == kCallOpcode) {
238 int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
239 *p -= static_cast<int32_t>(delta); // Relocate entry.
240 if (flush_icache) CPU::FlushICache(p, sizeof(uint32_t));
246 Address RelocInfo::target_address() {
247 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
248 return Assembler::target_address_at(pc_, host_);
252 Address RelocInfo::target_address_address() {
253 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
254 || rmode_ == EMBEDDED_OBJECT
255 || rmode_ == EXTERNAL_REFERENCE);
256 return reinterpret_cast<Address>(pc_);
260 Address RelocInfo::constant_pool_entry_address() {
266 int RelocInfo::target_address_size() {
267 if (IsCodedSpecially()) {
268 return Assembler::kSpecialTargetSize;
275 void RelocInfo::set_target_address(Address target,
276 WriteBarrierMode write_barrier_mode,
277 ICacheFlushMode icache_flush_mode) {
278 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
279 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
280 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL &&
281 IsCodeTarget(rmode_)) {
282 Object* target_code = Code::GetCodeFromTargetAddress(target);
283 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
284 host(), this, HeapObject::cast(target_code));
289 Object* RelocInfo::target_object() {
290 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
291 return Memory::Object_at(pc_);
295 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
296 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
297 if (rmode_ == EMBEDDED_OBJECT) {
298 return Memory::Object_Handle_at(pc_);
300 return origin->code_target_object_handle_at(pc_);
305 Address RelocInfo::target_reference() {
306 ASSERT(rmode_ == RelocInfo::EXTERNAL_REFERENCE);
307 return Memory::Address_at(pc_);
311 void RelocInfo::set_target_object(Object* target,
312 WriteBarrierMode write_barrier_mode,
313 ICacheFlushMode icache_flush_mode) {
314 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
315 ASSERT(!target->IsConsString());
316 Memory::Object_at(pc_) = target;
317 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
318 CPU::FlushICache(pc_, sizeof(Address));
320 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
322 target->IsHeapObject()) {
323 host()->GetHeap()->incremental_marking()->RecordWrite(
324 host(), &Memory::Object_at(pc_), HeapObject::cast(target));
329 Address RelocInfo::target_runtime_entry(Assembler* origin) {
330 ASSERT(IsRuntimeEntry(rmode_));
331 return origin->runtime_entry_at(pc_);
335 void RelocInfo::set_target_runtime_entry(Address target,
336 WriteBarrierMode write_barrier_mode,
337 ICacheFlushMode icache_flush_mode) {
338 ASSERT(IsRuntimeEntry(rmode_));
339 if (target_address() != target) {
340 set_target_address(target, write_barrier_mode, icache_flush_mode);
345 Handle<Cell> RelocInfo::target_cell_handle() {
346 ASSERT(rmode_ == RelocInfo::CELL);
347 Address address = Memory::Address_at(pc_);
348 return Handle<Cell>(reinterpret_cast<Cell**>(address));
352 Cell* RelocInfo::target_cell() {
353 ASSERT(rmode_ == RelocInfo::CELL);
354 return Cell::FromValueAddress(Memory::Address_at(pc_));
358 void RelocInfo::set_target_cell(Cell* cell,
359 WriteBarrierMode write_barrier_mode,
360 ICacheFlushMode icache_flush_mode) {
361 ASSERT(rmode_ == RelocInfo::CELL);
362 Address address = cell->address() + Cell::kValueOffset;
363 Memory::Address_at(pc_) = address;
364 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
365 CPU::FlushICache(pc_, sizeof(Address));
367 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
369 // TODO(1550) We are passing NULL as a slot because cell can never be on
370 // evacuation candidate.
371 host()->GetHeap()->incremental_marking()->RecordWrite(
377 void RelocInfo::WipeOut() {
378 if (IsEmbeddedObject(rmode_) || IsExternalReference(rmode_)) {
379 Memory::Address_at(pc_) = NULL;
380 } else if (IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)) {
381 // Effectively write zero into the relocation.
382 Assembler::set_target_address_at(pc_, host_, pc_ + sizeof(int32_t));
389 bool RelocInfo::IsPatchedReturnSequence() {
390 // The recognized call sequence is:
391 // movq(kScratchRegister, address); call(kScratchRegister);
392 // It only needs to be distinguished from a return sequence
393 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
394 // The 11th byte is int3 (0xCC) in the return sequence and
395 // REX.WB (0x48+register bit) for the call sequence.
396 return pc_[Assembler::kMoveAddressIntoScratchRegisterInstructionLength] !=
401 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
402 return !Assembler::IsNop(pc());
406 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
407 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
408 ASSERT(*pc_ == kCallOpcode);
409 return origin->code_target_object_handle_at(pc_ + 1);
413 Code* RelocInfo::code_age_stub() {
414 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
415 ASSERT(*pc_ == kCallOpcode);
416 return Code::GetCodeFromTargetAddress(
417 Assembler::target_address_at(pc_ + 1, host_));
421 void RelocInfo::set_code_age_stub(Code* stub,
422 ICacheFlushMode icache_flush_mode) {
423 ASSERT(*pc_ == kCallOpcode);
424 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
425 Assembler::set_target_address_at(pc_ + 1, host_, stub->instruction_start(),
430 Address RelocInfo::call_address() {
431 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
432 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
433 return Memory::Address_at(
434 pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
438 void RelocInfo::set_call_address(Address target) {
439 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
440 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
441 Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
443 CPU::FlushICache(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset,
445 if (host() != NULL) {
446 Object* target_code = Code::GetCodeFromTargetAddress(target);
447 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
448 host(), this, HeapObject::cast(target_code));
453 Object* RelocInfo::call_object() {
454 return *call_object_address();
458 void RelocInfo::set_call_object(Object* target) {
459 *call_object_address() = target;
463 Object** RelocInfo::call_object_address() {
464 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
465 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
466 return reinterpret_cast<Object**>(
467 pc_ + Assembler::kPatchReturnSequenceAddressOffset);
471 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
472 RelocInfo::Mode mode = rmode();
473 if (mode == RelocInfo::EMBEDDED_OBJECT) {
474 visitor->VisitEmbeddedPointer(this);
475 CPU::FlushICache(pc_, sizeof(Address));
476 } else if (RelocInfo::IsCodeTarget(mode)) {
477 visitor->VisitCodeTarget(this);
478 } else if (mode == RelocInfo::CELL) {
479 visitor->VisitCell(this);
480 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
481 visitor->VisitExternalReference(this);
482 CPU::FlushICache(pc_, sizeof(Address));
483 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
484 visitor->VisitCodeAgeSequence(this);
485 } else if (((RelocInfo::IsJSReturn(mode) &&
486 IsPatchedReturnSequence()) ||
487 (RelocInfo::IsDebugBreakSlot(mode) &&
488 IsPatchedDebugBreakSlotSequence())) &&
489 isolate->debug()->has_break_points()) {
490 visitor->VisitDebugTarget(this);
491 } else if (RelocInfo::IsRuntimeEntry(mode)) {
492 visitor->VisitRuntimeEntry(this);
497 template<typename StaticVisitor>
498 void RelocInfo::Visit(Heap* heap) {
499 RelocInfo::Mode mode = rmode();
500 if (mode == RelocInfo::EMBEDDED_OBJECT) {
501 StaticVisitor::VisitEmbeddedPointer(heap, this);
502 CPU::FlushICache(pc_, sizeof(Address));
503 } else if (RelocInfo::IsCodeTarget(mode)) {
504 StaticVisitor::VisitCodeTarget(heap, this);
505 } else if (mode == RelocInfo::CELL) {
506 StaticVisitor::VisitCell(heap, this);
507 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
508 StaticVisitor::VisitExternalReference(this);
509 CPU::FlushICache(pc_, sizeof(Address));
510 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
511 StaticVisitor::VisitCodeAgeSequence(heap, this);
512 } else if (heap->isolate()->debug()->has_break_points() &&
513 ((RelocInfo::IsJSReturn(mode) &&
514 IsPatchedReturnSequence()) ||
515 (RelocInfo::IsDebugBreakSlot(mode) &&
516 IsPatchedDebugBreakSlotSequence()))) {
517 StaticVisitor::VisitDebugTarget(heap, this);
518 } else if (RelocInfo::IsRuntimeEntry(mode)) {
519 StaticVisitor::VisitRuntimeEntry(this);
524 // -----------------------------------------------------------------------------
525 // Implementation of Operand
527 void Operand::set_modrm(int mod, Register rm_reg) {
528 ASSERT(is_uint2(mod));
529 buf_[0] = mod << 6 | rm_reg.low_bits();
530 // Set REX.B to the high bit of rm.code().
531 rex_ |= rm_reg.high_bit();
535 void Operand::set_sib(ScaleFactor scale, Register index, Register base) {
537 ASSERT(is_uint2(scale));
538 // Use SIB with no index register only for base rsp or r12. Otherwise we
539 // would skip the SIB byte entirely.
540 ASSERT(!index.is(rsp) || base.is(rsp) || base.is(r12));
541 buf_[1] = (scale << 6) | (index.low_bits() << 3) | base.low_bits();
542 rex_ |= index.high_bit() << 1 | base.high_bit();
546 void Operand::set_disp8(int disp) {
547 ASSERT(is_int8(disp));
548 ASSERT(len_ == 1 || len_ == 2);
549 int8_t* p = reinterpret_cast<int8_t*>(&buf_[len_]);
551 len_ += sizeof(int8_t);
554 void Operand::set_disp32(int disp) {
555 ASSERT(len_ == 1 || len_ == 2);
556 int32_t* p = reinterpret_cast<int32_t*>(&buf_[len_]);
558 len_ += sizeof(int32_t);
562 } } // namespace v8::internal
564 #endif // V8_X64_ASSEMBLER_X64_INL_H_